* doc/invoke.texi: Add cpu_type power6.
[official-gcc.git] / gcc / fold-const.c
blob19058b2f1431b302d707d52a283f9066e752cf30
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree range_predecessor (tree);
112 static tree range_successor (tree);
113 static tree make_range (tree, int *, tree *, tree *);
114 static tree build_range_check (tree, tree, int, tree, tree);
115 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
116 tree);
117 static tree fold_range_test (enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree fold_truthop (enum tree_code, tree, tree, tree);
121 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
124 static int multiple_of_p (tree, tree, tree);
125 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static int native_encode_expr (tree, unsigned char *, int);
138 static tree native_interpret_expr (tree, unsigned char *, int);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
144 addition.
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 sign. */
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 #define LOWPART(x) \
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
166 static void
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 static void
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 HOST_WIDE_INT *hi)
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
202 tree
203 force_fit_type (tree t, int overflowable,
204 bool overflowed, bool overflowed_const)
206 unsigned HOST_WIDE_INT low;
207 HOST_WIDE_INT high;
208 unsigned int prec;
209 int sign_extended_type;
211 gcc_assert (TREE_CODE (t) == INTEGER_CST);
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = POINTER_SIZE;
219 else
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
232 else
234 high = 0;
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
253 high = -1;
255 else
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
260 high = -1;
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 if (overflowed
272 || overflowable < 0
273 || (overflowable > 0 && sign_extended_type))
275 t = copy_node (t);
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
281 t = copy_node (t);
282 TREE_CONSTANT_OVERFLOW (t) = 1;
286 return t;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
300 HOST_WIDE_INT h;
302 l = l1 + l2;
303 h = h1 + h2 + (l < l1);
305 *lv = l;
306 *hv = h;
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
319 if (l1 == 0)
321 *lv = 0;
322 *hv = - h1;
323 return (*hv & h1) < 0;
325 else
327 *lv = -l1;
328 *hv = ~h1;
329 return 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
348 int i, j, k;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
359 carry = 0;
360 for (j = 0; j < 4; j++)
362 k = i + j;
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
366 carry += prod[k];
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
370 prod[i + 4] = carry;
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
378 if (h1 < 0)
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 if (h2 < 0)
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 void
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
404 if (count < 0)
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
407 return;
410 if (SHIFT_COUNT_TRUNCATED)
411 count %= prec;
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
417 *hv = 0;
418 *lv = 0;
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
423 *lv = 0;
425 else
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
429 *lv = l1 << count;
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
446 else
448 *hv = signmask;
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459 void
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 int arith)
465 unsigned HOST_WIDE_INT signmask;
467 signmask = (arith
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 : 0);
471 if (SHIFT_COUNT_TRUNCATED)
472 count %= prec;
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
478 *hv = 0;
479 *lv = 0;
481 else if (count >= HOST_BITS_PER_WIDE_INT)
483 *hv = 0;
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
489 *lv = ((l1 >> count)
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
497 *hv = signmask;
498 *lv = signmask;
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
507 else
509 *hv = signmask;
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 void
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
528 count %= prec;
529 if (count < 0)
530 count += prec;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
534 *lv = s1l | s2l;
535 *hv = s1h | s2h;
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
542 void
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
550 count %= prec;
551 if (count < 0)
552 count += prec;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 *lv = s1l | s2l;
557 *hv = s1h | s2h;
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 or EXACT_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
577 HOST_WIDE_INT *hrem)
579 int quo_neg = 0;
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
582 int i, j;
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
589 int overflow = 0;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
595 if (!uns)
597 if (hnum < 0)
599 quo_neg = ~ quo_neg;
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
603 overflow = 1;
605 if (hden < 0)
607 quo_neg = ~ quo_neg;
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
614 *hquo = *hrem = 0;
615 /* This unsigned division rounds toward zero. */
616 *lquo = lnum / lden;
617 goto finish_up;
620 if (hnum == 0)
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
623 *hquo = *lquo = 0;
624 *hrem = hnum;
625 *lrem = lnum;
626 goto finish_up;
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
645 carry = work % lden;
648 else
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
657 if (den[i] != 0)
659 den_hi_sig = i;
660 break;
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
667 if (scale > 1)
668 { /* scale divisor and dividend */
669 carry = 0;
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
677 num[4] = carry;
678 carry = 0;
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
688 num_hi_sig = 4;
690 /* Main loop */
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
702 else
703 quo_est = BASE - 1;
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
707 if (tmp < BASE
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
710 quo_est--;
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
716 carry = 0;
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
730 quo_est--;
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
743 quo[i] = quo_est;
747 decode (quo, lquo, hquo);
749 finish_up:
750 /* If result is negative, make it so. */
751 if (quo_neg)
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
759 switch (code)
761 case TRUNC_DIV_EXPR:
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
764 return overflow;
766 case FLOOR_DIV_EXPR:
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
770 /* quo = quo - 1; */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
772 lquo, hquo);
774 else
775 return overflow;
776 break;
778 case CEIL_DIV_EXPR:
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
783 lquo, hquo);
785 else
786 return overflow;
787 break;
789 case ROUND_DIV_EXPR:
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
798 if (*hrem < 0)
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
800 if (hden < 0)
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, &ltwice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
813 if (*hquo < 0)
814 /* quo = quo - 1; */
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 else
818 /* quo = quo + 1; */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
825 break;
827 default:
828 gcc_unreachable ();
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
835 return overflow;
838 /* If ARG2 divides ARG1 with zero remainder, carries out the division
839 of type CODE and returns the quotient.
840 Otherwise returns NULL_TREE. */
842 static tree
843 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
845 unsigned HOST_WIDE_INT int1l, int2l;
846 HOST_WIDE_INT int1h, int2h;
847 unsigned HOST_WIDE_INT quol, reml;
848 HOST_WIDE_INT quoh, remh;
849 tree type = TREE_TYPE (arg1);
850 int uns = TYPE_UNSIGNED (type);
852 int1l = TREE_INT_CST_LOW (arg1);
853 int1h = TREE_INT_CST_HIGH (arg1);
854 int2l = TREE_INT_CST_LOW (arg2);
855 int2h = TREE_INT_CST_HIGH (arg2);
857 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
858 &quol, &quoh, &reml, &remh);
859 if (remh != 0 || reml != 0)
860 return NULL_TREE;
862 return build_int_cst_wide (type, quol, quoh);
865 /* Return true if the built-in mathematical function specified by CODE
866 is odd, i.e. -f(x) == f(-x). */
868 static bool
869 negate_mathfn_p (enum built_in_function code)
871 switch (code)
873 CASE_FLT_FN (BUILT_IN_ASIN):
874 CASE_FLT_FN (BUILT_IN_ASINH):
875 CASE_FLT_FN (BUILT_IN_ATAN):
876 CASE_FLT_FN (BUILT_IN_ATANH):
877 CASE_FLT_FN (BUILT_IN_CBRT):
878 CASE_FLT_FN (BUILT_IN_SIN):
879 CASE_FLT_FN (BUILT_IN_SINH):
880 CASE_FLT_FN (BUILT_IN_TAN):
881 CASE_FLT_FN (BUILT_IN_TANH):
882 return true;
884 default:
885 break;
887 return false;
890 /* Check whether we may negate an integer constant T without causing
891 overflow. */
893 bool
894 may_negate_without_overflow_p (tree t)
896 unsigned HOST_WIDE_INT val;
897 unsigned int prec;
898 tree type;
900 gcc_assert (TREE_CODE (t) == INTEGER_CST);
902 type = TREE_TYPE (t);
903 if (TYPE_UNSIGNED (type))
904 return false;
906 prec = TYPE_PRECISION (type);
907 if (prec > HOST_BITS_PER_WIDE_INT)
909 if (TREE_INT_CST_LOW (t) != 0)
910 return true;
911 prec -= HOST_BITS_PER_WIDE_INT;
912 val = TREE_INT_CST_HIGH (t);
914 else
915 val = TREE_INT_CST_LOW (t);
916 if (prec < HOST_BITS_PER_WIDE_INT)
917 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
918 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
921 /* Determine whether an expression T can be cheaply negated using
922 the function negate_expr. */
924 static bool
925 negate_expr_p (tree t)
927 tree type;
929 if (t == 0)
930 return false;
932 type = TREE_TYPE (t);
934 STRIP_SIGN_NOPS (t);
935 switch (TREE_CODE (t))
937 case INTEGER_CST:
938 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 return true;
941 /* Check that -CST will not overflow type. */
942 return may_negate_without_overflow_p (t);
943 case BIT_NOT_EXPR:
944 return INTEGRAL_TYPE_P (type);
946 case REAL_CST:
947 case NEGATE_EXPR:
948 return true;
950 case COMPLEX_CST:
951 return negate_expr_p (TREE_REALPART (t))
952 && negate_expr_p (TREE_IMAGPART (t));
954 case PLUS_EXPR:
955 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
956 return false;
957 /* -(A + B) -> (-B) - A. */
958 if (negate_expr_p (TREE_OPERAND (t, 1))
959 && reorder_operands_p (TREE_OPERAND (t, 0),
960 TREE_OPERAND (t, 1)))
961 return true;
962 /* -(A + B) -> (-A) - B. */
963 return negate_expr_p (TREE_OPERAND (t, 0));
965 case MINUS_EXPR:
966 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
967 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
968 && reorder_operands_p (TREE_OPERAND (t, 0),
969 TREE_OPERAND (t, 1));
971 case MULT_EXPR:
972 if (TYPE_UNSIGNED (TREE_TYPE (t)))
973 break;
975 /* Fall through. */
977 case RDIV_EXPR:
978 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
979 return negate_expr_p (TREE_OPERAND (t, 1))
980 || negate_expr_p (TREE_OPERAND (t, 0));
981 break;
983 case TRUNC_DIV_EXPR:
984 case ROUND_DIV_EXPR:
985 case FLOOR_DIV_EXPR:
986 case CEIL_DIV_EXPR:
987 case EXACT_DIV_EXPR:
988 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
989 break;
990 return negate_expr_p (TREE_OPERAND (t, 1))
991 || negate_expr_p (TREE_OPERAND (t, 0));
993 case NOP_EXPR:
994 /* Negate -((double)float) as (double)(-float). */
995 if (TREE_CODE (type) == REAL_TYPE)
997 tree tem = strip_float_extensions (t);
998 if (tem != t)
999 return negate_expr_p (tem);
1001 break;
1003 case CALL_EXPR:
1004 /* Negate -f(x) as f(-x). */
1005 if (negate_mathfn_p (builtin_mathfn_code (t)))
1006 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1007 break;
1009 case RSHIFT_EXPR:
1010 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1011 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1013 tree op1 = TREE_OPERAND (t, 1);
1014 if (TREE_INT_CST_HIGH (op1) == 0
1015 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1016 == TREE_INT_CST_LOW (op1))
1017 return true;
1019 break;
1021 default:
1022 break;
1024 return false;
1027 /* Given T, an expression, return the negation of T. Allow for T to be
1028 null, in which case return null. */
1030 static tree
1031 negate_expr (tree t)
1033 tree type;
1034 tree tem;
1036 if (t == 0)
1037 return 0;
1039 type = TREE_TYPE (t);
1040 STRIP_SIGN_NOPS (t);
1042 switch (TREE_CODE (t))
1044 /* Convert - (~A) to A + 1. */
1045 case BIT_NOT_EXPR:
1046 if (INTEGRAL_TYPE_P (type))
1047 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1048 build_int_cst (type, 1));
1049 break;
1051 case INTEGER_CST:
1052 tem = fold_negate_const (t, type);
1053 if (! TREE_OVERFLOW (tem)
1054 || TYPE_UNSIGNED (type)
1055 || ! flag_trapv)
1056 return tem;
1057 break;
1059 case REAL_CST:
1060 tem = fold_negate_const (t, type);
1061 /* Two's complement FP formats, such as c4x, may overflow. */
1062 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1063 return fold_convert (type, tem);
1064 break;
1066 case COMPLEX_CST:
1068 tree rpart = negate_expr (TREE_REALPART (t));
1069 tree ipart = negate_expr (TREE_IMAGPART (t));
1071 if ((TREE_CODE (rpart) == REAL_CST
1072 && TREE_CODE (ipart) == REAL_CST)
1073 || (TREE_CODE (rpart) == INTEGER_CST
1074 && TREE_CODE (ipart) == INTEGER_CST))
1075 return build_complex (type, rpart, ipart);
1077 break;
1079 case NEGATE_EXPR:
1080 return fold_convert (type, TREE_OPERAND (t, 0));
1082 case PLUS_EXPR:
1083 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1085 /* -(A + B) -> (-B) - A. */
1086 if (negate_expr_p (TREE_OPERAND (t, 1))
1087 && reorder_operands_p (TREE_OPERAND (t, 0),
1088 TREE_OPERAND (t, 1)))
1090 tem = negate_expr (TREE_OPERAND (t, 1));
1091 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 tem, TREE_OPERAND (t, 0));
1093 return fold_convert (type, tem);
1096 /* -(A + B) -> (-A) - B. */
1097 if (negate_expr_p (TREE_OPERAND (t, 0)))
1099 tem = negate_expr (TREE_OPERAND (t, 0));
1100 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1101 tem, TREE_OPERAND (t, 1));
1102 return fold_convert (type, tem);
1105 break;
1107 case MINUS_EXPR:
1108 /* - (A - B) -> B - A */
1109 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1110 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1111 return fold_convert (type,
1112 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1113 TREE_OPERAND (t, 1),
1114 TREE_OPERAND (t, 0)));
1115 break;
1117 case MULT_EXPR:
1118 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1119 break;
1121 /* Fall through. */
1123 case RDIV_EXPR:
1124 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1126 tem = TREE_OPERAND (t, 1);
1127 if (negate_expr_p (tem))
1128 return fold_convert (type,
1129 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1130 TREE_OPERAND (t, 0),
1131 negate_expr (tem)));
1132 tem = TREE_OPERAND (t, 0);
1133 if (negate_expr_p (tem))
1134 return fold_convert (type,
1135 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1136 negate_expr (tem),
1137 TREE_OPERAND (t, 1)));
1139 break;
1141 case TRUNC_DIV_EXPR:
1142 case ROUND_DIV_EXPR:
1143 case FLOOR_DIV_EXPR:
1144 case CEIL_DIV_EXPR:
1145 case EXACT_DIV_EXPR:
1146 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1148 tem = TREE_OPERAND (t, 1);
1149 if (negate_expr_p (tem))
1150 return fold_convert (type,
1151 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1152 TREE_OPERAND (t, 0),
1153 negate_expr (tem)));
1154 tem = TREE_OPERAND (t, 0);
1155 if (negate_expr_p (tem))
1156 return fold_convert (type,
1157 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1158 negate_expr (tem),
1159 TREE_OPERAND (t, 1)));
1161 break;
1163 case NOP_EXPR:
1164 /* Convert -((double)float) into (double)(-float). */
1165 if (TREE_CODE (type) == REAL_TYPE)
1167 tem = strip_float_extensions (t);
1168 if (tem != t && negate_expr_p (tem))
1169 return fold_convert (type, negate_expr (tem));
1171 break;
1173 case CALL_EXPR:
1174 /* Negate -f(x) as f(-x). */
1175 if (negate_mathfn_p (builtin_mathfn_code (t))
1176 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1178 tree fndecl, arg, arglist;
1180 fndecl = get_callee_fndecl (t);
1181 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1182 arglist = build_tree_list (NULL_TREE, arg);
1183 return build_function_call_expr (fndecl, arglist);
1185 break;
1187 case RSHIFT_EXPR:
1188 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1189 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1191 tree op1 = TREE_OPERAND (t, 1);
1192 if (TREE_INT_CST_HIGH (op1) == 0
1193 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1194 == TREE_INT_CST_LOW (op1))
1196 tree ntype = TYPE_UNSIGNED (type)
1197 ? lang_hooks.types.signed_type (type)
1198 : lang_hooks.types.unsigned_type (type);
1199 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1200 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1201 return fold_convert (type, temp);
1204 break;
1206 default:
1207 break;
1210 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1211 return fold_convert (type, tem);
1214 /* Split a tree IN into a constant, literal and variable parts that could be
1215 combined with CODE to make IN. "constant" means an expression with
1216 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1217 commutative arithmetic operation. Store the constant part into *CONP,
1218 the literal in *LITP and return the variable part. If a part isn't
1219 present, set it to null. If the tree does not decompose in this way,
1220 return the entire tree as the variable part and the other parts as null.
1222 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1223 case, we negate an operand that was subtracted. Except if it is a
1224 literal for which we use *MINUS_LITP instead.
1226 If NEGATE_P is true, we are negating all of IN, again except a literal
1227 for which we use *MINUS_LITP instead.
1229 If IN is itself a literal or constant, return it as appropriate.
1231 Note that we do not guarantee that any of the three values will be the
1232 same type as IN, but they will have the same signedness and mode. */
1234 static tree
1235 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1236 tree *minus_litp, int negate_p)
1238 tree var = 0;
1240 *conp = 0;
1241 *litp = 0;
1242 *minus_litp = 0;
1244 /* Strip any conversions that don't change the machine mode or signedness. */
1245 STRIP_SIGN_NOPS (in);
1247 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1248 *litp = in;
1249 else if (TREE_CODE (in) == code
1250 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1251 /* We can associate addition and subtraction together (even
1252 though the C standard doesn't say so) for integers because
1253 the value is not affected. For reals, the value might be
1254 affected, so we can't. */
1255 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1256 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1258 tree op0 = TREE_OPERAND (in, 0);
1259 tree op1 = TREE_OPERAND (in, 1);
1260 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1261 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1263 /* First see if either of the operands is a literal, then a constant. */
1264 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1265 *litp = op0, op0 = 0;
1266 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1267 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1269 if (op0 != 0 && TREE_CONSTANT (op0))
1270 *conp = op0, op0 = 0;
1271 else if (op1 != 0 && TREE_CONSTANT (op1))
1272 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1274 /* If we haven't dealt with either operand, this is not a case we can
1275 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1276 if (op0 != 0 && op1 != 0)
1277 var = in;
1278 else if (op0 != 0)
1279 var = op0;
1280 else
1281 var = op1, neg_var_p = neg1_p;
1283 /* Now do any needed negations. */
1284 if (neg_litp_p)
1285 *minus_litp = *litp, *litp = 0;
1286 if (neg_conp_p)
1287 *conp = negate_expr (*conp);
1288 if (neg_var_p)
1289 var = negate_expr (var);
1291 else if (TREE_CONSTANT (in))
1292 *conp = in;
1293 else
1294 var = in;
1296 if (negate_p)
1298 if (*litp)
1299 *minus_litp = *litp, *litp = 0;
1300 else if (*minus_litp)
1301 *litp = *minus_litp, *minus_litp = 0;
1302 *conp = negate_expr (*conp);
1303 var = negate_expr (var);
1306 return var;
1309 /* Re-associate trees split by the above function. T1 and T2 are either
1310 expressions to associate or null. Return the new expression, if any. If
1311 we build an operation, do it in TYPE and with CODE. */
1313 static tree
1314 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1316 if (t1 == 0)
1317 return t2;
1318 else if (t2 == 0)
1319 return t1;
1321 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1322 try to fold this since we will have infinite recursion. But do
1323 deal with any NEGATE_EXPRs. */
1324 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1325 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1327 if (code == PLUS_EXPR)
1329 if (TREE_CODE (t1) == NEGATE_EXPR)
1330 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1331 fold_convert (type, TREE_OPERAND (t1, 0)));
1332 else if (TREE_CODE (t2) == NEGATE_EXPR)
1333 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1334 fold_convert (type, TREE_OPERAND (t2, 0)));
1335 else if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1338 else if (code == MINUS_EXPR)
1340 if (integer_zerop (t2))
1341 return fold_convert (type, t1);
1344 return build2 (code, type, fold_convert (type, t1),
1345 fold_convert (type, t2));
1348 return fold_build2 (code, type, fold_convert (type, t1),
1349 fold_convert (type, t2));
1352 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1353 to produce a new constant. Return NULL_TREE if we don't know how
1354 to evaluate CODE at compile-time.
1356 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1358 tree
1359 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1361 unsigned HOST_WIDE_INT int1l, int2l;
1362 HOST_WIDE_INT int1h, int2h;
1363 unsigned HOST_WIDE_INT low;
1364 HOST_WIDE_INT hi;
1365 unsigned HOST_WIDE_INT garbagel;
1366 HOST_WIDE_INT garbageh;
1367 tree t;
1368 tree type = TREE_TYPE (arg1);
1369 int uns = TYPE_UNSIGNED (type);
1370 int is_sizetype
1371 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1372 int overflow = 0;
1374 int1l = TREE_INT_CST_LOW (arg1);
1375 int1h = TREE_INT_CST_HIGH (arg1);
1376 int2l = TREE_INT_CST_LOW (arg2);
1377 int2h = TREE_INT_CST_HIGH (arg2);
1379 switch (code)
1381 case BIT_IOR_EXPR:
1382 low = int1l | int2l, hi = int1h | int2h;
1383 break;
1385 case BIT_XOR_EXPR:
1386 low = int1l ^ int2l, hi = int1h ^ int2h;
1387 break;
1389 case BIT_AND_EXPR:
1390 low = int1l & int2l, hi = int1h & int2h;
1391 break;
1393 case RSHIFT_EXPR:
1394 int2l = -int2l;
1395 case LSHIFT_EXPR:
1396 /* It's unclear from the C standard whether shifts can overflow.
1397 The following code ignores overflow; perhaps a C standard
1398 interpretation ruling is needed. */
1399 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1400 &low, &hi, !uns);
1401 break;
1403 case RROTATE_EXPR:
1404 int2l = - int2l;
1405 case LROTATE_EXPR:
1406 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1407 &low, &hi);
1408 break;
1410 case PLUS_EXPR:
1411 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1412 break;
1414 case MINUS_EXPR:
1415 neg_double (int2l, int2h, &low, &hi);
1416 add_double (int1l, int1h, low, hi, &low, &hi);
1417 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1418 break;
1420 case MULT_EXPR:
1421 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1422 break;
1424 case TRUNC_DIV_EXPR:
1425 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1426 case EXACT_DIV_EXPR:
1427 /* This is a shortcut for a common special case. */
1428 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1429 && ! TREE_CONSTANT_OVERFLOW (arg1)
1430 && ! TREE_CONSTANT_OVERFLOW (arg2)
1431 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1433 if (code == CEIL_DIV_EXPR)
1434 int1l += int2l - 1;
1436 low = int1l / int2l, hi = 0;
1437 break;
1440 /* ... fall through ... */
1442 case ROUND_DIV_EXPR:
1443 if (int2h == 0 && int2l == 0)
1444 return NULL_TREE;
1445 if (int2h == 0 && int2l == 1)
1447 low = int1l, hi = int1h;
1448 break;
1450 if (int1l == int2l && int1h == int2h
1451 && ! (int1l == 0 && int1h == 0))
1453 low = 1, hi = 0;
1454 break;
1456 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1457 &low, &hi, &garbagel, &garbageh);
1458 break;
1460 case TRUNC_MOD_EXPR:
1461 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1462 /* This is a shortcut for a common special case. */
1463 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1464 && ! TREE_CONSTANT_OVERFLOW (arg1)
1465 && ! TREE_CONSTANT_OVERFLOW (arg2)
1466 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1468 if (code == CEIL_MOD_EXPR)
1469 int1l += int2l - 1;
1470 low = int1l % int2l, hi = 0;
1471 break;
1474 /* ... fall through ... */
1476 case ROUND_MOD_EXPR:
1477 if (int2h == 0 && int2l == 0)
1478 return NULL_TREE;
1479 overflow = div_and_round_double (code, uns,
1480 int1l, int1h, int2l, int2h,
1481 &garbagel, &garbageh, &low, &hi);
1482 break;
1484 case MIN_EXPR:
1485 case MAX_EXPR:
1486 if (uns)
1487 low = (((unsigned HOST_WIDE_INT) int1h
1488 < (unsigned HOST_WIDE_INT) int2h)
1489 || (((unsigned HOST_WIDE_INT) int1h
1490 == (unsigned HOST_WIDE_INT) int2h)
1491 && int1l < int2l));
1492 else
1493 low = (int1h < int2h
1494 || (int1h == int2h && int1l < int2l));
1496 if (low == (code == MIN_EXPR))
1497 low = int1l, hi = int1h;
1498 else
1499 low = int2l, hi = int2h;
1500 break;
1502 default:
1503 return NULL_TREE;
1506 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1508 if (notrunc)
1510 /* Propagate overflow flags ourselves. */
1511 if (((!uns || is_sizetype) && overflow)
1512 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1514 t = copy_node (t);
1515 TREE_OVERFLOW (t) = 1;
1516 TREE_CONSTANT_OVERFLOW (t) = 1;
1518 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1520 t = copy_node (t);
1521 TREE_CONSTANT_OVERFLOW (t) = 1;
1524 else
1525 t = force_fit_type (t, 1,
1526 ((!uns || is_sizetype) && overflow)
1527 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1528 TREE_CONSTANT_OVERFLOW (arg1)
1529 | TREE_CONSTANT_OVERFLOW (arg2));
1531 return t;
1534 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1535 constant. We assume ARG1 and ARG2 have the same data type, or at least
1536 are the same kind of constant and the same machine mode.
1538 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1540 static tree
1541 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1543 STRIP_NOPS (arg1);
1544 STRIP_NOPS (arg2);
1546 if (TREE_CODE (arg1) == INTEGER_CST)
1547 return int_const_binop (code, arg1, arg2, notrunc);
1549 if (TREE_CODE (arg1) == REAL_CST)
1551 enum machine_mode mode;
1552 REAL_VALUE_TYPE d1;
1553 REAL_VALUE_TYPE d2;
1554 REAL_VALUE_TYPE value;
1555 REAL_VALUE_TYPE result;
1556 bool inexact;
1557 tree t, type;
1559 /* The following codes are handled by real_arithmetic. */
1560 switch (code)
1562 case PLUS_EXPR:
1563 case MINUS_EXPR:
1564 case MULT_EXPR:
1565 case RDIV_EXPR:
1566 case MIN_EXPR:
1567 case MAX_EXPR:
1568 break;
1570 default:
1571 return NULL_TREE;
1574 d1 = TREE_REAL_CST (arg1);
1575 d2 = TREE_REAL_CST (arg2);
1577 type = TREE_TYPE (arg1);
1578 mode = TYPE_MODE (type);
1580 /* Don't perform operation if we honor signaling NaNs and
1581 either operand is a NaN. */
1582 if (HONOR_SNANS (mode)
1583 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1584 return NULL_TREE;
1586 /* Don't perform operation if it would raise a division
1587 by zero exception. */
1588 if (code == RDIV_EXPR
1589 && REAL_VALUES_EQUAL (d2, dconst0)
1590 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1591 return NULL_TREE;
1593 /* If either operand is a NaN, just return it. Otherwise, set up
1594 for floating-point trap; we return an overflow. */
1595 if (REAL_VALUE_ISNAN (d1))
1596 return arg1;
1597 else if (REAL_VALUE_ISNAN (d2))
1598 return arg2;
1600 inexact = real_arithmetic (&value, code, &d1, &d2);
1601 real_convert (&result, mode, &value);
1603 /* Don't constant fold this floating point operation if
1604 the result has overflowed and flag_trapping_math. */
1606 if (flag_trapping_math
1607 && MODE_HAS_INFINITIES (mode)
1608 && REAL_VALUE_ISINF (result)
1609 && !REAL_VALUE_ISINF (d1)
1610 && !REAL_VALUE_ISINF (d2))
1611 return NULL_TREE;
1613 /* Don't constant fold this floating point operation if the
1614 result may dependent upon the run-time rounding mode and
1615 flag_rounding_math is set, or if GCC's software emulation
1616 is unable to accurately represent the result. */
1618 if ((flag_rounding_math
1619 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1620 && !flag_unsafe_math_optimizations))
1621 && (inexact || !real_identical (&result, &value)))
1622 return NULL_TREE;
1624 t = build_real (type, result);
1626 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1627 TREE_CONSTANT_OVERFLOW (t)
1628 = TREE_OVERFLOW (t)
1629 | TREE_CONSTANT_OVERFLOW (arg1)
1630 | TREE_CONSTANT_OVERFLOW (arg2);
1631 return t;
1634 if (TREE_CODE (arg1) == COMPLEX_CST)
1636 tree type = TREE_TYPE (arg1);
1637 tree r1 = TREE_REALPART (arg1);
1638 tree i1 = TREE_IMAGPART (arg1);
1639 tree r2 = TREE_REALPART (arg2);
1640 tree i2 = TREE_IMAGPART (arg2);
1641 tree t;
1643 switch (code)
1645 case PLUS_EXPR:
1646 t = build_complex (type,
1647 const_binop (PLUS_EXPR, r1, r2, notrunc),
1648 const_binop (PLUS_EXPR, i1, i2, notrunc));
1649 break;
1651 case MINUS_EXPR:
1652 t = build_complex (type,
1653 const_binop (MINUS_EXPR, r1, r2, notrunc),
1654 const_binop (MINUS_EXPR, i1, i2, notrunc));
1655 break;
1657 case MULT_EXPR:
1658 t = build_complex (type,
1659 const_binop (MINUS_EXPR,
1660 const_binop (MULT_EXPR,
1661 r1, r2, notrunc),
1662 const_binop (MULT_EXPR,
1663 i1, i2, notrunc),
1664 notrunc),
1665 const_binop (PLUS_EXPR,
1666 const_binop (MULT_EXPR,
1667 r1, i2, notrunc),
1668 const_binop (MULT_EXPR,
1669 i1, r2, notrunc),
1670 notrunc));
1671 break;
1673 case RDIV_EXPR:
1675 tree t1, t2, real, imag;
1676 tree magsquared
1677 = const_binop (PLUS_EXPR,
1678 const_binop (MULT_EXPR, r2, r2, notrunc),
1679 const_binop (MULT_EXPR, i2, i2, notrunc),
1680 notrunc);
1682 t1 = const_binop (PLUS_EXPR,
1683 const_binop (MULT_EXPR, r1, r2, notrunc),
1684 const_binop (MULT_EXPR, i1, i2, notrunc),
1685 notrunc);
1686 t2 = const_binop (MINUS_EXPR,
1687 const_binop (MULT_EXPR, i1, r2, notrunc),
1688 const_binop (MULT_EXPR, r1, i2, notrunc),
1689 notrunc);
1691 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1693 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1694 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1696 else
1698 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1699 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1700 if (!real || !imag)
1701 return NULL_TREE;
1704 t = build_complex (type, real, imag);
1706 break;
1708 default:
1709 return NULL_TREE;
1711 return t;
1713 return NULL_TREE;
1716 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1717 indicates which particular sizetype to create. */
1719 tree
1720 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1722 return build_int_cst (sizetype_tab[(int) kind], number);
1725 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1726 is a tree code. The type of the result is taken from the operands.
1727 Both must be the same type integer type and it must be a size type.
1728 If the operands are constant, so is the result. */
1730 tree
1731 size_binop (enum tree_code code, tree arg0, tree arg1)
1733 tree type = TREE_TYPE (arg0);
1735 if (arg0 == error_mark_node || arg1 == error_mark_node)
1736 return error_mark_node;
1738 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1739 && type == TREE_TYPE (arg1));
1741 /* Handle the special case of two integer constants faster. */
1742 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1744 /* And some specific cases even faster than that. */
1745 if (code == PLUS_EXPR && integer_zerop (arg0))
1746 return arg1;
1747 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1748 && integer_zerop (arg1))
1749 return arg0;
1750 else if (code == MULT_EXPR && integer_onep (arg0))
1751 return arg1;
1753 /* Handle general case of two integer constants. */
1754 return int_const_binop (code, arg0, arg1, 0);
1757 return fold_build2 (code, type, arg0, arg1);
1760 /* Given two values, either both of sizetype or both of bitsizetype,
1761 compute the difference between the two values. Return the value
1762 in signed type corresponding to the type of the operands. */
1764 tree
1765 size_diffop (tree arg0, tree arg1)
1767 tree type = TREE_TYPE (arg0);
1768 tree ctype;
1770 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1771 && type == TREE_TYPE (arg1));
1773 /* If the type is already signed, just do the simple thing. */
1774 if (!TYPE_UNSIGNED (type))
1775 return size_binop (MINUS_EXPR, arg0, arg1);
1777 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1779 /* If either operand is not a constant, do the conversions to the signed
1780 type and subtract. The hardware will do the right thing with any
1781 overflow in the subtraction. */
1782 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1783 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1784 fold_convert (ctype, arg1));
1786 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1787 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1788 overflow) and negate (which can't either). Special-case a result
1789 of zero while we're here. */
1790 if (tree_int_cst_equal (arg0, arg1))
1791 return build_int_cst (ctype, 0);
1792 else if (tree_int_cst_lt (arg1, arg0))
1793 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1794 else
1795 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1796 fold_convert (ctype, size_binop (MINUS_EXPR,
1797 arg1, arg0)));
1800 /* A subroutine of fold_convert_const handling conversions of an
1801 INTEGER_CST to another integer type. */
1803 static tree
1804 fold_convert_const_int_from_int (tree type, tree arg1)
1806 tree t;
1808 /* Given an integer constant, make new constant with new type,
1809 appropriately sign-extended or truncated. */
1810 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1811 TREE_INT_CST_HIGH (arg1));
1813 t = force_fit_type (t,
1814 /* Don't set the overflow when
1815 converting a pointer */
1816 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1817 (TREE_INT_CST_HIGH (arg1) < 0
1818 && (TYPE_UNSIGNED (type)
1819 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1820 | TREE_OVERFLOW (arg1),
1821 TREE_CONSTANT_OVERFLOW (arg1));
1823 return t;
1826 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1827 to an integer type. */
1829 static tree
1830 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1832 int overflow = 0;
1833 tree t;
1835 /* The following code implements the floating point to integer
1836 conversion rules required by the Java Language Specification,
1837 that IEEE NaNs are mapped to zero and values that overflow
1838 the target precision saturate, i.e. values greater than
1839 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1840 are mapped to INT_MIN. These semantics are allowed by the
1841 C and C++ standards that simply state that the behavior of
1842 FP-to-integer conversion is unspecified upon overflow. */
1844 HOST_WIDE_INT high, low;
1845 REAL_VALUE_TYPE r;
1846 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1848 switch (code)
1850 case FIX_TRUNC_EXPR:
1851 real_trunc (&r, VOIDmode, &x);
1852 break;
1854 case FIX_CEIL_EXPR:
1855 real_ceil (&r, VOIDmode, &x);
1856 break;
1858 case FIX_FLOOR_EXPR:
1859 real_floor (&r, VOIDmode, &x);
1860 break;
1862 case FIX_ROUND_EXPR:
1863 real_round (&r, VOIDmode, &x);
1864 break;
1866 default:
1867 gcc_unreachable ();
1870 /* If R is NaN, return zero and show we have an overflow. */
1871 if (REAL_VALUE_ISNAN (r))
1873 overflow = 1;
1874 high = 0;
1875 low = 0;
1878 /* See if R is less than the lower bound or greater than the
1879 upper bound. */
1881 if (! overflow)
1883 tree lt = TYPE_MIN_VALUE (type);
1884 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1885 if (REAL_VALUES_LESS (r, l))
1887 overflow = 1;
1888 high = TREE_INT_CST_HIGH (lt);
1889 low = TREE_INT_CST_LOW (lt);
1893 if (! overflow)
1895 tree ut = TYPE_MAX_VALUE (type);
1896 if (ut)
1898 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1899 if (REAL_VALUES_LESS (u, r))
1901 overflow = 1;
1902 high = TREE_INT_CST_HIGH (ut);
1903 low = TREE_INT_CST_LOW (ut);
1908 if (! overflow)
1909 REAL_VALUE_TO_INT (&low, &high, r);
1911 t = build_int_cst_wide (type, low, high);
1913 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1914 TREE_CONSTANT_OVERFLOW (arg1));
1915 return t;
1918 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1919 to another floating point type. */
1921 static tree
1922 fold_convert_const_real_from_real (tree type, tree arg1)
1924 REAL_VALUE_TYPE value;
1925 tree t;
1927 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1928 t = build_real (type, value);
1930 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1931 TREE_CONSTANT_OVERFLOW (t)
1932 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1933 return t;
1936 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1937 type TYPE. If no simplification can be done return NULL_TREE. */
1939 static tree
1940 fold_convert_const (enum tree_code code, tree type, tree arg1)
1942 if (TREE_TYPE (arg1) == type)
1943 return arg1;
1945 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1947 if (TREE_CODE (arg1) == INTEGER_CST)
1948 return fold_convert_const_int_from_int (type, arg1);
1949 else if (TREE_CODE (arg1) == REAL_CST)
1950 return fold_convert_const_int_from_real (code, type, arg1);
1952 else if (TREE_CODE (type) == REAL_TYPE)
1954 if (TREE_CODE (arg1) == INTEGER_CST)
1955 return build_real_from_int_cst (type, arg1);
1956 if (TREE_CODE (arg1) == REAL_CST)
1957 return fold_convert_const_real_from_real (type, arg1);
1959 return NULL_TREE;
1962 /* Construct a vector of zero elements of vector type TYPE. */
1964 static tree
1965 build_zero_vector (tree type)
1967 tree elem, list;
1968 int i, units;
1970 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1971 units = TYPE_VECTOR_SUBPARTS (type);
1973 list = NULL_TREE;
1974 for (i = 0; i < units; i++)
1975 list = tree_cons (NULL_TREE, elem, list);
1976 return build_vector (type, list);
1979 /* Convert expression ARG to type TYPE. Used by the middle-end for
1980 simple conversions in preference to calling the front-end's convert. */
1982 tree
1983 fold_convert (tree type, tree arg)
1985 tree orig = TREE_TYPE (arg);
1986 tree tem;
1988 if (type == orig)
1989 return arg;
1991 if (TREE_CODE (arg) == ERROR_MARK
1992 || TREE_CODE (type) == ERROR_MARK
1993 || TREE_CODE (orig) == ERROR_MARK)
1994 return error_mark_node;
1996 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1997 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1998 TYPE_MAIN_VARIANT (orig)))
1999 return fold_build1 (NOP_EXPR, type, arg);
2001 switch (TREE_CODE (type))
2003 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2004 case POINTER_TYPE: case REFERENCE_TYPE:
2005 case OFFSET_TYPE:
2006 if (TREE_CODE (arg) == INTEGER_CST)
2008 tem = fold_convert_const (NOP_EXPR, type, arg);
2009 if (tem != NULL_TREE)
2010 return tem;
2012 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2013 || TREE_CODE (orig) == OFFSET_TYPE)
2014 return fold_build1 (NOP_EXPR, type, arg);
2015 if (TREE_CODE (orig) == COMPLEX_TYPE)
2017 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2018 return fold_convert (type, tem);
2020 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2021 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2022 return fold_build1 (NOP_EXPR, type, arg);
2024 case REAL_TYPE:
2025 if (TREE_CODE (arg) == INTEGER_CST)
2027 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2028 if (tem != NULL_TREE)
2029 return tem;
2031 else if (TREE_CODE (arg) == REAL_CST)
2033 tem = fold_convert_const (NOP_EXPR, type, arg);
2034 if (tem != NULL_TREE)
2035 return tem;
2038 switch (TREE_CODE (orig))
2040 case INTEGER_TYPE:
2041 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2042 case POINTER_TYPE: case REFERENCE_TYPE:
2043 return fold_build1 (FLOAT_EXPR, type, arg);
2045 case REAL_TYPE:
2046 return fold_build1 (NOP_EXPR, type, arg);
2048 case COMPLEX_TYPE:
2049 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2050 return fold_convert (type, tem);
2052 default:
2053 gcc_unreachable ();
2056 case COMPLEX_TYPE:
2057 switch (TREE_CODE (orig))
2059 case INTEGER_TYPE:
2060 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2061 case POINTER_TYPE: case REFERENCE_TYPE:
2062 case REAL_TYPE:
2063 return build2 (COMPLEX_EXPR, type,
2064 fold_convert (TREE_TYPE (type), arg),
2065 fold_convert (TREE_TYPE (type), integer_zero_node));
2066 case COMPLEX_TYPE:
2068 tree rpart, ipart;
2070 if (TREE_CODE (arg) == COMPLEX_EXPR)
2072 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2073 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2074 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2077 arg = save_expr (arg);
2078 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2079 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2080 rpart = fold_convert (TREE_TYPE (type), rpart);
2081 ipart = fold_convert (TREE_TYPE (type), ipart);
2082 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2085 default:
2086 gcc_unreachable ();
2089 case VECTOR_TYPE:
2090 if (integer_zerop (arg))
2091 return build_zero_vector (type);
2092 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2093 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2094 || TREE_CODE (orig) == VECTOR_TYPE);
2095 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2097 case VOID_TYPE:
2098 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2100 default:
2101 gcc_unreachable ();
2105 /* Return false if expr can be assumed not to be an lvalue, true
2106 otherwise. */
2108 static bool
2109 maybe_lvalue_p (tree x)
2111 /* We only need to wrap lvalue tree codes. */
2112 switch (TREE_CODE (x))
2114 case VAR_DECL:
2115 case PARM_DECL:
2116 case RESULT_DECL:
2117 case LABEL_DECL:
2118 case FUNCTION_DECL:
2119 case SSA_NAME:
2121 case COMPONENT_REF:
2122 case INDIRECT_REF:
2123 case ALIGN_INDIRECT_REF:
2124 case MISALIGNED_INDIRECT_REF:
2125 case ARRAY_REF:
2126 case ARRAY_RANGE_REF:
2127 case BIT_FIELD_REF:
2128 case OBJ_TYPE_REF:
2130 case REALPART_EXPR:
2131 case IMAGPART_EXPR:
2132 case PREINCREMENT_EXPR:
2133 case PREDECREMENT_EXPR:
2134 case SAVE_EXPR:
2135 case TRY_CATCH_EXPR:
2136 case WITH_CLEANUP_EXPR:
2137 case COMPOUND_EXPR:
2138 case MODIFY_EXPR:
2139 case TARGET_EXPR:
2140 case COND_EXPR:
2141 case BIND_EXPR:
2142 case MIN_EXPR:
2143 case MAX_EXPR:
2144 break;
2146 default:
2147 /* Assume the worst for front-end tree codes. */
2148 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2149 break;
2150 return false;
2153 return true;
2156 /* Return an expr equal to X but certainly not valid as an lvalue. */
2158 tree
2159 non_lvalue (tree x)
2161 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2162 us. */
2163 if (in_gimple_form)
2164 return x;
2166 if (! maybe_lvalue_p (x))
2167 return x;
2168 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2171 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2172 Zero means allow extended lvalues. */
2174 int pedantic_lvalues;
2176 /* When pedantic, return an expr equal to X but certainly not valid as a
2177 pedantic lvalue. Otherwise, return X. */
2179 static tree
2180 pedantic_non_lvalue (tree x)
2182 if (pedantic_lvalues)
2183 return non_lvalue (x);
2184 else
2185 return x;
2188 /* Given a tree comparison code, return the code that is the logical inverse
2189 of the given code. It is not safe to do this for floating-point
2190 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2191 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2193 enum tree_code
2194 invert_tree_comparison (enum tree_code code, bool honor_nans)
2196 if (honor_nans && flag_trapping_math)
2197 return ERROR_MARK;
2199 switch (code)
2201 case EQ_EXPR:
2202 return NE_EXPR;
2203 case NE_EXPR:
2204 return EQ_EXPR;
2205 case GT_EXPR:
2206 return honor_nans ? UNLE_EXPR : LE_EXPR;
2207 case GE_EXPR:
2208 return honor_nans ? UNLT_EXPR : LT_EXPR;
2209 case LT_EXPR:
2210 return honor_nans ? UNGE_EXPR : GE_EXPR;
2211 case LE_EXPR:
2212 return honor_nans ? UNGT_EXPR : GT_EXPR;
2213 case LTGT_EXPR:
2214 return UNEQ_EXPR;
2215 case UNEQ_EXPR:
2216 return LTGT_EXPR;
2217 case UNGT_EXPR:
2218 return LE_EXPR;
2219 case UNGE_EXPR:
2220 return LT_EXPR;
2221 case UNLT_EXPR:
2222 return GE_EXPR;
2223 case UNLE_EXPR:
2224 return GT_EXPR;
2225 case ORDERED_EXPR:
2226 return UNORDERED_EXPR;
2227 case UNORDERED_EXPR:
2228 return ORDERED_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Similar, but return the comparison that results if the operands are
2235 swapped. This is safe for floating-point. */
2237 enum tree_code
2238 swap_tree_comparison (enum tree_code code)
2240 switch (code)
2242 case EQ_EXPR:
2243 case NE_EXPR:
2244 case ORDERED_EXPR:
2245 case UNORDERED_EXPR:
2246 case LTGT_EXPR:
2247 case UNEQ_EXPR:
2248 return code;
2249 case GT_EXPR:
2250 return LT_EXPR;
2251 case GE_EXPR:
2252 return LE_EXPR;
2253 case LT_EXPR:
2254 return GT_EXPR;
2255 case LE_EXPR:
2256 return GE_EXPR;
2257 case UNGT_EXPR:
2258 return UNLT_EXPR;
2259 case UNGE_EXPR:
2260 return UNLE_EXPR;
2261 case UNLT_EXPR:
2262 return UNGT_EXPR;
2263 case UNLE_EXPR:
2264 return UNGE_EXPR;
2265 default:
2266 gcc_unreachable ();
2271 /* Convert a comparison tree code from an enum tree_code representation
2272 into a compcode bit-based encoding. This function is the inverse of
2273 compcode_to_comparison. */
2275 static enum comparison_code
2276 comparison_to_compcode (enum tree_code code)
2278 switch (code)
2280 case LT_EXPR:
2281 return COMPCODE_LT;
2282 case EQ_EXPR:
2283 return COMPCODE_EQ;
2284 case LE_EXPR:
2285 return COMPCODE_LE;
2286 case GT_EXPR:
2287 return COMPCODE_GT;
2288 case NE_EXPR:
2289 return COMPCODE_NE;
2290 case GE_EXPR:
2291 return COMPCODE_GE;
2292 case ORDERED_EXPR:
2293 return COMPCODE_ORD;
2294 case UNORDERED_EXPR:
2295 return COMPCODE_UNORD;
2296 case UNLT_EXPR:
2297 return COMPCODE_UNLT;
2298 case UNEQ_EXPR:
2299 return COMPCODE_UNEQ;
2300 case UNLE_EXPR:
2301 return COMPCODE_UNLE;
2302 case UNGT_EXPR:
2303 return COMPCODE_UNGT;
2304 case LTGT_EXPR:
2305 return COMPCODE_LTGT;
2306 case UNGE_EXPR:
2307 return COMPCODE_UNGE;
2308 default:
2309 gcc_unreachable ();
2313 /* Convert a compcode bit-based encoding of a comparison operator back
2314 to GCC's enum tree_code representation. This function is the
2315 inverse of comparison_to_compcode. */
2317 static enum tree_code
2318 compcode_to_comparison (enum comparison_code code)
2320 switch (code)
2322 case COMPCODE_LT:
2323 return LT_EXPR;
2324 case COMPCODE_EQ:
2325 return EQ_EXPR;
2326 case COMPCODE_LE:
2327 return LE_EXPR;
2328 case COMPCODE_GT:
2329 return GT_EXPR;
2330 case COMPCODE_NE:
2331 return NE_EXPR;
2332 case COMPCODE_GE:
2333 return GE_EXPR;
2334 case COMPCODE_ORD:
2335 return ORDERED_EXPR;
2336 case COMPCODE_UNORD:
2337 return UNORDERED_EXPR;
2338 case COMPCODE_UNLT:
2339 return UNLT_EXPR;
2340 case COMPCODE_UNEQ:
2341 return UNEQ_EXPR;
2342 case COMPCODE_UNLE:
2343 return UNLE_EXPR;
2344 case COMPCODE_UNGT:
2345 return UNGT_EXPR;
2346 case COMPCODE_LTGT:
2347 return LTGT_EXPR;
2348 case COMPCODE_UNGE:
2349 return UNGE_EXPR;
2350 default:
2351 gcc_unreachable ();
2355 /* Return a tree for the comparison which is the combination of
2356 doing the AND or OR (depending on CODE) of the two operations LCODE
2357 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2358 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2359 if this makes the transformation invalid. */
2361 tree
2362 combine_comparisons (enum tree_code code, enum tree_code lcode,
2363 enum tree_code rcode, tree truth_type,
2364 tree ll_arg, tree lr_arg)
2366 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2367 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2368 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2369 enum comparison_code compcode;
2371 switch (code)
2373 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2374 compcode = lcompcode & rcompcode;
2375 break;
2377 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2378 compcode = lcompcode | rcompcode;
2379 break;
2381 default:
2382 return NULL_TREE;
2385 if (!honor_nans)
2387 /* Eliminate unordered comparisons, as well as LTGT and ORD
2388 which are not used unless the mode has NaNs. */
2389 compcode &= ~COMPCODE_UNORD;
2390 if (compcode == COMPCODE_LTGT)
2391 compcode = COMPCODE_NE;
2392 else if (compcode == COMPCODE_ORD)
2393 compcode = COMPCODE_TRUE;
2395 else if (flag_trapping_math)
2397 /* Check that the original operation and the optimized ones will trap
2398 under the same condition. */
2399 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2400 && (lcompcode != COMPCODE_EQ)
2401 && (lcompcode != COMPCODE_ORD);
2402 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2403 && (rcompcode != COMPCODE_EQ)
2404 && (rcompcode != COMPCODE_ORD);
2405 bool trap = (compcode & COMPCODE_UNORD) == 0
2406 && (compcode != COMPCODE_EQ)
2407 && (compcode != COMPCODE_ORD);
2409 /* In a short-circuited boolean expression the LHS might be
2410 such that the RHS, if evaluated, will never trap. For
2411 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2412 if neither x nor y is NaN. (This is a mixed blessing: for
2413 example, the expression above will never trap, hence
2414 optimizing it to x < y would be invalid). */
2415 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2416 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2417 rtrap = false;
2419 /* If the comparison was short-circuited, and only the RHS
2420 trapped, we may now generate a spurious trap. */
2421 if (rtrap && !ltrap
2422 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2423 return NULL_TREE;
2425 /* If we changed the conditions that cause a trap, we lose. */
2426 if ((ltrap || rtrap) != trap)
2427 return NULL_TREE;
2430 if (compcode == COMPCODE_TRUE)
2431 return constant_boolean_node (true, truth_type);
2432 else if (compcode == COMPCODE_FALSE)
2433 return constant_boolean_node (false, truth_type);
2434 else
2435 return fold_build2 (compcode_to_comparison (compcode),
2436 truth_type, ll_arg, lr_arg);
2439 /* Return nonzero if CODE is a tree code that represents a truth value. */
2441 static int
2442 truth_value_p (enum tree_code code)
2444 return (TREE_CODE_CLASS (code) == tcc_comparison
2445 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2446 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2447 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2450 /* Return nonzero if two operands (typically of the same tree node)
2451 are necessarily equal. If either argument has side-effects this
2452 function returns zero. FLAGS modifies behavior as follows:
2454 If OEP_ONLY_CONST is set, only return nonzero for constants.
2455 This function tests whether the operands are indistinguishable;
2456 it does not test whether they are equal using C's == operation.
2457 The distinction is important for IEEE floating point, because
2458 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2459 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2461 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2462 even though it may hold multiple values during a function.
2463 This is because a GCC tree node guarantees that nothing else is
2464 executed between the evaluation of its "operands" (which may often
2465 be evaluated in arbitrary order). Hence if the operands themselves
2466 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2467 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2468 unset means assuming isochronic (or instantaneous) tree equivalence.
2469 Unless comparing arbitrary expression trees, such as from different
2470 statements, this flag can usually be left unset.
2472 If OEP_PURE_SAME is set, then pure functions with identical arguments
2473 are considered the same. It is used when the caller has other ways
2474 to ensure that global memory is unchanged in between. */
2477 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2479 /* If either is ERROR_MARK, they aren't equal. */
2480 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2481 return 0;
2483 /* If both types don't have the same signedness, then we can't consider
2484 them equal. We must check this before the STRIP_NOPS calls
2485 because they may change the signedness of the arguments. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2487 return 0;
2489 STRIP_NOPS (arg0);
2490 STRIP_NOPS (arg1);
2492 /* In case both args are comparisons but with different comparison
2493 code, try to swap the comparison operands of one arg to produce
2494 a match and compare that variant. */
2495 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2496 && COMPARISON_CLASS_P (arg0)
2497 && COMPARISON_CLASS_P (arg1))
2499 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2501 if (TREE_CODE (arg0) == swap_code)
2502 return operand_equal_p (TREE_OPERAND (arg0, 0),
2503 TREE_OPERAND (arg1, 1), flags)
2504 && operand_equal_p (TREE_OPERAND (arg0, 1),
2505 TREE_OPERAND (arg1, 0), flags);
2508 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2509 /* This is needed for conversions and for COMPONENT_REF.
2510 Might as well play it safe and always test this. */
2511 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2512 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2513 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2514 return 0;
2516 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2517 We don't care about side effects in that case because the SAVE_EXPR
2518 takes care of that for us. In all other cases, two expressions are
2519 equal if they have no side effects. If we have two identical
2520 expressions with side effects that should be treated the same due
2521 to the only side effects being identical SAVE_EXPR's, that will
2522 be detected in the recursive calls below. */
2523 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2524 && (TREE_CODE (arg0) == SAVE_EXPR
2525 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2526 return 1;
2528 /* Next handle constant cases, those for which we can return 1 even
2529 if ONLY_CONST is set. */
2530 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2531 switch (TREE_CODE (arg0))
2533 case INTEGER_CST:
2534 return (! TREE_CONSTANT_OVERFLOW (arg0)
2535 && ! TREE_CONSTANT_OVERFLOW (arg1)
2536 && tree_int_cst_equal (arg0, arg1));
2538 case REAL_CST:
2539 return (! TREE_CONSTANT_OVERFLOW (arg0)
2540 && ! TREE_CONSTANT_OVERFLOW (arg1)
2541 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2542 TREE_REAL_CST (arg1)));
2544 case VECTOR_CST:
2546 tree v1, v2;
2548 if (TREE_CONSTANT_OVERFLOW (arg0)
2549 || TREE_CONSTANT_OVERFLOW (arg1))
2550 return 0;
2552 v1 = TREE_VECTOR_CST_ELTS (arg0);
2553 v2 = TREE_VECTOR_CST_ELTS (arg1);
2554 while (v1 && v2)
2556 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2557 flags))
2558 return 0;
2559 v1 = TREE_CHAIN (v1);
2560 v2 = TREE_CHAIN (v2);
2563 return v1 == v2;
2566 case COMPLEX_CST:
2567 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2568 flags)
2569 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2570 flags));
2572 case STRING_CST:
2573 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2574 && ! memcmp (TREE_STRING_POINTER (arg0),
2575 TREE_STRING_POINTER (arg1),
2576 TREE_STRING_LENGTH (arg0)));
2578 case ADDR_EXPR:
2579 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2581 default:
2582 break;
2585 if (flags & OEP_ONLY_CONST)
2586 return 0;
2588 /* Define macros to test an operand from arg0 and arg1 for equality and a
2589 variant that allows null and views null as being different from any
2590 non-null value. In the latter case, if either is null, the both
2591 must be; otherwise, do the normal comparison. */
2592 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2593 TREE_OPERAND (arg1, N), flags)
2595 #define OP_SAME_WITH_NULL(N) \
2596 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2597 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2599 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2601 case tcc_unary:
2602 /* Two conversions are equal only if signedness and modes match. */
2603 switch (TREE_CODE (arg0))
2605 case NOP_EXPR:
2606 case CONVERT_EXPR:
2607 case FIX_CEIL_EXPR:
2608 case FIX_TRUNC_EXPR:
2609 case FIX_FLOOR_EXPR:
2610 case FIX_ROUND_EXPR:
2611 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2612 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2613 return 0;
2614 break;
2615 default:
2616 break;
2619 return OP_SAME (0);
2622 case tcc_comparison:
2623 case tcc_binary:
2624 if (OP_SAME (0) && OP_SAME (1))
2625 return 1;
2627 /* For commutative ops, allow the other order. */
2628 return (commutative_tree_code (TREE_CODE (arg0))
2629 && operand_equal_p (TREE_OPERAND (arg0, 0),
2630 TREE_OPERAND (arg1, 1), flags)
2631 && operand_equal_p (TREE_OPERAND (arg0, 1),
2632 TREE_OPERAND (arg1, 0), flags));
2634 case tcc_reference:
2635 /* If either of the pointer (or reference) expressions we are
2636 dereferencing contain a side effect, these cannot be equal. */
2637 if (TREE_SIDE_EFFECTS (arg0)
2638 || TREE_SIDE_EFFECTS (arg1))
2639 return 0;
2641 switch (TREE_CODE (arg0))
2643 case INDIRECT_REF:
2644 case ALIGN_INDIRECT_REF:
2645 case MISALIGNED_INDIRECT_REF:
2646 case REALPART_EXPR:
2647 case IMAGPART_EXPR:
2648 return OP_SAME (0);
2650 case ARRAY_REF:
2651 case ARRAY_RANGE_REF:
2652 /* Operands 2 and 3 may be null. */
2653 return (OP_SAME (0)
2654 && OP_SAME (1)
2655 && OP_SAME_WITH_NULL (2)
2656 && OP_SAME_WITH_NULL (3));
2658 case COMPONENT_REF:
2659 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2660 may be NULL when we're called to compare MEM_EXPRs. */
2661 return OP_SAME_WITH_NULL (0)
2662 && OP_SAME (1)
2663 && OP_SAME_WITH_NULL (2);
2665 case BIT_FIELD_REF:
2666 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2668 default:
2669 return 0;
2672 case tcc_expression:
2673 switch (TREE_CODE (arg0))
2675 case ADDR_EXPR:
2676 case TRUTH_NOT_EXPR:
2677 return OP_SAME (0);
2679 case TRUTH_ANDIF_EXPR:
2680 case TRUTH_ORIF_EXPR:
2681 return OP_SAME (0) && OP_SAME (1);
2683 case TRUTH_AND_EXPR:
2684 case TRUTH_OR_EXPR:
2685 case TRUTH_XOR_EXPR:
2686 if (OP_SAME (0) && OP_SAME (1))
2687 return 1;
2689 /* Otherwise take into account this is a commutative operation. */
2690 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2691 TREE_OPERAND (arg1, 1), flags)
2692 && operand_equal_p (TREE_OPERAND (arg0, 1),
2693 TREE_OPERAND (arg1, 0), flags));
2695 case CALL_EXPR:
2696 /* If the CALL_EXPRs call different functions, then they
2697 clearly can not be equal. */
2698 if (!OP_SAME (0))
2699 return 0;
2702 unsigned int cef = call_expr_flags (arg0);
2703 if (flags & OEP_PURE_SAME)
2704 cef &= ECF_CONST | ECF_PURE;
2705 else
2706 cef &= ECF_CONST;
2707 if (!cef)
2708 return 0;
2711 /* Now see if all the arguments are the same. operand_equal_p
2712 does not handle TREE_LIST, so we walk the operands here
2713 feeding them to operand_equal_p. */
2714 arg0 = TREE_OPERAND (arg0, 1);
2715 arg1 = TREE_OPERAND (arg1, 1);
2716 while (arg0 && arg1)
2718 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2719 flags))
2720 return 0;
2722 arg0 = TREE_CHAIN (arg0);
2723 arg1 = TREE_CHAIN (arg1);
2726 /* If we get here and both argument lists are exhausted
2727 then the CALL_EXPRs are equal. */
2728 return ! (arg0 || arg1);
2730 default:
2731 return 0;
2734 case tcc_declaration:
2735 /* Consider __builtin_sqrt equal to sqrt. */
2736 return (TREE_CODE (arg0) == FUNCTION_DECL
2737 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2738 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2739 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2741 default:
2742 return 0;
2745 #undef OP_SAME
2746 #undef OP_SAME_WITH_NULL
2749 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2750 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2752 When in doubt, return 0. */
2754 static int
2755 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2757 int unsignedp1, unsignedpo;
2758 tree primarg0, primarg1, primother;
2759 unsigned int correct_width;
2761 if (operand_equal_p (arg0, arg1, 0))
2762 return 1;
2764 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2765 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2766 return 0;
2768 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2769 and see if the inner values are the same. This removes any
2770 signedness comparison, which doesn't matter here. */
2771 primarg0 = arg0, primarg1 = arg1;
2772 STRIP_NOPS (primarg0);
2773 STRIP_NOPS (primarg1);
2774 if (operand_equal_p (primarg0, primarg1, 0))
2775 return 1;
2777 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2778 actual comparison operand, ARG0.
2780 First throw away any conversions to wider types
2781 already present in the operands. */
2783 primarg1 = get_narrower (arg1, &unsignedp1);
2784 primother = get_narrower (other, &unsignedpo);
2786 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2787 if (unsignedp1 == unsignedpo
2788 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2789 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2791 tree type = TREE_TYPE (arg0);
2793 /* Make sure shorter operand is extended the right way
2794 to match the longer operand. */
2795 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2796 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2798 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2799 return 1;
2802 return 0;
2805 /* See if ARG is an expression that is either a comparison or is performing
2806 arithmetic on comparisons. The comparisons must only be comparing
2807 two different values, which will be stored in *CVAL1 and *CVAL2; if
2808 they are nonzero it means that some operands have already been found.
2809 No variables may be used anywhere else in the expression except in the
2810 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2811 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2813 If this is true, return 1. Otherwise, return zero. */
2815 static int
2816 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2818 enum tree_code code = TREE_CODE (arg);
2819 enum tree_code_class class = TREE_CODE_CLASS (code);
2821 /* We can handle some of the tcc_expression cases here. */
2822 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2823 class = tcc_unary;
2824 else if (class == tcc_expression
2825 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2826 || code == COMPOUND_EXPR))
2827 class = tcc_binary;
2829 else if (class == tcc_expression && code == SAVE_EXPR
2830 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2832 /* If we've already found a CVAL1 or CVAL2, this expression is
2833 two complex to handle. */
2834 if (*cval1 || *cval2)
2835 return 0;
2837 class = tcc_unary;
2838 *save_p = 1;
2841 switch (class)
2843 case tcc_unary:
2844 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2846 case tcc_binary:
2847 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2848 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2849 cval1, cval2, save_p));
2851 case tcc_constant:
2852 return 1;
2854 case tcc_expression:
2855 if (code == COND_EXPR)
2856 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2857 cval1, cval2, save_p)
2858 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2859 cval1, cval2, save_p)
2860 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2861 cval1, cval2, save_p));
2862 return 0;
2864 case tcc_comparison:
2865 /* First see if we can handle the first operand, then the second. For
2866 the second operand, we know *CVAL1 can't be zero. It must be that
2867 one side of the comparison is each of the values; test for the
2868 case where this isn't true by failing if the two operands
2869 are the same. */
2871 if (operand_equal_p (TREE_OPERAND (arg, 0),
2872 TREE_OPERAND (arg, 1), 0))
2873 return 0;
2875 if (*cval1 == 0)
2876 *cval1 = TREE_OPERAND (arg, 0);
2877 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2879 else if (*cval2 == 0)
2880 *cval2 = TREE_OPERAND (arg, 0);
2881 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2883 else
2884 return 0;
2886 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2888 else if (*cval2 == 0)
2889 *cval2 = TREE_OPERAND (arg, 1);
2890 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2892 else
2893 return 0;
2895 return 1;
2897 default:
2898 return 0;
2902 /* ARG is a tree that is known to contain just arithmetic operations and
2903 comparisons. Evaluate the operations in the tree substituting NEW0 for
2904 any occurrence of OLD0 as an operand of a comparison and likewise for
2905 NEW1 and OLD1. */
2907 static tree
2908 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2910 tree type = TREE_TYPE (arg);
2911 enum tree_code code = TREE_CODE (arg);
2912 enum tree_code_class class = TREE_CODE_CLASS (code);
2914 /* We can handle some of the tcc_expression cases here. */
2915 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2916 class = tcc_unary;
2917 else if (class == tcc_expression
2918 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2919 class = tcc_binary;
2921 switch (class)
2923 case tcc_unary:
2924 return fold_build1 (code, type,
2925 eval_subst (TREE_OPERAND (arg, 0),
2926 old0, new0, old1, new1));
2928 case tcc_binary:
2929 return fold_build2 (code, type,
2930 eval_subst (TREE_OPERAND (arg, 0),
2931 old0, new0, old1, new1),
2932 eval_subst (TREE_OPERAND (arg, 1),
2933 old0, new0, old1, new1));
2935 case tcc_expression:
2936 switch (code)
2938 case SAVE_EXPR:
2939 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2941 case COMPOUND_EXPR:
2942 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2944 case COND_EXPR:
2945 return fold_build3 (code, type,
2946 eval_subst (TREE_OPERAND (arg, 0),
2947 old0, new0, old1, new1),
2948 eval_subst (TREE_OPERAND (arg, 1),
2949 old0, new0, old1, new1),
2950 eval_subst (TREE_OPERAND (arg, 2),
2951 old0, new0, old1, new1));
2952 default:
2953 break;
2955 /* Fall through - ??? */
2957 case tcc_comparison:
2959 tree arg0 = TREE_OPERAND (arg, 0);
2960 tree arg1 = TREE_OPERAND (arg, 1);
2962 /* We need to check both for exact equality and tree equality. The
2963 former will be true if the operand has a side-effect. In that
2964 case, we know the operand occurred exactly once. */
2966 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2967 arg0 = new0;
2968 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2969 arg0 = new1;
2971 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2972 arg1 = new0;
2973 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2974 arg1 = new1;
2976 return fold_build2 (code, type, arg0, arg1);
2979 default:
2980 return arg;
2984 /* Return a tree for the case when the result of an expression is RESULT
2985 converted to TYPE and OMITTED was previously an operand of the expression
2986 but is now not needed (e.g., we folded OMITTED * 0).
2988 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2989 the conversion of RESULT to TYPE. */
2991 tree
2992 omit_one_operand (tree type, tree result, tree omitted)
2994 tree t = fold_convert (type, result);
2996 if (TREE_SIDE_EFFECTS (omitted))
2997 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2999 return non_lvalue (t);
3002 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3004 static tree
3005 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3007 tree t = fold_convert (type, result);
3009 if (TREE_SIDE_EFFECTS (omitted))
3010 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3012 return pedantic_non_lvalue (t);
3015 /* Return a tree for the case when the result of an expression is RESULT
3016 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3017 of the expression but are now not needed.
3019 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3020 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3021 evaluated before OMITTED2. Otherwise, if neither has side effects,
3022 just do the conversion of RESULT to TYPE. */
3024 tree
3025 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3027 tree t = fold_convert (type, result);
3029 if (TREE_SIDE_EFFECTS (omitted2))
3030 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3031 if (TREE_SIDE_EFFECTS (omitted1))
3032 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3034 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3038 /* Return a simplified tree node for the truth-negation of ARG. This
3039 never alters ARG itself. We assume that ARG is an operation that
3040 returns a truth value (0 or 1).
3042 FIXME: one would think we would fold the result, but it causes
3043 problems with the dominator optimizer. */
3044 tree
3045 invert_truthvalue (tree arg)
3047 tree type = TREE_TYPE (arg);
3048 enum tree_code code = TREE_CODE (arg);
3050 if (code == ERROR_MARK)
3051 return arg;
3053 /* If this is a comparison, we can simply invert it, except for
3054 floating-point non-equality comparisons, in which case we just
3055 enclose a TRUTH_NOT_EXPR around what we have. */
3057 if (TREE_CODE_CLASS (code) == tcc_comparison)
3059 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3060 if (FLOAT_TYPE_P (op_type)
3061 && flag_trapping_math
3062 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3063 && code != NE_EXPR && code != EQ_EXPR)
3064 return build1 (TRUTH_NOT_EXPR, type, arg);
3065 else
3067 code = invert_tree_comparison (code,
3068 HONOR_NANS (TYPE_MODE (op_type)));
3069 if (code == ERROR_MARK)
3070 return build1 (TRUTH_NOT_EXPR, type, arg);
3071 else
3072 return build2 (code, type,
3073 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3077 switch (code)
3079 case INTEGER_CST:
3080 return constant_boolean_node (integer_zerop (arg), type);
3082 case TRUTH_AND_EXPR:
3083 return build2 (TRUTH_OR_EXPR, type,
3084 invert_truthvalue (TREE_OPERAND (arg, 0)),
3085 invert_truthvalue (TREE_OPERAND (arg, 1)));
3087 case TRUTH_OR_EXPR:
3088 return build2 (TRUTH_AND_EXPR, type,
3089 invert_truthvalue (TREE_OPERAND (arg, 0)),
3090 invert_truthvalue (TREE_OPERAND (arg, 1)));
3092 case TRUTH_XOR_EXPR:
3093 /* Here we can invert either operand. We invert the first operand
3094 unless the second operand is a TRUTH_NOT_EXPR in which case our
3095 result is the XOR of the first operand with the inside of the
3096 negation of the second operand. */
3098 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3099 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3100 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3101 else
3102 return build2 (TRUTH_XOR_EXPR, type,
3103 invert_truthvalue (TREE_OPERAND (arg, 0)),
3104 TREE_OPERAND (arg, 1));
3106 case TRUTH_ANDIF_EXPR:
3107 return build2 (TRUTH_ORIF_EXPR, type,
3108 invert_truthvalue (TREE_OPERAND (arg, 0)),
3109 invert_truthvalue (TREE_OPERAND (arg, 1)));
3111 case TRUTH_ORIF_EXPR:
3112 return build2 (TRUTH_ANDIF_EXPR, type,
3113 invert_truthvalue (TREE_OPERAND (arg, 0)),
3114 invert_truthvalue (TREE_OPERAND (arg, 1)));
3116 case TRUTH_NOT_EXPR:
3117 return TREE_OPERAND (arg, 0);
3119 case COND_EXPR:
3121 tree arg1 = TREE_OPERAND (arg, 1);
3122 tree arg2 = TREE_OPERAND (arg, 2);
3123 /* A COND_EXPR may have a throw as one operand, which
3124 then has void type. Just leave void operands
3125 as they are. */
3126 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3127 VOID_TYPE_P (TREE_TYPE (arg1))
3128 ? arg1 : invert_truthvalue (arg1),
3129 VOID_TYPE_P (TREE_TYPE (arg2))
3130 ? arg2 : invert_truthvalue (arg2));
3133 case COMPOUND_EXPR:
3134 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3135 invert_truthvalue (TREE_OPERAND (arg, 1)));
3137 case NON_LVALUE_EXPR:
3138 return invert_truthvalue (TREE_OPERAND (arg, 0));
3140 case NOP_EXPR:
3141 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3142 break;
3144 case CONVERT_EXPR:
3145 case FLOAT_EXPR:
3146 return build1 (TREE_CODE (arg), type,
3147 invert_truthvalue (TREE_OPERAND (arg, 0)));
3149 case BIT_AND_EXPR:
3150 if (!integer_onep (TREE_OPERAND (arg, 1)))
3151 break;
3152 return build2 (EQ_EXPR, type, arg,
3153 build_int_cst (type, 0));
3155 case SAVE_EXPR:
3156 return build1 (TRUTH_NOT_EXPR, type, arg);
3158 case CLEANUP_POINT_EXPR:
3159 return build1 (CLEANUP_POINT_EXPR, type,
3160 invert_truthvalue (TREE_OPERAND (arg, 0)));
3162 default:
3163 break;
3165 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3166 return build1 (TRUTH_NOT_EXPR, type, arg);
3169 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3170 operands are another bit-wise operation with a common input. If so,
3171 distribute the bit operations to save an operation and possibly two if
3172 constants are involved. For example, convert
3173 (A | B) & (A | C) into A | (B & C)
3174 Further simplification will occur if B and C are constants.
3176 If this optimization cannot be done, 0 will be returned. */
3178 static tree
3179 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3181 tree common;
3182 tree left, right;
3184 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3185 || TREE_CODE (arg0) == code
3186 || (TREE_CODE (arg0) != BIT_AND_EXPR
3187 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3188 return 0;
3190 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3192 common = TREE_OPERAND (arg0, 0);
3193 left = TREE_OPERAND (arg0, 1);
3194 right = TREE_OPERAND (arg1, 1);
3196 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3198 common = TREE_OPERAND (arg0, 0);
3199 left = TREE_OPERAND (arg0, 1);
3200 right = TREE_OPERAND (arg1, 0);
3202 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3204 common = TREE_OPERAND (arg0, 1);
3205 left = TREE_OPERAND (arg0, 0);
3206 right = TREE_OPERAND (arg1, 1);
3208 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3210 common = TREE_OPERAND (arg0, 1);
3211 left = TREE_OPERAND (arg0, 0);
3212 right = TREE_OPERAND (arg1, 0);
3214 else
3215 return 0;
3217 return fold_build2 (TREE_CODE (arg0), type, common,
3218 fold_build2 (code, type, left, right));
3221 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3222 with code CODE. This optimization is unsafe. */
3223 static tree
3224 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3226 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3227 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3229 /* (A / C) +- (B / C) -> (A +- B) / C. */
3230 if (mul0 == mul1
3231 && operand_equal_p (TREE_OPERAND (arg0, 1),
3232 TREE_OPERAND (arg1, 1), 0))
3233 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3234 fold_build2 (code, type,
3235 TREE_OPERAND (arg0, 0),
3236 TREE_OPERAND (arg1, 0)),
3237 TREE_OPERAND (arg0, 1));
3239 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3240 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3241 TREE_OPERAND (arg1, 0), 0)
3242 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3243 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3245 REAL_VALUE_TYPE r0, r1;
3246 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3247 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3248 if (!mul0)
3249 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3250 if (!mul1)
3251 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3252 real_arithmetic (&r0, code, &r0, &r1);
3253 return fold_build2 (MULT_EXPR, type,
3254 TREE_OPERAND (arg0, 0),
3255 build_real (type, r0));
3258 return NULL_TREE;
3261 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3262 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3264 static tree
3265 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3266 int unsignedp)
3268 tree result;
3270 if (bitpos == 0)
3272 tree size = TYPE_SIZE (TREE_TYPE (inner));
3273 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3274 || POINTER_TYPE_P (TREE_TYPE (inner)))
3275 && host_integerp (size, 0)
3276 && tree_low_cst (size, 0) == bitsize)
3277 return fold_convert (type, inner);
3280 result = build3 (BIT_FIELD_REF, type, inner,
3281 size_int (bitsize), bitsize_int (bitpos));
3283 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3285 return result;
3288 /* Optimize a bit-field compare.
3290 There are two cases: First is a compare against a constant and the
3291 second is a comparison of two items where the fields are at the same
3292 bit position relative to the start of a chunk (byte, halfword, word)
3293 large enough to contain it. In these cases we can avoid the shift
3294 implicit in bitfield extractions.
3296 For constants, we emit a compare of the shifted constant with the
3297 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3298 compared. For two fields at the same position, we do the ANDs with the
3299 similar mask and compare the result of the ANDs.
3301 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3302 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3303 are the left and right operands of the comparison, respectively.
3305 If the optimization described above can be done, we return the resulting
3306 tree. Otherwise we return zero. */
3308 static tree
3309 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3310 tree lhs, tree rhs)
3312 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3313 tree type = TREE_TYPE (lhs);
3314 tree signed_type, unsigned_type;
3315 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3316 enum machine_mode lmode, rmode, nmode;
3317 int lunsignedp, runsignedp;
3318 int lvolatilep = 0, rvolatilep = 0;
3319 tree linner, rinner = NULL_TREE;
3320 tree mask;
3321 tree offset;
3323 /* Get all the information about the extractions being done. If the bit size
3324 if the same as the size of the underlying object, we aren't doing an
3325 extraction at all and so can do nothing. We also don't want to
3326 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3327 then will no longer be able to replace it. */
3328 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3329 &lunsignedp, &lvolatilep, false);
3330 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3331 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3332 return 0;
3334 if (!const_p)
3336 /* If this is not a constant, we can only do something if bit positions,
3337 sizes, and signedness are the same. */
3338 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3339 &runsignedp, &rvolatilep, false);
3341 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3342 || lunsignedp != runsignedp || offset != 0
3343 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3344 return 0;
3347 /* See if we can find a mode to refer to this field. We should be able to,
3348 but fail if we can't. */
3349 nmode = get_best_mode (lbitsize, lbitpos,
3350 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3351 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3352 TYPE_ALIGN (TREE_TYPE (rinner))),
3353 word_mode, lvolatilep || rvolatilep);
3354 if (nmode == VOIDmode)
3355 return 0;
3357 /* Set signed and unsigned types of the precision of this mode for the
3358 shifts below. */
3359 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3360 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3362 /* Compute the bit position and size for the new reference and our offset
3363 within it. If the new reference is the same size as the original, we
3364 won't optimize anything, so return zero. */
3365 nbitsize = GET_MODE_BITSIZE (nmode);
3366 nbitpos = lbitpos & ~ (nbitsize - 1);
3367 lbitpos -= nbitpos;
3368 if (nbitsize == lbitsize)
3369 return 0;
3371 if (BYTES_BIG_ENDIAN)
3372 lbitpos = nbitsize - lbitsize - lbitpos;
3374 /* Make the mask to be used against the extracted field. */
3375 mask = build_int_cst (unsigned_type, -1);
3376 mask = force_fit_type (mask, 0, false, false);
3377 mask = fold_convert (unsigned_type, mask);
3378 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3379 mask = const_binop (RSHIFT_EXPR, mask,
3380 size_int (nbitsize - lbitsize - lbitpos), 0);
3382 if (! const_p)
3383 /* If not comparing with constant, just rework the comparison
3384 and return. */
3385 return build2 (code, compare_type,
3386 build2 (BIT_AND_EXPR, unsigned_type,
3387 make_bit_field_ref (linner, unsigned_type,
3388 nbitsize, nbitpos, 1),
3389 mask),
3390 build2 (BIT_AND_EXPR, unsigned_type,
3391 make_bit_field_ref (rinner, unsigned_type,
3392 nbitsize, nbitpos, 1),
3393 mask));
3395 /* Otherwise, we are handling the constant case. See if the constant is too
3396 big for the field. Warn and return a tree of for 0 (false) if so. We do
3397 this not only for its own sake, but to avoid having to test for this
3398 error case below. If we didn't, we might generate wrong code.
3400 For unsigned fields, the constant shifted right by the field length should
3401 be all zero. For signed fields, the high-order bits should agree with
3402 the sign bit. */
3404 if (lunsignedp)
3406 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3407 fold_convert (unsigned_type, rhs),
3408 size_int (lbitsize), 0)))
3410 warning (0, "comparison is always %d due to width of bit-field",
3411 code == NE_EXPR);
3412 return constant_boolean_node (code == NE_EXPR, compare_type);
3415 else
3417 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3418 size_int (lbitsize - 1), 0);
3419 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3421 warning (0, "comparison is always %d due to width of bit-field",
3422 code == NE_EXPR);
3423 return constant_boolean_node (code == NE_EXPR, compare_type);
3427 /* Single-bit compares should always be against zero. */
3428 if (lbitsize == 1 && ! integer_zerop (rhs))
3430 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3431 rhs = build_int_cst (type, 0);
3434 /* Make a new bitfield reference, shift the constant over the
3435 appropriate number of bits and mask it with the computed mask
3436 (in case this was a signed field). If we changed it, make a new one. */
3437 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3438 if (lvolatilep)
3440 TREE_SIDE_EFFECTS (lhs) = 1;
3441 TREE_THIS_VOLATILE (lhs) = 1;
3444 rhs = const_binop (BIT_AND_EXPR,
3445 const_binop (LSHIFT_EXPR,
3446 fold_convert (unsigned_type, rhs),
3447 size_int (lbitpos), 0),
3448 mask, 0);
3450 return build2 (code, compare_type,
3451 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3452 rhs);
3455 /* Subroutine for fold_truthop: decode a field reference.
3457 If EXP is a comparison reference, we return the innermost reference.
3459 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3460 set to the starting bit number.
3462 If the innermost field can be completely contained in a mode-sized
3463 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3465 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3466 otherwise it is not changed.
3468 *PUNSIGNEDP is set to the signedness of the field.
3470 *PMASK is set to the mask used. This is either contained in a
3471 BIT_AND_EXPR or derived from the width of the field.
3473 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3475 Return 0 if this is not a component reference or is one that we can't
3476 do anything with. */
3478 static tree
3479 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3480 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3481 int *punsignedp, int *pvolatilep,
3482 tree *pmask, tree *pand_mask)
3484 tree outer_type = 0;
3485 tree and_mask = 0;
3486 tree mask, inner, offset;
3487 tree unsigned_type;
3488 unsigned int precision;
3490 /* All the optimizations using this function assume integer fields.
3491 There are problems with FP fields since the type_for_size call
3492 below can fail for, e.g., XFmode. */
3493 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3494 return 0;
3496 /* We are interested in the bare arrangement of bits, so strip everything
3497 that doesn't affect the machine mode. However, record the type of the
3498 outermost expression if it may matter below. */
3499 if (TREE_CODE (exp) == NOP_EXPR
3500 || TREE_CODE (exp) == CONVERT_EXPR
3501 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3502 outer_type = TREE_TYPE (exp);
3503 STRIP_NOPS (exp);
3505 if (TREE_CODE (exp) == BIT_AND_EXPR)
3507 and_mask = TREE_OPERAND (exp, 1);
3508 exp = TREE_OPERAND (exp, 0);
3509 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3510 if (TREE_CODE (and_mask) != INTEGER_CST)
3511 return 0;
3514 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3515 punsignedp, pvolatilep, false);
3516 if ((inner == exp && and_mask == 0)
3517 || *pbitsize < 0 || offset != 0
3518 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3519 return 0;
3521 /* If the number of bits in the reference is the same as the bitsize of
3522 the outer type, then the outer type gives the signedness. Otherwise
3523 (in case of a small bitfield) the signedness is unchanged. */
3524 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3525 *punsignedp = TYPE_UNSIGNED (outer_type);
3527 /* Compute the mask to access the bitfield. */
3528 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3529 precision = TYPE_PRECISION (unsigned_type);
3531 mask = build_int_cst (unsigned_type, -1);
3532 mask = force_fit_type (mask, 0, false, false);
3534 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3535 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3537 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3538 if (and_mask != 0)
3539 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3540 fold_convert (unsigned_type, and_mask), mask);
3542 *pmask = mask;
3543 *pand_mask = and_mask;
3544 return inner;
3547 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3548 bit positions. */
3550 static int
3551 all_ones_mask_p (tree mask, int size)
3553 tree type = TREE_TYPE (mask);
3554 unsigned int precision = TYPE_PRECISION (type);
3555 tree tmask;
3557 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3558 tmask = force_fit_type (tmask, 0, false, false);
3560 return
3561 tree_int_cst_equal (mask,
3562 const_binop (RSHIFT_EXPR,
3563 const_binop (LSHIFT_EXPR, tmask,
3564 size_int (precision - size),
3566 size_int (precision - size), 0));
3569 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3570 represents the sign bit of EXP's type. If EXP represents a sign
3571 or zero extension, also test VAL against the unextended type.
3572 The return value is the (sub)expression whose sign bit is VAL,
3573 or NULL_TREE otherwise. */
3575 static tree
3576 sign_bit_p (tree exp, tree val)
3578 unsigned HOST_WIDE_INT mask_lo, lo;
3579 HOST_WIDE_INT mask_hi, hi;
3580 int width;
3581 tree t;
3583 /* Tree EXP must have an integral type. */
3584 t = TREE_TYPE (exp);
3585 if (! INTEGRAL_TYPE_P (t))
3586 return NULL_TREE;
3588 /* Tree VAL must be an integer constant. */
3589 if (TREE_CODE (val) != INTEGER_CST
3590 || TREE_CONSTANT_OVERFLOW (val))
3591 return NULL_TREE;
3593 width = TYPE_PRECISION (t);
3594 if (width > HOST_BITS_PER_WIDE_INT)
3596 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3597 lo = 0;
3599 mask_hi = ((unsigned HOST_WIDE_INT) -1
3600 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3601 mask_lo = -1;
3603 else
3605 hi = 0;
3606 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3608 mask_hi = 0;
3609 mask_lo = ((unsigned HOST_WIDE_INT) -1
3610 >> (HOST_BITS_PER_WIDE_INT - width));
3613 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3614 treat VAL as if it were unsigned. */
3615 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3616 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3617 return exp;
3619 /* Handle extension from a narrower type. */
3620 if (TREE_CODE (exp) == NOP_EXPR
3621 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3622 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3624 return NULL_TREE;
3627 /* Subroutine for fold_truthop: determine if an operand is simple enough
3628 to be evaluated unconditionally. */
3630 static int
3631 simple_operand_p (tree exp)
3633 /* Strip any conversions that don't change the machine mode. */
3634 STRIP_NOPS (exp);
3636 return (CONSTANT_CLASS_P (exp)
3637 || TREE_CODE (exp) == SSA_NAME
3638 || (DECL_P (exp)
3639 && ! TREE_ADDRESSABLE (exp)
3640 && ! TREE_THIS_VOLATILE (exp)
3641 && ! DECL_NONLOCAL (exp)
3642 /* Don't regard global variables as simple. They may be
3643 allocated in ways unknown to the compiler (shared memory,
3644 #pragma weak, etc). */
3645 && ! TREE_PUBLIC (exp)
3646 && ! DECL_EXTERNAL (exp)
3647 /* Loading a static variable is unduly expensive, but global
3648 registers aren't expensive. */
3649 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3652 /* The following functions are subroutines to fold_range_test and allow it to
3653 try to change a logical combination of comparisons into a range test.
3655 For example, both
3656 X == 2 || X == 3 || X == 4 || X == 5
3658 X >= 2 && X <= 5
3659 are converted to
3660 (unsigned) (X - 2) <= 3
3662 We describe each set of comparisons as being either inside or outside
3663 a range, using a variable named like IN_P, and then describe the
3664 range with a lower and upper bound. If one of the bounds is omitted,
3665 it represents either the highest or lowest value of the type.
3667 In the comments below, we represent a range by two numbers in brackets
3668 preceded by a "+" to designate being inside that range, or a "-" to
3669 designate being outside that range, so the condition can be inverted by
3670 flipping the prefix. An omitted bound is represented by a "-". For
3671 example, "- [-, 10]" means being outside the range starting at the lowest
3672 possible value and ending at 10, in other words, being greater than 10.
3673 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3674 always false.
3676 We set up things so that the missing bounds are handled in a consistent
3677 manner so neither a missing bound nor "true" and "false" need to be
3678 handled using a special case. */
3680 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3681 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3682 and UPPER1_P are nonzero if the respective argument is an upper bound
3683 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3684 must be specified for a comparison. ARG1 will be converted to ARG0's
3685 type if both are specified. */
3687 static tree
3688 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3689 tree arg1, int upper1_p)
3691 tree tem;
3692 int result;
3693 int sgn0, sgn1;
3695 /* If neither arg represents infinity, do the normal operation.
3696 Else, if not a comparison, return infinity. Else handle the special
3697 comparison rules. Note that most of the cases below won't occur, but
3698 are handled for consistency. */
3700 if (arg0 != 0 && arg1 != 0)
3702 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3703 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3704 STRIP_NOPS (tem);
3705 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3708 if (TREE_CODE_CLASS (code) != tcc_comparison)
3709 return 0;
3711 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3712 for neither. In real maths, we cannot assume open ended ranges are
3713 the same. But, this is computer arithmetic, where numbers are finite.
3714 We can therefore make the transformation of any unbounded range with
3715 the value Z, Z being greater than any representable number. This permits
3716 us to treat unbounded ranges as equal. */
3717 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3718 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3719 switch (code)
3721 case EQ_EXPR:
3722 result = sgn0 == sgn1;
3723 break;
3724 case NE_EXPR:
3725 result = sgn0 != sgn1;
3726 break;
3727 case LT_EXPR:
3728 result = sgn0 < sgn1;
3729 break;
3730 case LE_EXPR:
3731 result = sgn0 <= sgn1;
3732 break;
3733 case GT_EXPR:
3734 result = sgn0 > sgn1;
3735 break;
3736 case GE_EXPR:
3737 result = sgn0 >= sgn1;
3738 break;
3739 default:
3740 gcc_unreachable ();
3743 return constant_boolean_node (result, type);
3746 /* Given EXP, a logical expression, set the range it is testing into
3747 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3748 actually being tested. *PLOW and *PHIGH will be made of the same type
3749 as the returned expression. If EXP is not a comparison, we will most
3750 likely not be returning a useful value and range. */
3752 static tree
3753 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3755 enum tree_code code;
3756 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3757 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3758 int in_p, n_in_p;
3759 tree low, high, n_low, n_high;
3761 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3762 and see if we can refine the range. Some of the cases below may not
3763 happen, but it doesn't seem worth worrying about this. We "continue"
3764 the outer loop when we've changed something; otherwise we "break"
3765 the switch, which will "break" the while. */
3767 in_p = 0;
3768 low = high = build_int_cst (TREE_TYPE (exp), 0);
3770 while (1)
3772 code = TREE_CODE (exp);
3773 exp_type = TREE_TYPE (exp);
3775 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3777 if (TREE_CODE_LENGTH (code) > 0)
3778 arg0 = TREE_OPERAND (exp, 0);
3779 if (TREE_CODE_CLASS (code) == tcc_comparison
3780 || TREE_CODE_CLASS (code) == tcc_unary
3781 || TREE_CODE_CLASS (code) == tcc_binary)
3782 arg0_type = TREE_TYPE (arg0);
3783 if (TREE_CODE_CLASS (code) == tcc_binary
3784 || TREE_CODE_CLASS (code) == tcc_comparison
3785 || (TREE_CODE_CLASS (code) == tcc_expression
3786 && TREE_CODE_LENGTH (code) > 1))
3787 arg1 = TREE_OPERAND (exp, 1);
3790 switch (code)
3792 case TRUTH_NOT_EXPR:
3793 in_p = ! in_p, exp = arg0;
3794 continue;
3796 case EQ_EXPR: case NE_EXPR:
3797 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3798 /* We can only do something if the range is testing for zero
3799 and if the second operand is an integer constant. Note that
3800 saying something is "in" the range we make is done by
3801 complementing IN_P since it will set in the initial case of
3802 being not equal to zero; "out" is leaving it alone. */
3803 if (low == 0 || high == 0
3804 || ! integer_zerop (low) || ! integer_zerop (high)
3805 || TREE_CODE (arg1) != INTEGER_CST)
3806 break;
3808 switch (code)
3810 case NE_EXPR: /* - [c, c] */
3811 low = high = arg1;
3812 break;
3813 case EQ_EXPR: /* + [c, c] */
3814 in_p = ! in_p, low = high = arg1;
3815 break;
3816 case GT_EXPR: /* - [-, c] */
3817 low = 0, high = arg1;
3818 break;
3819 case GE_EXPR: /* + [c, -] */
3820 in_p = ! in_p, low = arg1, high = 0;
3821 break;
3822 case LT_EXPR: /* - [c, -] */
3823 low = arg1, high = 0;
3824 break;
3825 case LE_EXPR: /* + [-, c] */
3826 in_p = ! in_p, low = 0, high = arg1;
3827 break;
3828 default:
3829 gcc_unreachable ();
3832 /* If this is an unsigned comparison, we also know that EXP is
3833 greater than or equal to zero. We base the range tests we make
3834 on that fact, so we record it here so we can parse existing
3835 range tests. We test arg0_type since often the return type
3836 of, e.g. EQ_EXPR, is boolean. */
3837 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3839 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3840 in_p, low, high, 1,
3841 build_int_cst (arg0_type, 0),
3842 NULL_TREE))
3843 break;
3845 in_p = n_in_p, low = n_low, high = n_high;
3847 /* If the high bound is missing, but we have a nonzero low
3848 bound, reverse the range so it goes from zero to the low bound
3849 minus 1. */
3850 if (high == 0 && low && ! integer_zerop (low))
3852 in_p = ! in_p;
3853 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3854 integer_one_node, 0);
3855 low = build_int_cst (arg0_type, 0);
3859 exp = arg0;
3860 continue;
3862 case NEGATE_EXPR:
3863 /* (-x) IN [a,b] -> x in [-b, -a] */
3864 n_low = range_binop (MINUS_EXPR, exp_type,
3865 build_int_cst (exp_type, 0),
3866 0, high, 1);
3867 n_high = range_binop (MINUS_EXPR, exp_type,
3868 build_int_cst (exp_type, 0),
3869 0, low, 0);
3870 low = n_low, high = n_high;
3871 exp = arg0;
3872 continue;
3874 case BIT_NOT_EXPR:
3875 /* ~ X -> -X - 1 */
3876 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3877 build_int_cst (exp_type, 1));
3878 continue;
3880 case PLUS_EXPR: case MINUS_EXPR:
3881 if (TREE_CODE (arg1) != INTEGER_CST)
3882 break;
3884 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3885 move a constant to the other side. */
3886 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3887 break;
3889 /* If EXP is signed, any overflow in the computation is undefined,
3890 so we don't worry about it so long as our computations on
3891 the bounds don't overflow. For unsigned, overflow is defined
3892 and this is exactly the right thing. */
3893 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3894 arg0_type, low, 0, arg1, 0);
3895 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3896 arg0_type, high, 1, arg1, 0);
3897 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3898 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3899 break;
3901 /* Check for an unsigned range which has wrapped around the maximum
3902 value thus making n_high < n_low, and normalize it. */
3903 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3905 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3906 integer_one_node, 0);
3907 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3908 integer_one_node, 0);
3910 /* If the range is of the form +/- [ x+1, x ], we won't
3911 be able to normalize it. But then, it represents the
3912 whole range or the empty set, so make it
3913 +/- [ -, - ]. */
3914 if (tree_int_cst_equal (n_low, low)
3915 && tree_int_cst_equal (n_high, high))
3916 low = high = 0;
3917 else
3918 in_p = ! in_p;
3920 else
3921 low = n_low, high = n_high;
3923 exp = arg0;
3924 continue;
3926 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3927 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3928 break;
3930 if (! INTEGRAL_TYPE_P (arg0_type)
3931 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3932 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3933 break;
3935 n_low = low, n_high = high;
3937 if (n_low != 0)
3938 n_low = fold_convert (arg0_type, n_low);
3940 if (n_high != 0)
3941 n_high = fold_convert (arg0_type, n_high);
3944 /* If we're converting arg0 from an unsigned type, to exp,
3945 a signed type, we will be doing the comparison as unsigned.
3946 The tests above have already verified that LOW and HIGH
3947 are both positive.
3949 So we have to ensure that we will handle large unsigned
3950 values the same way that the current signed bounds treat
3951 negative values. */
3953 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3955 tree high_positive;
3956 tree equiv_type = lang_hooks.types.type_for_mode
3957 (TYPE_MODE (arg0_type), 1);
3959 /* A range without an upper bound is, naturally, unbounded.
3960 Since convert would have cropped a very large value, use
3961 the max value for the destination type. */
3962 high_positive
3963 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3964 : TYPE_MAX_VALUE (arg0_type);
3966 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3967 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3968 fold_convert (arg0_type,
3969 high_positive),
3970 fold_convert (arg0_type,
3971 integer_one_node));
3973 /* If the low bound is specified, "and" the range with the
3974 range for which the original unsigned value will be
3975 positive. */
3976 if (low != 0)
3978 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3979 1, n_low, n_high, 1,
3980 fold_convert (arg0_type,
3981 integer_zero_node),
3982 high_positive))
3983 break;
3985 in_p = (n_in_p == in_p);
3987 else
3989 /* Otherwise, "or" the range with the range of the input
3990 that will be interpreted as negative. */
3991 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3992 0, n_low, n_high, 1,
3993 fold_convert (arg0_type,
3994 integer_zero_node),
3995 high_positive))
3996 break;
3998 in_p = (in_p != n_in_p);
4002 exp = arg0;
4003 low = n_low, high = n_high;
4004 continue;
4006 default:
4007 break;
4010 break;
4013 /* If EXP is a constant, we can evaluate whether this is true or false. */
4014 if (TREE_CODE (exp) == INTEGER_CST)
4016 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4017 exp, 0, low, 0))
4018 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4019 exp, 1, high, 1)));
4020 low = high = 0;
4021 exp = 0;
4024 *pin_p = in_p, *plow = low, *phigh = high;
4025 return exp;
4028 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4029 type, TYPE, return an expression to test if EXP is in (or out of, depending
4030 on IN_P) the range. Return 0 if the test couldn't be created. */
4032 static tree
4033 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4035 tree etype = TREE_TYPE (exp);
4036 tree value;
4038 #ifdef HAVE_canonicalize_funcptr_for_compare
4039 /* Disable this optimization for function pointer expressions
4040 on targets that require function pointer canonicalization. */
4041 if (HAVE_canonicalize_funcptr_for_compare
4042 && TREE_CODE (etype) == POINTER_TYPE
4043 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4044 return NULL_TREE;
4045 #endif
4047 if (! in_p)
4049 value = build_range_check (type, exp, 1, low, high);
4050 if (value != 0)
4051 return invert_truthvalue (value);
4053 return 0;
4056 if (low == 0 && high == 0)
4057 return build_int_cst (type, 1);
4059 if (low == 0)
4060 return fold_build2 (LE_EXPR, type, exp,
4061 fold_convert (etype, high));
4063 if (high == 0)
4064 return fold_build2 (GE_EXPR, type, exp,
4065 fold_convert (etype, low));
4067 if (operand_equal_p (low, high, 0))
4068 return fold_build2 (EQ_EXPR, type, exp,
4069 fold_convert (etype, low));
4071 if (integer_zerop (low))
4073 if (! TYPE_UNSIGNED (etype))
4075 etype = lang_hooks.types.unsigned_type (etype);
4076 high = fold_convert (etype, high);
4077 exp = fold_convert (etype, exp);
4079 return build_range_check (type, exp, 1, 0, high);
4082 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4083 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4085 unsigned HOST_WIDE_INT lo;
4086 HOST_WIDE_INT hi;
4087 int prec;
4089 prec = TYPE_PRECISION (etype);
4090 if (prec <= HOST_BITS_PER_WIDE_INT)
4092 hi = 0;
4093 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4095 else
4097 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4098 lo = (unsigned HOST_WIDE_INT) -1;
4101 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4103 if (TYPE_UNSIGNED (etype))
4105 etype = lang_hooks.types.signed_type (etype);
4106 exp = fold_convert (etype, exp);
4108 return fold_build2 (GT_EXPR, type, exp,
4109 build_int_cst (etype, 0));
4113 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4114 This requires wrap-around arithmetics for the type of the expression. */
4115 switch (TREE_CODE (etype))
4117 case INTEGER_TYPE:
4118 /* There is no requirement that LOW be within the range of ETYPE
4119 if the latter is a subtype. It must, however, be within the base
4120 type of ETYPE. So be sure we do the subtraction in that type. */
4121 if (TREE_TYPE (etype))
4122 etype = TREE_TYPE (etype);
4123 break;
4125 case ENUMERAL_TYPE:
4126 case BOOLEAN_TYPE:
4127 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4128 TYPE_UNSIGNED (etype));
4129 break;
4131 default:
4132 break;
4135 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4136 if (TREE_CODE (etype) == INTEGER_TYPE
4137 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4139 tree utype, minv, maxv;
4141 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4142 for the type in question, as we rely on this here. */
4143 utype = lang_hooks.types.unsigned_type (etype);
4144 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4145 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4146 integer_one_node, 1);
4147 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4149 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4150 minv, 1, maxv, 1)))
4151 etype = utype;
4152 else
4153 return 0;
4156 high = fold_convert (etype, high);
4157 low = fold_convert (etype, low);
4158 exp = fold_convert (etype, exp);
4160 value = const_binop (MINUS_EXPR, high, low, 0);
4162 if (value != 0 && !TREE_OVERFLOW (value))
4163 return build_range_check (type,
4164 fold_build2 (MINUS_EXPR, etype, exp, low),
4165 1, build_int_cst (etype, 0), value);
4167 return 0;
4170 /* Return the predecessor of VAL in its type, handling the infinite case. */
4172 static tree
4173 range_predecessor (tree val)
4175 tree type = TREE_TYPE (val);
4177 if (INTEGRAL_TYPE_P (type)
4178 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4179 return 0;
4180 else
4181 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4184 /* Return the successor of VAL in its type, handling the infinite case. */
4186 static tree
4187 range_successor (tree val)
4189 tree type = TREE_TYPE (val);
4191 if (INTEGRAL_TYPE_P (type)
4192 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4193 return 0;
4194 else
4195 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4198 /* Given two ranges, see if we can merge them into one. Return 1 if we
4199 can, 0 if we can't. Set the output range into the specified parameters. */
4201 static int
4202 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4203 tree high0, int in1_p, tree low1, tree high1)
4205 int no_overlap;
4206 int subset;
4207 int temp;
4208 tree tem;
4209 int in_p;
4210 tree low, high;
4211 int lowequal = ((low0 == 0 && low1 == 0)
4212 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4213 low0, 0, low1, 0)));
4214 int highequal = ((high0 == 0 && high1 == 0)
4215 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4216 high0, 1, high1, 1)));
4218 /* Make range 0 be the range that starts first, or ends last if they
4219 start at the same value. Swap them if it isn't. */
4220 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4221 low0, 0, low1, 0))
4222 || (lowequal
4223 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4224 high1, 1, high0, 1))))
4226 temp = in0_p, in0_p = in1_p, in1_p = temp;
4227 tem = low0, low0 = low1, low1 = tem;
4228 tem = high0, high0 = high1, high1 = tem;
4231 /* Now flag two cases, whether the ranges are disjoint or whether the
4232 second range is totally subsumed in the first. Note that the tests
4233 below are simplified by the ones above. */
4234 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4235 high0, 1, low1, 0));
4236 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4237 high1, 1, high0, 1));
4239 /* We now have four cases, depending on whether we are including or
4240 excluding the two ranges. */
4241 if (in0_p && in1_p)
4243 /* If they don't overlap, the result is false. If the second range
4244 is a subset it is the result. Otherwise, the range is from the start
4245 of the second to the end of the first. */
4246 if (no_overlap)
4247 in_p = 0, low = high = 0;
4248 else if (subset)
4249 in_p = 1, low = low1, high = high1;
4250 else
4251 in_p = 1, low = low1, high = high0;
4254 else if (in0_p && ! in1_p)
4256 /* If they don't overlap, the result is the first range. If they are
4257 equal, the result is false. If the second range is a subset of the
4258 first, and the ranges begin at the same place, we go from just after
4259 the end of the second range to the end of the first. If the second
4260 range is not a subset of the first, or if it is a subset and both
4261 ranges end at the same place, the range starts at the start of the
4262 first range and ends just before the second range.
4263 Otherwise, we can't describe this as a single range. */
4264 if (no_overlap)
4265 in_p = 1, low = low0, high = high0;
4266 else if (lowequal && highequal)
4267 in_p = 0, low = high = 0;
4268 else if (subset && lowequal)
4270 low = range_successor (high1);
4271 high = high0;
4272 in_p = (low != 0);
4274 else if (! subset || highequal)
4276 low = low0;
4277 high = range_predecessor (low1);
4278 in_p = (high != 0);
4280 else
4281 return 0;
4284 else if (! in0_p && in1_p)
4286 /* If they don't overlap, the result is the second range. If the second
4287 is a subset of the first, the result is false. Otherwise,
4288 the range starts just after the first range and ends at the
4289 end of the second. */
4290 if (no_overlap)
4291 in_p = 1, low = low1, high = high1;
4292 else if (subset || highequal)
4293 in_p = 0, low = high = 0;
4294 else
4296 low = range_successor (high0);
4297 high = high1;
4298 in_p = (low != 0);
4302 else
4304 /* The case where we are excluding both ranges. Here the complex case
4305 is if they don't overlap. In that case, the only time we have a
4306 range is if they are adjacent. If the second is a subset of the
4307 first, the result is the first. Otherwise, the range to exclude
4308 starts at the beginning of the first range and ends at the end of the
4309 second. */
4310 if (no_overlap)
4312 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4313 range_successor (high0),
4314 1, low1, 0)))
4315 in_p = 0, low = low0, high = high1;
4316 else
4318 /* Canonicalize - [min, x] into - [-, x]. */
4319 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4320 switch (TREE_CODE (TREE_TYPE (low0)))
4322 case ENUMERAL_TYPE:
4323 if (TYPE_PRECISION (TREE_TYPE (low0))
4324 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4325 break;
4326 /* FALLTHROUGH */
4327 case INTEGER_TYPE:
4328 if (tree_int_cst_equal (low0,
4329 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4330 low0 = 0;
4331 break;
4332 case POINTER_TYPE:
4333 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4334 && integer_zerop (low0))
4335 low0 = 0;
4336 break;
4337 default:
4338 break;
4341 /* Canonicalize - [x, max] into - [x, -]. */
4342 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4343 switch (TREE_CODE (TREE_TYPE (high1)))
4345 case ENUMERAL_TYPE:
4346 if (TYPE_PRECISION (TREE_TYPE (high1))
4347 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4348 break;
4349 /* FALLTHROUGH */
4350 case INTEGER_TYPE:
4351 if (tree_int_cst_equal (high1,
4352 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4353 high1 = 0;
4354 break;
4355 case POINTER_TYPE:
4356 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4357 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4358 high1, 1,
4359 integer_one_node, 1)))
4360 high1 = 0;
4361 break;
4362 default:
4363 break;
4366 /* The ranges might be also adjacent between the maximum and
4367 minimum values of the given type. For
4368 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4369 return + [x + 1, y - 1]. */
4370 if (low0 == 0 && high1 == 0)
4372 low = range_successor (high0);
4373 high = range_predecessor (low1);
4374 if (low == 0 || high == 0)
4375 return 0;
4377 in_p = 1;
4379 else
4380 return 0;
4383 else if (subset)
4384 in_p = 0, low = low0, high = high0;
4385 else
4386 in_p = 0, low = low0, high = high1;
4389 *pin_p = in_p, *plow = low, *phigh = high;
4390 return 1;
4394 /* Subroutine of fold, looking inside expressions of the form
4395 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4396 of the COND_EXPR. This function is being used also to optimize
4397 A op B ? C : A, by reversing the comparison first.
4399 Return a folded expression whose code is not a COND_EXPR
4400 anymore, or NULL_TREE if no folding opportunity is found. */
4402 static tree
4403 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4405 enum tree_code comp_code = TREE_CODE (arg0);
4406 tree arg00 = TREE_OPERAND (arg0, 0);
4407 tree arg01 = TREE_OPERAND (arg0, 1);
4408 tree arg1_type = TREE_TYPE (arg1);
4409 tree tem;
4411 STRIP_NOPS (arg1);
4412 STRIP_NOPS (arg2);
4414 /* If we have A op 0 ? A : -A, consider applying the following
4415 transformations:
4417 A == 0? A : -A same as -A
4418 A != 0? A : -A same as A
4419 A >= 0? A : -A same as abs (A)
4420 A > 0? A : -A same as abs (A)
4421 A <= 0? A : -A same as -abs (A)
4422 A < 0? A : -A same as -abs (A)
4424 None of these transformations work for modes with signed
4425 zeros. If A is +/-0, the first two transformations will
4426 change the sign of the result (from +0 to -0, or vice
4427 versa). The last four will fix the sign of the result,
4428 even though the original expressions could be positive or
4429 negative, depending on the sign of A.
4431 Note that all these transformations are correct if A is
4432 NaN, since the two alternatives (A and -A) are also NaNs. */
4433 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4434 ? real_zerop (arg01)
4435 : integer_zerop (arg01))
4436 && ((TREE_CODE (arg2) == NEGATE_EXPR
4437 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4438 /* In the case that A is of the form X-Y, '-A' (arg2) may
4439 have already been folded to Y-X, check for that. */
4440 || (TREE_CODE (arg1) == MINUS_EXPR
4441 && TREE_CODE (arg2) == MINUS_EXPR
4442 && operand_equal_p (TREE_OPERAND (arg1, 0),
4443 TREE_OPERAND (arg2, 1), 0)
4444 && operand_equal_p (TREE_OPERAND (arg1, 1),
4445 TREE_OPERAND (arg2, 0), 0))))
4446 switch (comp_code)
4448 case EQ_EXPR:
4449 case UNEQ_EXPR:
4450 tem = fold_convert (arg1_type, arg1);
4451 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4452 case NE_EXPR:
4453 case LTGT_EXPR:
4454 return pedantic_non_lvalue (fold_convert (type, arg1));
4455 case UNGE_EXPR:
4456 case UNGT_EXPR:
4457 if (flag_trapping_math)
4458 break;
4459 /* Fall through. */
4460 case GE_EXPR:
4461 case GT_EXPR:
4462 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4463 arg1 = fold_convert (lang_hooks.types.signed_type
4464 (TREE_TYPE (arg1)), arg1);
4465 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4466 return pedantic_non_lvalue (fold_convert (type, tem));
4467 case UNLE_EXPR:
4468 case UNLT_EXPR:
4469 if (flag_trapping_math)
4470 break;
4471 case LE_EXPR:
4472 case LT_EXPR:
4473 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4474 arg1 = fold_convert (lang_hooks.types.signed_type
4475 (TREE_TYPE (arg1)), arg1);
4476 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4477 return negate_expr (fold_convert (type, tem));
4478 default:
4479 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4480 break;
4483 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4484 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4485 both transformations are correct when A is NaN: A != 0
4486 is then true, and A == 0 is false. */
4488 if (integer_zerop (arg01) && integer_zerop (arg2))
4490 if (comp_code == NE_EXPR)
4491 return pedantic_non_lvalue (fold_convert (type, arg1));
4492 else if (comp_code == EQ_EXPR)
4493 return build_int_cst (type, 0);
4496 /* Try some transformations of A op B ? A : B.
4498 A == B? A : B same as B
4499 A != B? A : B same as A
4500 A >= B? A : B same as max (A, B)
4501 A > B? A : B same as max (B, A)
4502 A <= B? A : B same as min (A, B)
4503 A < B? A : B same as min (B, A)
4505 As above, these transformations don't work in the presence
4506 of signed zeros. For example, if A and B are zeros of
4507 opposite sign, the first two transformations will change
4508 the sign of the result. In the last four, the original
4509 expressions give different results for (A=+0, B=-0) and
4510 (A=-0, B=+0), but the transformed expressions do not.
4512 The first two transformations are correct if either A or B
4513 is a NaN. In the first transformation, the condition will
4514 be false, and B will indeed be chosen. In the case of the
4515 second transformation, the condition A != B will be true,
4516 and A will be chosen.
4518 The conversions to max() and min() are not correct if B is
4519 a number and A is not. The conditions in the original
4520 expressions will be false, so all four give B. The min()
4521 and max() versions would give a NaN instead. */
4522 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4523 /* Avoid these transformations if the COND_EXPR may be used
4524 as an lvalue in the C++ front-end. PR c++/19199. */
4525 && (in_gimple_form
4526 || strcmp (lang_hooks.name, "GNU C++") != 0
4527 || ! maybe_lvalue_p (arg1)
4528 || ! maybe_lvalue_p (arg2)))
4530 tree comp_op0 = arg00;
4531 tree comp_op1 = arg01;
4532 tree comp_type = TREE_TYPE (comp_op0);
4534 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4535 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4537 comp_type = type;
4538 comp_op0 = arg1;
4539 comp_op1 = arg2;
4542 switch (comp_code)
4544 case EQ_EXPR:
4545 return pedantic_non_lvalue (fold_convert (type, arg2));
4546 case NE_EXPR:
4547 return pedantic_non_lvalue (fold_convert (type, arg1));
4548 case LE_EXPR:
4549 case LT_EXPR:
4550 case UNLE_EXPR:
4551 case UNLT_EXPR:
4552 /* In C++ a ?: expression can be an lvalue, so put the
4553 operand which will be used if they are equal first
4554 so that we can convert this back to the
4555 corresponding COND_EXPR. */
4556 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4558 comp_op0 = fold_convert (comp_type, comp_op0);
4559 comp_op1 = fold_convert (comp_type, comp_op1);
4560 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4561 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4562 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4563 return pedantic_non_lvalue (fold_convert (type, tem));
4565 break;
4566 case GE_EXPR:
4567 case GT_EXPR:
4568 case UNGE_EXPR:
4569 case UNGT_EXPR:
4570 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4572 comp_op0 = fold_convert (comp_type, comp_op0);
4573 comp_op1 = fold_convert (comp_type, comp_op1);
4574 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4575 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4576 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4577 return pedantic_non_lvalue (fold_convert (type, tem));
4579 break;
4580 case UNEQ_EXPR:
4581 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4582 return pedantic_non_lvalue (fold_convert (type, arg2));
4583 break;
4584 case LTGT_EXPR:
4585 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4586 return pedantic_non_lvalue (fold_convert (type, arg1));
4587 break;
4588 default:
4589 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4590 break;
4594 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4595 we might still be able to simplify this. For example,
4596 if C1 is one less or one more than C2, this might have started
4597 out as a MIN or MAX and been transformed by this function.
4598 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4600 if (INTEGRAL_TYPE_P (type)
4601 && TREE_CODE (arg01) == INTEGER_CST
4602 && TREE_CODE (arg2) == INTEGER_CST)
4603 switch (comp_code)
4605 case EQ_EXPR:
4606 /* We can replace A with C1 in this case. */
4607 arg1 = fold_convert (type, arg01);
4608 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4610 case LT_EXPR:
4611 /* If C1 is C2 + 1, this is min(A, C2). */
4612 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4613 OEP_ONLY_CONST)
4614 && operand_equal_p (arg01,
4615 const_binop (PLUS_EXPR, arg2,
4616 integer_one_node, 0),
4617 OEP_ONLY_CONST))
4618 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4619 type, arg1, arg2));
4620 break;
4622 case LE_EXPR:
4623 /* If C1 is C2 - 1, this is min(A, C2). */
4624 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4625 OEP_ONLY_CONST)
4626 && operand_equal_p (arg01,
4627 const_binop (MINUS_EXPR, arg2,
4628 integer_one_node, 0),
4629 OEP_ONLY_CONST))
4630 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4631 type, arg1, arg2));
4632 break;
4634 case GT_EXPR:
4635 /* If C1 is C2 - 1, this is max(A, C2). */
4636 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4637 OEP_ONLY_CONST)
4638 && operand_equal_p (arg01,
4639 const_binop (MINUS_EXPR, arg2,
4640 integer_one_node, 0),
4641 OEP_ONLY_CONST))
4642 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4643 type, arg1, arg2));
4644 break;
4646 case GE_EXPR:
4647 /* If C1 is C2 + 1, this is max(A, C2). */
4648 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4649 OEP_ONLY_CONST)
4650 && operand_equal_p (arg01,
4651 const_binop (PLUS_EXPR, arg2,
4652 integer_one_node, 0),
4653 OEP_ONLY_CONST))
4654 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4655 type, arg1, arg2));
4656 break;
4657 case NE_EXPR:
4658 break;
4659 default:
4660 gcc_unreachable ();
4663 return NULL_TREE;
4668 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4669 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4670 #endif
4672 /* EXP is some logical combination of boolean tests. See if we can
4673 merge it into some range test. Return the new tree if so. */
4675 static tree
4676 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4678 int or_op = (code == TRUTH_ORIF_EXPR
4679 || code == TRUTH_OR_EXPR);
4680 int in0_p, in1_p, in_p;
4681 tree low0, low1, low, high0, high1, high;
4682 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4683 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4684 tree tem;
4686 /* If this is an OR operation, invert both sides; we will invert
4687 again at the end. */
4688 if (or_op)
4689 in0_p = ! in0_p, in1_p = ! in1_p;
4691 /* If both expressions are the same, if we can merge the ranges, and we
4692 can build the range test, return it or it inverted. If one of the
4693 ranges is always true or always false, consider it to be the same
4694 expression as the other. */
4695 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4696 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4697 in1_p, low1, high1)
4698 && 0 != (tem = (build_range_check (type,
4699 lhs != 0 ? lhs
4700 : rhs != 0 ? rhs : integer_zero_node,
4701 in_p, low, high))))
4702 return or_op ? invert_truthvalue (tem) : tem;
4704 /* On machines where the branch cost is expensive, if this is a
4705 short-circuited branch and the underlying object on both sides
4706 is the same, make a non-short-circuit operation. */
4707 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4708 && lhs != 0 && rhs != 0
4709 && (code == TRUTH_ANDIF_EXPR
4710 || code == TRUTH_ORIF_EXPR)
4711 && operand_equal_p (lhs, rhs, 0))
4713 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4714 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4715 which cases we can't do this. */
4716 if (simple_operand_p (lhs))
4717 return build2 (code == TRUTH_ANDIF_EXPR
4718 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4719 type, op0, op1);
4721 else if (lang_hooks.decls.global_bindings_p () == 0
4722 && ! CONTAINS_PLACEHOLDER_P (lhs))
4724 tree common = save_expr (lhs);
4726 if (0 != (lhs = build_range_check (type, common,
4727 or_op ? ! in0_p : in0_p,
4728 low0, high0))
4729 && (0 != (rhs = build_range_check (type, common,
4730 or_op ? ! in1_p : in1_p,
4731 low1, high1))))
4732 return build2 (code == TRUTH_ANDIF_EXPR
4733 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4734 type, lhs, rhs);
4738 return 0;
4741 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4742 bit value. Arrange things so the extra bits will be set to zero if and
4743 only if C is signed-extended to its full width. If MASK is nonzero,
4744 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4746 static tree
4747 unextend (tree c, int p, int unsignedp, tree mask)
4749 tree type = TREE_TYPE (c);
4750 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4751 tree temp;
4753 if (p == modesize || unsignedp)
4754 return c;
4756 /* We work by getting just the sign bit into the low-order bit, then
4757 into the high-order bit, then sign-extend. We then XOR that value
4758 with C. */
4759 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4760 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4762 /* We must use a signed type in order to get an arithmetic right shift.
4763 However, we must also avoid introducing accidental overflows, so that
4764 a subsequent call to integer_zerop will work. Hence we must
4765 do the type conversion here. At this point, the constant is either
4766 zero or one, and the conversion to a signed type can never overflow.
4767 We could get an overflow if this conversion is done anywhere else. */
4768 if (TYPE_UNSIGNED (type))
4769 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4771 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4772 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4773 if (mask != 0)
4774 temp = const_binop (BIT_AND_EXPR, temp,
4775 fold_convert (TREE_TYPE (c), mask), 0);
4776 /* If necessary, convert the type back to match the type of C. */
4777 if (TYPE_UNSIGNED (type))
4778 temp = fold_convert (type, temp);
4780 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4783 /* Find ways of folding logical expressions of LHS and RHS:
4784 Try to merge two comparisons to the same innermost item.
4785 Look for range tests like "ch >= '0' && ch <= '9'".
4786 Look for combinations of simple terms on machines with expensive branches
4787 and evaluate the RHS unconditionally.
4789 For example, if we have p->a == 2 && p->b == 4 and we can make an
4790 object large enough to span both A and B, we can do this with a comparison
4791 against the object ANDed with the a mask.
4793 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4794 operations to do this with one comparison.
4796 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4797 function and the one above.
4799 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4800 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4802 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4803 two operands.
4805 We return the simplified tree or 0 if no optimization is possible. */
4807 static tree
4808 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4810 /* If this is the "or" of two comparisons, we can do something if
4811 the comparisons are NE_EXPR. If this is the "and", we can do something
4812 if the comparisons are EQ_EXPR. I.e.,
4813 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4815 WANTED_CODE is this operation code. For single bit fields, we can
4816 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4817 comparison for one-bit fields. */
4819 enum tree_code wanted_code;
4820 enum tree_code lcode, rcode;
4821 tree ll_arg, lr_arg, rl_arg, rr_arg;
4822 tree ll_inner, lr_inner, rl_inner, rr_inner;
4823 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4824 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4825 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4826 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4827 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4828 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4829 enum machine_mode lnmode, rnmode;
4830 tree ll_mask, lr_mask, rl_mask, rr_mask;
4831 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4832 tree l_const, r_const;
4833 tree lntype, rntype, result;
4834 int first_bit, end_bit;
4835 int volatilep;
4837 /* Start by getting the comparison codes. Fail if anything is volatile.
4838 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4839 it were surrounded with a NE_EXPR. */
4841 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4842 return 0;
4844 lcode = TREE_CODE (lhs);
4845 rcode = TREE_CODE (rhs);
4847 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4849 lhs = build2 (NE_EXPR, truth_type, lhs,
4850 build_int_cst (TREE_TYPE (lhs), 0));
4851 lcode = NE_EXPR;
4854 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4856 rhs = build2 (NE_EXPR, truth_type, rhs,
4857 build_int_cst (TREE_TYPE (rhs), 0));
4858 rcode = NE_EXPR;
4861 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4862 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4863 return 0;
4865 ll_arg = TREE_OPERAND (lhs, 0);
4866 lr_arg = TREE_OPERAND (lhs, 1);
4867 rl_arg = TREE_OPERAND (rhs, 0);
4868 rr_arg = TREE_OPERAND (rhs, 1);
4870 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4871 if (simple_operand_p (ll_arg)
4872 && simple_operand_p (lr_arg))
4874 tree result;
4875 if (operand_equal_p (ll_arg, rl_arg, 0)
4876 && operand_equal_p (lr_arg, rr_arg, 0))
4878 result = combine_comparisons (code, lcode, rcode,
4879 truth_type, ll_arg, lr_arg);
4880 if (result)
4881 return result;
4883 else if (operand_equal_p (ll_arg, rr_arg, 0)
4884 && operand_equal_p (lr_arg, rl_arg, 0))
4886 result = combine_comparisons (code, lcode,
4887 swap_tree_comparison (rcode),
4888 truth_type, ll_arg, lr_arg);
4889 if (result)
4890 return result;
4894 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4895 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4897 /* If the RHS can be evaluated unconditionally and its operands are
4898 simple, it wins to evaluate the RHS unconditionally on machines
4899 with expensive branches. In this case, this isn't a comparison
4900 that can be merged. Avoid doing this if the RHS is a floating-point
4901 comparison since those can trap. */
4903 if (BRANCH_COST >= 2
4904 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4905 && simple_operand_p (rl_arg)
4906 && simple_operand_p (rr_arg))
4908 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4909 if (code == TRUTH_OR_EXPR
4910 && lcode == NE_EXPR && integer_zerop (lr_arg)
4911 && rcode == NE_EXPR && integer_zerop (rr_arg)
4912 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4913 return build2 (NE_EXPR, truth_type,
4914 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4915 ll_arg, rl_arg),
4916 build_int_cst (TREE_TYPE (ll_arg), 0));
4918 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4919 if (code == TRUTH_AND_EXPR
4920 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4921 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4922 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4923 return build2 (EQ_EXPR, truth_type,
4924 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4925 ll_arg, rl_arg),
4926 build_int_cst (TREE_TYPE (ll_arg), 0));
4928 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4929 return build2 (code, truth_type, lhs, rhs);
4932 /* See if the comparisons can be merged. Then get all the parameters for
4933 each side. */
4935 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4936 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4937 return 0;
4939 volatilep = 0;
4940 ll_inner = decode_field_reference (ll_arg,
4941 &ll_bitsize, &ll_bitpos, &ll_mode,
4942 &ll_unsignedp, &volatilep, &ll_mask,
4943 &ll_and_mask);
4944 lr_inner = decode_field_reference (lr_arg,
4945 &lr_bitsize, &lr_bitpos, &lr_mode,
4946 &lr_unsignedp, &volatilep, &lr_mask,
4947 &lr_and_mask);
4948 rl_inner = decode_field_reference (rl_arg,
4949 &rl_bitsize, &rl_bitpos, &rl_mode,
4950 &rl_unsignedp, &volatilep, &rl_mask,
4951 &rl_and_mask);
4952 rr_inner = decode_field_reference (rr_arg,
4953 &rr_bitsize, &rr_bitpos, &rr_mode,
4954 &rr_unsignedp, &volatilep, &rr_mask,
4955 &rr_and_mask);
4957 /* It must be true that the inner operation on the lhs of each
4958 comparison must be the same if we are to be able to do anything.
4959 Then see if we have constants. If not, the same must be true for
4960 the rhs's. */
4961 if (volatilep || ll_inner == 0 || rl_inner == 0
4962 || ! operand_equal_p (ll_inner, rl_inner, 0))
4963 return 0;
4965 if (TREE_CODE (lr_arg) == INTEGER_CST
4966 && TREE_CODE (rr_arg) == INTEGER_CST)
4967 l_const = lr_arg, r_const = rr_arg;
4968 else if (lr_inner == 0 || rr_inner == 0
4969 || ! operand_equal_p (lr_inner, rr_inner, 0))
4970 return 0;
4971 else
4972 l_const = r_const = 0;
4974 /* If either comparison code is not correct for our logical operation,
4975 fail. However, we can convert a one-bit comparison against zero into
4976 the opposite comparison against that bit being set in the field. */
4978 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4979 if (lcode != wanted_code)
4981 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4983 /* Make the left operand unsigned, since we are only interested
4984 in the value of one bit. Otherwise we are doing the wrong
4985 thing below. */
4986 ll_unsignedp = 1;
4987 l_const = ll_mask;
4989 else
4990 return 0;
4993 /* This is analogous to the code for l_const above. */
4994 if (rcode != wanted_code)
4996 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4998 rl_unsignedp = 1;
4999 r_const = rl_mask;
5001 else
5002 return 0;
5005 /* After this point all optimizations will generate bit-field
5006 references, which we might not want. */
5007 if (! lang_hooks.can_use_bit_fields_p ())
5008 return 0;
5010 /* See if we can find a mode that contains both fields being compared on
5011 the left. If we can't, fail. Otherwise, update all constants and masks
5012 to be relative to a field of that size. */
5013 first_bit = MIN (ll_bitpos, rl_bitpos);
5014 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5015 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5016 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5017 volatilep);
5018 if (lnmode == VOIDmode)
5019 return 0;
5021 lnbitsize = GET_MODE_BITSIZE (lnmode);
5022 lnbitpos = first_bit & ~ (lnbitsize - 1);
5023 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5024 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5026 if (BYTES_BIG_ENDIAN)
5028 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5029 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5032 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5033 size_int (xll_bitpos), 0);
5034 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5035 size_int (xrl_bitpos), 0);
5037 if (l_const)
5039 l_const = fold_convert (lntype, l_const);
5040 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5041 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5042 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5043 fold_build1 (BIT_NOT_EXPR,
5044 lntype, ll_mask),
5045 0)))
5047 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5049 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5052 if (r_const)
5054 r_const = fold_convert (lntype, r_const);
5055 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5056 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5057 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5058 fold_build1 (BIT_NOT_EXPR,
5059 lntype, rl_mask),
5060 0)))
5062 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5064 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5068 /* If the right sides are not constant, do the same for it. Also,
5069 disallow this optimization if a size or signedness mismatch occurs
5070 between the left and right sides. */
5071 if (l_const == 0)
5073 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5074 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5075 /* Make sure the two fields on the right
5076 correspond to the left without being swapped. */
5077 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5078 return 0;
5080 first_bit = MIN (lr_bitpos, rr_bitpos);
5081 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5082 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5083 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5084 volatilep);
5085 if (rnmode == VOIDmode)
5086 return 0;
5088 rnbitsize = GET_MODE_BITSIZE (rnmode);
5089 rnbitpos = first_bit & ~ (rnbitsize - 1);
5090 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5091 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5093 if (BYTES_BIG_ENDIAN)
5095 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5096 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5099 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5100 size_int (xlr_bitpos), 0);
5101 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5102 size_int (xrr_bitpos), 0);
5104 /* Make a mask that corresponds to both fields being compared.
5105 Do this for both items being compared. If the operands are the
5106 same size and the bits being compared are in the same position
5107 then we can do this by masking both and comparing the masked
5108 results. */
5109 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5110 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5111 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5113 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5114 ll_unsignedp || rl_unsignedp);
5115 if (! all_ones_mask_p (ll_mask, lnbitsize))
5116 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5118 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5119 lr_unsignedp || rr_unsignedp);
5120 if (! all_ones_mask_p (lr_mask, rnbitsize))
5121 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5123 return build2 (wanted_code, truth_type, lhs, rhs);
5126 /* There is still another way we can do something: If both pairs of
5127 fields being compared are adjacent, we may be able to make a wider
5128 field containing them both.
5130 Note that we still must mask the lhs/rhs expressions. Furthermore,
5131 the mask must be shifted to account for the shift done by
5132 make_bit_field_ref. */
5133 if ((ll_bitsize + ll_bitpos == rl_bitpos
5134 && lr_bitsize + lr_bitpos == rr_bitpos)
5135 || (ll_bitpos == rl_bitpos + rl_bitsize
5136 && lr_bitpos == rr_bitpos + rr_bitsize))
5138 tree type;
5140 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5141 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5142 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5143 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5145 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5146 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5147 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5148 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5150 /* Convert to the smaller type before masking out unwanted bits. */
5151 type = lntype;
5152 if (lntype != rntype)
5154 if (lnbitsize > rnbitsize)
5156 lhs = fold_convert (rntype, lhs);
5157 ll_mask = fold_convert (rntype, ll_mask);
5158 type = rntype;
5160 else if (lnbitsize < rnbitsize)
5162 rhs = fold_convert (lntype, rhs);
5163 lr_mask = fold_convert (lntype, lr_mask);
5164 type = lntype;
5168 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5169 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5171 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5172 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5174 return build2 (wanted_code, truth_type, lhs, rhs);
5177 return 0;
5180 /* Handle the case of comparisons with constants. If there is something in
5181 common between the masks, those bits of the constants must be the same.
5182 If not, the condition is always false. Test for this to avoid generating
5183 incorrect code below. */
5184 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5185 if (! integer_zerop (result)
5186 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5187 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5189 if (wanted_code == NE_EXPR)
5191 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5192 return constant_boolean_node (true, truth_type);
5194 else
5196 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5197 return constant_boolean_node (false, truth_type);
5201 /* Construct the expression we will return. First get the component
5202 reference we will make. Unless the mask is all ones the width of
5203 that field, perform the mask operation. Then compare with the
5204 merged constant. */
5205 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5206 ll_unsignedp || rl_unsignedp);
5208 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5209 if (! all_ones_mask_p (ll_mask, lnbitsize))
5210 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5212 return build2 (wanted_code, truth_type, result,
5213 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5216 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5217 constant. */
5219 static tree
5220 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5222 tree arg0 = op0;
5223 enum tree_code op_code;
5224 tree comp_const = op1;
5225 tree minmax_const;
5226 int consts_equal, consts_lt;
5227 tree inner;
5229 STRIP_SIGN_NOPS (arg0);
5231 op_code = TREE_CODE (arg0);
5232 minmax_const = TREE_OPERAND (arg0, 1);
5233 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5234 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5235 inner = TREE_OPERAND (arg0, 0);
5237 /* If something does not permit us to optimize, return the original tree. */
5238 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5239 || TREE_CODE (comp_const) != INTEGER_CST
5240 || TREE_CONSTANT_OVERFLOW (comp_const)
5241 || TREE_CODE (minmax_const) != INTEGER_CST
5242 || TREE_CONSTANT_OVERFLOW (minmax_const))
5243 return NULL_TREE;
5245 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5246 and GT_EXPR, doing the rest with recursive calls using logical
5247 simplifications. */
5248 switch (code)
5250 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5252 /* FIXME: We should be able to invert code without building a
5253 scratch tree node, but doing so would require us to
5254 duplicate a part of invert_truthvalue here. */
5255 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5256 tem = optimize_minmax_comparison (TREE_CODE (tem),
5257 TREE_TYPE (tem),
5258 TREE_OPERAND (tem, 0),
5259 TREE_OPERAND (tem, 1));
5260 return invert_truthvalue (tem);
5263 case GE_EXPR:
5264 return
5265 fold_build2 (TRUTH_ORIF_EXPR, type,
5266 optimize_minmax_comparison
5267 (EQ_EXPR, type, arg0, comp_const),
5268 optimize_minmax_comparison
5269 (GT_EXPR, type, arg0, comp_const));
5271 case EQ_EXPR:
5272 if (op_code == MAX_EXPR && consts_equal)
5273 /* MAX (X, 0) == 0 -> X <= 0 */
5274 return fold_build2 (LE_EXPR, type, inner, comp_const);
5276 else if (op_code == MAX_EXPR && consts_lt)
5277 /* MAX (X, 0) == 5 -> X == 5 */
5278 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5280 else if (op_code == MAX_EXPR)
5281 /* MAX (X, 0) == -1 -> false */
5282 return omit_one_operand (type, integer_zero_node, inner);
5284 else if (consts_equal)
5285 /* MIN (X, 0) == 0 -> X >= 0 */
5286 return fold_build2 (GE_EXPR, type, inner, comp_const);
5288 else if (consts_lt)
5289 /* MIN (X, 0) == 5 -> false */
5290 return omit_one_operand (type, integer_zero_node, inner);
5292 else
5293 /* MIN (X, 0) == -1 -> X == -1 */
5294 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5296 case GT_EXPR:
5297 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5298 /* MAX (X, 0) > 0 -> X > 0
5299 MAX (X, 0) > 5 -> X > 5 */
5300 return fold_build2 (GT_EXPR, type, inner, comp_const);
5302 else if (op_code == MAX_EXPR)
5303 /* MAX (X, 0) > -1 -> true */
5304 return omit_one_operand (type, integer_one_node, inner);
5306 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5307 /* MIN (X, 0) > 0 -> false
5308 MIN (X, 0) > 5 -> false */
5309 return omit_one_operand (type, integer_zero_node, inner);
5311 else
5312 /* MIN (X, 0) > -1 -> X > -1 */
5313 return fold_build2 (GT_EXPR, type, inner, comp_const);
5315 default:
5316 return NULL_TREE;
5320 /* T is an integer expression that is being multiplied, divided, or taken a
5321 modulus (CODE says which and what kind of divide or modulus) by a
5322 constant C. See if we can eliminate that operation by folding it with
5323 other operations already in T. WIDE_TYPE, if non-null, is a type that
5324 should be used for the computation if wider than our type.
5326 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5327 (X * 2) + (Y * 4). We must, however, be assured that either the original
5328 expression would not overflow or that overflow is undefined for the type
5329 in the language in question.
5331 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5332 the machine has a multiply-accumulate insn or that this is part of an
5333 addressing calculation.
5335 If we return a non-null expression, it is an equivalent form of the
5336 original computation, but need not be in the original type. */
5338 static tree
5339 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5341 /* To avoid exponential search depth, refuse to allow recursion past
5342 three levels. Beyond that (1) it's highly unlikely that we'll find
5343 something interesting and (2) we've probably processed it before
5344 when we built the inner expression. */
5346 static int depth;
5347 tree ret;
5349 if (depth > 3)
5350 return NULL;
5352 depth++;
5353 ret = extract_muldiv_1 (t, c, code, wide_type);
5354 depth--;
5356 return ret;
5359 static tree
5360 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5362 tree type = TREE_TYPE (t);
5363 enum tree_code tcode = TREE_CODE (t);
5364 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5365 > GET_MODE_SIZE (TYPE_MODE (type)))
5366 ? wide_type : type);
5367 tree t1, t2;
5368 int same_p = tcode == code;
5369 tree op0 = NULL_TREE, op1 = NULL_TREE;
5371 /* Don't deal with constants of zero here; they confuse the code below. */
5372 if (integer_zerop (c))
5373 return NULL_TREE;
5375 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5376 op0 = TREE_OPERAND (t, 0);
5378 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5379 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5381 /* Note that we need not handle conditional operations here since fold
5382 already handles those cases. So just do arithmetic here. */
5383 switch (tcode)
5385 case INTEGER_CST:
5386 /* For a constant, we can always simplify if we are a multiply
5387 or (for divide and modulus) if it is a multiple of our constant. */
5388 if (code == MULT_EXPR
5389 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5390 return const_binop (code, fold_convert (ctype, t),
5391 fold_convert (ctype, c), 0);
5392 break;
5394 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5395 /* If op0 is an expression ... */
5396 if ((COMPARISON_CLASS_P (op0)
5397 || UNARY_CLASS_P (op0)
5398 || BINARY_CLASS_P (op0)
5399 || EXPRESSION_CLASS_P (op0))
5400 /* ... and is unsigned, and its type is smaller than ctype,
5401 then we cannot pass through as widening. */
5402 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5403 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5404 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5405 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5406 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5407 /* ... or this is a truncation (t is narrower than op0),
5408 then we cannot pass through this narrowing. */
5409 || (GET_MODE_SIZE (TYPE_MODE (type))
5410 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5411 /* ... or signedness changes for division or modulus,
5412 then we cannot pass through this conversion. */
5413 || (code != MULT_EXPR
5414 && (TYPE_UNSIGNED (ctype)
5415 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5416 break;
5418 /* Pass the constant down and see if we can make a simplification. If
5419 we can, replace this expression with the inner simplification for
5420 possible later conversion to our or some other type. */
5421 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5422 && TREE_CODE (t2) == INTEGER_CST
5423 && ! TREE_CONSTANT_OVERFLOW (t2)
5424 && (0 != (t1 = extract_muldiv (op0, t2, code,
5425 code == MULT_EXPR
5426 ? ctype : NULL_TREE))))
5427 return t1;
5428 break;
5430 case ABS_EXPR:
5431 /* If widening the type changes it from signed to unsigned, then we
5432 must avoid building ABS_EXPR itself as unsigned. */
5433 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5435 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5436 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5438 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5439 return fold_convert (ctype, t1);
5441 break;
5443 /* FALLTHROUGH */
5444 case NEGATE_EXPR:
5445 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5446 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5447 break;
5449 case MIN_EXPR: case MAX_EXPR:
5450 /* If widening the type changes the signedness, then we can't perform
5451 this optimization as that changes the result. */
5452 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5453 break;
5455 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5456 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5457 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5459 if (tree_int_cst_sgn (c) < 0)
5460 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5462 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5463 fold_convert (ctype, t2));
5465 break;
5467 case LSHIFT_EXPR: case RSHIFT_EXPR:
5468 /* If the second operand is constant, this is a multiplication
5469 or floor division, by a power of two, so we can treat it that
5470 way unless the multiplier or divisor overflows. Signed
5471 left-shift overflow is implementation-defined rather than
5472 undefined in C90, so do not convert signed left shift into
5473 multiplication. */
5474 if (TREE_CODE (op1) == INTEGER_CST
5475 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5476 /* const_binop may not detect overflow correctly,
5477 so check for it explicitly here. */
5478 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5479 && TREE_INT_CST_HIGH (op1) == 0
5480 && 0 != (t1 = fold_convert (ctype,
5481 const_binop (LSHIFT_EXPR,
5482 size_one_node,
5483 op1, 0)))
5484 && ! TREE_OVERFLOW (t1))
5485 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5486 ? MULT_EXPR : FLOOR_DIV_EXPR,
5487 ctype, fold_convert (ctype, op0), t1),
5488 c, code, wide_type);
5489 break;
5491 case PLUS_EXPR: case MINUS_EXPR:
5492 /* See if we can eliminate the operation on both sides. If we can, we
5493 can return a new PLUS or MINUS. If we can't, the only remaining
5494 cases where we can do anything are if the second operand is a
5495 constant. */
5496 t1 = extract_muldiv (op0, c, code, wide_type);
5497 t2 = extract_muldiv (op1, c, code, wide_type);
5498 if (t1 != 0 && t2 != 0
5499 && (code == MULT_EXPR
5500 /* If not multiplication, we can only do this if both operands
5501 are divisible by c. */
5502 || (multiple_of_p (ctype, op0, c)
5503 && multiple_of_p (ctype, op1, c))))
5504 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5505 fold_convert (ctype, t2));
5507 /* If this was a subtraction, negate OP1 and set it to be an addition.
5508 This simplifies the logic below. */
5509 if (tcode == MINUS_EXPR)
5510 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5512 if (TREE_CODE (op1) != INTEGER_CST)
5513 break;
5515 /* If either OP1 or C are negative, this optimization is not safe for
5516 some of the division and remainder types while for others we need
5517 to change the code. */
5518 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5520 if (code == CEIL_DIV_EXPR)
5521 code = FLOOR_DIV_EXPR;
5522 else if (code == FLOOR_DIV_EXPR)
5523 code = CEIL_DIV_EXPR;
5524 else if (code != MULT_EXPR
5525 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5526 break;
5529 /* If it's a multiply or a division/modulus operation of a multiple
5530 of our constant, do the operation and verify it doesn't overflow. */
5531 if (code == MULT_EXPR
5532 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5534 op1 = const_binop (code, fold_convert (ctype, op1),
5535 fold_convert (ctype, c), 0);
5536 /* We allow the constant to overflow with wrapping semantics. */
5537 if (op1 == 0
5538 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5539 break;
5541 else
5542 break;
5544 /* If we have an unsigned type is not a sizetype, we cannot widen
5545 the operation since it will change the result if the original
5546 computation overflowed. */
5547 if (TYPE_UNSIGNED (ctype)
5548 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5549 && ctype != type)
5550 break;
5552 /* If we were able to eliminate our operation from the first side,
5553 apply our operation to the second side and reform the PLUS. */
5554 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5555 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5557 /* The last case is if we are a multiply. In that case, we can
5558 apply the distributive law to commute the multiply and addition
5559 if the multiplication of the constants doesn't overflow. */
5560 if (code == MULT_EXPR)
5561 return fold_build2 (tcode, ctype,
5562 fold_build2 (code, ctype,
5563 fold_convert (ctype, op0),
5564 fold_convert (ctype, c)),
5565 op1);
5567 break;
5569 case MULT_EXPR:
5570 /* We have a special case here if we are doing something like
5571 (C * 8) % 4 since we know that's zero. */
5572 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5573 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5574 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5575 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5576 return omit_one_operand (type, integer_zero_node, op0);
5578 /* ... fall through ... */
5580 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5581 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5582 /* If we can extract our operation from the LHS, do so and return a
5583 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5584 do something only if the second operand is a constant. */
5585 if (same_p
5586 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5587 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5588 fold_convert (ctype, op1));
5589 else if (tcode == MULT_EXPR && code == MULT_EXPR
5590 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5591 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5592 fold_convert (ctype, t1));
5593 else if (TREE_CODE (op1) != INTEGER_CST)
5594 return 0;
5596 /* If these are the same operation types, we can associate them
5597 assuming no overflow. */
5598 if (tcode == code
5599 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5600 fold_convert (ctype, c), 0))
5601 && ! TREE_OVERFLOW (t1))
5602 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5604 /* If these operations "cancel" each other, we have the main
5605 optimizations of this pass, which occur when either constant is a
5606 multiple of the other, in which case we replace this with either an
5607 operation or CODE or TCODE.
5609 If we have an unsigned type that is not a sizetype, we cannot do
5610 this since it will change the result if the original computation
5611 overflowed. */
5612 if ((! TYPE_UNSIGNED (ctype)
5613 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5614 && ! flag_wrapv
5615 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5616 || (tcode == MULT_EXPR
5617 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5618 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5620 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5621 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5622 fold_convert (ctype,
5623 const_binop (TRUNC_DIV_EXPR,
5624 op1, c, 0)));
5625 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5626 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5627 fold_convert (ctype,
5628 const_binop (TRUNC_DIV_EXPR,
5629 c, op1, 0)));
5631 break;
5633 default:
5634 break;
5637 return 0;
5640 /* Return a node which has the indicated constant VALUE (either 0 or
5641 1), and is of the indicated TYPE. */
5643 tree
5644 constant_boolean_node (int value, tree type)
5646 if (type == integer_type_node)
5647 return value ? integer_one_node : integer_zero_node;
5648 else if (type == boolean_type_node)
5649 return value ? boolean_true_node : boolean_false_node;
5650 else
5651 return build_int_cst (type, value);
5655 /* Return true if expr looks like an ARRAY_REF and set base and
5656 offset to the appropriate trees. If there is no offset,
5657 offset is set to NULL_TREE. Base will be canonicalized to
5658 something you can get the element type from using
5659 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5660 in bytes to the base. */
5662 static bool
5663 extract_array_ref (tree expr, tree *base, tree *offset)
5665 /* One canonical form is a PLUS_EXPR with the first
5666 argument being an ADDR_EXPR with a possible NOP_EXPR
5667 attached. */
5668 if (TREE_CODE (expr) == PLUS_EXPR)
5670 tree op0 = TREE_OPERAND (expr, 0);
5671 tree inner_base, dummy1;
5672 /* Strip NOP_EXPRs here because the C frontends and/or
5673 folders present us (int *)&x.a + 4B possibly. */
5674 STRIP_NOPS (op0);
5675 if (extract_array_ref (op0, &inner_base, &dummy1))
5677 *base = inner_base;
5678 if (dummy1 == NULL_TREE)
5679 *offset = TREE_OPERAND (expr, 1);
5680 else
5681 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5682 dummy1, TREE_OPERAND (expr, 1));
5683 return true;
5686 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5687 which we transform into an ADDR_EXPR with appropriate
5688 offset. For other arguments to the ADDR_EXPR we assume
5689 zero offset and as such do not care about the ADDR_EXPR
5690 type and strip possible nops from it. */
5691 else if (TREE_CODE (expr) == ADDR_EXPR)
5693 tree op0 = TREE_OPERAND (expr, 0);
5694 if (TREE_CODE (op0) == ARRAY_REF)
5696 tree idx = TREE_OPERAND (op0, 1);
5697 *base = TREE_OPERAND (op0, 0);
5698 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5699 array_ref_element_size (op0));
5701 else
5703 /* Handle array-to-pointer decay as &a. */
5704 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5705 *base = TREE_OPERAND (expr, 0);
5706 else
5707 *base = expr;
5708 *offset = NULL_TREE;
5710 return true;
5712 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5713 else if (SSA_VAR_P (expr)
5714 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5716 *base = expr;
5717 *offset = NULL_TREE;
5718 return true;
5721 return false;
5725 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5726 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5727 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5728 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5729 COND is the first argument to CODE; otherwise (as in the example
5730 given here), it is the second argument. TYPE is the type of the
5731 original expression. Return NULL_TREE if no simplification is
5732 possible. */
5734 static tree
5735 fold_binary_op_with_conditional_arg (enum tree_code code,
5736 tree type, tree op0, tree op1,
5737 tree cond, tree arg, int cond_first_p)
5739 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5740 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5741 tree test, true_value, false_value;
5742 tree lhs = NULL_TREE;
5743 tree rhs = NULL_TREE;
5745 /* This transformation is only worthwhile if we don't have to wrap
5746 arg in a SAVE_EXPR, and the operation can be simplified on at least
5747 one of the branches once its pushed inside the COND_EXPR. */
5748 if (!TREE_CONSTANT (arg))
5749 return NULL_TREE;
5751 if (TREE_CODE (cond) == COND_EXPR)
5753 test = TREE_OPERAND (cond, 0);
5754 true_value = TREE_OPERAND (cond, 1);
5755 false_value = TREE_OPERAND (cond, 2);
5756 /* If this operand throws an expression, then it does not make
5757 sense to try to perform a logical or arithmetic operation
5758 involving it. */
5759 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5760 lhs = true_value;
5761 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5762 rhs = false_value;
5764 else
5766 tree testtype = TREE_TYPE (cond);
5767 test = cond;
5768 true_value = constant_boolean_node (true, testtype);
5769 false_value = constant_boolean_node (false, testtype);
5772 arg = fold_convert (arg_type, arg);
5773 if (lhs == 0)
5775 true_value = fold_convert (cond_type, true_value);
5776 if (cond_first_p)
5777 lhs = fold_build2 (code, type, true_value, arg);
5778 else
5779 lhs = fold_build2 (code, type, arg, true_value);
5781 if (rhs == 0)
5783 false_value = fold_convert (cond_type, false_value);
5784 if (cond_first_p)
5785 rhs = fold_build2 (code, type, false_value, arg);
5786 else
5787 rhs = fold_build2 (code, type, arg, false_value);
5790 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5791 return fold_convert (type, test);
5795 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5797 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5798 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5799 ADDEND is the same as X.
5801 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5802 and finite. The problematic cases are when X is zero, and its mode
5803 has signed zeros. In the case of rounding towards -infinity,
5804 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5805 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5807 static bool
5808 fold_real_zero_addition_p (tree type, tree addend, int negate)
5810 if (!real_zerop (addend))
5811 return false;
5813 /* Don't allow the fold with -fsignaling-nans. */
5814 if (HONOR_SNANS (TYPE_MODE (type)))
5815 return false;
5817 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5818 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5819 return true;
5821 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5822 if (TREE_CODE (addend) == REAL_CST
5823 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5824 negate = !negate;
5826 /* The mode has signed zeros, and we have to honor their sign.
5827 In this situation, there is only one case we can return true for.
5828 X - 0 is the same as X unless rounding towards -infinity is
5829 supported. */
5830 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5833 /* Subroutine of fold() that checks comparisons of built-in math
5834 functions against real constants.
5836 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5837 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5838 is the type of the result and ARG0 and ARG1 are the operands of the
5839 comparison. ARG1 must be a TREE_REAL_CST.
5841 The function returns the constant folded tree if a simplification
5842 can be made, and NULL_TREE otherwise. */
5844 static tree
5845 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5846 tree type, tree arg0, tree arg1)
5848 REAL_VALUE_TYPE c;
5850 if (BUILTIN_SQRT_P (fcode))
5852 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5853 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5855 c = TREE_REAL_CST (arg1);
5856 if (REAL_VALUE_NEGATIVE (c))
5858 /* sqrt(x) < y is always false, if y is negative. */
5859 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5860 return omit_one_operand (type, integer_zero_node, arg);
5862 /* sqrt(x) > y is always true, if y is negative and we
5863 don't care about NaNs, i.e. negative values of x. */
5864 if (code == NE_EXPR || !HONOR_NANS (mode))
5865 return omit_one_operand (type, integer_one_node, arg);
5867 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5868 return fold_build2 (GE_EXPR, type, arg,
5869 build_real (TREE_TYPE (arg), dconst0));
5871 else if (code == GT_EXPR || code == GE_EXPR)
5873 REAL_VALUE_TYPE c2;
5875 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5876 real_convert (&c2, mode, &c2);
5878 if (REAL_VALUE_ISINF (c2))
5880 /* sqrt(x) > y is x == +Inf, when y is very large. */
5881 if (HONOR_INFINITIES (mode))
5882 return fold_build2 (EQ_EXPR, type, arg,
5883 build_real (TREE_TYPE (arg), c2));
5885 /* sqrt(x) > y is always false, when y is very large
5886 and we don't care about infinities. */
5887 return omit_one_operand (type, integer_zero_node, arg);
5890 /* sqrt(x) > c is the same as x > c*c. */
5891 return fold_build2 (code, type, arg,
5892 build_real (TREE_TYPE (arg), c2));
5894 else if (code == LT_EXPR || code == LE_EXPR)
5896 REAL_VALUE_TYPE c2;
5898 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5899 real_convert (&c2, mode, &c2);
5901 if (REAL_VALUE_ISINF (c2))
5903 /* sqrt(x) < y is always true, when y is a very large
5904 value and we don't care about NaNs or Infinities. */
5905 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5906 return omit_one_operand (type, integer_one_node, arg);
5908 /* sqrt(x) < y is x != +Inf when y is very large and we
5909 don't care about NaNs. */
5910 if (! HONOR_NANS (mode))
5911 return fold_build2 (NE_EXPR, type, arg,
5912 build_real (TREE_TYPE (arg), c2));
5914 /* sqrt(x) < y is x >= 0 when y is very large and we
5915 don't care about Infinities. */
5916 if (! HONOR_INFINITIES (mode))
5917 return fold_build2 (GE_EXPR, type, arg,
5918 build_real (TREE_TYPE (arg), dconst0));
5920 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5921 if (lang_hooks.decls.global_bindings_p () != 0
5922 || CONTAINS_PLACEHOLDER_P (arg))
5923 return NULL_TREE;
5925 arg = save_expr (arg);
5926 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5927 fold_build2 (GE_EXPR, type, arg,
5928 build_real (TREE_TYPE (arg),
5929 dconst0)),
5930 fold_build2 (NE_EXPR, type, arg,
5931 build_real (TREE_TYPE (arg),
5932 c2)));
5935 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5936 if (! HONOR_NANS (mode))
5937 return fold_build2 (code, type, arg,
5938 build_real (TREE_TYPE (arg), c2));
5940 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5941 if (lang_hooks.decls.global_bindings_p () == 0
5942 && ! CONTAINS_PLACEHOLDER_P (arg))
5944 arg = save_expr (arg);
5945 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5946 fold_build2 (GE_EXPR, type, arg,
5947 build_real (TREE_TYPE (arg),
5948 dconst0)),
5949 fold_build2 (code, type, arg,
5950 build_real (TREE_TYPE (arg),
5951 c2)));
5956 return NULL_TREE;
5959 /* Subroutine of fold() that optimizes comparisons against Infinities,
5960 either +Inf or -Inf.
5962 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5963 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5964 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5966 The function returns the constant folded tree if a simplification
5967 can be made, and NULL_TREE otherwise. */
5969 static tree
5970 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5972 enum machine_mode mode;
5973 REAL_VALUE_TYPE max;
5974 tree temp;
5975 bool neg;
5977 mode = TYPE_MODE (TREE_TYPE (arg0));
5979 /* For negative infinity swap the sense of the comparison. */
5980 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5981 if (neg)
5982 code = swap_tree_comparison (code);
5984 switch (code)
5986 case GT_EXPR:
5987 /* x > +Inf is always false, if with ignore sNANs. */
5988 if (HONOR_SNANS (mode))
5989 return NULL_TREE;
5990 return omit_one_operand (type, integer_zero_node, arg0);
5992 case LE_EXPR:
5993 /* x <= +Inf is always true, if we don't case about NaNs. */
5994 if (! HONOR_NANS (mode))
5995 return omit_one_operand (type, integer_one_node, arg0);
5997 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5998 if (lang_hooks.decls.global_bindings_p () == 0
5999 && ! CONTAINS_PLACEHOLDER_P (arg0))
6001 arg0 = save_expr (arg0);
6002 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6004 break;
6006 case EQ_EXPR:
6007 case GE_EXPR:
6008 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6009 real_maxval (&max, neg, mode);
6010 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6011 arg0, build_real (TREE_TYPE (arg0), max));
6013 case LT_EXPR:
6014 /* x < +Inf is always equal to x <= DBL_MAX. */
6015 real_maxval (&max, neg, mode);
6016 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6017 arg0, build_real (TREE_TYPE (arg0), max));
6019 case NE_EXPR:
6020 /* x != +Inf is always equal to !(x > DBL_MAX). */
6021 real_maxval (&max, neg, mode);
6022 if (! HONOR_NANS (mode))
6023 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6024 arg0, build_real (TREE_TYPE (arg0), max));
6026 /* The transformation below creates non-gimple code and thus is
6027 not appropriate if we are in gimple form. */
6028 if (in_gimple_form)
6029 return NULL_TREE;
6031 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6032 arg0, build_real (TREE_TYPE (arg0), max));
6033 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6035 default:
6036 break;
6039 return NULL_TREE;
6042 /* Subroutine of fold() that optimizes comparisons of a division by
6043 a nonzero integer constant against an integer constant, i.e.
6044 X/C1 op C2.
6046 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6047 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6048 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6050 The function returns the constant folded tree if a simplification
6051 can be made, and NULL_TREE otherwise. */
6053 static tree
6054 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6056 tree prod, tmp, hi, lo;
6057 tree arg00 = TREE_OPERAND (arg0, 0);
6058 tree arg01 = TREE_OPERAND (arg0, 1);
6059 unsigned HOST_WIDE_INT lpart;
6060 HOST_WIDE_INT hpart;
6061 bool neg_overflow;
6062 int overflow;
6064 /* We have to do this the hard way to detect unsigned overflow.
6065 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6066 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6067 TREE_INT_CST_HIGH (arg01),
6068 TREE_INT_CST_LOW (arg1),
6069 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6070 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6071 prod = force_fit_type (prod, -1, overflow, false);
6072 neg_overflow = false;
6074 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6076 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6077 lo = prod;
6079 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6080 overflow = add_double (TREE_INT_CST_LOW (prod),
6081 TREE_INT_CST_HIGH (prod),
6082 TREE_INT_CST_LOW (tmp),
6083 TREE_INT_CST_HIGH (tmp),
6084 &lpart, &hpart);
6085 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6086 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6087 TREE_CONSTANT_OVERFLOW (prod));
6089 else if (tree_int_cst_sgn (arg01) >= 0)
6091 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6092 switch (tree_int_cst_sgn (arg1))
6094 case -1:
6095 neg_overflow = true;
6096 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6097 hi = prod;
6098 break;
6100 case 0:
6101 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6102 hi = tmp;
6103 break;
6105 case 1:
6106 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6107 lo = prod;
6108 break;
6110 default:
6111 gcc_unreachable ();
6114 else
6116 /* A negative divisor reverses the relational operators. */
6117 code = swap_tree_comparison (code);
6119 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6120 switch (tree_int_cst_sgn (arg1))
6122 case -1:
6123 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6124 lo = prod;
6125 break;
6127 case 0:
6128 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6129 lo = tmp;
6130 break;
6132 case 1:
6133 neg_overflow = true;
6134 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6135 hi = prod;
6136 break;
6138 default:
6139 gcc_unreachable ();
6143 switch (code)
6145 case EQ_EXPR:
6146 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6147 return omit_one_operand (type, integer_zero_node, arg00);
6148 if (TREE_OVERFLOW (hi))
6149 return fold_build2 (GE_EXPR, type, arg00, lo);
6150 if (TREE_OVERFLOW (lo))
6151 return fold_build2 (LE_EXPR, type, arg00, hi);
6152 return build_range_check (type, arg00, 1, lo, hi);
6154 case NE_EXPR:
6155 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6156 return omit_one_operand (type, integer_one_node, arg00);
6157 if (TREE_OVERFLOW (hi))
6158 return fold_build2 (LT_EXPR, type, arg00, lo);
6159 if (TREE_OVERFLOW (lo))
6160 return fold_build2 (GT_EXPR, type, arg00, hi);
6161 return build_range_check (type, arg00, 0, lo, hi);
6163 case LT_EXPR:
6164 if (TREE_OVERFLOW (lo))
6166 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6167 return omit_one_operand (type, tmp, arg00);
6169 return fold_build2 (LT_EXPR, type, arg00, lo);
6171 case LE_EXPR:
6172 if (TREE_OVERFLOW (hi))
6174 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6175 return omit_one_operand (type, tmp, arg00);
6177 return fold_build2 (LE_EXPR, type, arg00, hi);
6179 case GT_EXPR:
6180 if (TREE_OVERFLOW (hi))
6182 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6183 return omit_one_operand (type, tmp, arg00);
6185 return fold_build2 (GT_EXPR, type, arg00, hi);
6187 case GE_EXPR:
6188 if (TREE_OVERFLOW (lo))
6190 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6191 return omit_one_operand (type, tmp, arg00);
6193 return fold_build2 (GE_EXPR, type, arg00, lo);
6195 default:
6196 break;
6199 return NULL_TREE;
6203 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6204 equality/inequality test, then return a simplified form of the test
6205 using a sign testing. Otherwise return NULL. TYPE is the desired
6206 result type. */
6208 static tree
6209 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6210 tree result_type)
6212 /* If this is testing a single bit, we can optimize the test. */
6213 if ((code == NE_EXPR || code == EQ_EXPR)
6214 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6215 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6217 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6218 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6219 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6221 if (arg00 != NULL_TREE
6222 /* This is only a win if casting to a signed type is cheap,
6223 i.e. when arg00's type is not a partial mode. */
6224 && TYPE_PRECISION (TREE_TYPE (arg00))
6225 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6227 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6228 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6229 result_type, fold_convert (stype, arg00),
6230 build_int_cst (stype, 0));
6234 return NULL_TREE;
6237 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6238 equality/inequality test, then return a simplified form of
6239 the test using shifts and logical operations. Otherwise return
6240 NULL. TYPE is the desired result type. */
6242 tree
6243 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6244 tree result_type)
6246 /* If this is testing a single bit, we can optimize the test. */
6247 if ((code == NE_EXPR || code == EQ_EXPR)
6248 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6249 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6251 tree inner = TREE_OPERAND (arg0, 0);
6252 tree type = TREE_TYPE (arg0);
6253 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6254 enum machine_mode operand_mode = TYPE_MODE (type);
6255 int ops_unsigned;
6256 tree signed_type, unsigned_type, intermediate_type;
6257 tree tem;
6259 /* First, see if we can fold the single bit test into a sign-bit
6260 test. */
6261 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6262 result_type);
6263 if (tem)
6264 return tem;
6266 /* Otherwise we have (A & C) != 0 where C is a single bit,
6267 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6268 Similarly for (A & C) == 0. */
6270 /* If INNER is a right shift of a constant and it plus BITNUM does
6271 not overflow, adjust BITNUM and INNER. */
6272 if (TREE_CODE (inner) == RSHIFT_EXPR
6273 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6274 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6275 && bitnum < TYPE_PRECISION (type)
6276 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6277 bitnum - TYPE_PRECISION (type)))
6279 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6280 inner = TREE_OPERAND (inner, 0);
6283 /* If we are going to be able to omit the AND below, we must do our
6284 operations as unsigned. If we must use the AND, we have a choice.
6285 Normally unsigned is faster, but for some machines signed is. */
6286 #ifdef LOAD_EXTEND_OP
6287 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6288 && !flag_syntax_only) ? 0 : 1;
6289 #else
6290 ops_unsigned = 1;
6291 #endif
6293 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6294 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6295 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6296 inner = fold_convert (intermediate_type, inner);
6298 if (bitnum != 0)
6299 inner = build2 (RSHIFT_EXPR, intermediate_type,
6300 inner, size_int (bitnum));
6302 if (code == EQ_EXPR)
6303 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6304 inner, integer_one_node);
6306 /* Put the AND last so it can combine with more things. */
6307 inner = build2 (BIT_AND_EXPR, intermediate_type,
6308 inner, integer_one_node);
6310 /* Make sure to return the proper type. */
6311 inner = fold_convert (result_type, inner);
6313 return inner;
6315 return NULL_TREE;
6318 /* Check whether we are allowed to reorder operands arg0 and arg1,
6319 such that the evaluation of arg1 occurs before arg0. */
6321 static bool
6322 reorder_operands_p (tree arg0, tree arg1)
6324 if (! flag_evaluation_order)
6325 return true;
6326 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6327 return true;
6328 return ! TREE_SIDE_EFFECTS (arg0)
6329 && ! TREE_SIDE_EFFECTS (arg1);
6332 /* Test whether it is preferable two swap two operands, ARG0 and
6333 ARG1, for example because ARG0 is an integer constant and ARG1
6334 isn't. If REORDER is true, only recommend swapping if we can
6335 evaluate the operands in reverse order. */
6337 bool
6338 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6340 STRIP_SIGN_NOPS (arg0);
6341 STRIP_SIGN_NOPS (arg1);
6343 if (TREE_CODE (arg1) == INTEGER_CST)
6344 return 0;
6345 if (TREE_CODE (arg0) == INTEGER_CST)
6346 return 1;
6348 if (TREE_CODE (arg1) == REAL_CST)
6349 return 0;
6350 if (TREE_CODE (arg0) == REAL_CST)
6351 return 1;
6353 if (TREE_CODE (arg1) == COMPLEX_CST)
6354 return 0;
6355 if (TREE_CODE (arg0) == COMPLEX_CST)
6356 return 1;
6358 if (TREE_CONSTANT (arg1))
6359 return 0;
6360 if (TREE_CONSTANT (arg0))
6361 return 1;
6363 if (optimize_size)
6364 return 0;
6366 if (reorder && flag_evaluation_order
6367 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6368 return 0;
6370 if (DECL_P (arg1))
6371 return 0;
6372 if (DECL_P (arg0))
6373 return 1;
6375 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6376 for commutative and comparison operators. Ensuring a canonical
6377 form allows the optimizers to find additional redundancies without
6378 having to explicitly check for both orderings. */
6379 if (TREE_CODE (arg0) == SSA_NAME
6380 && TREE_CODE (arg1) == SSA_NAME
6381 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6382 return 1;
6384 return 0;
6387 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6388 ARG0 is extended to a wider type. */
6390 static tree
6391 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6393 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6394 tree arg1_unw;
6395 tree shorter_type, outer_type;
6396 tree min, max;
6397 bool above, below;
6399 if (arg0_unw == arg0)
6400 return NULL_TREE;
6401 shorter_type = TREE_TYPE (arg0_unw);
6403 #ifdef HAVE_canonicalize_funcptr_for_compare
6404 /* Disable this optimization if we're casting a function pointer
6405 type on targets that require function pointer canonicalization. */
6406 if (HAVE_canonicalize_funcptr_for_compare
6407 && TREE_CODE (shorter_type) == POINTER_TYPE
6408 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6409 return NULL_TREE;
6410 #endif
6412 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6413 return NULL_TREE;
6415 arg1_unw = get_unwidened (arg1, shorter_type);
6417 /* If possible, express the comparison in the shorter mode. */
6418 if ((code == EQ_EXPR || code == NE_EXPR
6419 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6420 && (TREE_TYPE (arg1_unw) == shorter_type
6421 || (TREE_CODE (arg1_unw) == INTEGER_CST
6422 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6423 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6424 && int_fits_type_p (arg1_unw, shorter_type))))
6425 return fold_build2 (code, type, arg0_unw,
6426 fold_convert (shorter_type, arg1_unw));
6428 if (TREE_CODE (arg1_unw) != INTEGER_CST
6429 || TREE_CODE (shorter_type) != INTEGER_TYPE
6430 || !int_fits_type_p (arg1_unw, shorter_type))
6431 return NULL_TREE;
6433 /* If we are comparing with the integer that does not fit into the range
6434 of the shorter type, the result is known. */
6435 outer_type = TREE_TYPE (arg1_unw);
6436 min = lower_bound_in_type (outer_type, shorter_type);
6437 max = upper_bound_in_type (outer_type, shorter_type);
6439 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6440 max, arg1_unw));
6441 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6442 arg1_unw, min));
6444 switch (code)
6446 case EQ_EXPR:
6447 if (above || below)
6448 return omit_one_operand (type, integer_zero_node, arg0);
6449 break;
6451 case NE_EXPR:
6452 if (above || below)
6453 return omit_one_operand (type, integer_one_node, arg0);
6454 break;
6456 case LT_EXPR:
6457 case LE_EXPR:
6458 if (above)
6459 return omit_one_operand (type, integer_one_node, arg0);
6460 else if (below)
6461 return omit_one_operand (type, integer_zero_node, arg0);
6463 case GT_EXPR:
6464 case GE_EXPR:
6465 if (above)
6466 return omit_one_operand (type, integer_zero_node, arg0);
6467 else if (below)
6468 return omit_one_operand (type, integer_one_node, arg0);
6470 default:
6471 break;
6474 return NULL_TREE;
6477 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6478 ARG0 just the signedness is changed. */
6480 static tree
6481 fold_sign_changed_comparison (enum tree_code code, tree type,
6482 tree arg0, tree arg1)
6484 tree arg0_inner, tmp;
6485 tree inner_type, outer_type;
6487 if (TREE_CODE (arg0) != NOP_EXPR
6488 && TREE_CODE (arg0) != CONVERT_EXPR)
6489 return NULL_TREE;
6491 outer_type = TREE_TYPE (arg0);
6492 arg0_inner = TREE_OPERAND (arg0, 0);
6493 inner_type = TREE_TYPE (arg0_inner);
6495 #ifdef HAVE_canonicalize_funcptr_for_compare
6496 /* Disable this optimization if we're casting a function pointer
6497 type on targets that require function pointer canonicalization. */
6498 if (HAVE_canonicalize_funcptr_for_compare
6499 && TREE_CODE (inner_type) == POINTER_TYPE
6500 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6501 return NULL_TREE;
6502 #endif
6504 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6505 return NULL_TREE;
6507 if (TREE_CODE (arg1) != INTEGER_CST
6508 && !((TREE_CODE (arg1) == NOP_EXPR
6509 || TREE_CODE (arg1) == CONVERT_EXPR)
6510 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6511 return NULL_TREE;
6513 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6514 && code != NE_EXPR
6515 && code != EQ_EXPR)
6516 return NULL_TREE;
6518 if (TREE_CODE (arg1) == INTEGER_CST)
6520 tmp = build_int_cst_wide (inner_type,
6521 TREE_INT_CST_LOW (arg1),
6522 TREE_INT_CST_HIGH (arg1));
6523 arg1 = force_fit_type (tmp, 0,
6524 TREE_OVERFLOW (arg1),
6525 TREE_CONSTANT_OVERFLOW (arg1));
6527 else
6528 arg1 = fold_convert (inner_type, arg1);
6530 return fold_build2 (code, type, arg0_inner, arg1);
6533 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6534 step of the array. Reconstructs s and delta in the case of s * delta
6535 being an integer constant (and thus already folded).
6536 ADDR is the address. MULT is the multiplicative expression.
6537 If the function succeeds, the new address expression is returned. Otherwise
6538 NULL_TREE is returned. */
6540 static tree
6541 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6543 tree s, delta, step;
6544 tree ref = TREE_OPERAND (addr, 0), pref;
6545 tree ret, pos;
6546 tree itype;
6548 /* Canonicalize op1 into a possibly non-constant delta
6549 and an INTEGER_CST s. */
6550 if (TREE_CODE (op1) == MULT_EXPR)
6552 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6554 STRIP_NOPS (arg0);
6555 STRIP_NOPS (arg1);
6557 if (TREE_CODE (arg0) == INTEGER_CST)
6559 s = arg0;
6560 delta = arg1;
6562 else if (TREE_CODE (arg1) == INTEGER_CST)
6564 s = arg1;
6565 delta = arg0;
6567 else
6568 return NULL_TREE;
6570 else if (TREE_CODE (op1) == INTEGER_CST)
6572 delta = op1;
6573 s = NULL_TREE;
6575 else
6577 /* Simulate we are delta * 1. */
6578 delta = op1;
6579 s = integer_one_node;
6582 for (;; ref = TREE_OPERAND (ref, 0))
6584 if (TREE_CODE (ref) == ARRAY_REF)
6586 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6587 if (! itype)
6588 continue;
6590 step = array_ref_element_size (ref);
6591 if (TREE_CODE (step) != INTEGER_CST)
6592 continue;
6594 if (s)
6596 if (! tree_int_cst_equal (step, s))
6597 continue;
6599 else
6601 /* Try if delta is a multiple of step. */
6602 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6603 if (! tmp)
6604 continue;
6605 delta = tmp;
6608 break;
6611 if (!handled_component_p (ref))
6612 return NULL_TREE;
6615 /* We found the suitable array reference. So copy everything up to it,
6616 and replace the index. */
6618 pref = TREE_OPERAND (addr, 0);
6619 ret = copy_node (pref);
6620 pos = ret;
6622 while (pref != ref)
6624 pref = TREE_OPERAND (pref, 0);
6625 TREE_OPERAND (pos, 0) = copy_node (pref);
6626 pos = TREE_OPERAND (pos, 0);
6629 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6630 fold_convert (itype,
6631 TREE_OPERAND (pos, 1)),
6632 fold_convert (itype, delta));
6634 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6638 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6639 means A >= Y && A != MAX, but in this case we know that
6640 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6642 static tree
6643 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6645 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6647 if (TREE_CODE (bound) == LT_EXPR)
6648 a = TREE_OPERAND (bound, 0);
6649 else if (TREE_CODE (bound) == GT_EXPR)
6650 a = TREE_OPERAND (bound, 1);
6651 else
6652 return NULL_TREE;
6654 typea = TREE_TYPE (a);
6655 if (!INTEGRAL_TYPE_P (typea)
6656 && !POINTER_TYPE_P (typea))
6657 return NULL_TREE;
6659 if (TREE_CODE (ineq) == LT_EXPR)
6661 a1 = TREE_OPERAND (ineq, 1);
6662 y = TREE_OPERAND (ineq, 0);
6664 else if (TREE_CODE (ineq) == GT_EXPR)
6666 a1 = TREE_OPERAND (ineq, 0);
6667 y = TREE_OPERAND (ineq, 1);
6669 else
6670 return NULL_TREE;
6672 if (TREE_TYPE (a1) != typea)
6673 return NULL_TREE;
6675 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6676 if (!integer_onep (diff))
6677 return NULL_TREE;
6679 return fold_build2 (GE_EXPR, type, a, y);
6682 /* Fold a sum or difference of at least one multiplication.
6683 Returns the folded tree or NULL if no simplification could be made. */
6685 static tree
6686 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6688 tree arg00, arg01, arg10, arg11;
6689 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6691 /* (A * C) +- (B * C) -> (A+-B) * C.
6692 (A * C) +- A -> A * (C+-1).
6693 We are most concerned about the case where C is a constant,
6694 but other combinations show up during loop reduction. Since
6695 it is not difficult, try all four possibilities. */
6697 if (TREE_CODE (arg0) == MULT_EXPR)
6699 arg00 = TREE_OPERAND (arg0, 0);
6700 arg01 = TREE_OPERAND (arg0, 1);
6702 else
6704 arg00 = arg0;
6705 arg01 = fold_convert (type, integer_one_node);
6707 if (TREE_CODE (arg1) == MULT_EXPR)
6709 arg10 = TREE_OPERAND (arg1, 0);
6710 arg11 = TREE_OPERAND (arg1, 1);
6712 else
6714 arg10 = arg1;
6715 arg11 = fold_convert (type, integer_one_node);
6717 same = NULL_TREE;
6719 if (operand_equal_p (arg01, arg11, 0))
6720 same = arg01, alt0 = arg00, alt1 = arg10;
6721 else if (operand_equal_p (arg00, arg10, 0))
6722 same = arg00, alt0 = arg01, alt1 = arg11;
6723 else if (operand_equal_p (arg00, arg11, 0))
6724 same = arg00, alt0 = arg01, alt1 = arg10;
6725 else if (operand_equal_p (arg01, arg10, 0))
6726 same = arg01, alt0 = arg00, alt1 = arg11;
6728 /* No identical multiplicands; see if we can find a common
6729 power-of-two factor in non-power-of-two multiplies. This
6730 can help in multi-dimensional array access. */
6731 else if (host_integerp (arg01, 0)
6732 && host_integerp (arg11, 0))
6734 HOST_WIDE_INT int01, int11, tmp;
6735 bool swap = false;
6736 tree maybe_same;
6737 int01 = TREE_INT_CST_LOW (arg01);
6738 int11 = TREE_INT_CST_LOW (arg11);
6740 /* Move min of absolute values to int11. */
6741 if ((int01 >= 0 ? int01 : -int01)
6742 < (int11 >= 0 ? int11 : -int11))
6744 tmp = int01, int01 = int11, int11 = tmp;
6745 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6746 maybe_same = arg01;
6747 swap = true;
6749 else
6750 maybe_same = arg11;
6752 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6754 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6755 build_int_cst (TREE_TYPE (arg00),
6756 int01 / int11));
6757 alt1 = arg10;
6758 same = maybe_same;
6759 if (swap)
6760 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6764 if (same)
6765 return fold_build2 (MULT_EXPR, type,
6766 fold_build2 (code, type,
6767 fold_convert (type, alt0),
6768 fold_convert (type, alt1)),
6769 fold_convert (type, same));
6771 return NULL_TREE;
6774 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6775 specified by EXPR into the buffer PTR of length LEN bytes.
6776 Return the number of bytes placed in the buffer, or zero
6777 upon failure. */
6779 static int
6780 native_encode_int (tree expr, unsigned char *ptr, int len)
6782 tree type = TREE_TYPE (expr);
6783 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6784 int byte, offset, word, words;
6785 unsigned char value;
6787 if (total_bytes > len)
6788 return 0;
6789 words = total_bytes / UNITS_PER_WORD;
6791 for (byte = 0; byte < total_bytes; byte++)
6793 int bitpos = byte * BITS_PER_UNIT;
6794 if (bitpos < HOST_BITS_PER_WIDE_INT)
6795 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6796 else
6797 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6798 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6800 if (total_bytes > UNITS_PER_WORD)
6802 word = byte / UNITS_PER_WORD;
6803 if (WORDS_BIG_ENDIAN)
6804 word = (words - 1) - word;
6805 offset = word * UNITS_PER_WORD;
6806 if (BYTES_BIG_ENDIAN)
6807 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6808 else
6809 offset += byte % UNITS_PER_WORD;
6811 else
6812 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6813 ptr[offset] = value;
6815 return total_bytes;
6819 /* Subroutine of native_encode_expr. Encode the REAL_CST
6820 specified by EXPR into the buffer PTR of length LEN bytes.
6821 Return the number of bytes placed in the buffer, or zero
6822 upon failure. */
6824 static int
6825 native_encode_real (tree expr, unsigned char *ptr, int len)
6827 tree type = TREE_TYPE (expr);
6828 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6829 int byte, offset, word, words;
6830 unsigned char value;
6832 /* There are always 32 bits in each long, no matter the size of
6833 the hosts long. We handle floating point representations with
6834 up to 192 bits. */
6835 long tmp[6];
6837 if (total_bytes > len)
6838 return 0;
6839 words = total_bytes / UNITS_PER_WORD;
6841 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6843 for (byte = 0; byte < total_bytes; byte++)
6845 int bitpos = byte * BITS_PER_UNIT;
6846 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6848 if (total_bytes > UNITS_PER_WORD)
6850 word = byte / UNITS_PER_WORD;
6851 if (FLOAT_WORDS_BIG_ENDIAN)
6852 word = (words - 1) - word;
6853 offset = word * UNITS_PER_WORD;
6854 if (BYTES_BIG_ENDIAN)
6855 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6856 else
6857 offset += byte % UNITS_PER_WORD;
6859 else
6860 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6861 ptr[offset] = value;
6863 return total_bytes;
6866 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6867 specified by EXPR into the buffer PTR of length LEN bytes.
6868 Return the number of bytes placed in the buffer, or zero
6869 upon failure. */
6871 static int
6872 native_encode_complex (tree expr, unsigned char *ptr, int len)
6874 int rsize, isize;
6875 tree part;
6877 part = TREE_REALPART (expr);
6878 rsize = native_encode_expr (part, ptr, len);
6879 if (rsize == 0)
6880 return 0;
6881 part = TREE_IMAGPART (expr);
6882 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6883 if (isize != rsize)
6884 return 0;
6885 return rsize + isize;
6889 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6890 specified by EXPR into the buffer PTR of length LEN bytes.
6891 Return the number of bytes placed in the buffer, or zero
6892 upon failure. */
6894 static int
6895 native_encode_vector (tree expr, unsigned char *ptr, int len)
6897 int i, size, offset, count;
6898 tree elem, elements;
6900 size = 0;
6901 offset = 0;
6902 elements = TREE_VECTOR_CST_ELTS (expr);
6903 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6904 for (i = 0; i < count; i++)
6906 if (elements)
6908 elem = TREE_VALUE (elements);
6909 elements = TREE_CHAIN (elements);
6911 else
6912 elem = NULL_TREE;
6914 if (elem)
6916 size = native_encode_expr (elem, ptr+offset, len-offset);
6917 if (size == 0)
6918 return 0;
6920 else if (size != 0)
6922 if (offset + size > len)
6923 return 0;
6924 memset (ptr+offset, 0, size);
6926 else
6927 return 0;
6928 offset += size;
6930 return offset;
6934 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6935 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6936 buffer PTR of length LEN bytes. Return the number of bytes
6937 placed in the buffer, or zero upon failure. */
6939 static int
6940 native_encode_expr (tree expr, unsigned char *ptr, int len)
6942 switch (TREE_CODE (expr))
6944 case INTEGER_CST:
6945 return native_encode_int (expr, ptr, len);
6947 case REAL_CST:
6948 return native_encode_real (expr, ptr, len);
6950 case COMPLEX_CST:
6951 return native_encode_complex (expr, ptr, len);
6953 case VECTOR_CST:
6954 return native_encode_vector (expr, ptr, len);
6956 default:
6957 return 0;
6962 /* Subroutine of native_interpret_expr. Interpret the contents of
6963 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6964 If the buffer cannot be interpreted, return NULL_TREE. */
6966 static tree
6967 native_interpret_int (tree type, unsigned char *ptr, int len)
6969 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6970 int byte, offset, word, words;
6971 unsigned char value;
6972 unsigned int HOST_WIDE_INT lo = 0;
6973 HOST_WIDE_INT hi = 0;
6975 if (total_bytes > len)
6976 return NULL_TREE;
6977 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
6978 return NULL_TREE;
6979 words = total_bytes / UNITS_PER_WORD;
6981 for (byte = 0; byte < total_bytes; byte++)
6983 int bitpos = byte * BITS_PER_UNIT;
6984 if (total_bytes > UNITS_PER_WORD)
6986 word = byte / UNITS_PER_WORD;
6987 if (WORDS_BIG_ENDIAN)
6988 word = (words - 1) - word;
6989 offset = word * UNITS_PER_WORD;
6990 if (BYTES_BIG_ENDIAN)
6991 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6992 else
6993 offset += byte % UNITS_PER_WORD;
6995 else
6996 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6997 value = ptr[offset];
6999 if (bitpos < HOST_BITS_PER_WIDE_INT)
7000 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7001 else
7002 hi |= (unsigned HOST_WIDE_INT) value
7003 << (bitpos - HOST_BITS_PER_WIDE_INT);
7006 return force_fit_type (build_int_cst_wide (type, lo, hi),
7007 0, false, false);
7011 /* Subroutine of native_interpret_expr. Interpret the contents of
7012 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7013 If the buffer cannot be interpreted, return NULL_TREE. */
7015 static tree
7016 native_interpret_real (tree type, unsigned char *ptr, int len)
7018 enum machine_mode mode = TYPE_MODE (type);
7019 int total_bytes = GET_MODE_SIZE (mode);
7020 int byte, offset, word, words;
7021 unsigned char value;
7022 /* There are always 32 bits in each long, no matter the size of
7023 the hosts long. We handle floating point representations with
7024 up to 192 bits. */
7025 REAL_VALUE_TYPE r;
7026 long tmp[6];
7028 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7029 if (total_bytes > len || total_bytes > 24)
7030 return NULL_TREE;
7031 words = total_bytes / UNITS_PER_WORD;
7033 memset (tmp, 0, sizeof (tmp));
7034 for (byte = 0; byte < total_bytes; byte++)
7036 int bitpos = byte * BITS_PER_UNIT;
7037 if (total_bytes > UNITS_PER_WORD)
7039 word = byte / UNITS_PER_WORD;
7040 if (FLOAT_WORDS_BIG_ENDIAN)
7041 word = (words - 1) - word;
7042 offset = word * UNITS_PER_WORD;
7043 if (BYTES_BIG_ENDIAN)
7044 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7045 else
7046 offset += byte % UNITS_PER_WORD;
7048 else
7049 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7050 value = ptr[offset];
7052 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7055 real_from_target (&r, tmp, mode);
7056 return build_real (type, r);
7060 /* Subroutine of native_interpret_expr. Interpret the contents of
7061 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7062 If the buffer cannot be interpreted, return NULL_TREE. */
7064 static tree
7065 native_interpret_complex (tree type, unsigned char *ptr, int len)
7067 tree etype, rpart, ipart;
7068 int size;
7070 etype = TREE_TYPE (type);
7071 size = GET_MODE_SIZE (TYPE_MODE (etype));
7072 if (size * 2 > len)
7073 return NULL_TREE;
7074 rpart = native_interpret_expr (etype, ptr, size);
7075 if (!rpart)
7076 return NULL_TREE;
7077 ipart = native_interpret_expr (etype, ptr+size, size);
7078 if (!ipart)
7079 return NULL_TREE;
7080 return build_complex (type, rpart, ipart);
7084 /* Subroutine of native_interpret_expr. Interpret the contents of
7085 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7086 If the buffer cannot be interpreted, return NULL_TREE. */
7088 static tree
7089 native_interpret_vector (tree type, unsigned char *ptr, int len)
7091 tree etype, elem, elements;
7092 int i, size, count;
7094 etype = TREE_TYPE (type);
7095 size = GET_MODE_SIZE (TYPE_MODE (etype));
7096 count = TYPE_VECTOR_SUBPARTS (type);
7097 if (size * count > len)
7098 return NULL_TREE;
7100 elements = NULL_TREE;
7101 for (i = count - 1; i >= 0; i--)
7103 elem = native_interpret_expr (etype, ptr+(i*size), size);
7104 if (!elem)
7105 return NULL_TREE;
7106 elements = tree_cons (NULL_TREE, elem, elements);
7108 return build_vector (type, elements);
7112 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7113 the buffer PTR of length LEN as a constant of type TYPE. For
7114 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7115 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7116 return NULL_TREE. */
7118 static tree
7119 native_interpret_expr (tree type, unsigned char *ptr, int len)
7121 switch (TREE_CODE (type))
7123 case INTEGER_TYPE:
7124 case ENUMERAL_TYPE:
7125 case BOOLEAN_TYPE:
7126 return native_interpret_int (type, ptr, len);
7128 case REAL_TYPE:
7129 return native_interpret_real (type, ptr, len);
7131 case COMPLEX_TYPE:
7132 return native_interpret_complex (type, ptr, len);
7134 case VECTOR_TYPE:
7135 return native_interpret_vector (type, ptr, len);
7137 default:
7138 return NULL_TREE;
7143 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7144 TYPE at compile-time. If we're unable to perform the conversion
7145 return NULL_TREE. */
7147 static tree
7148 fold_view_convert_expr (tree type, tree expr)
7150 /* We support up to 512-bit values (for V8DFmode). */
7151 unsigned char buffer[64];
7152 int len;
7154 /* Check that the host and target are sane. */
7155 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7156 return NULL_TREE;
7158 len = native_encode_expr (expr, buffer, sizeof (buffer));
7159 if (len == 0)
7160 return NULL_TREE;
7162 return native_interpret_expr (type, buffer, len);
7166 /* Fold a unary expression of code CODE and type TYPE with operand
7167 OP0. Return the folded expression if folding is successful.
7168 Otherwise, return NULL_TREE. */
7170 tree
7171 fold_unary (enum tree_code code, tree type, tree op0)
7173 tree tem;
7174 tree arg0;
7175 enum tree_code_class kind = TREE_CODE_CLASS (code);
7177 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7178 && TREE_CODE_LENGTH (code) == 1);
7180 arg0 = op0;
7181 if (arg0)
7183 if (code == NOP_EXPR || code == CONVERT_EXPR
7184 || code == FLOAT_EXPR || code == ABS_EXPR)
7186 /* Don't use STRIP_NOPS, because signedness of argument type
7187 matters. */
7188 STRIP_SIGN_NOPS (arg0);
7190 else
7192 /* Strip any conversions that don't change the mode. This
7193 is safe for every expression, except for a comparison
7194 expression because its signedness is derived from its
7195 operands.
7197 Note that this is done as an internal manipulation within
7198 the constant folder, in order to find the simplest
7199 representation of the arguments so that their form can be
7200 studied. In any cases, the appropriate type conversions
7201 should be put back in the tree that will get out of the
7202 constant folder. */
7203 STRIP_NOPS (arg0);
7207 if (TREE_CODE_CLASS (code) == tcc_unary)
7209 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7210 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7211 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7212 else if (TREE_CODE (arg0) == COND_EXPR)
7214 tree arg01 = TREE_OPERAND (arg0, 1);
7215 tree arg02 = TREE_OPERAND (arg0, 2);
7216 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7217 arg01 = fold_build1 (code, type, arg01);
7218 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7219 arg02 = fold_build1 (code, type, arg02);
7220 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7221 arg01, arg02);
7223 /* If this was a conversion, and all we did was to move into
7224 inside the COND_EXPR, bring it back out. But leave it if
7225 it is a conversion from integer to integer and the
7226 result precision is no wider than a word since such a
7227 conversion is cheap and may be optimized away by combine,
7228 while it couldn't if it were outside the COND_EXPR. Then return
7229 so we don't get into an infinite recursion loop taking the
7230 conversion out and then back in. */
7232 if ((code == NOP_EXPR || code == CONVERT_EXPR
7233 || code == NON_LVALUE_EXPR)
7234 && TREE_CODE (tem) == COND_EXPR
7235 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7236 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7237 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7238 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7239 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7240 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7241 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7242 && (INTEGRAL_TYPE_P
7243 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7244 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7245 || flag_syntax_only))
7246 tem = build1 (code, type,
7247 build3 (COND_EXPR,
7248 TREE_TYPE (TREE_OPERAND
7249 (TREE_OPERAND (tem, 1), 0)),
7250 TREE_OPERAND (tem, 0),
7251 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7252 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7253 return tem;
7255 else if (COMPARISON_CLASS_P (arg0))
7257 if (TREE_CODE (type) == BOOLEAN_TYPE)
7259 arg0 = copy_node (arg0);
7260 TREE_TYPE (arg0) = type;
7261 return arg0;
7263 else if (TREE_CODE (type) != INTEGER_TYPE)
7264 return fold_build3 (COND_EXPR, type, arg0,
7265 fold_build1 (code, type,
7266 integer_one_node),
7267 fold_build1 (code, type,
7268 integer_zero_node));
7272 switch (code)
7274 case NOP_EXPR:
7275 case FLOAT_EXPR:
7276 case CONVERT_EXPR:
7277 case FIX_TRUNC_EXPR:
7278 case FIX_CEIL_EXPR:
7279 case FIX_FLOOR_EXPR:
7280 case FIX_ROUND_EXPR:
7281 if (TREE_TYPE (op0) == type)
7282 return op0;
7284 /* If we have (type) (a CMP b) and type is an integral type, return
7285 new expression involving the new type. */
7286 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7287 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7288 TREE_OPERAND (op0, 1));
7290 /* Handle cases of two conversions in a row. */
7291 if (TREE_CODE (op0) == NOP_EXPR
7292 || TREE_CODE (op0) == CONVERT_EXPR)
7294 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7295 tree inter_type = TREE_TYPE (op0);
7296 int inside_int = INTEGRAL_TYPE_P (inside_type);
7297 int inside_ptr = POINTER_TYPE_P (inside_type);
7298 int inside_float = FLOAT_TYPE_P (inside_type);
7299 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7300 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7301 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7302 int inter_int = INTEGRAL_TYPE_P (inter_type);
7303 int inter_ptr = POINTER_TYPE_P (inter_type);
7304 int inter_float = FLOAT_TYPE_P (inter_type);
7305 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7306 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7307 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7308 int final_int = INTEGRAL_TYPE_P (type);
7309 int final_ptr = POINTER_TYPE_P (type);
7310 int final_float = FLOAT_TYPE_P (type);
7311 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7312 unsigned int final_prec = TYPE_PRECISION (type);
7313 int final_unsignedp = TYPE_UNSIGNED (type);
7315 /* In addition to the cases of two conversions in a row
7316 handled below, if we are converting something to its own
7317 type via an object of identical or wider precision, neither
7318 conversion is needed. */
7319 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7320 && (((inter_int || inter_ptr) && final_int)
7321 || (inter_float && final_float))
7322 && inter_prec >= final_prec)
7323 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7325 /* Likewise, if the intermediate and final types are either both
7326 float or both integer, we don't need the middle conversion if
7327 it is wider than the final type and doesn't change the signedness
7328 (for integers). Avoid this if the final type is a pointer
7329 since then we sometimes need the inner conversion. Likewise if
7330 the outer has a precision not equal to the size of its mode. */
7331 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7332 || (inter_float && inside_float)
7333 || (inter_vec && inside_vec))
7334 && inter_prec >= inside_prec
7335 && (inter_float || inter_vec
7336 || inter_unsignedp == inside_unsignedp)
7337 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7338 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7339 && ! final_ptr
7340 && (! final_vec || inter_prec == inside_prec))
7341 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7343 /* If we have a sign-extension of a zero-extended value, we can
7344 replace that by a single zero-extension. */
7345 if (inside_int && inter_int && final_int
7346 && inside_prec < inter_prec && inter_prec < final_prec
7347 && inside_unsignedp && !inter_unsignedp)
7348 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7350 /* Two conversions in a row are not needed unless:
7351 - some conversion is floating-point (overstrict for now), or
7352 - some conversion is a vector (overstrict for now), or
7353 - the intermediate type is narrower than both initial and
7354 final, or
7355 - the intermediate type and innermost type differ in signedness,
7356 and the outermost type is wider than the intermediate, or
7357 - the initial type is a pointer type and the precisions of the
7358 intermediate and final types differ, or
7359 - the final type is a pointer type and the precisions of the
7360 initial and intermediate types differ.
7361 - the final type is a pointer type and the initial type not
7362 - the initial type is a pointer to an array and the final type
7363 not. */
7364 if (! inside_float && ! inter_float && ! final_float
7365 && ! inside_vec && ! inter_vec && ! final_vec
7366 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7367 && ! (inside_int && inter_int
7368 && inter_unsignedp != inside_unsignedp
7369 && inter_prec < final_prec)
7370 && ((inter_unsignedp && inter_prec > inside_prec)
7371 == (final_unsignedp && final_prec > inter_prec))
7372 && ! (inside_ptr && inter_prec != final_prec)
7373 && ! (final_ptr && inside_prec != inter_prec)
7374 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7375 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7376 && final_ptr == inside_ptr
7377 && ! (inside_ptr
7378 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7379 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7380 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7383 /* Handle (T *)&A.B.C for A being of type T and B and C
7384 living at offset zero. This occurs frequently in
7385 C++ upcasting and then accessing the base. */
7386 if (TREE_CODE (op0) == ADDR_EXPR
7387 && POINTER_TYPE_P (type)
7388 && handled_component_p (TREE_OPERAND (op0, 0)))
7390 HOST_WIDE_INT bitsize, bitpos;
7391 tree offset;
7392 enum machine_mode mode;
7393 int unsignedp, volatilep;
7394 tree base = TREE_OPERAND (op0, 0);
7395 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7396 &mode, &unsignedp, &volatilep, false);
7397 /* If the reference was to a (constant) zero offset, we can use
7398 the address of the base if it has the same base type
7399 as the result type. */
7400 if (! offset && bitpos == 0
7401 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7402 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7403 return fold_convert (type, build_fold_addr_expr (base));
7406 if (TREE_CODE (op0) == MODIFY_EXPR
7407 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7408 /* Detect assigning a bitfield. */
7409 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7410 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7412 /* Don't leave an assignment inside a conversion
7413 unless assigning a bitfield. */
7414 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7415 /* First do the assignment, then return converted constant. */
7416 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7417 TREE_NO_WARNING (tem) = 1;
7418 TREE_USED (tem) = 1;
7419 return tem;
7422 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7423 constants (if x has signed type, the sign bit cannot be set
7424 in c). This folds extension into the BIT_AND_EXPR. */
7425 if (INTEGRAL_TYPE_P (type)
7426 && TREE_CODE (type) != BOOLEAN_TYPE
7427 && TREE_CODE (op0) == BIT_AND_EXPR
7428 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7430 tree and = op0;
7431 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7432 int change = 0;
7434 if (TYPE_UNSIGNED (TREE_TYPE (and))
7435 || (TYPE_PRECISION (type)
7436 <= TYPE_PRECISION (TREE_TYPE (and))))
7437 change = 1;
7438 else if (TYPE_PRECISION (TREE_TYPE (and1))
7439 <= HOST_BITS_PER_WIDE_INT
7440 && host_integerp (and1, 1))
7442 unsigned HOST_WIDE_INT cst;
7444 cst = tree_low_cst (and1, 1);
7445 cst &= (HOST_WIDE_INT) -1
7446 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7447 change = (cst == 0);
7448 #ifdef LOAD_EXTEND_OP
7449 if (change
7450 && !flag_syntax_only
7451 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7452 == ZERO_EXTEND))
7454 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7455 and0 = fold_convert (uns, and0);
7456 and1 = fold_convert (uns, and1);
7458 #endif
7460 if (change)
7462 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7463 TREE_INT_CST_HIGH (and1));
7464 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7465 TREE_CONSTANT_OVERFLOW (and1));
7466 return fold_build2 (BIT_AND_EXPR, type,
7467 fold_convert (type, and0), tem);
7471 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7472 T2 being pointers to types of the same size. */
7473 if (POINTER_TYPE_P (type)
7474 && BINARY_CLASS_P (arg0)
7475 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7476 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7478 tree arg00 = TREE_OPERAND (arg0, 0);
7479 tree t0 = type;
7480 tree t1 = TREE_TYPE (arg00);
7481 tree tt0 = TREE_TYPE (t0);
7482 tree tt1 = TREE_TYPE (t1);
7483 tree s0 = TYPE_SIZE (tt0);
7484 tree s1 = TYPE_SIZE (tt1);
7486 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7487 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7488 TREE_OPERAND (arg0, 1));
7491 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7492 of the same precision, and X is a integer type not narrower than
7493 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7494 if (INTEGRAL_TYPE_P (type)
7495 && TREE_CODE (op0) == BIT_NOT_EXPR
7496 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7497 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7498 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7499 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7501 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7502 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7503 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7504 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7507 tem = fold_convert_const (code, type, arg0);
7508 return tem ? tem : NULL_TREE;
7510 case VIEW_CONVERT_EXPR:
7511 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7512 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7513 return fold_view_convert_expr (type, op0);
7515 case NEGATE_EXPR:
7516 if (negate_expr_p (arg0))
7517 return fold_convert (type, negate_expr (arg0));
7518 return NULL_TREE;
7520 case ABS_EXPR:
7521 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7522 return fold_abs_const (arg0, type);
7523 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7524 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7525 /* Convert fabs((double)float) into (double)fabsf(float). */
7526 else if (TREE_CODE (arg0) == NOP_EXPR
7527 && TREE_CODE (type) == REAL_TYPE)
7529 tree targ0 = strip_float_extensions (arg0);
7530 if (targ0 != arg0)
7531 return fold_convert (type, fold_build1 (ABS_EXPR,
7532 TREE_TYPE (targ0),
7533 targ0));
7535 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7536 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7537 return arg0;
7539 /* Strip sign ops from argument. */
7540 if (TREE_CODE (type) == REAL_TYPE)
7542 tem = fold_strip_sign_ops (arg0);
7543 if (tem)
7544 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7546 return NULL_TREE;
7548 case CONJ_EXPR:
7549 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7550 return fold_convert (type, arg0);
7551 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7552 return build2 (COMPLEX_EXPR, type,
7553 TREE_OPERAND (arg0, 0),
7554 negate_expr (TREE_OPERAND (arg0, 1)));
7555 else if (TREE_CODE (arg0) == COMPLEX_CST)
7556 return build_complex (type, TREE_REALPART (arg0),
7557 negate_expr (TREE_IMAGPART (arg0)));
7558 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7559 return fold_build2 (TREE_CODE (arg0), type,
7560 fold_build1 (CONJ_EXPR, type,
7561 TREE_OPERAND (arg0, 0)),
7562 fold_build1 (CONJ_EXPR, type,
7563 TREE_OPERAND (arg0, 1)));
7564 else if (TREE_CODE (arg0) == CONJ_EXPR)
7565 return TREE_OPERAND (arg0, 0);
7566 return NULL_TREE;
7568 case BIT_NOT_EXPR:
7569 if (TREE_CODE (arg0) == INTEGER_CST)
7570 return fold_not_const (arg0, type);
7571 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7572 return TREE_OPERAND (arg0, 0);
7573 /* Convert ~ (-A) to A - 1. */
7574 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7575 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7576 build_int_cst (type, 1));
7577 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7578 else if (INTEGRAL_TYPE_P (type)
7579 && ((TREE_CODE (arg0) == MINUS_EXPR
7580 && integer_onep (TREE_OPERAND (arg0, 1)))
7581 || (TREE_CODE (arg0) == PLUS_EXPR
7582 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7583 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7584 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7585 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7586 && (tem = fold_unary (BIT_NOT_EXPR, type,
7587 fold_convert (type,
7588 TREE_OPERAND (arg0, 0)))))
7589 return fold_build2 (BIT_XOR_EXPR, type, tem,
7590 fold_convert (type, TREE_OPERAND (arg0, 1)));
7591 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7592 && (tem = fold_unary (BIT_NOT_EXPR, type,
7593 fold_convert (type,
7594 TREE_OPERAND (arg0, 1)))))
7595 return fold_build2 (BIT_XOR_EXPR, type,
7596 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7598 return NULL_TREE;
7600 case TRUTH_NOT_EXPR:
7601 /* The argument to invert_truthvalue must have Boolean type. */
7602 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7603 arg0 = fold_convert (boolean_type_node, arg0);
7605 /* Note that the operand of this must be an int
7606 and its values must be 0 or 1.
7607 ("true" is a fixed value perhaps depending on the language,
7608 but we don't handle values other than 1 correctly yet.) */
7609 tem = invert_truthvalue (arg0);
7610 /* Avoid infinite recursion. */
7611 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7612 return NULL_TREE;
7613 return fold_convert (type, tem);
7615 case REALPART_EXPR:
7616 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7617 return NULL_TREE;
7618 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7619 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7620 TREE_OPERAND (arg0, 1));
7621 else if (TREE_CODE (arg0) == COMPLEX_CST)
7622 return TREE_REALPART (arg0);
7623 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7624 return fold_build2 (TREE_CODE (arg0), type,
7625 fold_build1 (REALPART_EXPR, type,
7626 TREE_OPERAND (arg0, 0)),
7627 fold_build1 (REALPART_EXPR, type,
7628 TREE_OPERAND (arg0, 1)));
7629 return NULL_TREE;
7631 case IMAGPART_EXPR:
7632 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7633 return fold_convert (type, integer_zero_node);
7634 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7635 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7636 TREE_OPERAND (arg0, 0));
7637 else if (TREE_CODE (arg0) == COMPLEX_CST)
7638 return TREE_IMAGPART (arg0);
7639 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7640 return fold_build2 (TREE_CODE (arg0), type,
7641 fold_build1 (IMAGPART_EXPR, type,
7642 TREE_OPERAND (arg0, 0)),
7643 fold_build1 (IMAGPART_EXPR, type,
7644 TREE_OPERAND (arg0, 1)));
7645 return NULL_TREE;
7647 default:
7648 return NULL_TREE;
7649 } /* switch (code) */
7652 /* Fold a binary expression of code CODE and type TYPE with operands
7653 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7654 Return the folded expression if folding is successful. Otherwise,
7655 return NULL_TREE. */
7657 static tree
7658 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7660 enum tree_code compl_code;
7662 if (code == MIN_EXPR)
7663 compl_code = MAX_EXPR;
7664 else if (code == MAX_EXPR)
7665 compl_code = MIN_EXPR;
7666 else
7667 gcc_unreachable ();
7669 /* MIN (MAX (a, b), b) == b.  */
7670 if (TREE_CODE (op0) == compl_code
7671 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7672 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7674 /* MIN (MAX (b, a), b) == b.  */
7675 if (TREE_CODE (op0) == compl_code
7676 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7677 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7678 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7680 /* MIN (a, MAX (a, b)) == a.  */
7681 if (TREE_CODE (op1) == compl_code
7682 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7683 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7684 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7686 /* MIN (a, MAX (b, a)) == a.  */
7687 if (TREE_CODE (op1) == compl_code
7688 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7689 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7690 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7692 return NULL_TREE;
7695 /* Subroutine of fold_binary. This routine performs all of the
7696 transformations that are common to the equality/inequality
7697 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7698 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7699 fold_binary should call fold_binary. Fold a comparison with
7700 tree code CODE and type TYPE with operands OP0 and OP1. Return
7701 the folded comparison or NULL_TREE. */
7703 static tree
7704 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7706 tree arg0, arg1, tem;
7708 arg0 = op0;
7709 arg1 = op1;
7711 STRIP_SIGN_NOPS (arg0);
7712 STRIP_SIGN_NOPS (arg1);
7714 tem = fold_relational_const (code, type, arg0, arg1);
7715 if (tem != NULL_TREE)
7716 return tem;
7718 /* If one arg is a real or integer constant, put it last. */
7719 if (tree_swap_operands_p (arg0, arg1, true))
7720 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7722 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7723 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7724 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7725 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7726 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7727 && !(flag_wrapv || flag_trapv))
7728 && (TREE_CODE (arg1) == INTEGER_CST
7729 && !TREE_OVERFLOW (arg1)))
7731 tree const1 = TREE_OPERAND (arg0, 1);
7732 tree const2 = arg1;
7733 tree variable = TREE_OPERAND (arg0, 0);
7734 tree lhs;
7735 int lhs_add;
7736 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7738 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7739 TREE_TYPE (arg1), const2, const1);
7740 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7741 && (TREE_CODE (lhs) != INTEGER_CST
7742 || !TREE_OVERFLOW (lhs)))
7743 return fold_build2 (code, type, variable, lhs);
7746 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7748 tree targ0 = strip_float_extensions (arg0);
7749 tree targ1 = strip_float_extensions (arg1);
7750 tree newtype = TREE_TYPE (targ0);
7752 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7753 newtype = TREE_TYPE (targ1);
7755 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7756 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7757 return fold_build2 (code, type, fold_convert (newtype, targ0),
7758 fold_convert (newtype, targ1));
7760 /* (-a) CMP (-b) -> b CMP a */
7761 if (TREE_CODE (arg0) == NEGATE_EXPR
7762 && TREE_CODE (arg1) == NEGATE_EXPR)
7763 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7764 TREE_OPERAND (arg0, 0));
7766 if (TREE_CODE (arg1) == REAL_CST)
7768 REAL_VALUE_TYPE cst;
7769 cst = TREE_REAL_CST (arg1);
7771 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7772 if (TREE_CODE (arg0) == NEGATE_EXPR)
7773 return fold_build2 (swap_tree_comparison (code), type,
7774 TREE_OPERAND (arg0, 0),
7775 build_real (TREE_TYPE (arg1),
7776 REAL_VALUE_NEGATE (cst)));
7778 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7779 /* a CMP (-0) -> a CMP 0 */
7780 if (REAL_VALUE_MINUS_ZERO (cst))
7781 return fold_build2 (code, type, arg0,
7782 build_real (TREE_TYPE (arg1), dconst0));
7784 /* x != NaN is always true, other ops are always false. */
7785 if (REAL_VALUE_ISNAN (cst)
7786 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7788 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7789 return omit_one_operand (type, tem, arg0);
7792 /* Fold comparisons against infinity. */
7793 if (REAL_VALUE_ISINF (cst))
7795 tem = fold_inf_compare (code, type, arg0, arg1);
7796 if (tem != NULL_TREE)
7797 return tem;
7801 /* If this is a comparison of a real constant with a PLUS_EXPR
7802 or a MINUS_EXPR of a real constant, we can convert it into a
7803 comparison with a revised real constant as long as no overflow
7804 occurs when unsafe_math_optimizations are enabled. */
7805 if (flag_unsafe_math_optimizations
7806 && TREE_CODE (arg1) == REAL_CST
7807 && (TREE_CODE (arg0) == PLUS_EXPR
7808 || TREE_CODE (arg0) == MINUS_EXPR)
7809 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7810 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7811 ? MINUS_EXPR : PLUS_EXPR,
7812 arg1, TREE_OPERAND (arg0, 1), 0))
7813 && ! TREE_CONSTANT_OVERFLOW (tem))
7814 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7816 /* Likewise, we can simplify a comparison of a real constant with
7817 a MINUS_EXPR whose first operand is also a real constant, i.e.
7818 (c1 - x) < c2 becomes x > c1-c2. */
7819 if (flag_unsafe_math_optimizations
7820 && TREE_CODE (arg1) == REAL_CST
7821 && TREE_CODE (arg0) == MINUS_EXPR
7822 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7823 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7824 arg1, 0))
7825 && ! TREE_CONSTANT_OVERFLOW (tem))
7826 return fold_build2 (swap_tree_comparison (code), type,
7827 TREE_OPERAND (arg0, 1), tem);
7829 /* Fold comparisons against built-in math functions. */
7830 if (TREE_CODE (arg1) == REAL_CST
7831 && flag_unsafe_math_optimizations
7832 && ! flag_errno_math)
7834 enum built_in_function fcode = builtin_mathfn_code (arg0);
7836 if (fcode != END_BUILTINS)
7838 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7839 if (tem != NULL_TREE)
7840 return tem;
7845 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7846 if (TREE_CONSTANT (arg1)
7847 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7848 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7849 /* This optimization is invalid for ordered comparisons
7850 if CONST+INCR overflows or if foo+incr might overflow.
7851 This optimization is invalid for floating point due to rounding.
7852 For pointer types we assume overflow doesn't happen. */
7853 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7854 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7855 && (code == EQ_EXPR || code == NE_EXPR))))
7857 tree varop, newconst;
7859 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7861 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7862 arg1, TREE_OPERAND (arg0, 1));
7863 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7864 TREE_OPERAND (arg0, 0),
7865 TREE_OPERAND (arg0, 1));
7867 else
7869 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7870 arg1, TREE_OPERAND (arg0, 1));
7871 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7872 TREE_OPERAND (arg0, 0),
7873 TREE_OPERAND (arg0, 1));
7877 /* If VAROP is a reference to a bitfield, we must mask
7878 the constant by the width of the field. */
7879 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7880 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7881 && host_integerp (DECL_SIZE (TREE_OPERAND
7882 (TREE_OPERAND (varop, 0), 1)), 1))
7884 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7885 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7886 tree folded_compare, shift;
7888 /* First check whether the comparison would come out
7889 always the same. If we don't do that we would
7890 change the meaning with the masking. */
7891 folded_compare = fold_build2 (code, type,
7892 TREE_OPERAND (varop, 0), arg1);
7893 if (TREE_CODE (folded_compare) == INTEGER_CST)
7894 return omit_one_operand (type, folded_compare, varop);
7896 shift = build_int_cst (NULL_TREE,
7897 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7898 shift = fold_convert (TREE_TYPE (varop), shift);
7899 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7900 newconst, shift);
7901 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7902 newconst, shift);
7905 return fold_build2 (code, type, varop, newconst);
7908 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7909 && (TREE_CODE (arg0) == NOP_EXPR
7910 || TREE_CODE (arg0) == CONVERT_EXPR))
7912 /* If we are widening one operand of an integer comparison,
7913 see if the other operand is similarly being widened. Perhaps we
7914 can do the comparison in the narrower type. */
7915 tem = fold_widened_comparison (code, type, arg0, arg1);
7916 if (tem)
7917 return tem;
7919 /* Or if we are changing signedness. */
7920 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
7921 if (tem)
7922 return tem;
7925 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7926 constant, we can simplify it. */
7927 if (TREE_CODE (arg1) == INTEGER_CST
7928 && (TREE_CODE (arg0) == MIN_EXPR
7929 || TREE_CODE (arg0) == MAX_EXPR)
7930 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7932 tem = optimize_minmax_comparison (code, type, op0, op1);
7933 if (tem)
7934 return tem;
7937 /* Simplify comparison of something with itself. (For IEEE
7938 floating-point, we can only do some of these simplifications.) */
7939 if (operand_equal_p (arg0, arg1, 0))
7941 switch (code)
7943 case EQ_EXPR:
7944 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7945 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7946 return constant_boolean_node (1, type);
7947 break;
7949 case GE_EXPR:
7950 case LE_EXPR:
7951 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7952 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7953 return constant_boolean_node (1, type);
7954 return fold_build2 (EQ_EXPR, type, arg0, arg1);
7956 case NE_EXPR:
7957 /* For NE, we can only do this simplification if integer
7958 or we don't honor IEEE floating point NaNs. */
7959 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7960 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7961 break;
7962 /* ... fall through ... */
7963 case GT_EXPR:
7964 case LT_EXPR:
7965 return constant_boolean_node (0, type);
7966 default:
7967 gcc_unreachable ();
7971 /* If we are comparing an expression that just has comparisons
7972 of two integer values, arithmetic expressions of those comparisons,
7973 and constants, we can simplify it. There are only three cases
7974 to check: the two values can either be equal, the first can be
7975 greater, or the second can be greater. Fold the expression for
7976 those three values. Since each value must be 0 or 1, we have
7977 eight possibilities, each of which corresponds to the constant 0
7978 or 1 or one of the six possible comparisons.
7980 This handles common cases like (a > b) == 0 but also handles
7981 expressions like ((x > y) - (y > x)) > 0, which supposedly
7982 occur in macroized code. */
7984 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7986 tree cval1 = 0, cval2 = 0;
7987 int save_p = 0;
7989 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7990 /* Don't handle degenerate cases here; they should already
7991 have been handled anyway. */
7992 && cval1 != 0 && cval2 != 0
7993 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7994 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7995 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7996 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7997 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7998 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7999 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8001 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8002 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8004 /* We can't just pass T to eval_subst in case cval1 or cval2
8005 was the same as ARG1. */
8007 tree high_result
8008 = fold_build2 (code, type,
8009 eval_subst (arg0, cval1, maxval,
8010 cval2, minval),
8011 arg1);
8012 tree equal_result
8013 = fold_build2 (code, type,
8014 eval_subst (arg0, cval1, maxval,
8015 cval2, maxval),
8016 arg1);
8017 tree low_result
8018 = fold_build2 (code, type,
8019 eval_subst (arg0, cval1, minval,
8020 cval2, maxval),
8021 arg1);
8023 /* All three of these results should be 0 or 1. Confirm they are.
8024 Then use those values to select the proper code to use. */
8026 if (TREE_CODE (high_result) == INTEGER_CST
8027 && TREE_CODE (equal_result) == INTEGER_CST
8028 && TREE_CODE (low_result) == INTEGER_CST)
8030 /* Make a 3-bit mask with the high-order bit being the
8031 value for `>', the next for '=', and the low for '<'. */
8032 switch ((integer_onep (high_result) * 4)
8033 + (integer_onep (equal_result) * 2)
8034 + integer_onep (low_result))
8036 case 0:
8037 /* Always false. */
8038 return omit_one_operand (type, integer_zero_node, arg0);
8039 case 1:
8040 code = LT_EXPR;
8041 break;
8042 case 2:
8043 code = EQ_EXPR;
8044 break;
8045 case 3:
8046 code = LE_EXPR;
8047 break;
8048 case 4:
8049 code = GT_EXPR;
8050 break;
8051 case 5:
8052 code = NE_EXPR;
8053 break;
8054 case 6:
8055 code = GE_EXPR;
8056 break;
8057 case 7:
8058 /* Always true. */
8059 return omit_one_operand (type, integer_one_node, arg0);
8062 if (save_p)
8063 return save_expr (build2 (code, type, cval1, cval2));
8064 return fold_build2 (code, type, cval1, cval2);
8069 /* Fold a comparison of the address of COMPONENT_REFs with the same
8070 type and component to a comparison of the address of the base
8071 object. In short, &x->a OP &y->a to x OP y and
8072 &x->a OP &y.a to x OP &y */
8073 if (TREE_CODE (arg0) == ADDR_EXPR
8074 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8075 && TREE_CODE (arg1) == ADDR_EXPR
8076 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8078 tree cref0 = TREE_OPERAND (arg0, 0);
8079 tree cref1 = TREE_OPERAND (arg1, 0);
8080 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8082 tree op0 = TREE_OPERAND (cref0, 0);
8083 tree op1 = TREE_OPERAND (cref1, 0);
8084 return fold_build2 (code, type,
8085 build_fold_addr_expr (op0),
8086 build_fold_addr_expr (op1));
8090 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8091 into a single range test. */
8092 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8093 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8094 && TREE_CODE (arg1) == INTEGER_CST
8095 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8096 && !integer_zerop (TREE_OPERAND (arg0, 1))
8097 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8098 && !TREE_OVERFLOW (arg1))
8100 tem = fold_div_compare (code, type, arg0, arg1);
8101 if (tem != NULL_TREE)
8102 return tem;
8105 return NULL_TREE;
8109 /* Subroutine of fold_binary. Optimize complex multiplications of the
8110 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8111 argument EXPR represents the expression "z" of type TYPE. */
8113 static tree
8114 fold_mult_zconjz (tree type, tree expr)
8116 tree itype = TREE_TYPE (type);
8117 tree rpart, ipart, tem;
8119 if (TREE_CODE (expr) == COMPLEX_EXPR)
8121 rpart = TREE_OPERAND (expr, 0);
8122 ipart = TREE_OPERAND (expr, 1);
8124 else if (TREE_CODE (expr) == COMPLEX_CST)
8126 rpart = TREE_REALPART (expr);
8127 ipart = TREE_IMAGPART (expr);
8129 else
8131 expr = save_expr (expr);
8132 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8133 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8136 rpart = save_expr (rpart);
8137 ipart = save_expr (ipart);
8138 tem = fold_build2 (PLUS_EXPR, itype,
8139 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8140 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8141 return fold_build2 (COMPLEX_EXPR, type, tem,
8142 fold_convert (itype, integer_zero_node));
8146 /* Fold a binary expression of code CODE and type TYPE with operands
8147 OP0 and OP1. Return the folded expression if folding is
8148 successful. Otherwise, return NULL_TREE. */
8150 tree
8151 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8153 enum tree_code_class kind = TREE_CODE_CLASS (code);
8154 tree arg0, arg1, tem;
8155 tree t1 = NULL_TREE;
8157 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8158 && TREE_CODE_LENGTH (code) == 2
8159 && op0 != NULL_TREE
8160 && op1 != NULL_TREE);
8162 arg0 = op0;
8163 arg1 = op1;
8165 /* Strip any conversions that don't change the mode. This is
8166 safe for every expression, except for a comparison expression
8167 because its signedness is derived from its operands. So, in
8168 the latter case, only strip conversions that don't change the
8169 signedness.
8171 Note that this is done as an internal manipulation within the
8172 constant folder, in order to find the simplest representation
8173 of the arguments so that their form can be studied. In any
8174 cases, the appropriate type conversions should be put back in
8175 the tree that will get out of the constant folder. */
8177 if (kind == tcc_comparison)
8179 STRIP_SIGN_NOPS (arg0);
8180 STRIP_SIGN_NOPS (arg1);
8182 else
8184 STRIP_NOPS (arg0);
8185 STRIP_NOPS (arg1);
8188 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8189 constant but we can't do arithmetic on them. */
8190 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8191 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8192 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8193 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8195 if (kind == tcc_binary)
8196 tem = const_binop (code, arg0, arg1, 0);
8197 else if (kind == tcc_comparison)
8198 tem = fold_relational_const (code, type, arg0, arg1);
8199 else
8200 tem = NULL_TREE;
8202 if (tem != NULL_TREE)
8204 if (TREE_TYPE (tem) != type)
8205 tem = fold_convert (type, tem);
8206 return tem;
8210 /* If this is a commutative operation, and ARG0 is a constant, move it
8211 to ARG1 to reduce the number of tests below. */
8212 if (commutative_tree_code (code)
8213 && tree_swap_operands_p (arg0, arg1, true))
8214 return fold_build2 (code, type, op1, op0);
8216 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8218 First check for cases where an arithmetic operation is applied to a
8219 compound, conditional, or comparison operation. Push the arithmetic
8220 operation inside the compound or conditional to see if any folding
8221 can then be done. Convert comparison to conditional for this purpose.
8222 The also optimizes non-constant cases that used to be done in
8223 expand_expr.
8225 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8226 one of the operands is a comparison and the other is a comparison, a
8227 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8228 code below would make the expression more complex. Change it to a
8229 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8230 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8232 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8233 || code == EQ_EXPR || code == NE_EXPR)
8234 && ((truth_value_p (TREE_CODE (arg0))
8235 && (truth_value_p (TREE_CODE (arg1))
8236 || (TREE_CODE (arg1) == BIT_AND_EXPR
8237 && integer_onep (TREE_OPERAND (arg1, 1)))))
8238 || (truth_value_p (TREE_CODE (arg1))
8239 && (truth_value_p (TREE_CODE (arg0))
8240 || (TREE_CODE (arg0) == BIT_AND_EXPR
8241 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8243 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8244 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8245 : TRUTH_XOR_EXPR,
8246 boolean_type_node,
8247 fold_convert (boolean_type_node, arg0),
8248 fold_convert (boolean_type_node, arg1));
8250 if (code == EQ_EXPR)
8251 tem = invert_truthvalue (tem);
8253 return fold_convert (type, tem);
8256 if (TREE_CODE_CLASS (code) == tcc_binary
8257 || TREE_CODE_CLASS (code) == tcc_comparison)
8259 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8260 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8261 fold_build2 (code, type,
8262 TREE_OPERAND (arg0, 1), op1));
8263 if (TREE_CODE (arg1) == COMPOUND_EXPR
8264 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8265 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8266 fold_build2 (code, type,
8267 op0, TREE_OPERAND (arg1, 1)));
8269 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8271 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8272 arg0, arg1,
8273 /*cond_first_p=*/1);
8274 if (tem != NULL_TREE)
8275 return tem;
8278 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8280 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8281 arg1, arg0,
8282 /*cond_first_p=*/0);
8283 if (tem != NULL_TREE)
8284 return tem;
8288 switch (code)
8290 case PLUS_EXPR:
8291 /* A + (-B) -> A - B */
8292 if (TREE_CODE (arg1) == NEGATE_EXPR)
8293 return fold_build2 (MINUS_EXPR, type,
8294 fold_convert (type, arg0),
8295 fold_convert (type, TREE_OPERAND (arg1, 0)));
8296 /* (-A) + B -> B - A */
8297 if (TREE_CODE (arg0) == NEGATE_EXPR
8298 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8299 return fold_build2 (MINUS_EXPR, type,
8300 fold_convert (type, arg1),
8301 fold_convert (type, TREE_OPERAND (arg0, 0)));
8302 /* Convert ~A + 1 to -A. */
8303 if (INTEGRAL_TYPE_P (type)
8304 && TREE_CODE (arg0) == BIT_NOT_EXPR
8305 && integer_onep (arg1))
8306 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8308 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8309 same or one. */
8310 if ((TREE_CODE (arg0) == MULT_EXPR
8311 || TREE_CODE (arg1) == MULT_EXPR)
8312 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8314 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8315 if (tem)
8316 return tem;
8319 if (! FLOAT_TYPE_P (type))
8321 if (integer_zerop (arg1))
8322 return non_lvalue (fold_convert (type, arg0));
8324 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8325 with a constant, and the two constants have no bits in common,
8326 we should treat this as a BIT_IOR_EXPR since this may produce more
8327 simplifications. */
8328 if (TREE_CODE (arg0) == BIT_AND_EXPR
8329 && TREE_CODE (arg1) == BIT_AND_EXPR
8330 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8331 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8332 && integer_zerop (const_binop (BIT_AND_EXPR,
8333 TREE_OPERAND (arg0, 1),
8334 TREE_OPERAND (arg1, 1), 0)))
8336 code = BIT_IOR_EXPR;
8337 goto bit_ior;
8340 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8341 (plus (plus (mult) (mult)) (foo)) so that we can
8342 take advantage of the factoring cases below. */
8343 if (((TREE_CODE (arg0) == PLUS_EXPR
8344 || TREE_CODE (arg0) == MINUS_EXPR)
8345 && TREE_CODE (arg1) == MULT_EXPR)
8346 || ((TREE_CODE (arg1) == PLUS_EXPR
8347 || TREE_CODE (arg1) == MINUS_EXPR)
8348 && TREE_CODE (arg0) == MULT_EXPR))
8350 tree parg0, parg1, parg, marg;
8351 enum tree_code pcode;
8353 if (TREE_CODE (arg1) == MULT_EXPR)
8354 parg = arg0, marg = arg1;
8355 else
8356 parg = arg1, marg = arg0;
8357 pcode = TREE_CODE (parg);
8358 parg0 = TREE_OPERAND (parg, 0);
8359 parg1 = TREE_OPERAND (parg, 1);
8360 STRIP_NOPS (parg0);
8361 STRIP_NOPS (parg1);
8363 if (TREE_CODE (parg0) == MULT_EXPR
8364 && TREE_CODE (parg1) != MULT_EXPR)
8365 return fold_build2 (pcode, type,
8366 fold_build2 (PLUS_EXPR, type,
8367 fold_convert (type, parg0),
8368 fold_convert (type, marg)),
8369 fold_convert (type, parg1));
8370 if (TREE_CODE (parg0) != MULT_EXPR
8371 && TREE_CODE (parg1) == MULT_EXPR)
8372 return fold_build2 (PLUS_EXPR, type,
8373 fold_convert (type, parg0),
8374 fold_build2 (pcode, type,
8375 fold_convert (type, marg),
8376 fold_convert (type,
8377 parg1)));
8380 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8381 of the array. Loop optimizer sometimes produce this type of
8382 expressions. */
8383 if (TREE_CODE (arg0) == ADDR_EXPR)
8385 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8386 if (tem)
8387 return fold_convert (type, tem);
8389 else if (TREE_CODE (arg1) == ADDR_EXPR)
8391 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8392 if (tem)
8393 return fold_convert (type, tem);
8396 else
8398 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8399 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8400 return non_lvalue (fold_convert (type, arg0));
8402 /* Likewise if the operands are reversed. */
8403 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8404 return non_lvalue (fold_convert (type, arg1));
8406 /* Convert X + -C into X - C. */
8407 if (TREE_CODE (arg1) == REAL_CST
8408 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8410 tem = fold_negate_const (arg1, type);
8411 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8412 return fold_build2 (MINUS_EXPR, type,
8413 fold_convert (type, arg0),
8414 fold_convert (type, tem));
8417 if (flag_unsafe_math_optimizations
8418 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8419 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8420 && (tem = distribute_real_division (code, type, arg0, arg1)))
8421 return tem;
8423 /* Convert x+x into x*2.0. */
8424 if (operand_equal_p (arg0, arg1, 0)
8425 && SCALAR_FLOAT_TYPE_P (type))
8426 return fold_build2 (MULT_EXPR, type, arg0,
8427 build_real (type, dconst2));
8429 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8430 if (flag_unsafe_math_optimizations
8431 && TREE_CODE (arg1) == PLUS_EXPR
8432 && TREE_CODE (arg0) != MULT_EXPR)
8434 tree tree10 = TREE_OPERAND (arg1, 0);
8435 tree tree11 = TREE_OPERAND (arg1, 1);
8436 if (TREE_CODE (tree11) == MULT_EXPR
8437 && TREE_CODE (tree10) == MULT_EXPR)
8439 tree tree0;
8440 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8441 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8444 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8445 if (flag_unsafe_math_optimizations
8446 && TREE_CODE (arg0) == PLUS_EXPR
8447 && TREE_CODE (arg1) != MULT_EXPR)
8449 tree tree00 = TREE_OPERAND (arg0, 0);
8450 tree tree01 = TREE_OPERAND (arg0, 1);
8451 if (TREE_CODE (tree01) == MULT_EXPR
8452 && TREE_CODE (tree00) == MULT_EXPR)
8454 tree tree0;
8455 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8456 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8461 bit_rotate:
8462 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8463 is a rotate of A by C1 bits. */
8464 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8465 is a rotate of A by B bits. */
8467 enum tree_code code0, code1;
8468 code0 = TREE_CODE (arg0);
8469 code1 = TREE_CODE (arg1);
8470 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8471 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8472 && operand_equal_p (TREE_OPERAND (arg0, 0),
8473 TREE_OPERAND (arg1, 0), 0)
8474 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8476 tree tree01, tree11;
8477 enum tree_code code01, code11;
8479 tree01 = TREE_OPERAND (arg0, 1);
8480 tree11 = TREE_OPERAND (arg1, 1);
8481 STRIP_NOPS (tree01);
8482 STRIP_NOPS (tree11);
8483 code01 = TREE_CODE (tree01);
8484 code11 = TREE_CODE (tree11);
8485 if (code01 == INTEGER_CST
8486 && code11 == INTEGER_CST
8487 && TREE_INT_CST_HIGH (tree01) == 0
8488 && TREE_INT_CST_HIGH (tree11) == 0
8489 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8490 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8491 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8492 code0 == LSHIFT_EXPR ? tree01 : tree11);
8493 else if (code11 == MINUS_EXPR)
8495 tree tree110, tree111;
8496 tree110 = TREE_OPERAND (tree11, 0);
8497 tree111 = TREE_OPERAND (tree11, 1);
8498 STRIP_NOPS (tree110);
8499 STRIP_NOPS (tree111);
8500 if (TREE_CODE (tree110) == INTEGER_CST
8501 && 0 == compare_tree_int (tree110,
8502 TYPE_PRECISION
8503 (TREE_TYPE (TREE_OPERAND
8504 (arg0, 0))))
8505 && operand_equal_p (tree01, tree111, 0))
8506 return build2 ((code0 == LSHIFT_EXPR
8507 ? LROTATE_EXPR
8508 : RROTATE_EXPR),
8509 type, TREE_OPERAND (arg0, 0), tree01);
8511 else if (code01 == MINUS_EXPR)
8513 tree tree010, tree011;
8514 tree010 = TREE_OPERAND (tree01, 0);
8515 tree011 = TREE_OPERAND (tree01, 1);
8516 STRIP_NOPS (tree010);
8517 STRIP_NOPS (tree011);
8518 if (TREE_CODE (tree010) == INTEGER_CST
8519 && 0 == compare_tree_int (tree010,
8520 TYPE_PRECISION
8521 (TREE_TYPE (TREE_OPERAND
8522 (arg0, 0))))
8523 && operand_equal_p (tree11, tree011, 0))
8524 return build2 ((code0 != LSHIFT_EXPR
8525 ? LROTATE_EXPR
8526 : RROTATE_EXPR),
8527 type, TREE_OPERAND (arg0, 0), tree11);
8532 associate:
8533 /* In most languages, can't associate operations on floats through
8534 parentheses. Rather than remember where the parentheses were, we
8535 don't associate floats at all, unless the user has specified
8536 -funsafe-math-optimizations. */
8538 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8540 tree var0, con0, lit0, minus_lit0;
8541 tree var1, con1, lit1, minus_lit1;
8543 /* Split both trees into variables, constants, and literals. Then
8544 associate each group together, the constants with literals,
8545 then the result with variables. This increases the chances of
8546 literals being recombined later and of generating relocatable
8547 expressions for the sum of a constant and literal. */
8548 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8549 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8550 code == MINUS_EXPR);
8552 /* Only do something if we found more than two objects. Otherwise,
8553 nothing has changed and we risk infinite recursion. */
8554 if (2 < ((var0 != 0) + (var1 != 0)
8555 + (con0 != 0) + (con1 != 0)
8556 + (lit0 != 0) + (lit1 != 0)
8557 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8559 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8560 if (code == MINUS_EXPR)
8561 code = PLUS_EXPR;
8563 var0 = associate_trees (var0, var1, code, type);
8564 con0 = associate_trees (con0, con1, code, type);
8565 lit0 = associate_trees (lit0, lit1, code, type);
8566 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8568 /* Preserve the MINUS_EXPR if the negative part of the literal is
8569 greater than the positive part. Otherwise, the multiplicative
8570 folding code (i.e extract_muldiv) may be fooled in case
8571 unsigned constants are subtracted, like in the following
8572 example: ((X*2 + 4) - 8U)/2. */
8573 if (minus_lit0 && lit0)
8575 if (TREE_CODE (lit0) == INTEGER_CST
8576 && TREE_CODE (minus_lit0) == INTEGER_CST
8577 && tree_int_cst_lt (lit0, minus_lit0))
8579 minus_lit0 = associate_trees (minus_lit0, lit0,
8580 MINUS_EXPR, type);
8581 lit0 = 0;
8583 else
8585 lit0 = associate_trees (lit0, minus_lit0,
8586 MINUS_EXPR, type);
8587 minus_lit0 = 0;
8590 if (minus_lit0)
8592 if (con0 == 0)
8593 return fold_convert (type,
8594 associate_trees (var0, minus_lit0,
8595 MINUS_EXPR, type));
8596 else
8598 con0 = associate_trees (con0, minus_lit0,
8599 MINUS_EXPR, type);
8600 return fold_convert (type,
8601 associate_trees (var0, con0,
8602 PLUS_EXPR, type));
8606 con0 = associate_trees (con0, lit0, code, type);
8607 return fold_convert (type, associate_trees (var0, con0,
8608 code, type));
8612 return NULL_TREE;
8614 case MINUS_EXPR:
8615 /* A - (-B) -> A + B */
8616 if (TREE_CODE (arg1) == NEGATE_EXPR)
8617 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8618 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8619 if (TREE_CODE (arg0) == NEGATE_EXPR
8620 && (FLOAT_TYPE_P (type)
8621 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8622 && negate_expr_p (arg1)
8623 && reorder_operands_p (arg0, arg1))
8624 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8625 TREE_OPERAND (arg0, 0));
8626 /* Convert -A - 1 to ~A. */
8627 if (INTEGRAL_TYPE_P (type)
8628 && TREE_CODE (arg0) == NEGATE_EXPR
8629 && integer_onep (arg1))
8630 return fold_build1 (BIT_NOT_EXPR, type,
8631 fold_convert (type, TREE_OPERAND (arg0, 0)));
8633 /* Convert -1 - A to ~A. */
8634 if (INTEGRAL_TYPE_P (type)
8635 && integer_all_onesp (arg0))
8636 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8638 if (! FLOAT_TYPE_P (type))
8640 if (integer_zerop (arg0))
8641 return negate_expr (fold_convert (type, arg1));
8642 if (integer_zerop (arg1))
8643 return non_lvalue (fold_convert (type, arg0));
8645 /* Fold A - (A & B) into ~B & A. */
8646 if (!TREE_SIDE_EFFECTS (arg0)
8647 && TREE_CODE (arg1) == BIT_AND_EXPR)
8649 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8650 return fold_build2 (BIT_AND_EXPR, type,
8651 fold_build1 (BIT_NOT_EXPR, type,
8652 TREE_OPERAND (arg1, 0)),
8653 arg0);
8654 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8655 return fold_build2 (BIT_AND_EXPR, type,
8656 fold_build1 (BIT_NOT_EXPR, type,
8657 TREE_OPERAND (arg1, 1)),
8658 arg0);
8661 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8662 any power of 2 minus 1. */
8663 if (TREE_CODE (arg0) == BIT_AND_EXPR
8664 && TREE_CODE (arg1) == BIT_AND_EXPR
8665 && operand_equal_p (TREE_OPERAND (arg0, 0),
8666 TREE_OPERAND (arg1, 0), 0))
8668 tree mask0 = TREE_OPERAND (arg0, 1);
8669 tree mask1 = TREE_OPERAND (arg1, 1);
8670 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8672 if (operand_equal_p (tem, mask1, 0))
8674 tem = fold_build2 (BIT_XOR_EXPR, type,
8675 TREE_OPERAND (arg0, 0), mask1);
8676 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8681 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8682 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8683 return non_lvalue (fold_convert (type, arg0));
8685 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8686 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8687 (-ARG1 + ARG0) reduces to -ARG1. */
8688 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8689 return negate_expr (fold_convert (type, arg1));
8691 /* Fold &x - &x. This can happen from &x.foo - &x.
8692 This is unsafe for certain floats even in non-IEEE formats.
8693 In IEEE, it is unsafe because it does wrong for NaNs.
8694 Also note that operand_equal_p is always false if an operand
8695 is volatile. */
8697 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8698 && operand_equal_p (arg0, arg1, 0))
8699 return fold_convert (type, integer_zero_node);
8701 /* A - B -> A + (-B) if B is easily negatable. */
8702 if (negate_expr_p (arg1)
8703 && ((FLOAT_TYPE_P (type)
8704 /* Avoid this transformation if B is a positive REAL_CST. */
8705 && (TREE_CODE (arg1) != REAL_CST
8706 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8707 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8708 return fold_build2 (PLUS_EXPR, type,
8709 fold_convert (type, arg0),
8710 fold_convert (type, negate_expr (arg1)));
8712 /* Try folding difference of addresses. */
8714 HOST_WIDE_INT diff;
8716 if ((TREE_CODE (arg0) == ADDR_EXPR
8717 || TREE_CODE (arg1) == ADDR_EXPR)
8718 && ptr_difference_const (arg0, arg1, &diff))
8719 return build_int_cst_type (type, diff);
8722 /* Fold &a[i] - &a[j] to i-j. */
8723 if (TREE_CODE (arg0) == ADDR_EXPR
8724 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8725 && TREE_CODE (arg1) == ADDR_EXPR
8726 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8728 tree aref0 = TREE_OPERAND (arg0, 0);
8729 tree aref1 = TREE_OPERAND (arg1, 0);
8730 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8731 TREE_OPERAND (aref1, 0), 0))
8733 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8734 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8735 tree esz = array_ref_element_size (aref0);
8736 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8737 return fold_build2 (MULT_EXPR, type, diff,
8738 fold_convert (type, esz));
8743 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8744 of the array. Loop optimizer sometimes produce this type of
8745 expressions. */
8746 if (TREE_CODE (arg0) == ADDR_EXPR)
8748 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8749 if (tem)
8750 return fold_convert (type, tem);
8753 if (flag_unsafe_math_optimizations
8754 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8755 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8756 && (tem = distribute_real_division (code, type, arg0, arg1)))
8757 return tem;
8759 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8760 same or one. */
8761 if ((TREE_CODE (arg0) == MULT_EXPR
8762 || TREE_CODE (arg1) == MULT_EXPR)
8763 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8765 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8766 if (tem)
8767 return tem;
8770 goto associate;
8772 case MULT_EXPR:
8773 /* (-A) * (-B) -> A * B */
8774 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8775 return fold_build2 (MULT_EXPR, type,
8776 TREE_OPERAND (arg0, 0),
8777 negate_expr (arg1));
8778 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8779 return fold_build2 (MULT_EXPR, type,
8780 negate_expr (arg0),
8781 TREE_OPERAND (arg1, 0));
8783 if (! FLOAT_TYPE_P (type))
8785 if (integer_zerop (arg1))
8786 return omit_one_operand (type, arg1, arg0);
8787 if (integer_onep (arg1))
8788 return non_lvalue (fold_convert (type, arg0));
8789 /* Transform x * -1 into -x. */
8790 if (integer_all_onesp (arg1))
8791 return fold_convert (type, negate_expr (arg0));
8793 /* (a * (1 << b)) is (a << b) */
8794 if (TREE_CODE (arg1) == LSHIFT_EXPR
8795 && integer_onep (TREE_OPERAND (arg1, 0)))
8796 return fold_build2 (LSHIFT_EXPR, type, arg0,
8797 TREE_OPERAND (arg1, 1));
8798 if (TREE_CODE (arg0) == LSHIFT_EXPR
8799 && integer_onep (TREE_OPERAND (arg0, 0)))
8800 return fold_build2 (LSHIFT_EXPR, type, arg1,
8801 TREE_OPERAND (arg0, 1));
8803 if (TREE_CODE (arg1) == INTEGER_CST
8804 && 0 != (tem = extract_muldiv (op0,
8805 fold_convert (type, arg1),
8806 code, NULL_TREE)))
8807 return fold_convert (type, tem);
8809 /* Optimize z * conj(z) for integer complex numbers. */
8810 if (TREE_CODE (arg0) == CONJ_EXPR
8811 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8812 return fold_mult_zconjz (type, arg1);
8813 if (TREE_CODE (arg1) == CONJ_EXPR
8814 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8815 return fold_mult_zconjz (type, arg0);
8817 else
8819 /* Maybe fold x * 0 to 0. The expressions aren't the same
8820 when x is NaN, since x * 0 is also NaN. Nor are they the
8821 same in modes with signed zeros, since multiplying a
8822 negative value by 0 gives -0, not +0. */
8823 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8824 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8825 && real_zerop (arg1))
8826 return omit_one_operand (type, arg1, arg0);
8827 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8828 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8829 && real_onep (arg1))
8830 return non_lvalue (fold_convert (type, arg0));
8832 /* Transform x * -1.0 into -x. */
8833 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8834 && real_minus_onep (arg1))
8835 return fold_convert (type, negate_expr (arg0));
8837 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8838 if (flag_unsafe_math_optimizations
8839 && TREE_CODE (arg0) == RDIV_EXPR
8840 && TREE_CODE (arg1) == REAL_CST
8841 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8843 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8844 arg1, 0);
8845 if (tem)
8846 return fold_build2 (RDIV_EXPR, type, tem,
8847 TREE_OPERAND (arg0, 1));
8850 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8851 if (operand_equal_p (arg0, arg1, 0))
8853 tree tem = fold_strip_sign_ops (arg0);
8854 if (tem != NULL_TREE)
8856 tem = fold_convert (type, tem);
8857 return fold_build2 (MULT_EXPR, type, tem, tem);
8861 /* Optimize z * conj(z) for floating point complex numbers.
8862 Guarded by flag_unsafe_math_optimizations as non-finite
8863 imaginary components don't produce scalar results. */
8864 if (flag_unsafe_math_optimizations
8865 && TREE_CODE (arg0) == CONJ_EXPR
8866 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8867 return fold_mult_zconjz (type, arg1);
8868 if (flag_unsafe_math_optimizations
8869 && TREE_CODE (arg1) == CONJ_EXPR
8870 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8871 return fold_mult_zconjz (type, arg0);
8873 if (flag_unsafe_math_optimizations)
8875 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8876 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8878 /* Optimizations of root(...)*root(...). */
8879 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8881 tree rootfn, arg, arglist;
8882 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8883 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8885 /* Optimize sqrt(x)*sqrt(x) as x. */
8886 if (BUILTIN_SQRT_P (fcode0)
8887 && operand_equal_p (arg00, arg10, 0)
8888 && ! HONOR_SNANS (TYPE_MODE (type)))
8889 return arg00;
8891 /* Optimize root(x)*root(y) as root(x*y). */
8892 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8893 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8894 arglist = build_tree_list (NULL_TREE, arg);
8895 return build_function_call_expr (rootfn, arglist);
8898 /* Optimize expN(x)*expN(y) as expN(x+y). */
8899 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8901 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8902 tree arg = fold_build2 (PLUS_EXPR, type,
8903 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8904 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8905 tree arglist = build_tree_list (NULL_TREE, arg);
8906 return build_function_call_expr (expfn, arglist);
8909 /* Optimizations of pow(...)*pow(...). */
8910 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8911 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8912 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8914 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8915 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8916 1)));
8917 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8918 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8919 1)));
8921 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8922 if (operand_equal_p (arg01, arg11, 0))
8924 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8925 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8926 tree arglist = tree_cons (NULL_TREE, arg,
8927 build_tree_list (NULL_TREE,
8928 arg01));
8929 return build_function_call_expr (powfn, arglist);
8932 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8933 if (operand_equal_p (arg00, arg10, 0))
8935 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8936 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
8937 tree arglist = tree_cons (NULL_TREE, arg00,
8938 build_tree_list (NULL_TREE,
8939 arg));
8940 return build_function_call_expr (powfn, arglist);
8944 /* Optimize tan(x)*cos(x) as sin(x). */
8945 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8946 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8947 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8948 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8949 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8950 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8951 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8952 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8954 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8956 if (sinfn != NULL_TREE)
8957 return build_function_call_expr (sinfn,
8958 TREE_OPERAND (arg0, 1));
8961 /* Optimize x*pow(x,c) as pow(x,c+1). */
8962 if (fcode1 == BUILT_IN_POW
8963 || fcode1 == BUILT_IN_POWF
8964 || fcode1 == BUILT_IN_POWL)
8966 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8967 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8968 1)));
8969 if (TREE_CODE (arg11) == REAL_CST
8970 && ! TREE_CONSTANT_OVERFLOW (arg11)
8971 && operand_equal_p (arg0, arg10, 0))
8973 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8974 REAL_VALUE_TYPE c;
8975 tree arg, arglist;
8977 c = TREE_REAL_CST (arg11);
8978 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8979 arg = build_real (type, c);
8980 arglist = build_tree_list (NULL_TREE, arg);
8981 arglist = tree_cons (NULL_TREE, arg0, arglist);
8982 return build_function_call_expr (powfn, arglist);
8986 /* Optimize pow(x,c)*x as pow(x,c+1). */
8987 if (fcode0 == BUILT_IN_POW
8988 || fcode0 == BUILT_IN_POWF
8989 || fcode0 == BUILT_IN_POWL)
8991 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8992 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8993 1)));
8994 if (TREE_CODE (arg01) == REAL_CST
8995 && ! TREE_CONSTANT_OVERFLOW (arg01)
8996 && operand_equal_p (arg1, arg00, 0))
8998 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8999 REAL_VALUE_TYPE c;
9000 tree arg, arglist;
9002 c = TREE_REAL_CST (arg01);
9003 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9004 arg = build_real (type, c);
9005 arglist = build_tree_list (NULL_TREE, arg);
9006 arglist = tree_cons (NULL_TREE, arg1, arglist);
9007 return build_function_call_expr (powfn, arglist);
9011 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9012 if (! optimize_size
9013 && operand_equal_p (arg0, arg1, 0))
9015 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9017 if (powfn)
9019 tree arg = build_real (type, dconst2);
9020 tree arglist = build_tree_list (NULL_TREE, arg);
9021 arglist = tree_cons (NULL_TREE, arg0, arglist);
9022 return build_function_call_expr (powfn, arglist);
9027 goto associate;
9029 case BIT_IOR_EXPR:
9030 bit_ior:
9031 if (integer_all_onesp (arg1))
9032 return omit_one_operand (type, arg1, arg0);
9033 if (integer_zerop (arg1))
9034 return non_lvalue (fold_convert (type, arg0));
9035 if (operand_equal_p (arg0, arg1, 0))
9036 return non_lvalue (fold_convert (type, arg0));
9038 /* ~X | X is -1. */
9039 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9040 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9042 t1 = build_int_cst (type, -1);
9043 t1 = force_fit_type (t1, 0, false, false);
9044 return omit_one_operand (type, t1, arg1);
9047 /* X | ~X is -1. */
9048 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9049 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9051 t1 = build_int_cst (type, -1);
9052 t1 = force_fit_type (t1, 0, false, false);
9053 return omit_one_operand (type, t1, arg0);
9056 /* Canonicalize (X & C1) | C2. */
9057 if (TREE_CODE (arg0) == BIT_AND_EXPR
9058 && TREE_CODE (arg1) == INTEGER_CST
9059 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9061 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9062 int width = TYPE_PRECISION (type);
9063 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9064 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9065 hi2 = TREE_INT_CST_HIGH (arg1);
9066 lo2 = TREE_INT_CST_LOW (arg1);
9068 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9069 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9070 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9072 if (width > HOST_BITS_PER_WIDE_INT)
9074 mhi = (unsigned HOST_WIDE_INT) -1
9075 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9076 mlo = -1;
9078 else
9080 mhi = 0;
9081 mlo = (unsigned HOST_WIDE_INT) -1
9082 >> (HOST_BITS_PER_WIDE_INT - width);
9085 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9086 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9087 return fold_build2 (BIT_IOR_EXPR, type,
9088 TREE_OPERAND (arg0, 0), arg1);
9090 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9091 hi1 &= mhi;
9092 lo1 &= mlo;
9093 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9094 return fold_build2 (BIT_IOR_EXPR, type,
9095 fold_build2 (BIT_AND_EXPR, type,
9096 TREE_OPERAND (arg0, 0),
9097 build_int_cst_wide (type,
9098 lo1 & ~lo2,
9099 hi1 & ~hi2)),
9100 arg1);
9103 /* (X & Y) | Y is (X, Y). */
9104 if (TREE_CODE (arg0) == BIT_AND_EXPR
9105 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9106 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9107 /* (X & Y) | X is (Y, X). */
9108 if (TREE_CODE (arg0) == BIT_AND_EXPR
9109 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9110 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9111 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9112 /* X | (X & Y) is (Y, X). */
9113 if (TREE_CODE (arg1) == BIT_AND_EXPR
9114 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9115 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9116 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9117 /* X | (Y & X) is (Y, X). */
9118 if (TREE_CODE (arg1) == BIT_AND_EXPR
9119 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9120 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9121 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9123 t1 = distribute_bit_expr (code, type, arg0, arg1);
9124 if (t1 != NULL_TREE)
9125 return t1;
9127 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9129 This results in more efficient code for machines without a NAND
9130 instruction. Combine will canonicalize to the first form
9131 which will allow use of NAND instructions provided by the
9132 backend if they exist. */
9133 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9134 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9136 return fold_build1 (BIT_NOT_EXPR, type,
9137 build2 (BIT_AND_EXPR, type,
9138 TREE_OPERAND (arg0, 0),
9139 TREE_OPERAND (arg1, 0)));
9142 /* See if this can be simplified into a rotate first. If that
9143 is unsuccessful continue in the association code. */
9144 goto bit_rotate;
9146 case BIT_XOR_EXPR:
9147 if (integer_zerop (arg1))
9148 return non_lvalue (fold_convert (type, arg0));
9149 if (integer_all_onesp (arg1))
9150 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9151 if (operand_equal_p (arg0, arg1, 0))
9152 return omit_one_operand (type, integer_zero_node, arg0);
9154 /* ~X ^ X is -1. */
9155 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9156 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9158 t1 = build_int_cst (type, -1);
9159 t1 = force_fit_type (t1, 0, false, false);
9160 return omit_one_operand (type, t1, arg1);
9163 /* X ^ ~X is -1. */
9164 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9165 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9167 t1 = build_int_cst (type, -1);
9168 t1 = force_fit_type (t1, 0, false, false);
9169 return omit_one_operand (type, t1, arg0);
9172 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9173 with a constant, and the two constants have no bits in common,
9174 we should treat this as a BIT_IOR_EXPR since this may produce more
9175 simplifications. */
9176 if (TREE_CODE (arg0) == BIT_AND_EXPR
9177 && TREE_CODE (arg1) == BIT_AND_EXPR
9178 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9179 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9180 && integer_zerop (const_binop (BIT_AND_EXPR,
9181 TREE_OPERAND (arg0, 1),
9182 TREE_OPERAND (arg1, 1), 0)))
9184 code = BIT_IOR_EXPR;
9185 goto bit_ior;
9188 /* (X | Y) ^ X -> Y & ~ X*/
9189 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9190 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9192 tree t2 = TREE_OPERAND (arg0, 1);
9193 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9194 arg1);
9195 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9196 fold_convert (type, t1));
9197 return t1;
9200 /* (Y | X) ^ X -> Y & ~ X*/
9201 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9202 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9204 tree t2 = TREE_OPERAND (arg0, 0);
9205 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9206 arg1);
9207 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9208 fold_convert (type, t1));
9209 return t1;
9212 /* X ^ (X | Y) -> Y & ~ X*/
9213 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9214 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9216 tree t2 = TREE_OPERAND (arg1, 1);
9217 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9218 arg0);
9219 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9220 fold_convert (type, t1));
9221 return t1;
9224 /* X ^ (Y | X) -> Y & ~ X*/
9225 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9226 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9228 tree t2 = TREE_OPERAND (arg1, 0);
9229 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9230 arg0);
9231 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9232 fold_convert (type, t1));
9233 return t1;
9236 /* Convert ~X ^ ~Y to X ^ Y. */
9237 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9238 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9239 return fold_build2 (code, type,
9240 fold_convert (type, TREE_OPERAND (arg0, 0)),
9241 fold_convert (type, TREE_OPERAND (arg1, 0)));
9243 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9244 if (TREE_CODE (arg0) == BIT_AND_EXPR
9245 && integer_onep (TREE_OPERAND (arg0, 1))
9246 && integer_onep (arg1))
9247 return fold_build2 (EQ_EXPR, type, arg0,
9248 build_int_cst (TREE_TYPE (arg0), 0));
9250 /* Fold (X & Y) ^ Y as ~X & Y. */
9251 if (TREE_CODE (arg0) == BIT_AND_EXPR
9252 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9254 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9255 return fold_build2 (BIT_AND_EXPR, type,
9256 fold_build1 (BIT_NOT_EXPR, type, tem),
9257 fold_convert (type, arg1));
9259 /* Fold (X & Y) ^ X as ~Y & X. */
9260 if (TREE_CODE (arg0) == BIT_AND_EXPR
9261 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9262 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9264 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9265 return fold_build2 (BIT_AND_EXPR, type,
9266 fold_build1 (BIT_NOT_EXPR, type, tem),
9267 fold_convert (type, arg1));
9269 /* Fold X ^ (X & Y) as X & ~Y. */
9270 if (TREE_CODE (arg1) == BIT_AND_EXPR
9271 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9273 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9274 return fold_build2 (BIT_AND_EXPR, type,
9275 fold_convert (type, arg0),
9276 fold_build1 (BIT_NOT_EXPR, type, tem));
9278 /* Fold X ^ (Y & X) as ~Y & X. */
9279 if (TREE_CODE (arg1) == BIT_AND_EXPR
9280 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9281 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9283 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9284 return fold_build2 (BIT_AND_EXPR, type,
9285 fold_build1 (BIT_NOT_EXPR, type, tem),
9286 fold_convert (type, arg0));
9289 /* See if this can be simplified into a rotate first. If that
9290 is unsuccessful continue in the association code. */
9291 goto bit_rotate;
9293 case BIT_AND_EXPR:
9294 if (integer_all_onesp (arg1))
9295 return non_lvalue (fold_convert (type, arg0));
9296 if (integer_zerop (arg1))
9297 return omit_one_operand (type, arg1, arg0);
9298 if (operand_equal_p (arg0, arg1, 0))
9299 return non_lvalue (fold_convert (type, arg0));
9301 /* ~X & X is always zero. */
9302 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9303 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9304 return omit_one_operand (type, integer_zero_node, arg1);
9306 /* X & ~X is always zero. */
9307 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9308 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9309 return omit_one_operand (type, integer_zero_node, arg0);
9311 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9312 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9313 && TREE_CODE (arg1) == INTEGER_CST
9314 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9315 return fold_build2 (BIT_IOR_EXPR, type,
9316 fold_build2 (BIT_AND_EXPR, type,
9317 TREE_OPERAND (arg0, 0), arg1),
9318 fold_build2 (BIT_AND_EXPR, type,
9319 TREE_OPERAND (arg0, 1), arg1));
9321 /* (X | Y) & Y is (X, Y). */
9322 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9323 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9324 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9325 /* (X | Y) & X is (Y, X). */
9326 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9327 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9328 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9329 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9330 /* X & (X | Y) is (Y, X). */
9331 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9332 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9333 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9334 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9335 /* X & (Y | X) is (Y, X). */
9336 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9337 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9338 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9339 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9341 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9342 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9343 && integer_onep (TREE_OPERAND (arg0, 1))
9344 && integer_onep (arg1))
9346 tem = TREE_OPERAND (arg0, 0);
9347 return fold_build2 (EQ_EXPR, type,
9348 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9349 build_int_cst (TREE_TYPE (tem), 1)),
9350 build_int_cst (TREE_TYPE (tem), 0));
9352 /* Fold ~X & 1 as (X & 1) == 0. */
9353 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9354 && integer_onep (arg1))
9356 tem = TREE_OPERAND (arg0, 0);
9357 return fold_build2 (EQ_EXPR, type,
9358 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9359 build_int_cst (TREE_TYPE (tem), 1)),
9360 build_int_cst (TREE_TYPE (tem), 0));
9363 /* Fold (X ^ Y) & Y as ~X & Y. */
9364 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9365 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9367 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9368 return fold_build2 (BIT_AND_EXPR, type,
9369 fold_build1 (BIT_NOT_EXPR, type, tem),
9370 fold_convert (type, arg1));
9372 /* Fold (X ^ Y) & X as ~Y & X. */
9373 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9374 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9375 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9377 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9378 return fold_build2 (BIT_AND_EXPR, type,
9379 fold_build1 (BIT_NOT_EXPR, type, tem),
9380 fold_convert (type, arg1));
9382 /* Fold X & (X ^ Y) as X & ~Y. */
9383 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9384 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9386 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9387 return fold_build2 (BIT_AND_EXPR, type,
9388 fold_convert (type, arg0),
9389 fold_build1 (BIT_NOT_EXPR, type, tem));
9391 /* Fold X & (Y ^ X) as ~Y & X. */
9392 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9393 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9394 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9396 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9397 return fold_build2 (BIT_AND_EXPR, type,
9398 fold_build1 (BIT_NOT_EXPR, type, tem),
9399 fold_convert (type, arg0));
9402 t1 = distribute_bit_expr (code, type, arg0, arg1);
9403 if (t1 != NULL_TREE)
9404 return t1;
9405 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9406 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9407 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9409 unsigned int prec
9410 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9412 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9413 && (~TREE_INT_CST_LOW (arg1)
9414 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9415 return fold_convert (type, TREE_OPERAND (arg0, 0));
9418 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9420 This results in more efficient code for machines without a NOR
9421 instruction. Combine will canonicalize to the first form
9422 which will allow use of NOR instructions provided by the
9423 backend if they exist. */
9424 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9425 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9427 return fold_build1 (BIT_NOT_EXPR, type,
9428 build2 (BIT_IOR_EXPR, type,
9429 TREE_OPERAND (arg0, 0),
9430 TREE_OPERAND (arg1, 0)));
9433 goto associate;
9435 case RDIV_EXPR:
9436 /* Don't touch a floating-point divide by zero unless the mode
9437 of the constant can represent infinity. */
9438 if (TREE_CODE (arg1) == REAL_CST
9439 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9440 && real_zerop (arg1))
9441 return NULL_TREE;
9443 /* Optimize A / A to 1.0 if we don't care about
9444 NaNs or Infinities. Skip the transformation
9445 for non-real operands. */
9446 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9447 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9448 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9449 && operand_equal_p (arg0, arg1, 0))
9451 tree r = build_real (TREE_TYPE (arg0), dconst1);
9453 return omit_two_operands (type, r, arg0, arg1);
9456 /* The complex version of the above A / A optimization. */
9457 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9458 && operand_equal_p (arg0, arg1, 0))
9460 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9461 if (! HONOR_NANS (TYPE_MODE (elem_type))
9462 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9464 tree r = build_real (elem_type, dconst1);
9465 /* omit_two_operands will call fold_convert for us. */
9466 return omit_two_operands (type, r, arg0, arg1);
9470 /* (-A) / (-B) -> A / B */
9471 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9472 return fold_build2 (RDIV_EXPR, type,
9473 TREE_OPERAND (arg0, 0),
9474 negate_expr (arg1));
9475 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9476 return fold_build2 (RDIV_EXPR, type,
9477 negate_expr (arg0),
9478 TREE_OPERAND (arg1, 0));
9480 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9481 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9482 && real_onep (arg1))
9483 return non_lvalue (fold_convert (type, arg0));
9485 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9486 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9487 && real_minus_onep (arg1))
9488 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9490 /* If ARG1 is a constant, we can convert this to a multiply by the
9491 reciprocal. This does not have the same rounding properties,
9492 so only do this if -funsafe-math-optimizations. We can actually
9493 always safely do it if ARG1 is a power of two, but it's hard to
9494 tell if it is or not in a portable manner. */
9495 if (TREE_CODE (arg1) == REAL_CST)
9497 if (flag_unsafe_math_optimizations
9498 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9499 arg1, 0)))
9500 return fold_build2 (MULT_EXPR, type, arg0, tem);
9501 /* Find the reciprocal if optimizing and the result is exact. */
9502 if (optimize)
9504 REAL_VALUE_TYPE r;
9505 r = TREE_REAL_CST (arg1);
9506 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9508 tem = build_real (type, r);
9509 return fold_build2 (MULT_EXPR, type,
9510 fold_convert (type, arg0), tem);
9514 /* Convert A/B/C to A/(B*C). */
9515 if (flag_unsafe_math_optimizations
9516 && TREE_CODE (arg0) == RDIV_EXPR)
9517 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9518 fold_build2 (MULT_EXPR, type,
9519 TREE_OPERAND (arg0, 1), arg1));
9521 /* Convert A/(B/C) to (A/B)*C. */
9522 if (flag_unsafe_math_optimizations
9523 && TREE_CODE (arg1) == RDIV_EXPR)
9524 return fold_build2 (MULT_EXPR, type,
9525 fold_build2 (RDIV_EXPR, type, arg0,
9526 TREE_OPERAND (arg1, 0)),
9527 TREE_OPERAND (arg1, 1));
9529 /* Convert C1/(X*C2) into (C1/C2)/X. */
9530 if (flag_unsafe_math_optimizations
9531 && TREE_CODE (arg1) == MULT_EXPR
9532 && TREE_CODE (arg0) == REAL_CST
9533 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9535 tree tem = const_binop (RDIV_EXPR, arg0,
9536 TREE_OPERAND (arg1, 1), 0);
9537 if (tem)
9538 return fold_build2 (RDIV_EXPR, type, tem,
9539 TREE_OPERAND (arg1, 0));
9542 if (flag_unsafe_math_optimizations)
9544 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9545 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9547 /* Optimize sin(x)/cos(x) as tan(x). */
9548 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9549 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9550 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9551 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9552 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9554 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9556 if (tanfn != NULL_TREE)
9557 return build_function_call_expr (tanfn,
9558 TREE_OPERAND (arg0, 1));
9561 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9562 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9563 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9564 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9565 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9566 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9568 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9570 if (tanfn != NULL_TREE)
9572 tree tmp = TREE_OPERAND (arg0, 1);
9573 tmp = build_function_call_expr (tanfn, tmp);
9574 return fold_build2 (RDIV_EXPR, type,
9575 build_real (type, dconst1), tmp);
9579 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9580 NaNs or Infinities. */
9581 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9582 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9583 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9585 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9586 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9588 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9589 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9590 && operand_equal_p (arg00, arg01, 0))
9592 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9594 if (cosfn != NULL_TREE)
9595 return build_function_call_expr (cosfn,
9596 TREE_OPERAND (arg0, 1));
9600 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9601 NaNs or Infinities. */
9602 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9603 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9604 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9606 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9607 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9609 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9610 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9611 && operand_equal_p (arg00, arg01, 0))
9613 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9615 if (cosfn != NULL_TREE)
9617 tree tmp = TREE_OPERAND (arg0, 1);
9618 tmp = build_function_call_expr (cosfn, tmp);
9619 return fold_build2 (RDIV_EXPR, type,
9620 build_real (type, dconst1),
9621 tmp);
9626 /* Optimize pow(x,c)/x as pow(x,c-1). */
9627 if (fcode0 == BUILT_IN_POW
9628 || fcode0 == BUILT_IN_POWF
9629 || fcode0 == BUILT_IN_POWL)
9631 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9632 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9633 if (TREE_CODE (arg01) == REAL_CST
9634 && ! TREE_CONSTANT_OVERFLOW (arg01)
9635 && operand_equal_p (arg1, arg00, 0))
9637 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9638 REAL_VALUE_TYPE c;
9639 tree arg, arglist;
9641 c = TREE_REAL_CST (arg01);
9642 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9643 arg = build_real (type, c);
9644 arglist = build_tree_list (NULL_TREE, arg);
9645 arglist = tree_cons (NULL_TREE, arg1, arglist);
9646 return build_function_call_expr (powfn, arglist);
9650 /* Optimize x/expN(y) into x*expN(-y). */
9651 if (BUILTIN_EXPONENT_P (fcode1))
9653 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9654 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9655 tree arglist = build_tree_list (NULL_TREE,
9656 fold_convert (type, arg));
9657 arg1 = build_function_call_expr (expfn, arglist);
9658 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9661 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9662 if (fcode1 == BUILT_IN_POW
9663 || fcode1 == BUILT_IN_POWF
9664 || fcode1 == BUILT_IN_POWL)
9666 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9667 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9668 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9669 tree neg11 = fold_convert (type, negate_expr (arg11));
9670 tree arglist = tree_cons(NULL_TREE, arg10,
9671 build_tree_list (NULL_TREE, neg11));
9672 arg1 = build_function_call_expr (powfn, arglist);
9673 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9676 return NULL_TREE;
9678 case TRUNC_DIV_EXPR:
9679 case FLOOR_DIV_EXPR:
9680 /* Simplify A / (B << N) where A and B are positive and B is
9681 a power of 2, to A >> (N + log2(B)). */
9682 if (TREE_CODE (arg1) == LSHIFT_EXPR
9683 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9685 tree sval = TREE_OPERAND (arg1, 0);
9686 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9688 tree sh_cnt = TREE_OPERAND (arg1, 1);
9689 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9691 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9692 sh_cnt, build_int_cst (NULL_TREE, pow2));
9693 return fold_build2 (RSHIFT_EXPR, type,
9694 fold_convert (type, arg0), sh_cnt);
9697 /* Fall thru */
9699 case ROUND_DIV_EXPR:
9700 case CEIL_DIV_EXPR:
9701 case EXACT_DIV_EXPR:
9702 if (integer_onep (arg1))
9703 return non_lvalue (fold_convert (type, arg0));
9704 if (integer_zerop (arg1))
9705 return NULL_TREE;
9706 /* X / -1 is -X. */
9707 if (!TYPE_UNSIGNED (type)
9708 && TREE_CODE (arg1) == INTEGER_CST
9709 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9710 && TREE_INT_CST_HIGH (arg1) == -1)
9711 return fold_convert (type, negate_expr (arg0));
9713 /* Convert -A / -B to A / B when the type is signed and overflow is
9714 undefined. */
9715 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9716 && TREE_CODE (arg0) == NEGATE_EXPR
9717 && negate_expr_p (arg1))
9718 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9719 negate_expr (arg1));
9720 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9721 && TREE_CODE (arg1) == NEGATE_EXPR
9722 && negate_expr_p (arg0))
9723 return fold_build2 (code, type, negate_expr (arg0),
9724 TREE_OPERAND (arg1, 0));
9726 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9727 operation, EXACT_DIV_EXPR.
9729 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9730 At one time others generated faster code, it's not clear if they do
9731 after the last round to changes to the DIV code in expmed.c. */
9732 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9733 && multiple_of_p (type, arg0, arg1))
9734 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9736 if (TREE_CODE (arg1) == INTEGER_CST
9737 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9738 return fold_convert (type, tem);
9740 return NULL_TREE;
9742 case CEIL_MOD_EXPR:
9743 case FLOOR_MOD_EXPR:
9744 case ROUND_MOD_EXPR:
9745 case TRUNC_MOD_EXPR:
9746 /* X % 1 is always zero, but be sure to preserve any side
9747 effects in X. */
9748 if (integer_onep (arg1))
9749 return omit_one_operand (type, integer_zero_node, arg0);
9751 /* X % 0, return X % 0 unchanged so that we can get the
9752 proper warnings and errors. */
9753 if (integer_zerop (arg1))
9754 return NULL_TREE;
9756 /* 0 % X is always zero, but be sure to preserve any side
9757 effects in X. Place this after checking for X == 0. */
9758 if (integer_zerop (arg0))
9759 return omit_one_operand (type, integer_zero_node, arg1);
9761 /* X % -1 is zero. */
9762 if (!TYPE_UNSIGNED (type)
9763 && TREE_CODE (arg1) == INTEGER_CST
9764 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9765 && TREE_INT_CST_HIGH (arg1) == -1)
9766 return omit_one_operand (type, integer_zero_node, arg0);
9768 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9769 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9770 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9771 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9773 tree c = arg1;
9774 /* Also optimize A % (C << N) where C is a power of 2,
9775 to A & ((C << N) - 1). */
9776 if (TREE_CODE (arg1) == LSHIFT_EXPR)
9777 c = TREE_OPERAND (arg1, 0);
9779 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
9781 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
9782 arg1, integer_one_node);
9783 return fold_build2 (BIT_AND_EXPR, type,
9784 fold_convert (type, arg0),
9785 fold_convert (type, mask));
9789 /* X % -C is the same as X % C. */
9790 if (code == TRUNC_MOD_EXPR
9791 && !TYPE_UNSIGNED (type)
9792 && TREE_CODE (arg1) == INTEGER_CST
9793 && !TREE_CONSTANT_OVERFLOW (arg1)
9794 && TREE_INT_CST_HIGH (arg1) < 0
9795 && !flag_trapv
9796 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9797 && !sign_bit_p (arg1, arg1))
9798 return fold_build2 (code, type, fold_convert (type, arg0),
9799 fold_convert (type, negate_expr (arg1)));
9801 /* X % -Y is the same as X % Y. */
9802 if (code == TRUNC_MOD_EXPR
9803 && !TYPE_UNSIGNED (type)
9804 && TREE_CODE (arg1) == NEGATE_EXPR
9805 && !flag_trapv)
9806 return fold_build2 (code, type, fold_convert (type, arg0),
9807 fold_convert (type, TREE_OPERAND (arg1, 0)));
9809 if (TREE_CODE (arg1) == INTEGER_CST
9810 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9811 return fold_convert (type, tem);
9813 return NULL_TREE;
9815 case LROTATE_EXPR:
9816 case RROTATE_EXPR:
9817 if (integer_all_onesp (arg0))
9818 return omit_one_operand (type, arg0, arg1);
9819 goto shift;
9821 case RSHIFT_EXPR:
9822 /* Optimize -1 >> x for arithmetic right shifts. */
9823 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9824 return omit_one_operand (type, arg0, arg1);
9825 /* ... fall through ... */
9827 case LSHIFT_EXPR:
9828 shift:
9829 if (integer_zerop (arg1))
9830 return non_lvalue (fold_convert (type, arg0));
9831 if (integer_zerop (arg0))
9832 return omit_one_operand (type, arg0, arg1);
9834 /* Since negative shift count is not well-defined,
9835 don't try to compute it in the compiler. */
9836 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9837 return NULL_TREE;
9839 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9840 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
9841 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9842 && host_integerp (TREE_OPERAND (arg0, 1), false)
9843 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9845 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9846 + TREE_INT_CST_LOW (arg1));
9848 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9849 being well defined. */
9850 if (low >= TYPE_PRECISION (type))
9852 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9853 low = low % TYPE_PRECISION (type);
9854 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9855 return build_int_cst (type, 0);
9856 else
9857 low = TYPE_PRECISION (type) - 1;
9860 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9861 build_int_cst (type, low));
9864 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9865 into x & ((unsigned)-1 >> c) for unsigned types. */
9866 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9867 || (TYPE_UNSIGNED (type)
9868 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9869 && host_integerp (arg1, false)
9870 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9871 && host_integerp (TREE_OPERAND (arg0, 1), false)
9872 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9874 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9875 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9876 tree lshift;
9877 tree arg00;
9879 if (low0 == low1)
9881 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9883 lshift = build_int_cst (type, -1);
9884 lshift = int_const_binop (code, lshift, arg1, 0);
9886 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9890 /* Rewrite an LROTATE_EXPR by a constant into an
9891 RROTATE_EXPR by a new constant. */
9892 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9894 tree tem = build_int_cst (NULL_TREE,
9895 GET_MODE_BITSIZE (TYPE_MODE (type)));
9896 tem = fold_convert (TREE_TYPE (arg1), tem);
9897 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9898 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9901 /* If we have a rotate of a bit operation with the rotate count and
9902 the second operand of the bit operation both constant,
9903 permute the two operations. */
9904 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9905 && (TREE_CODE (arg0) == BIT_AND_EXPR
9906 || TREE_CODE (arg0) == BIT_IOR_EXPR
9907 || TREE_CODE (arg0) == BIT_XOR_EXPR)
9908 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9909 return fold_build2 (TREE_CODE (arg0), type,
9910 fold_build2 (code, type,
9911 TREE_OPERAND (arg0, 0), arg1),
9912 fold_build2 (code, type,
9913 TREE_OPERAND (arg0, 1), arg1));
9915 /* Two consecutive rotates adding up to the width of the mode can
9916 be ignored. */
9917 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9918 && TREE_CODE (arg0) == RROTATE_EXPR
9919 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9920 && TREE_INT_CST_HIGH (arg1) == 0
9921 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
9922 && ((TREE_INT_CST_LOW (arg1)
9923 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
9924 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
9925 return TREE_OPERAND (arg0, 0);
9927 return NULL_TREE;
9929 case MIN_EXPR:
9930 if (operand_equal_p (arg0, arg1, 0))
9931 return omit_one_operand (type, arg0, arg1);
9932 if (INTEGRAL_TYPE_P (type)
9933 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9934 return omit_one_operand (type, arg1, arg0);
9935 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
9936 if (tem)
9937 return tem;
9938 goto associate;
9940 case MAX_EXPR:
9941 if (operand_equal_p (arg0, arg1, 0))
9942 return omit_one_operand (type, arg0, arg1);
9943 if (INTEGRAL_TYPE_P (type)
9944 && TYPE_MAX_VALUE (type)
9945 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9946 return omit_one_operand (type, arg1, arg0);
9947 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
9948 if (tem)
9949 return tem;
9950 goto associate;
9952 case TRUTH_ANDIF_EXPR:
9953 /* Note that the operands of this must be ints
9954 and their values must be 0 or 1.
9955 ("true" is a fixed value perhaps depending on the language.) */
9956 /* If first arg is constant zero, return it. */
9957 if (integer_zerop (arg0))
9958 return fold_convert (type, arg0);
9959 case TRUTH_AND_EXPR:
9960 /* If either arg is constant true, drop it. */
9961 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9962 return non_lvalue (fold_convert (type, arg1));
9963 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
9964 /* Preserve sequence points. */
9965 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
9966 return non_lvalue (fold_convert (type, arg0));
9967 /* If second arg is constant zero, result is zero, but first arg
9968 must be evaluated. */
9969 if (integer_zerop (arg1))
9970 return omit_one_operand (type, arg1, arg0);
9971 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9972 case will be handled here. */
9973 if (integer_zerop (arg0))
9974 return omit_one_operand (type, arg0, arg1);
9976 /* !X && X is always false. */
9977 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9978 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9979 return omit_one_operand (type, integer_zero_node, arg1);
9980 /* X && !X is always false. */
9981 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
9982 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9983 return omit_one_operand (type, integer_zero_node, arg0);
9985 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
9986 means A >= Y && A != MAX, but in this case we know that
9987 A < X <= MAX. */
9989 if (!TREE_SIDE_EFFECTS (arg0)
9990 && !TREE_SIDE_EFFECTS (arg1))
9992 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
9993 if (tem && !operand_equal_p (tem, arg0, 0))
9994 return fold_build2 (code, type, tem, arg1);
9996 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
9997 if (tem && !operand_equal_p (tem, arg1, 0))
9998 return fold_build2 (code, type, arg0, tem);
10001 truth_andor:
10002 /* We only do these simplifications if we are optimizing. */
10003 if (!optimize)
10004 return NULL_TREE;
10006 /* Check for things like (A || B) && (A || C). We can convert this
10007 to A || (B && C). Note that either operator can be any of the four
10008 truth and/or operations and the transformation will still be
10009 valid. Also note that we only care about order for the
10010 ANDIF and ORIF operators. If B contains side effects, this
10011 might change the truth-value of A. */
10012 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10013 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10014 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10015 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10016 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10017 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10019 tree a00 = TREE_OPERAND (arg0, 0);
10020 tree a01 = TREE_OPERAND (arg0, 1);
10021 tree a10 = TREE_OPERAND (arg1, 0);
10022 tree a11 = TREE_OPERAND (arg1, 1);
10023 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10024 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10025 && (code == TRUTH_AND_EXPR
10026 || code == TRUTH_OR_EXPR));
10028 if (operand_equal_p (a00, a10, 0))
10029 return fold_build2 (TREE_CODE (arg0), type, a00,
10030 fold_build2 (code, type, a01, a11));
10031 else if (commutative && operand_equal_p (a00, a11, 0))
10032 return fold_build2 (TREE_CODE (arg0), type, a00,
10033 fold_build2 (code, type, a01, a10));
10034 else if (commutative && operand_equal_p (a01, a10, 0))
10035 return fold_build2 (TREE_CODE (arg0), type, a01,
10036 fold_build2 (code, type, a00, a11));
10038 /* This case if tricky because we must either have commutative
10039 operators or else A10 must not have side-effects. */
10041 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10042 && operand_equal_p (a01, a11, 0))
10043 return fold_build2 (TREE_CODE (arg0), type,
10044 fold_build2 (code, type, a00, a10),
10045 a01);
10048 /* See if we can build a range comparison. */
10049 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10050 return tem;
10052 /* Check for the possibility of merging component references. If our
10053 lhs is another similar operation, try to merge its rhs with our
10054 rhs. Then try to merge our lhs and rhs. */
10055 if (TREE_CODE (arg0) == code
10056 && 0 != (tem = fold_truthop (code, type,
10057 TREE_OPERAND (arg0, 1), arg1)))
10058 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10060 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10061 return tem;
10063 return NULL_TREE;
10065 case TRUTH_ORIF_EXPR:
10066 /* Note that the operands of this must be ints
10067 and their values must be 0 or true.
10068 ("true" is a fixed value perhaps depending on the language.) */
10069 /* If first arg is constant true, return it. */
10070 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10071 return fold_convert (type, arg0);
10072 case TRUTH_OR_EXPR:
10073 /* If either arg is constant zero, drop it. */
10074 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10075 return non_lvalue (fold_convert (type, arg1));
10076 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10077 /* Preserve sequence points. */
10078 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10079 return non_lvalue (fold_convert (type, arg0));
10080 /* If second arg is constant true, result is true, but we must
10081 evaluate first arg. */
10082 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10083 return omit_one_operand (type, arg1, arg0);
10084 /* Likewise for first arg, but note this only occurs here for
10085 TRUTH_OR_EXPR. */
10086 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10087 return omit_one_operand (type, arg0, arg1);
10089 /* !X || X is always true. */
10090 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10091 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10092 return omit_one_operand (type, integer_one_node, arg1);
10093 /* X || !X is always true. */
10094 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10095 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10096 return omit_one_operand (type, integer_one_node, arg0);
10098 goto truth_andor;
10100 case TRUTH_XOR_EXPR:
10101 /* If the second arg is constant zero, drop it. */
10102 if (integer_zerop (arg1))
10103 return non_lvalue (fold_convert (type, arg0));
10104 /* If the second arg is constant true, this is a logical inversion. */
10105 if (integer_onep (arg1))
10107 /* Only call invert_truthvalue if operand is a truth value. */
10108 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10109 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10110 else
10111 tem = invert_truthvalue (arg0);
10112 return non_lvalue (fold_convert (type, tem));
10114 /* Identical arguments cancel to zero. */
10115 if (operand_equal_p (arg0, arg1, 0))
10116 return omit_one_operand (type, integer_zero_node, arg0);
10118 /* !X ^ X is always true. */
10119 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10120 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10121 return omit_one_operand (type, integer_one_node, arg1);
10123 /* X ^ !X is always true. */
10124 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10125 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10126 return omit_one_operand (type, integer_one_node, arg0);
10128 return NULL_TREE;
10130 case EQ_EXPR:
10131 case NE_EXPR:
10132 tem = fold_comparison (code, type, op0, op1);
10133 if (tem != NULL_TREE)
10134 return tem;
10136 /* bool_var != 0 becomes bool_var. */
10137 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10138 && code == NE_EXPR)
10139 return non_lvalue (fold_convert (type, arg0));
10141 /* bool_var == 1 becomes bool_var. */
10142 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10143 && code == EQ_EXPR)
10144 return non_lvalue (fold_convert (type, arg0));
10146 /* bool_var != 1 becomes !bool_var. */
10147 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10148 && code == NE_EXPR)
10149 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10151 /* bool_var == 0 becomes !bool_var. */
10152 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10153 && code == EQ_EXPR)
10154 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10156 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10157 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10158 && TREE_CODE (arg1) == INTEGER_CST)
10159 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10160 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10161 arg1));
10163 /* If this is an equality comparison of the address of a non-weak
10164 object against zero, then we know the result. */
10165 if (TREE_CODE (arg0) == ADDR_EXPR
10166 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10167 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10168 && integer_zerop (arg1))
10169 return constant_boolean_node (code != EQ_EXPR, type);
10171 /* If this is an equality comparison of the address of two non-weak,
10172 unaliased symbols neither of which are extern (since we do not
10173 have access to attributes for externs), then we know the result. */
10174 if (TREE_CODE (arg0) == ADDR_EXPR
10175 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10176 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10177 && ! lookup_attribute ("alias",
10178 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10179 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10180 && TREE_CODE (arg1) == ADDR_EXPR
10181 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10182 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10183 && ! lookup_attribute ("alias",
10184 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10185 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10187 /* We know that we're looking at the address of two
10188 non-weak, unaliased, static _DECL nodes.
10190 It is both wasteful and incorrect to call operand_equal_p
10191 to compare the two ADDR_EXPR nodes. It is wasteful in that
10192 all we need to do is test pointer equality for the arguments
10193 to the two ADDR_EXPR nodes. It is incorrect to use
10194 operand_equal_p as that function is NOT equivalent to a
10195 C equality test. It can in fact return false for two
10196 objects which would test as equal using the C equality
10197 operator. */
10198 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10199 return constant_boolean_node (equal
10200 ? code == EQ_EXPR : code != EQ_EXPR,
10201 type);
10204 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10205 a MINUS_EXPR of a constant, we can convert it into a comparison with
10206 a revised constant as long as no overflow occurs. */
10207 if (TREE_CODE (arg1) == INTEGER_CST
10208 && (TREE_CODE (arg0) == PLUS_EXPR
10209 || TREE_CODE (arg0) == MINUS_EXPR)
10210 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10211 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10212 ? MINUS_EXPR : PLUS_EXPR,
10213 arg1, TREE_OPERAND (arg0, 1), 0))
10214 && ! TREE_CONSTANT_OVERFLOW (tem))
10215 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10217 /* Similarly for a NEGATE_EXPR. */
10218 if (TREE_CODE (arg0) == NEGATE_EXPR
10219 && TREE_CODE (arg1) == INTEGER_CST
10220 && 0 != (tem = negate_expr (arg1))
10221 && TREE_CODE (tem) == INTEGER_CST
10222 && ! TREE_CONSTANT_OVERFLOW (tem))
10223 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10225 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10226 for !=. Don't do this for ordered comparisons due to overflow. */
10227 if (TREE_CODE (arg0) == MINUS_EXPR
10228 && integer_zerop (arg1))
10229 return fold_build2 (code, type,
10230 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10232 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10233 if (TREE_CODE (arg0) == ABS_EXPR
10234 && (integer_zerop (arg1) || real_zerop (arg1)))
10235 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10237 /* If this is an EQ or NE comparison with zero and ARG0 is
10238 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10239 two operations, but the latter can be done in one less insn
10240 on machines that have only two-operand insns or on which a
10241 constant cannot be the first operand. */
10242 if (TREE_CODE (arg0) == BIT_AND_EXPR
10243 && integer_zerop (arg1))
10245 tree arg00 = TREE_OPERAND (arg0, 0);
10246 tree arg01 = TREE_OPERAND (arg0, 1);
10247 if (TREE_CODE (arg00) == LSHIFT_EXPR
10248 && integer_onep (TREE_OPERAND (arg00, 0)))
10249 return
10250 fold_build2 (code, type,
10251 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10252 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10253 arg01, TREE_OPERAND (arg00, 1)),
10254 fold_convert (TREE_TYPE (arg0),
10255 integer_one_node)),
10256 arg1);
10257 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10258 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10259 return
10260 fold_build2 (code, type,
10261 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10262 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10263 arg00, TREE_OPERAND (arg01, 1)),
10264 fold_convert (TREE_TYPE (arg0),
10265 integer_one_node)),
10266 arg1);
10269 /* If this is an NE or EQ comparison of zero against the result of a
10270 signed MOD operation whose second operand is a power of 2, make
10271 the MOD operation unsigned since it is simpler and equivalent. */
10272 if (integer_zerop (arg1)
10273 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10274 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10275 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10276 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10277 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10278 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10280 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10281 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10282 fold_convert (newtype,
10283 TREE_OPERAND (arg0, 0)),
10284 fold_convert (newtype,
10285 TREE_OPERAND (arg0, 1)));
10287 return fold_build2 (code, type, newmod,
10288 fold_convert (newtype, arg1));
10291 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10292 C1 is a valid shift constant, and C2 is a power of two, i.e.
10293 a single bit. */
10294 if (TREE_CODE (arg0) == BIT_AND_EXPR
10295 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10296 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10297 == INTEGER_CST
10298 && integer_pow2p (TREE_OPERAND (arg0, 1))
10299 && integer_zerop (arg1))
10301 tree itype = TREE_TYPE (arg0);
10302 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10303 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10305 /* Check for a valid shift count. */
10306 if (TREE_INT_CST_HIGH (arg001) == 0
10307 && TREE_INT_CST_LOW (arg001) < prec)
10309 tree arg01 = TREE_OPERAND (arg0, 1);
10310 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10311 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10312 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10313 can be rewritten as (X & (C2 << C1)) != 0. */
10314 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10316 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10317 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10318 return fold_build2 (code, type, tem, arg1);
10320 /* Otherwise, for signed (arithmetic) shifts,
10321 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10322 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10323 else if (!TYPE_UNSIGNED (itype))
10324 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10325 arg000, build_int_cst (itype, 0));
10326 /* Otherwise, of unsigned (logical) shifts,
10327 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10328 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10329 else
10330 return omit_one_operand (type,
10331 code == EQ_EXPR ? integer_one_node
10332 : integer_zero_node,
10333 arg000);
10337 /* If this is an NE comparison of zero with an AND of one, remove the
10338 comparison since the AND will give the correct value. */
10339 if (code == NE_EXPR
10340 && integer_zerop (arg1)
10341 && TREE_CODE (arg0) == BIT_AND_EXPR
10342 && integer_onep (TREE_OPERAND (arg0, 1)))
10343 return fold_convert (type, arg0);
10345 /* If we have (A & C) == C where C is a power of 2, convert this into
10346 (A & C) != 0. Similarly for NE_EXPR. */
10347 if (TREE_CODE (arg0) == BIT_AND_EXPR
10348 && integer_pow2p (TREE_OPERAND (arg0, 1))
10349 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10350 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10351 arg0, fold_convert (TREE_TYPE (arg0),
10352 integer_zero_node));
10354 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10355 bit, then fold the expression into A < 0 or A >= 0. */
10356 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10357 if (tem)
10358 return tem;
10360 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10361 Similarly for NE_EXPR. */
10362 if (TREE_CODE (arg0) == BIT_AND_EXPR
10363 && TREE_CODE (arg1) == INTEGER_CST
10364 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10366 tree notc = fold_build1 (BIT_NOT_EXPR,
10367 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10368 TREE_OPERAND (arg0, 1));
10369 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10370 arg1, notc);
10371 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10372 if (integer_nonzerop (dandnotc))
10373 return omit_one_operand (type, rslt, arg0);
10376 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10377 Similarly for NE_EXPR. */
10378 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10379 && TREE_CODE (arg1) == INTEGER_CST
10380 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10382 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10383 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10384 TREE_OPERAND (arg0, 1), notd);
10385 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10386 if (integer_nonzerop (candnotd))
10387 return omit_one_operand (type, rslt, arg0);
10390 /* If this is a comparison of a field, we may be able to simplify it. */
10391 if (((TREE_CODE (arg0) == COMPONENT_REF
10392 && lang_hooks.can_use_bit_fields_p ())
10393 || TREE_CODE (arg0) == BIT_FIELD_REF)
10394 /* Handle the constant case even without -O
10395 to make sure the warnings are given. */
10396 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10398 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10399 if (t1)
10400 return t1;
10403 /* Optimize comparisons of strlen vs zero to a compare of the
10404 first character of the string vs zero. To wit,
10405 strlen(ptr) == 0 => *ptr == 0
10406 strlen(ptr) != 0 => *ptr != 0
10407 Other cases should reduce to one of these two (or a constant)
10408 due to the return value of strlen being unsigned. */
10409 if (TREE_CODE (arg0) == CALL_EXPR
10410 && integer_zerop (arg1))
10412 tree fndecl = get_callee_fndecl (arg0);
10413 tree arglist;
10415 if (fndecl
10416 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10417 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10418 && (arglist = TREE_OPERAND (arg0, 1))
10419 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10420 && ! TREE_CHAIN (arglist))
10422 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10423 return fold_build2 (code, type, iref,
10424 build_int_cst (TREE_TYPE (iref), 0));
10428 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10429 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10430 if (TREE_CODE (arg0) == RSHIFT_EXPR
10431 && integer_zerop (arg1)
10432 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10434 tree arg00 = TREE_OPERAND (arg0, 0);
10435 tree arg01 = TREE_OPERAND (arg0, 1);
10436 tree itype = TREE_TYPE (arg00);
10437 if (TREE_INT_CST_HIGH (arg01) == 0
10438 && TREE_INT_CST_LOW (arg01)
10439 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10441 if (TYPE_UNSIGNED (itype))
10443 itype = lang_hooks.types.signed_type (itype);
10444 arg00 = fold_convert (itype, arg00);
10446 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10447 type, arg00, build_int_cst (itype, 0));
10451 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10452 if (integer_zerop (arg1)
10453 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10454 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10455 TREE_OPERAND (arg0, 1));
10457 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10458 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10459 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10460 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10461 build_int_cst (TREE_TYPE (arg1), 0));
10462 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10463 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10464 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10465 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10466 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10467 build_int_cst (TREE_TYPE (arg1), 0));
10469 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10470 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10471 && TREE_CODE (arg1) == INTEGER_CST
10472 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10473 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10474 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10475 TREE_OPERAND (arg0, 1), arg1));
10477 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10478 (X & C) == 0 when C is a single bit. */
10479 if (TREE_CODE (arg0) == BIT_AND_EXPR
10480 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10481 && integer_zerop (arg1)
10482 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10484 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10485 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10486 TREE_OPERAND (arg0, 1));
10487 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10488 type, tem, arg1);
10491 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10492 constant C is a power of two, i.e. a single bit. */
10493 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10494 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10495 && integer_zerop (arg1)
10496 && integer_pow2p (TREE_OPERAND (arg0, 1))
10497 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10498 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10500 tree arg00 = TREE_OPERAND (arg0, 0);
10501 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10502 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10505 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10506 when is C is a power of two, i.e. a single bit. */
10507 if (TREE_CODE (arg0) == BIT_AND_EXPR
10508 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10509 && integer_zerop (arg1)
10510 && integer_pow2p (TREE_OPERAND (arg0, 1))
10511 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10512 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10514 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10515 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10516 arg000, TREE_OPERAND (arg0, 1));
10517 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10518 tem, build_int_cst (TREE_TYPE (tem), 0));
10521 /* If this is a comparison of two exprs that look like an
10522 ARRAY_REF of the same object, then we can fold this to a
10523 comparison of the two offsets. This is only safe for
10524 EQ_EXPR and NE_EXPR because of overflow issues. */
10526 tree base0, offset0, base1, offset1;
10528 if (extract_array_ref (arg0, &base0, &offset0)
10529 && extract_array_ref (arg1, &base1, &offset1)
10530 && operand_equal_p (base0, base1, 0))
10532 /* Handle no offsets on both sides specially. */
10533 if (offset0 == NULL_TREE && offset1 == NULL_TREE)
10534 return fold_build2 (code, type, integer_zero_node,
10535 integer_zero_node);
10537 if (!offset0 || !offset1
10538 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
10540 if (offset0 == NULL_TREE)
10541 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
10542 if (offset1 == NULL_TREE)
10543 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
10544 return fold_build2 (code, type, offset0, offset1);
10549 if (integer_zerop (arg1)
10550 && tree_expr_nonzero_p (arg0))
10552 tree res = constant_boolean_node (code==NE_EXPR, type);
10553 return omit_one_operand (type, res, arg0);
10555 return NULL_TREE;
10557 case LT_EXPR:
10558 case GT_EXPR:
10559 case LE_EXPR:
10560 case GE_EXPR:
10561 tem = fold_comparison (code, type, op0, op1);
10562 if (tem != NULL_TREE)
10563 return tem;
10565 /* Transform comparisons of the form X +- C CMP X. */
10566 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10567 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10568 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10569 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10570 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10571 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10572 && !(flag_wrapv || flag_trapv))))
10574 tree arg01 = TREE_OPERAND (arg0, 1);
10575 enum tree_code code0 = TREE_CODE (arg0);
10576 int is_positive;
10578 if (TREE_CODE (arg01) == REAL_CST)
10579 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10580 else
10581 is_positive = tree_int_cst_sgn (arg01);
10583 /* (X - c) > X becomes false. */
10584 if (code == GT_EXPR
10585 && ((code0 == MINUS_EXPR && is_positive >= 0)
10586 || (code0 == PLUS_EXPR && is_positive <= 0)))
10587 return constant_boolean_node (0, type);
10589 /* Likewise (X + c) < X becomes false. */
10590 if (code == LT_EXPR
10591 && ((code0 == PLUS_EXPR && is_positive >= 0)
10592 || (code0 == MINUS_EXPR && is_positive <= 0)))
10593 return constant_boolean_node (0, type);
10595 /* Convert (X - c) <= X to true. */
10596 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10597 && code == LE_EXPR
10598 && ((code0 == MINUS_EXPR && is_positive >= 0)
10599 || (code0 == PLUS_EXPR && is_positive <= 0)))
10600 return constant_boolean_node (1, type);
10602 /* Convert (X + c) >= X to true. */
10603 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10604 && code == GE_EXPR
10605 && ((code0 == PLUS_EXPR && is_positive >= 0)
10606 || (code0 == MINUS_EXPR && is_positive <= 0)))
10607 return constant_boolean_node (1, type);
10609 if (TREE_CODE (arg01) == INTEGER_CST)
10611 /* Convert X + c > X and X - c < X to true for integers. */
10612 if (code == GT_EXPR
10613 && ((code0 == PLUS_EXPR && is_positive > 0)
10614 || (code0 == MINUS_EXPR && is_positive < 0)))
10615 return constant_boolean_node (1, type);
10617 if (code == LT_EXPR
10618 && ((code0 == MINUS_EXPR && is_positive > 0)
10619 || (code0 == PLUS_EXPR && is_positive < 0)))
10620 return constant_boolean_node (1, type);
10622 /* Convert X + c <= X and X - c >= X to false for integers. */
10623 if (code == LE_EXPR
10624 && ((code0 == PLUS_EXPR && is_positive > 0)
10625 || (code0 == MINUS_EXPR && is_positive < 0)))
10626 return constant_boolean_node (0, type);
10628 if (code == GE_EXPR
10629 && ((code0 == MINUS_EXPR && is_positive > 0)
10630 || (code0 == PLUS_EXPR && is_positive < 0)))
10631 return constant_boolean_node (0, type);
10635 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10636 This transformation affects the cases which are handled in later
10637 optimizations involving comparisons with non-negative constants. */
10638 if (TREE_CODE (arg1) == INTEGER_CST
10639 && TREE_CODE (arg0) != INTEGER_CST
10640 && tree_int_cst_sgn (arg1) > 0)
10642 if (code == GE_EXPR)
10644 arg1 = const_binop (MINUS_EXPR, arg1,
10645 build_int_cst (TREE_TYPE (arg1), 1), 0);
10646 return fold_build2 (GT_EXPR, type, arg0,
10647 fold_convert (TREE_TYPE (arg0), arg1));
10649 if (code == LT_EXPR)
10651 arg1 = const_binop (MINUS_EXPR, arg1,
10652 build_int_cst (TREE_TYPE (arg1), 1), 0);
10653 return fold_build2 (LE_EXPR, type, arg0,
10654 fold_convert (TREE_TYPE (arg0), arg1));
10658 /* Comparisons with the highest or lowest possible integer of
10659 the specified size will have known values. */
10661 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10663 if (TREE_CODE (arg1) == INTEGER_CST
10664 && ! TREE_CONSTANT_OVERFLOW (arg1)
10665 && width <= 2 * HOST_BITS_PER_WIDE_INT
10666 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10667 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10669 HOST_WIDE_INT signed_max_hi;
10670 unsigned HOST_WIDE_INT signed_max_lo;
10671 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10673 if (width <= HOST_BITS_PER_WIDE_INT)
10675 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10676 - 1;
10677 signed_max_hi = 0;
10678 max_hi = 0;
10680 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10682 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10683 min_lo = 0;
10684 min_hi = 0;
10686 else
10688 max_lo = signed_max_lo;
10689 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10690 min_hi = -1;
10693 else
10695 width -= HOST_BITS_PER_WIDE_INT;
10696 signed_max_lo = -1;
10697 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10698 - 1;
10699 max_lo = -1;
10700 min_lo = 0;
10702 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10704 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10705 min_hi = 0;
10707 else
10709 max_hi = signed_max_hi;
10710 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10714 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10715 && TREE_INT_CST_LOW (arg1) == max_lo)
10716 switch (code)
10718 case GT_EXPR:
10719 return omit_one_operand (type, integer_zero_node, arg0);
10721 case GE_EXPR:
10722 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10724 case LE_EXPR:
10725 return omit_one_operand (type, integer_one_node, arg0);
10727 case LT_EXPR:
10728 return fold_build2 (NE_EXPR, type, arg0, arg1);
10730 /* The GE_EXPR and LT_EXPR cases above are not normally
10731 reached because of previous transformations. */
10733 default:
10734 break;
10736 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10737 == max_hi
10738 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10739 switch (code)
10741 case GT_EXPR:
10742 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10743 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10744 case LE_EXPR:
10745 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10746 return fold_build2 (NE_EXPR, type, arg0, arg1);
10747 default:
10748 break;
10750 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10751 == min_hi
10752 && TREE_INT_CST_LOW (arg1) == min_lo)
10753 switch (code)
10755 case LT_EXPR:
10756 return omit_one_operand (type, integer_zero_node, arg0);
10758 case LE_EXPR:
10759 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10761 case GE_EXPR:
10762 return omit_one_operand (type, integer_one_node, arg0);
10764 case GT_EXPR:
10765 return fold_build2 (NE_EXPR, type, op0, op1);
10767 default:
10768 break;
10770 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10771 == min_hi
10772 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10773 switch (code)
10775 case GE_EXPR:
10776 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10777 return fold_build2 (NE_EXPR, type, arg0, arg1);
10778 case LT_EXPR:
10779 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10780 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10781 default:
10782 break;
10785 else if (!in_gimple_form
10786 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10787 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10788 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10789 /* signed_type does not work on pointer types. */
10790 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10792 /* The following case also applies to X < signed_max+1
10793 and X >= signed_max+1 because previous transformations. */
10794 if (code == LE_EXPR || code == GT_EXPR)
10796 tree st0, st1;
10797 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10798 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10799 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10800 type, fold_convert (st0, arg0),
10801 build_int_cst (st1, 0));
10807 /* If we are comparing an ABS_EXPR with a constant, we can
10808 convert all the cases into explicit comparisons, but they may
10809 well not be faster than doing the ABS and one comparison.
10810 But ABS (X) <= C is a range comparison, which becomes a subtraction
10811 and a comparison, and is probably faster. */
10812 if (code == LE_EXPR
10813 && TREE_CODE (arg1) == INTEGER_CST
10814 && TREE_CODE (arg0) == ABS_EXPR
10815 && ! TREE_SIDE_EFFECTS (arg0)
10816 && (0 != (tem = negate_expr (arg1)))
10817 && TREE_CODE (tem) == INTEGER_CST
10818 && ! TREE_CONSTANT_OVERFLOW (tem))
10819 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10820 build2 (GE_EXPR, type,
10821 TREE_OPERAND (arg0, 0), tem),
10822 build2 (LE_EXPR, type,
10823 TREE_OPERAND (arg0, 0), arg1));
10825 /* Convert ABS_EXPR<x> >= 0 to true. */
10826 if (code == GE_EXPR
10827 && tree_expr_nonnegative_p (arg0)
10828 && (integer_zerop (arg1)
10829 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10830 && real_zerop (arg1))))
10831 return omit_one_operand (type, integer_one_node, arg0);
10833 /* Convert ABS_EXPR<x> < 0 to false. */
10834 if (code == LT_EXPR
10835 && tree_expr_nonnegative_p (arg0)
10836 && (integer_zerop (arg1) || real_zerop (arg1)))
10837 return omit_one_operand (type, integer_zero_node, arg0);
10839 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10840 and similarly for >= into !=. */
10841 if ((code == LT_EXPR || code == GE_EXPR)
10842 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10843 && TREE_CODE (arg1) == LSHIFT_EXPR
10844 && integer_onep (TREE_OPERAND (arg1, 0)))
10845 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10846 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10847 TREE_OPERAND (arg1, 1)),
10848 build_int_cst (TREE_TYPE (arg0), 0));
10850 if ((code == LT_EXPR || code == GE_EXPR)
10851 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10852 && (TREE_CODE (arg1) == NOP_EXPR
10853 || TREE_CODE (arg1) == CONVERT_EXPR)
10854 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10855 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10856 return
10857 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10858 fold_convert (TREE_TYPE (arg0),
10859 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10860 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10861 1))),
10862 build_int_cst (TREE_TYPE (arg0), 0));
10864 return NULL_TREE;
10866 case UNORDERED_EXPR:
10867 case ORDERED_EXPR:
10868 case UNLT_EXPR:
10869 case UNLE_EXPR:
10870 case UNGT_EXPR:
10871 case UNGE_EXPR:
10872 case UNEQ_EXPR:
10873 case LTGT_EXPR:
10874 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10876 t1 = fold_relational_const (code, type, arg0, arg1);
10877 if (t1 != NULL_TREE)
10878 return t1;
10881 /* If the first operand is NaN, the result is constant. */
10882 if (TREE_CODE (arg0) == REAL_CST
10883 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10884 && (code != LTGT_EXPR || ! flag_trapping_math))
10886 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10887 ? integer_zero_node
10888 : integer_one_node;
10889 return omit_one_operand (type, t1, arg1);
10892 /* If the second operand is NaN, the result is constant. */
10893 if (TREE_CODE (arg1) == REAL_CST
10894 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10895 && (code != LTGT_EXPR || ! flag_trapping_math))
10897 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10898 ? integer_zero_node
10899 : integer_one_node;
10900 return omit_one_operand (type, t1, arg0);
10903 /* Simplify unordered comparison of something with itself. */
10904 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10905 && operand_equal_p (arg0, arg1, 0))
10906 return constant_boolean_node (1, type);
10908 if (code == LTGT_EXPR
10909 && !flag_trapping_math
10910 && operand_equal_p (arg0, arg1, 0))
10911 return constant_boolean_node (0, type);
10913 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10915 tree targ0 = strip_float_extensions (arg0);
10916 tree targ1 = strip_float_extensions (arg1);
10917 tree newtype = TREE_TYPE (targ0);
10919 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10920 newtype = TREE_TYPE (targ1);
10922 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10923 return fold_build2 (code, type, fold_convert (newtype, targ0),
10924 fold_convert (newtype, targ1));
10927 return NULL_TREE;
10929 case COMPOUND_EXPR:
10930 /* When pedantic, a compound expression can be neither an lvalue
10931 nor an integer constant expression. */
10932 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10933 return NULL_TREE;
10934 /* Don't let (0, 0) be null pointer constant. */
10935 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10936 : fold_convert (type, arg1);
10937 return pedantic_non_lvalue (tem);
10939 case COMPLEX_EXPR:
10940 if ((TREE_CODE (arg0) == REAL_CST
10941 && TREE_CODE (arg1) == REAL_CST)
10942 || (TREE_CODE (arg0) == INTEGER_CST
10943 && TREE_CODE (arg1) == INTEGER_CST))
10944 return build_complex (type, arg0, arg1);
10945 return NULL_TREE;
10947 case ASSERT_EXPR:
10948 /* An ASSERT_EXPR should never be passed to fold_binary. */
10949 gcc_unreachable ();
10951 default:
10952 return NULL_TREE;
10953 } /* switch (code) */
10956 /* Callback for walk_tree, looking for LABEL_EXPR.
10957 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10958 Do not check the sub-tree of GOTO_EXPR. */
10960 static tree
10961 contains_label_1 (tree *tp,
10962 int *walk_subtrees,
10963 void *data ATTRIBUTE_UNUSED)
10965 switch (TREE_CODE (*tp))
10967 case LABEL_EXPR:
10968 return *tp;
10969 case GOTO_EXPR:
10970 *walk_subtrees = 0;
10971 /* no break */
10972 default:
10973 return NULL_TREE;
10977 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10978 accessible from outside the sub-tree. Returns NULL_TREE if no
10979 addressable label is found. */
10981 static bool
10982 contains_label_p (tree st)
10984 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10987 /* Fold a ternary expression of code CODE and type TYPE with operands
10988 OP0, OP1, and OP2. Return the folded expression if folding is
10989 successful. Otherwise, return NULL_TREE. */
10991 tree
10992 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10994 tree tem;
10995 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10996 enum tree_code_class kind = TREE_CODE_CLASS (code);
10998 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10999 && TREE_CODE_LENGTH (code) == 3);
11001 /* Strip any conversions that don't change the mode. This is safe
11002 for every expression, except for a comparison expression because
11003 its signedness is derived from its operands. So, in the latter
11004 case, only strip conversions that don't change the signedness.
11006 Note that this is done as an internal manipulation within the
11007 constant folder, in order to find the simplest representation of
11008 the arguments so that their form can be studied. In any cases,
11009 the appropriate type conversions should be put back in the tree
11010 that will get out of the constant folder. */
11011 if (op0)
11013 arg0 = op0;
11014 STRIP_NOPS (arg0);
11017 if (op1)
11019 arg1 = op1;
11020 STRIP_NOPS (arg1);
11023 switch (code)
11025 case COMPONENT_REF:
11026 if (TREE_CODE (arg0) == CONSTRUCTOR
11027 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11029 unsigned HOST_WIDE_INT idx;
11030 tree field, value;
11031 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11032 if (field == arg1)
11033 return value;
11035 return NULL_TREE;
11037 case COND_EXPR:
11038 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11039 so all simple results must be passed through pedantic_non_lvalue. */
11040 if (TREE_CODE (arg0) == INTEGER_CST)
11042 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11043 tem = integer_zerop (arg0) ? op2 : op1;
11044 /* Only optimize constant conditions when the selected branch
11045 has the same type as the COND_EXPR. This avoids optimizing
11046 away "c ? x : throw", where the throw has a void type.
11047 Avoid throwing away that operand which contains label. */
11048 if ((!TREE_SIDE_EFFECTS (unused_op)
11049 || !contains_label_p (unused_op))
11050 && (! VOID_TYPE_P (TREE_TYPE (tem))
11051 || VOID_TYPE_P (type)))
11052 return pedantic_non_lvalue (tem);
11053 return NULL_TREE;
11055 if (operand_equal_p (arg1, op2, 0))
11056 return pedantic_omit_one_operand (type, arg1, arg0);
11058 /* If we have A op B ? A : C, we may be able to convert this to a
11059 simpler expression, depending on the operation and the values
11060 of B and C. Signed zeros prevent all of these transformations,
11061 for reasons given above each one.
11063 Also try swapping the arguments and inverting the conditional. */
11064 if (COMPARISON_CLASS_P (arg0)
11065 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11066 arg1, TREE_OPERAND (arg0, 1))
11067 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11069 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11070 if (tem)
11071 return tem;
11074 if (COMPARISON_CLASS_P (arg0)
11075 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11076 op2,
11077 TREE_OPERAND (arg0, 1))
11078 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11080 tem = invert_truthvalue (arg0);
11081 if (COMPARISON_CLASS_P (tem))
11083 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11084 if (tem)
11085 return tem;
11089 /* If the second operand is simpler than the third, swap them
11090 since that produces better jump optimization results. */
11091 if (truth_value_p (TREE_CODE (arg0))
11092 && tree_swap_operands_p (op1, op2, false))
11094 /* See if this can be inverted. If it can't, possibly because
11095 it was a floating-point inequality comparison, don't do
11096 anything. */
11097 tem = invert_truthvalue (arg0);
11099 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11100 return fold_build3 (code, type, tem, op2, op1);
11103 /* Convert A ? 1 : 0 to simply A. */
11104 if (integer_onep (op1)
11105 && integer_zerop (op2)
11106 /* If we try to convert OP0 to our type, the
11107 call to fold will try to move the conversion inside
11108 a COND, which will recurse. In that case, the COND_EXPR
11109 is probably the best choice, so leave it alone. */
11110 && type == TREE_TYPE (arg0))
11111 return pedantic_non_lvalue (arg0);
11113 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11114 over COND_EXPR in cases such as floating point comparisons. */
11115 if (integer_zerop (op1)
11116 && integer_onep (op2)
11117 && truth_value_p (TREE_CODE (arg0)))
11118 return pedantic_non_lvalue (fold_convert (type,
11119 invert_truthvalue (arg0)));
11121 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11122 if (TREE_CODE (arg0) == LT_EXPR
11123 && integer_zerop (TREE_OPERAND (arg0, 1))
11124 && integer_zerop (op2)
11125 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11126 return fold_convert (type,
11127 fold_build2 (BIT_AND_EXPR,
11128 TREE_TYPE (tem), tem,
11129 fold_convert (TREE_TYPE (tem), arg1)));
11131 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11132 already handled above. */
11133 if (TREE_CODE (arg0) == BIT_AND_EXPR
11134 && integer_onep (TREE_OPERAND (arg0, 1))
11135 && integer_zerop (op2)
11136 && integer_pow2p (arg1))
11138 tree tem = TREE_OPERAND (arg0, 0);
11139 STRIP_NOPS (tem);
11140 if (TREE_CODE (tem) == RSHIFT_EXPR
11141 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11142 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11143 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11144 return fold_build2 (BIT_AND_EXPR, type,
11145 TREE_OPERAND (tem, 0), arg1);
11148 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11149 is probably obsolete because the first operand should be a
11150 truth value (that's why we have the two cases above), but let's
11151 leave it in until we can confirm this for all front-ends. */
11152 if (integer_zerop (op2)
11153 && TREE_CODE (arg0) == NE_EXPR
11154 && integer_zerop (TREE_OPERAND (arg0, 1))
11155 && integer_pow2p (arg1)
11156 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11157 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11158 arg1, OEP_ONLY_CONST))
11159 return pedantic_non_lvalue (fold_convert (type,
11160 TREE_OPERAND (arg0, 0)));
11162 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11163 if (integer_zerop (op2)
11164 && truth_value_p (TREE_CODE (arg0))
11165 && truth_value_p (TREE_CODE (arg1)))
11166 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11167 fold_convert (type, arg0),
11168 arg1);
11170 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11171 if (integer_onep (op2)
11172 && truth_value_p (TREE_CODE (arg0))
11173 && truth_value_p (TREE_CODE (arg1)))
11175 /* Only perform transformation if ARG0 is easily inverted. */
11176 tem = invert_truthvalue (arg0);
11177 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11178 return fold_build2 (TRUTH_ORIF_EXPR, type,
11179 fold_convert (type, tem),
11180 arg1);
11183 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11184 if (integer_zerop (arg1)
11185 && truth_value_p (TREE_CODE (arg0))
11186 && truth_value_p (TREE_CODE (op2)))
11188 /* Only perform transformation if ARG0 is easily inverted. */
11189 tem = invert_truthvalue (arg0);
11190 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11191 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11192 fold_convert (type, tem),
11193 op2);
11196 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11197 if (integer_onep (arg1)
11198 && truth_value_p (TREE_CODE (arg0))
11199 && truth_value_p (TREE_CODE (op2)))
11200 return fold_build2 (TRUTH_ORIF_EXPR, type,
11201 fold_convert (type, arg0),
11202 op2);
11204 return NULL_TREE;
11206 case CALL_EXPR:
11207 /* Check for a built-in function. */
11208 if (TREE_CODE (op0) == ADDR_EXPR
11209 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11210 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11211 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11212 return NULL_TREE;
11214 case BIT_FIELD_REF:
11215 if (TREE_CODE (arg0) == VECTOR_CST
11216 && type == TREE_TYPE (TREE_TYPE (arg0))
11217 && host_integerp (arg1, 1)
11218 && host_integerp (op2, 1))
11220 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11221 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11223 if (width != 0
11224 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11225 && (idx % width) == 0
11226 && (idx = idx / width)
11227 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11229 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11230 while (idx-- > 0 && elements)
11231 elements = TREE_CHAIN (elements);
11232 if (elements)
11233 return TREE_VALUE (elements);
11234 else
11235 return fold_convert (type, integer_zero_node);
11238 return NULL_TREE;
11240 default:
11241 return NULL_TREE;
11242 } /* switch (code) */
11245 /* Perform constant folding and related simplification of EXPR.
11246 The related simplifications include x*1 => x, x*0 => 0, etc.,
11247 and application of the associative law.
11248 NOP_EXPR conversions may be removed freely (as long as we
11249 are careful not to change the type of the overall expression).
11250 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11251 but we can constant-fold them if they have constant operands. */
11253 #ifdef ENABLE_FOLD_CHECKING
11254 # define fold(x) fold_1 (x)
11255 static tree fold_1 (tree);
11256 static
11257 #endif
11258 tree
11259 fold (tree expr)
11261 const tree t = expr;
11262 enum tree_code code = TREE_CODE (t);
11263 enum tree_code_class kind = TREE_CODE_CLASS (code);
11264 tree tem;
11266 /* Return right away if a constant. */
11267 if (kind == tcc_constant)
11268 return t;
11270 if (IS_EXPR_CODE_CLASS (kind))
11272 tree type = TREE_TYPE (t);
11273 tree op0, op1, op2;
11275 switch (TREE_CODE_LENGTH (code))
11277 case 1:
11278 op0 = TREE_OPERAND (t, 0);
11279 tem = fold_unary (code, type, op0);
11280 return tem ? tem : expr;
11281 case 2:
11282 op0 = TREE_OPERAND (t, 0);
11283 op1 = TREE_OPERAND (t, 1);
11284 tem = fold_binary (code, type, op0, op1);
11285 return tem ? tem : expr;
11286 case 3:
11287 op0 = TREE_OPERAND (t, 0);
11288 op1 = TREE_OPERAND (t, 1);
11289 op2 = TREE_OPERAND (t, 2);
11290 tem = fold_ternary (code, type, op0, op1, op2);
11291 return tem ? tem : expr;
11292 default:
11293 break;
11297 switch (code)
11299 case CONST_DECL:
11300 return fold (DECL_INITIAL (t));
11302 default:
11303 return t;
11304 } /* switch (code) */
11307 #ifdef ENABLE_FOLD_CHECKING
11308 #undef fold
11310 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11311 static void fold_check_failed (tree, tree);
11312 void print_fold_checksum (tree);
11314 /* When --enable-checking=fold, compute a digest of expr before
11315 and after actual fold call to see if fold did not accidentally
11316 change original expr. */
11318 tree
11319 fold (tree expr)
11321 tree ret;
11322 struct md5_ctx ctx;
11323 unsigned char checksum_before[16], checksum_after[16];
11324 htab_t ht;
11326 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11327 md5_init_ctx (&ctx);
11328 fold_checksum_tree (expr, &ctx, ht);
11329 md5_finish_ctx (&ctx, checksum_before);
11330 htab_empty (ht);
11332 ret = fold_1 (expr);
11334 md5_init_ctx (&ctx);
11335 fold_checksum_tree (expr, &ctx, ht);
11336 md5_finish_ctx (&ctx, checksum_after);
11337 htab_delete (ht);
11339 if (memcmp (checksum_before, checksum_after, 16))
11340 fold_check_failed (expr, ret);
11342 return ret;
11345 void
11346 print_fold_checksum (tree expr)
11348 struct md5_ctx ctx;
11349 unsigned char checksum[16], cnt;
11350 htab_t ht;
11352 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11353 md5_init_ctx (&ctx);
11354 fold_checksum_tree (expr, &ctx, ht);
11355 md5_finish_ctx (&ctx, checksum);
11356 htab_delete (ht);
11357 for (cnt = 0; cnt < 16; ++cnt)
11358 fprintf (stderr, "%02x", checksum[cnt]);
11359 putc ('\n', stderr);
11362 static void
11363 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11365 internal_error ("fold check: original tree changed by fold");
11368 static void
11369 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11371 void **slot;
11372 enum tree_code code;
11373 struct tree_function_decl buf;
11374 int i, len;
11376 recursive_label:
11378 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11379 <= sizeof (struct tree_function_decl))
11380 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11381 if (expr == NULL)
11382 return;
11383 slot = htab_find_slot (ht, expr, INSERT);
11384 if (*slot != NULL)
11385 return;
11386 *slot = expr;
11387 code = TREE_CODE (expr);
11388 if (TREE_CODE_CLASS (code) == tcc_declaration
11389 && DECL_ASSEMBLER_NAME_SET_P (expr))
11391 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11392 memcpy ((char *) &buf, expr, tree_size (expr));
11393 expr = (tree) &buf;
11394 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11396 else if (TREE_CODE_CLASS (code) == tcc_type
11397 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11398 || TYPE_CACHED_VALUES_P (expr)
11399 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11401 /* Allow these fields to be modified. */
11402 memcpy ((char *) &buf, expr, tree_size (expr));
11403 expr = (tree) &buf;
11404 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11405 TYPE_POINTER_TO (expr) = NULL;
11406 TYPE_REFERENCE_TO (expr) = NULL;
11407 if (TYPE_CACHED_VALUES_P (expr))
11409 TYPE_CACHED_VALUES_P (expr) = 0;
11410 TYPE_CACHED_VALUES (expr) = NULL;
11413 md5_process_bytes (expr, tree_size (expr), ctx);
11414 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11415 if (TREE_CODE_CLASS (code) != tcc_type
11416 && TREE_CODE_CLASS (code) != tcc_declaration
11417 && code != TREE_LIST)
11418 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11419 switch (TREE_CODE_CLASS (code))
11421 case tcc_constant:
11422 switch (code)
11424 case STRING_CST:
11425 md5_process_bytes (TREE_STRING_POINTER (expr),
11426 TREE_STRING_LENGTH (expr), ctx);
11427 break;
11428 case COMPLEX_CST:
11429 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11430 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11431 break;
11432 case VECTOR_CST:
11433 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11434 break;
11435 default:
11436 break;
11438 break;
11439 case tcc_exceptional:
11440 switch (code)
11442 case TREE_LIST:
11443 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11444 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11445 expr = TREE_CHAIN (expr);
11446 goto recursive_label;
11447 break;
11448 case TREE_VEC:
11449 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11450 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11451 break;
11452 default:
11453 break;
11455 break;
11456 case tcc_expression:
11457 case tcc_reference:
11458 case tcc_comparison:
11459 case tcc_unary:
11460 case tcc_binary:
11461 case tcc_statement:
11462 len = TREE_CODE_LENGTH (code);
11463 for (i = 0; i < len; ++i)
11464 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11465 break;
11466 case tcc_declaration:
11467 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11468 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11469 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11471 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11472 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11473 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11474 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11475 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11477 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11478 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11480 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11482 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11483 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11484 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11486 break;
11487 case tcc_type:
11488 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11489 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11490 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11491 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11492 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11493 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11494 if (INTEGRAL_TYPE_P (expr)
11495 || SCALAR_FLOAT_TYPE_P (expr))
11497 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11498 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11500 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11501 if (TREE_CODE (expr) == RECORD_TYPE
11502 || TREE_CODE (expr) == UNION_TYPE
11503 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11504 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11505 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11506 break;
11507 default:
11508 break;
11512 #endif
11514 /* Fold a unary tree expression with code CODE of type TYPE with an
11515 operand OP0. Return a folded expression if successful. Otherwise,
11516 return a tree expression with code CODE of type TYPE with an
11517 operand OP0. */
11519 tree
11520 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11522 tree tem;
11523 #ifdef ENABLE_FOLD_CHECKING
11524 unsigned char checksum_before[16], checksum_after[16];
11525 struct md5_ctx ctx;
11526 htab_t ht;
11528 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11529 md5_init_ctx (&ctx);
11530 fold_checksum_tree (op0, &ctx, ht);
11531 md5_finish_ctx (&ctx, checksum_before);
11532 htab_empty (ht);
11533 #endif
11535 tem = fold_unary (code, type, op0);
11536 if (!tem)
11537 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11539 #ifdef ENABLE_FOLD_CHECKING
11540 md5_init_ctx (&ctx);
11541 fold_checksum_tree (op0, &ctx, ht);
11542 md5_finish_ctx (&ctx, checksum_after);
11543 htab_delete (ht);
11545 if (memcmp (checksum_before, checksum_after, 16))
11546 fold_check_failed (op0, tem);
11547 #endif
11548 return tem;
11551 /* Fold a binary tree expression with code CODE of type TYPE with
11552 operands OP0 and OP1. Return a folded expression if successful.
11553 Otherwise, return a tree expression with code CODE of type TYPE
11554 with operands OP0 and OP1. */
11556 tree
11557 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11558 MEM_STAT_DECL)
11560 tree tem;
11561 #ifdef ENABLE_FOLD_CHECKING
11562 unsigned char checksum_before_op0[16],
11563 checksum_before_op1[16],
11564 checksum_after_op0[16],
11565 checksum_after_op1[16];
11566 struct md5_ctx ctx;
11567 htab_t ht;
11569 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11570 md5_init_ctx (&ctx);
11571 fold_checksum_tree (op0, &ctx, ht);
11572 md5_finish_ctx (&ctx, checksum_before_op0);
11573 htab_empty (ht);
11575 md5_init_ctx (&ctx);
11576 fold_checksum_tree (op1, &ctx, ht);
11577 md5_finish_ctx (&ctx, checksum_before_op1);
11578 htab_empty (ht);
11579 #endif
11581 tem = fold_binary (code, type, op0, op1);
11582 if (!tem)
11583 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11585 #ifdef ENABLE_FOLD_CHECKING
11586 md5_init_ctx (&ctx);
11587 fold_checksum_tree (op0, &ctx, ht);
11588 md5_finish_ctx (&ctx, checksum_after_op0);
11589 htab_empty (ht);
11591 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11592 fold_check_failed (op0, tem);
11594 md5_init_ctx (&ctx);
11595 fold_checksum_tree (op1, &ctx, ht);
11596 md5_finish_ctx (&ctx, checksum_after_op1);
11597 htab_delete (ht);
11599 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11600 fold_check_failed (op1, tem);
11601 #endif
11602 return tem;
11605 /* Fold a ternary tree expression with code CODE of type TYPE with
11606 operands OP0, OP1, and OP2. Return a folded expression if
11607 successful. Otherwise, return a tree expression with code CODE of
11608 type TYPE with operands OP0, OP1, and OP2. */
11610 tree
11611 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11612 MEM_STAT_DECL)
11614 tree tem;
11615 #ifdef ENABLE_FOLD_CHECKING
11616 unsigned char checksum_before_op0[16],
11617 checksum_before_op1[16],
11618 checksum_before_op2[16],
11619 checksum_after_op0[16],
11620 checksum_after_op1[16],
11621 checksum_after_op2[16];
11622 struct md5_ctx ctx;
11623 htab_t ht;
11625 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11626 md5_init_ctx (&ctx);
11627 fold_checksum_tree (op0, &ctx, ht);
11628 md5_finish_ctx (&ctx, checksum_before_op0);
11629 htab_empty (ht);
11631 md5_init_ctx (&ctx);
11632 fold_checksum_tree (op1, &ctx, ht);
11633 md5_finish_ctx (&ctx, checksum_before_op1);
11634 htab_empty (ht);
11636 md5_init_ctx (&ctx);
11637 fold_checksum_tree (op2, &ctx, ht);
11638 md5_finish_ctx (&ctx, checksum_before_op2);
11639 htab_empty (ht);
11640 #endif
11642 tem = fold_ternary (code, type, op0, op1, op2);
11643 if (!tem)
11644 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11646 #ifdef ENABLE_FOLD_CHECKING
11647 md5_init_ctx (&ctx);
11648 fold_checksum_tree (op0, &ctx, ht);
11649 md5_finish_ctx (&ctx, checksum_after_op0);
11650 htab_empty (ht);
11652 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11653 fold_check_failed (op0, tem);
11655 md5_init_ctx (&ctx);
11656 fold_checksum_tree (op1, &ctx, ht);
11657 md5_finish_ctx (&ctx, checksum_after_op1);
11658 htab_empty (ht);
11660 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11661 fold_check_failed (op1, tem);
11663 md5_init_ctx (&ctx);
11664 fold_checksum_tree (op2, &ctx, ht);
11665 md5_finish_ctx (&ctx, checksum_after_op2);
11666 htab_delete (ht);
11668 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11669 fold_check_failed (op2, tem);
11670 #endif
11671 return tem;
11674 /* Perform constant folding and related simplification of initializer
11675 expression EXPR. These behave identically to "fold_buildN" but ignore
11676 potential run-time traps and exceptions that fold must preserve. */
11678 #define START_FOLD_INIT \
11679 int saved_signaling_nans = flag_signaling_nans;\
11680 int saved_trapping_math = flag_trapping_math;\
11681 int saved_rounding_math = flag_rounding_math;\
11682 int saved_trapv = flag_trapv;\
11683 flag_signaling_nans = 0;\
11684 flag_trapping_math = 0;\
11685 flag_rounding_math = 0;\
11686 flag_trapv = 0
11688 #define END_FOLD_INIT \
11689 flag_signaling_nans = saved_signaling_nans;\
11690 flag_trapping_math = saved_trapping_math;\
11691 flag_rounding_math = saved_rounding_math;\
11692 flag_trapv = saved_trapv
11694 tree
11695 fold_build1_initializer (enum tree_code code, tree type, tree op)
11697 tree result;
11698 START_FOLD_INIT;
11700 result = fold_build1 (code, type, op);
11702 END_FOLD_INIT;
11703 return result;
11706 tree
11707 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11709 tree result;
11710 START_FOLD_INIT;
11712 result = fold_build2 (code, type, op0, op1);
11714 END_FOLD_INIT;
11715 return result;
11718 tree
11719 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11720 tree op2)
11722 tree result;
11723 START_FOLD_INIT;
11725 result = fold_build3 (code, type, op0, op1, op2);
11727 END_FOLD_INIT;
11728 return result;
11731 #undef START_FOLD_INIT
11732 #undef END_FOLD_INIT
11734 /* Determine if first argument is a multiple of second argument. Return 0 if
11735 it is not, or we cannot easily determined it to be.
11737 An example of the sort of thing we care about (at this point; this routine
11738 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11739 fold cases do now) is discovering that
11741 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11743 is a multiple of
11745 SAVE_EXPR (J * 8)
11747 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11749 This code also handles discovering that
11751 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11753 is a multiple of 8 so we don't have to worry about dealing with a
11754 possible remainder.
11756 Note that we *look* inside a SAVE_EXPR only to determine how it was
11757 calculated; it is not safe for fold to do much of anything else with the
11758 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11759 at run time. For example, the latter example above *cannot* be implemented
11760 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11761 evaluation time of the original SAVE_EXPR is not necessarily the same at
11762 the time the new expression is evaluated. The only optimization of this
11763 sort that would be valid is changing
11765 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11767 divided by 8 to
11769 SAVE_EXPR (I) * SAVE_EXPR (J)
11771 (where the same SAVE_EXPR (J) is used in the original and the
11772 transformed version). */
11774 static int
11775 multiple_of_p (tree type, tree top, tree bottom)
11777 if (operand_equal_p (top, bottom, 0))
11778 return 1;
11780 if (TREE_CODE (type) != INTEGER_TYPE)
11781 return 0;
11783 switch (TREE_CODE (top))
11785 case BIT_AND_EXPR:
11786 /* Bitwise and provides a power of two multiple. If the mask is
11787 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11788 if (!integer_pow2p (bottom))
11789 return 0;
11790 /* FALLTHRU */
11792 case MULT_EXPR:
11793 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11794 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11796 case PLUS_EXPR:
11797 case MINUS_EXPR:
11798 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11799 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11801 case LSHIFT_EXPR:
11802 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11804 tree op1, t1;
11806 op1 = TREE_OPERAND (top, 1);
11807 /* const_binop may not detect overflow correctly,
11808 so check for it explicitly here. */
11809 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11810 > TREE_INT_CST_LOW (op1)
11811 && TREE_INT_CST_HIGH (op1) == 0
11812 && 0 != (t1 = fold_convert (type,
11813 const_binop (LSHIFT_EXPR,
11814 size_one_node,
11815 op1, 0)))
11816 && ! TREE_OVERFLOW (t1))
11817 return multiple_of_p (type, t1, bottom);
11819 return 0;
11821 case NOP_EXPR:
11822 /* Can't handle conversions from non-integral or wider integral type. */
11823 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11824 || (TYPE_PRECISION (type)
11825 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11826 return 0;
11828 /* .. fall through ... */
11830 case SAVE_EXPR:
11831 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11833 case INTEGER_CST:
11834 if (TREE_CODE (bottom) != INTEGER_CST
11835 || (TYPE_UNSIGNED (type)
11836 && (tree_int_cst_sgn (top) < 0
11837 || tree_int_cst_sgn (bottom) < 0)))
11838 return 0;
11839 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11840 top, bottom, 0));
11842 default:
11843 return 0;
11847 /* Return true if `t' is known to be non-negative. */
11850 tree_expr_nonnegative_p (tree t)
11852 if (t == error_mark_node)
11853 return 0;
11855 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11856 return 1;
11858 switch (TREE_CODE (t))
11860 case SSA_NAME:
11861 /* Query VRP to see if it has recorded any information about
11862 the range of this object. */
11863 return ssa_name_nonnegative_p (t);
11865 case ABS_EXPR:
11866 /* We can't return 1 if flag_wrapv is set because
11867 ABS_EXPR<INT_MIN> = INT_MIN. */
11868 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11869 return 1;
11870 break;
11872 case INTEGER_CST:
11873 return tree_int_cst_sgn (t) >= 0;
11875 case REAL_CST:
11876 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11878 case PLUS_EXPR:
11879 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11880 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11881 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11883 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11884 both unsigned and at least 2 bits shorter than the result. */
11885 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11886 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11887 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11889 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11890 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11891 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11892 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11894 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11895 TYPE_PRECISION (inner2)) + 1;
11896 return prec < TYPE_PRECISION (TREE_TYPE (t));
11899 break;
11901 case MULT_EXPR:
11902 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11904 /* x * x for floating point x is always non-negative. */
11905 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11906 return 1;
11907 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11908 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11911 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11912 both unsigned and their total bits is shorter than the result. */
11913 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11914 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11915 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11917 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11918 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11919 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11920 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11921 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11922 < TYPE_PRECISION (TREE_TYPE (t));
11924 return 0;
11926 case BIT_AND_EXPR:
11927 case MAX_EXPR:
11928 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11929 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11931 case BIT_IOR_EXPR:
11932 case BIT_XOR_EXPR:
11933 case MIN_EXPR:
11934 case RDIV_EXPR:
11935 case TRUNC_DIV_EXPR:
11936 case CEIL_DIV_EXPR:
11937 case FLOOR_DIV_EXPR:
11938 case ROUND_DIV_EXPR:
11939 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11940 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11942 case TRUNC_MOD_EXPR:
11943 case CEIL_MOD_EXPR:
11944 case FLOOR_MOD_EXPR:
11945 case ROUND_MOD_EXPR:
11946 case SAVE_EXPR:
11947 case NON_LVALUE_EXPR:
11948 case FLOAT_EXPR:
11949 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11951 case COMPOUND_EXPR:
11952 case MODIFY_EXPR:
11953 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11955 case BIND_EXPR:
11956 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
11958 case COND_EXPR:
11959 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
11960 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
11962 case NOP_EXPR:
11964 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11965 tree outer_type = TREE_TYPE (t);
11967 if (TREE_CODE (outer_type) == REAL_TYPE)
11969 if (TREE_CODE (inner_type) == REAL_TYPE)
11970 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11971 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11973 if (TYPE_UNSIGNED (inner_type))
11974 return 1;
11975 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11978 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11980 if (TREE_CODE (inner_type) == REAL_TYPE)
11981 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11982 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11983 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11984 && TYPE_UNSIGNED (inner_type);
11987 break;
11989 case TARGET_EXPR:
11991 tree temp = TARGET_EXPR_SLOT (t);
11992 t = TARGET_EXPR_INITIAL (t);
11994 /* If the initializer is non-void, then it's a normal expression
11995 that will be assigned to the slot. */
11996 if (!VOID_TYPE_P (t))
11997 return tree_expr_nonnegative_p (t);
11999 /* Otherwise, the initializer sets the slot in some way. One common
12000 way is an assignment statement at the end of the initializer. */
12001 while (1)
12003 if (TREE_CODE (t) == BIND_EXPR)
12004 t = expr_last (BIND_EXPR_BODY (t));
12005 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12006 || TREE_CODE (t) == TRY_CATCH_EXPR)
12007 t = expr_last (TREE_OPERAND (t, 0));
12008 else if (TREE_CODE (t) == STATEMENT_LIST)
12009 t = expr_last (t);
12010 else
12011 break;
12013 if (TREE_CODE (t) == MODIFY_EXPR
12014 && TREE_OPERAND (t, 0) == temp)
12015 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12017 return 0;
12020 case CALL_EXPR:
12022 tree fndecl = get_callee_fndecl (t);
12023 tree arglist = TREE_OPERAND (t, 1);
12024 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12025 switch (DECL_FUNCTION_CODE (fndecl))
12027 CASE_FLT_FN (BUILT_IN_ACOS):
12028 CASE_FLT_FN (BUILT_IN_ACOSH):
12029 CASE_FLT_FN (BUILT_IN_CABS):
12030 CASE_FLT_FN (BUILT_IN_COSH):
12031 CASE_FLT_FN (BUILT_IN_ERFC):
12032 CASE_FLT_FN (BUILT_IN_EXP):
12033 CASE_FLT_FN (BUILT_IN_EXP10):
12034 CASE_FLT_FN (BUILT_IN_EXP2):
12035 CASE_FLT_FN (BUILT_IN_FABS):
12036 CASE_FLT_FN (BUILT_IN_FDIM):
12037 CASE_FLT_FN (BUILT_IN_HYPOT):
12038 CASE_FLT_FN (BUILT_IN_POW10):
12039 CASE_INT_FN (BUILT_IN_FFS):
12040 CASE_INT_FN (BUILT_IN_PARITY):
12041 CASE_INT_FN (BUILT_IN_POPCOUNT):
12042 /* Always true. */
12043 return 1;
12045 CASE_FLT_FN (BUILT_IN_SQRT):
12046 /* sqrt(-0.0) is -0.0. */
12047 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12048 return 1;
12049 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12051 CASE_FLT_FN (BUILT_IN_ASINH):
12052 CASE_FLT_FN (BUILT_IN_ATAN):
12053 CASE_FLT_FN (BUILT_IN_ATANH):
12054 CASE_FLT_FN (BUILT_IN_CBRT):
12055 CASE_FLT_FN (BUILT_IN_CEIL):
12056 CASE_FLT_FN (BUILT_IN_ERF):
12057 CASE_FLT_FN (BUILT_IN_EXPM1):
12058 CASE_FLT_FN (BUILT_IN_FLOOR):
12059 CASE_FLT_FN (BUILT_IN_FMOD):
12060 CASE_FLT_FN (BUILT_IN_FREXP):
12061 CASE_FLT_FN (BUILT_IN_LCEIL):
12062 CASE_FLT_FN (BUILT_IN_LDEXP):
12063 CASE_FLT_FN (BUILT_IN_LFLOOR):
12064 CASE_FLT_FN (BUILT_IN_LLCEIL):
12065 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12066 CASE_FLT_FN (BUILT_IN_LLRINT):
12067 CASE_FLT_FN (BUILT_IN_LLROUND):
12068 CASE_FLT_FN (BUILT_IN_LRINT):
12069 CASE_FLT_FN (BUILT_IN_LROUND):
12070 CASE_FLT_FN (BUILT_IN_MODF):
12071 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12072 CASE_FLT_FN (BUILT_IN_POW):
12073 CASE_FLT_FN (BUILT_IN_RINT):
12074 CASE_FLT_FN (BUILT_IN_ROUND):
12075 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12076 CASE_FLT_FN (BUILT_IN_SINH):
12077 CASE_FLT_FN (BUILT_IN_TANH):
12078 CASE_FLT_FN (BUILT_IN_TRUNC):
12079 /* True if the 1st argument is nonnegative. */
12080 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12082 CASE_FLT_FN (BUILT_IN_FMAX):
12083 /* True if the 1st OR 2nd arguments are nonnegative. */
12084 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12085 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12087 CASE_FLT_FN (BUILT_IN_FMIN):
12088 /* True if the 1st AND 2nd arguments are nonnegative. */
12089 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12090 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12092 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12093 /* True if the 2nd argument is nonnegative. */
12094 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12096 default:
12097 break;
12101 /* ... fall through ... */
12103 default:
12104 if (truth_value_p (TREE_CODE (t)))
12105 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12106 return 1;
12109 /* We don't know sign of `t', so be conservative and return false. */
12110 return 0;
12113 /* Return true when T is an address and is known to be nonzero.
12114 For floating point we further ensure that T is not denormal.
12115 Similar logic is present in nonzero_address in rtlanal.h. */
12117 bool
12118 tree_expr_nonzero_p (tree t)
12120 tree type = TREE_TYPE (t);
12122 /* Doing something useful for floating point would need more work. */
12123 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12124 return false;
12126 switch (TREE_CODE (t))
12128 case SSA_NAME:
12129 /* Query VRP to see if it has recorded any information about
12130 the range of this object. */
12131 return ssa_name_nonzero_p (t);
12133 case ABS_EXPR:
12134 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12136 case INTEGER_CST:
12137 /* We used to test for !integer_zerop here. This does not work correctly
12138 if TREE_CONSTANT_OVERFLOW (t). */
12139 return (TREE_INT_CST_LOW (t) != 0
12140 || TREE_INT_CST_HIGH (t) != 0);
12142 case PLUS_EXPR:
12143 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12145 /* With the presence of negative values it is hard
12146 to say something. */
12147 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12148 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12149 return false;
12150 /* One of operands must be positive and the other non-negative. */
12151 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12152 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12154 break;
12156 case MULT_EXPR:
12157 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12159 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12160 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12162 break;
12164 case NOP_EXPR:
12166 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12167 tree outer_type = TREE_TYPE (t);
12169 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12170 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12172 break;
12174 case ADDR_EXPR:
12176 tree base = get_base_address (TREE_OPERAND (t, 0));
12178 if (!base)
12179 return false;
12181 /* Weak declarations may link to NULL. */
12182 if (VAR_OR_FUNCTION_DECL_P (base))
12183 return !DECL_WEAK (base);
12185 /* Constants are never weak. */
12186 if (CONSTANT_CLASS_P (base))
12187 return true;
12189 return false;
12192 case COND_EXPR:
12193 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12194 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12196 case MIN_EXPR:
12197 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12198 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12200 case MAX_EXPR:
12201 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12203 /* When both operands are nonzero, then MAX must be too. */
12204 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12205 return true;
12207 /* MAX where operand 0 is positive is positive. */
12208 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12210 /* MAX where operand 1 is positive is positive. */
12211 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12212 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12213 return true;
12214 break;
12216 case COMPOUND_EXPR:
12217 case MODIFY_EXPR:
12218 case BIND_EXPR:
12219 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12221 case SAVE_EXPR:
12222 case NON_LVALUE_EXPR:
12223 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12225 case BIT_IOR_EXPR:
12226 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12227 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12229 case CALL_EXPR:
12230 return alloca_call_p (t);
12232 default:
12233 break;
12235 return false;
12238 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12239 attempt to fold the expression to a constant without modifying TYPE,
12240 OP0 or OP1.
12242 If the expression could be simplified to a constant, then return
12243 the constant. If the expression would not be simplified to a
12244 constant, then return NULL_TREE. */
12246 tree
12247 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12249 tree tem = fold_binary (code, type, op0, op1);
12250 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12253 /* Given the components of a unary expression CODE, TYPE and OP0,
12254 attempt to fold the expression to a constant without modifying
12255 TYPE or OP0.
12257 If the expression could be simplified to a constant, then return
12258 the constant. If the expression would not be simplified to a
12259 constant, then return NULL_TREE. */
12261 tree
12262 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12264 tree tem = fold_unary (code, type, op0);
12265 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12268 /* If EXP represents referencing an element in a constant string
12269 (either via pointer arithmetic or array indexing), return the
12270 tree representing the value accessed, otherwise return NULL. */
12272 tree
12273 fold_read_from_constant_string (tree exp)
12275 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12277 tree exp1 = TREE_OPERAND (exp, 0);
12278 tree index;
12279 tree string;
12281 if (TREE_CODE (exp) == INDIRECT_REF)
12282 string = string_constant (exp1, &index);
12283 else
12285 tree low_bound = array_ref_low_bound (exp);
12286 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12288 /* Optimize the special-case of a zero lower bound.
12290 We convert the low_bound to sizetype to avoid some problems
12291 with constant folding. (E.g. suppose the lower bound is 1,
12292 and its mode is QI. Without the conversion,l (ARRAY
12293 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12294 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12295 if (! integer_zerop (low_bound))
12296 index = size_diffop (index, fold_convert (sizetype, low_bound));
12298 string = exp1;
12301 if (string
12302 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12303 && TREE_CODE (string) == STRING_CST
12304 && TREE_CODE (index) == INTEGER_CST
12305 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12306 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12307 == MODE_INT)
12308 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12309 return fold_convert (TREE_TYPE (exp),
12310 build_int_cst (NULL_TREE,
12311 (TREE_STRING_POINTER (string)
12312 [TREE_INT_CST_LOW (index)])));
12314 return NULL;
12317 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12318 an integer constant or real constant.
12320 TYPE is the type of the result. */
12322 static tree
12323 fold_negate_const (tree arg0, tree type)
12325 tree t = NULL_TREE;
12327 switch (TREE_CODE (arg0))
12329 case INTEGER_CST:
12331 unsigned HOST_WIDE_INT low;
12332 HOST_WIDE_INT high;
12333 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12334 TREE_INT_CST_HIGH (arg0),
12335 &low, &high);
12336 t = build_int_cst_wide (type, low, high);
12337 t = force_fit_type (t, 1,
12338 (overflow | TREE_OVERFLOW (arg0))
12339 && !TYPE_UNSIGNED (type),
12340 TREE_CONSTANT_OVERFLOW (arg0));
12341 break;
12344 case REAL_CST:
12345 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12346 break;
12348 default:
12349 gcc_unreachable ();
12352 return t;
12355 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12356 an integer constant or real constant.
12358 TYPE is the type of the result. */
12360 tree
12361 fold_abs_const (tree arg0, tree type)
12363 tree t = NULL_TREE;
12365 switch (TREE_CODE (arg0))
12367 case INTEGER_CST:
12368 /* If the value is unsigned, then the absolute value is
12369 the same as the ordinary value. */
12370 if (TYPE_UNSIGNED (type))
12371 t = arg0;
12372 /* Similarly, if the value is non-negative. */
12373 else if (INT_CST_LT (integer_minus_one_node, arg0))
12374 t = arg0;
12375 /* If the value is negative, then the absolute value is
12376 its negation. */
12377 else
12379 unsigned HOST_WIDE_INT low;
12380 HOST_WIDE_INT high;
12381 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12382 TREE_INT_CST_HIGH (arg0),
12383 &low, &high);
12384 t = build_int_cst_wide (type, low, high);
12385 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12386 TREE_CONSTANT_OVERFLOW (arg0));
12388 break;
12390 case REAL_CST:
12391 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12392 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12393 else
12394 t = arg0;
12395 break;
12397 default:
12398 gcc_unreachable ();
12401 return t;
12404 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12405 constant. TYPE is the type of the result. */
12407 static tree
12408 fold_not_const (tree arg0, tree type)
12410 tree t = NULL_TREE;
12412 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12414 t = build_int_cst_wide (type,
12415 ~ TREE_INT_CST_LOW (arg0),
12416 ~ TREE_INT_CST_HIGH (arg0));
12417 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12418 TREE_CONSTANT_OVERFLOW (arg0));
12420 return t;
12423 /* Given CODE, a relational operator, the target type, TYPE and two
12424 constant operands OP0 and OP1, return the result of the
12425 relational operation. If the result is not a compile time
12426 constant, then return NULL_TREE. */
12428 static tree
12429 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12431 int result, invert;
12433 /* From here on, the only cases we handle are when the result is
12434 known to be a constant. */
12436 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12438 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12439 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12441 /* Handle the cases where either operand is a NaN. */
12442 if (real_isnan (c0) || real_isnan (c1))
12444 switch (code)
12446 case EQ_EXPR:
12447 case ORDERED_EXPR:
12448 result = 0;
12449 break;
12451 case NE_EXPR:
12452 case UNORDERED_EXPR:
12453 case UNLT_EXPR:
12454 case UNLE_EXPR:
12455 case UNGT_EXPR:
12456 case UNGE_EXPR:
12457 case UNEQ_EXPR:
12458 result = 1;
12459 break;
12461 case LT_EXPR:
12462 case LE_EXPR:
12463 case GT_EXPR:
12464 case GE_EXPR:
12465 case LTGT_EXPR:
12466 if (flag_trapping_math)
12467 return NULL_TREE;
12468 result = 0;
12469 break;
12471 default:
12472 gcc_unreachable ();
12475 return constant_boolean_node (result, type);
12478 return constant_boolean_node (real_compare (code, c0, c1), type);
12481 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12483 To compute GT, swap the arguments and do LT.
12484 To compute GE, do LT and invert the result.
12485 To compute LE, swap the arguments, do LT and invert the result.
12486 To compute NE, do EQ and invert the result.
12488 Therefore, the code below must handle only EQ and LT. */
12490 if (code == LE_EXPR || code == GT_EXPR)
12492 tree tem = op0;
12493 op0 = op1;
12494 op1 = tem;
12495 code = swap_tree_comparison (code);
12498 /* Note that it is safe to invert for real values here because we
12499 have already handled the one case that it matters. */
12501 invert = 0;
12502 if (code == NE_EXPR || code == GE_EXPR)
12504 invert = 1;
12505 code = invert_tree_comparison (code, false);
12508 /* Compute a result for LT or EQ if args permit;
12509 Otherwise return T. */
12510 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12512 if (code == EQ_EXPR)
12513 result = tree_int_cst_equal (op0, op1);
12514 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12515 result = INT_CST_LT_UNSIGNED (op0, op1);
12516 else
12517 result = INT_CST_LT (op0, op1);
12519 else
12520 return NULL_TREE;
12522 if (invert)
12523 result ^= 1;
12524 return constant_boolean_node (result, type);
12527 /* Build an expression for the a clean point containing EXPR with type TYPE.
12528 Don't build a cleanup point expression for EXPR which don't have side
12529 effects. */
12531 tree
12532 fold_build_cleanup_point_expr (tree type, tree expr)
12534 /* If the expression does not have side effects then we don't have to wrap
12535 it with a cleanup point expression. */
12536 if (!TREE_SIDE_EFFECTS (expr))
12537 return expr;
12539 /* If the expression is a return, check to see if the expression inside the
12540 return has no side effects or the right hand side of the modify expression
12541 inside the return. If either don't have side effects set we don't need to
12542 wrap the expression in a cleanup point expression. Note we don't check the
12543 left hand side of the modify because it should always be a return decl. */
12544 if (TREE_CODE (expr) == RETURN_EXPR)
12546 tree op = TREE_OPERAND (expr, 0);
12547 if (!op || !TREE_SIDE_EFFECTS (op))
12548 return expr;
12549 op = TREE_OPERAND (op, 1);
12550 if (!TREE_SIDE_EFFECTS (op))
12551 return expr;
12554 return build1 (CLEANUP_POINT_EXPR, type, expr);
12557 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12558 avoid confusing the gimplify process. */
12560 tree
12561 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12563 /* The size of the object is not relevant when talking about its address. */
12564 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12565 t = TREE_OPERAND (t, 0);
12567 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12568 if (TREE_CODE (t) == INDIRECT_REF
12569 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12571 t = TREE_OPERAND (t, 0);
12572 if (TREE_TYPE (t) != ptrtype)
12573 t = build1 (NOP_EXPR, ptrtype, t);
12575 else
12577 tree base = t;
12579 while (handled_component_p (base))
12580 base = TREE_OPERAND (base, 0);
12581 if (DECL_P (base))
12582 TREE_ADDRESSABLE (base) = 1;
12584 t = build1 (ADDR_EXPR, ptrtype, t);
12587 return t;
12590 tree
12591 build_fold_addr_expr (tree t)
12593 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12596 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12597 of an indirection through OP0, or NULL_TREE if no simplification is
12598 possible. */
12600 tree
12601 fold_indirect_ref_1 (tree type, tree op0)
12603 tree sub = op0;
12604 tree subtype;
12606 STRIP_NOPS (sub);
12607 subtype = TREE_TYPE (sub);
12608 if (!POINTER_TYPE_P (subtype))
12609 return NULL_TREE;
12611 if (TREE_CODE (sub) == ADDR_EXPR)
12613 tree op = TREE_OPERAND (sub, 0);
12614 tree optype = TREE_TYPE (op);
12615 /* *&p => p; make sure to handle *&"str"[cst] here. */
12616 if (type == optype)
12618 tree fop = fold_read_from_constant_string (op);
12619 if (fop)
12620 return fop;
12621 else
12622 return op;
12624 /* *(foo *)&fooarray => fooarray[0] */
12625 else if (TREE_CODE (optype) == ARRAY_TYPE
12626 && type == TREE_TYPE (optype))
12628 tree type_domain = TYPE_DOMAIN (optype);
12629 tree min_val = size_zero_node;
12630 if (type_domain && TYPE_MIN_VALUE (type_domain))
12631 min_val = TYPE_MIN_VALUE (type_domain);
12632 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12634 /* *(foo *)&complexfoo => __real__ complexfoo */
12635 else if (TREE_CODE (optype) == COMPLEX_TYPE
12636 && type == TREE_TYPE (optype))
12637 return fold_build1 (REALPART_EXPR, type, op);
12640 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12641 if (TREE_CODE (sub) == PLUS_EXPR
12642 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12644 tree op00 = TREE_OPERAND (sub, 0);
12645 tree op01 = TREE_OPERAND (sub, 1);
12646 tree op00type;
12648 STRIP_NOPS (op00);
12649 op00type = TREE_TYPE (op00);
12650 if (TREE_CODE (op00) == ADDR_EXPR
12651 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12652 && type == TREE_TYPE (TREE_TYPE (op00type)))
12654 tree size = TYPE_SIZE_UNIT (type);
12655 if (tree_int_cst_equal (size, op01))
12656 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
12660 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12661 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
12662 && type == TREE_TYPE (TREE_TYPE (subtype)))
12664 tree type_domain;
12665 tree min_val = size_zero_node;
12666 sub = build_fold_indirect_ref (sub);
12667 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
12668 if (type_domain && TYPE_MIN_VALUE (type_domain))
12669 min_val = TYPE_MIN_VALUE (type_domain);
12670 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
12673 return NULL_TREE;
12676 /* Builds an expression for an indirection through T, simplifying some
12677 cases. */
12679 tree
12680 build_fold_indirect_ref (tree t)
12682 tree type = TREE_TYPE (TREE_TYPE (t));
12683 tree sub = fold_indirect_ref_1 (type, t);
12685 if (sub)
12686 return sub;
12687 else
12688 return build1 (INDIRECT_REF, type, t);
12691 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12693 tree
12694 fold_indirect_ref (tree t)
12696 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12698 if (sub)
12699 return sub;
12700 else
12701 return t;
12704 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12705 whose result is ignored. The type of the returned tree need not be
12706 the same as the original expression. */
12708 tree
12709 fold_ignored_result (tree t)
12711 if (!TREE_SIDE_EFFECTS (t))
12712 return integer_zero_node;
12714 for (;;)
12715 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12717 case tcc_unary:
12718 t = TREE_OPERAND (t, 0);
12719 break;
12721 case tcc_binary:
12722 case tcc_comparison:
12723 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12724 t = TREE_OPERAND (t, 0);
12725 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12726 t = TREE_OPERAND (t, 1);
12727 else
12728 return t;
12729 break;
12731 case tcc_expression:
12732 switch (TREE_CODE (t))
12734 case COMPOUND_EXPR:
12735 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12736 return t;
12737 t = TREE_OPERAND (t, 0);
12738 break;
12740 case COND_EXPR:
12741 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12742 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12743 return t;
12744 t = TREE_OPERAND (t, 0);
12745 break;
12747 default:
12748 return t;
12750 break;
12752 default:
12753 return t;
12757 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12758 This can only be applied to objects of a sizetype. */
12760 tree
12761 round_up (tree value, int divisor)
12763 tree div = NULL_TREE;
12765 gcc_assert (divisor > 0);
12766 if (divisor == 1)
12767 return value;
12769 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12770 have to do anything. Only do this when we are not given a const,
12771 because in that case, this check is more expensive than just
12772 doing it. */
12773 if (TREE_CODE (value) != INTEGER_CST)
12775 div = build_int_cst (TREE_TYPE (value), divisor);
12777 if (multiple_of_p (TREE_TYPE (value), value, div))
12778 return value;
12781 /* If divisor is a power of two, simplify this to bit manipulation. */
12782 if (divisor == (divisor & -divisor))
12784 tree t;
12786 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12787 value = size_binop (PLUS_EXPR, value, t);
12788 t = build_int_cst (TREE_TYPE (value), -divisor);
12789 value = size_binop (BIT_AND_EXPR, value, t);
12791 else
12793 if (!div)
12794 div = build_int_cst (TREE_TYPE (value), divisor);
12795 value = size_binop (CEIL_DIV_EXPR, value, div);
12796 value = size_binop (MULT_EXPR, value, div);
12799 return value;
12802 /* Likewise, but round down. */
12804 tree
12805 round_down (tree value, int divisor)
12807 tree div = NULL_TREE;
12809 gcc_assert (divisor > 0);
12810 if (divisor == 1)
12811 return value;
12813 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12814 have to do anything. Only do this when we are not given a const,
12815 because in that case, this check is more expensive than just
12816 doing it. */
12817 if (TREE_CODE (value) != INTEGER_CST)
12819 div = build_int_cst (TREE_TYPE (value), divisor);
12821 if (multiple_of_p (TREE_TYPE (value), value, div))
12822 return value;
12825 /* If divisor is a power of two, simplify this to bit manipulation. */
12826 if (divisor == (divisor & -divisor))
12828 tree t;
12830 t = build_int_cst (TREE_TYPE (value), -divisor);
12831 value = size_binop (BIT_AND_EXPR, value, t);
12833 else
12835 if (!div)
12836 div = build_int_cst (TREE_TYPE (value), divisor);
12837 value = size_binop (FLOOR_DIV_EXPR, value, div);
12838 value = size_binop (MULT_EXPR, value, div);
12841 return value;
12844 /* Returns the pointer to the base of the object addressed by EXP and
12845 extracts the information about the offset of the access, storing it
12846 to PBITPOS and POFFSET. */
12848 static tree
12849 split_address_to_core_and_offset (tree exp,
12850 HOST_WIDE_INT *pbitpos, tree *poffset)
12852 tree core;
12853 enum machine_mode mode;
12854 int unsignedp, volatilep;
12855 HOST_WIDE_INT bitsize;
12857 if (TREE_CODE (exp) == ADDR_EXPR)
12859 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12860 poffset, &mode, &unsignedp, &volatilep,
12861 false);
12862 core = build_fold_addr_expr (core);
12864 else
12866 core = exp;
12867 *pbitpos = 0;
12868 *poffset = NULL_TREE;
12871 return core;
12874 /* Returns true if addresses of E1 and E2 differ by a constant, false
12875 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12877 bool
12878 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12880 tree core1, core2;
12881 HOST_WIDE_INT bitpos1, bitpos2;
12882 tree toffset1, toffset2, tdiff, type;
12884 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12885 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12887 if (bitpos1 % BITS_PER_UNIT != 0
12888 || bitpos2 % BITS_PER_UNIT != 0
12889 || !operand_equal_p (core1, core2, 0))
12890 return false;
12892 if (toffset1 && toffset2)
12894 type = TREE_TYPE (toffset1);
12895 if (type != TREE_TYPE (toffset2))
12896 toffset2 = fold_convert (type, toffset2);
12898 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12899 if (!cst_and_fits_in_hwi (tdiff))
12900 return false;
12902 *diff = int_cst_value (tdiff);
12904 else if (toffset1 || toffset2)
12906 /* If only one of the offsets is non-constant, the difference cannot
12907 be a constant. */
12908 return false;
12910 else
12911 *diff = 0;
12913 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12914 return true;
12917 /* Simplify the floating point expression EXP when the sign of the
12918 result is not significant. Return NULL_TREE if no simplification
12919 is possible. */
12921 tree
12922 fold_strip_sign_ops (tree exp)
12924 tree arg0, arg1;
12926 switch (TREE_CODE (exp))
12928 case ABS_EXPR:
12929 case NEGATE_EXPR:
12930 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12931 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12933 case MULT_EXPR:
12934 case RDIV_EXPR:
12935 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12936 return NULL_TREE;
12937 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12938 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12939 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12940 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12941 arg0 ? arg0 : TREE_OPERAND (exp, 0),
12942 arg1 ? arg1 : TREE_OPERAND (exp, 1));
12943 break;
12945 default:
12946 break;
12948 return NULL_TREE;