* g++.dg/template/crash31.C: Correct embedded PR number.
[official-gcc.git] / gcc / fold-const.c
blob8f8624cf59949b1559563169003dc86bb9bb18bd
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static tree build_zero_vector (tree);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 tree);
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
127 tree, int);
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_relational_hi_lo (enum tree_code *, const tree,
138 tree *, tree *);
139 static bool tree_expr_nonzero_p (tree);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
144 addition.
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 sign. */
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 #define LOWPART(x) \
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
166 static void
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 static void
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 HOST_WIDE_INT *hi)
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
202 tree
203 force_fit_type (tree t, int overflowable,
204 bool overflowed, bool overflowed_const)
206 unsigned HOST_WIDE_INT low;
207 HOST_WIDE_INT high;
208 unsigned int prec;
209 int sign_extended_type;
211 gcc_assert (TREE_CODE (t) == INTEGER_CST);
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = POINTER_SIZE;
219 else
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
232 else
234 high = 0;
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
253 high = -1;
255 else
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
260 high = -1;
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 if (overflowed
272 || overflowable < 0
273 || (overflowable > 0 && sign_extended_type))
275 t = copy_node (t);
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
281 t = copy_node (t);
282 TREE_CONSTANT_OVERFLOW (t) = 1;
286 return t;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
300 HOST_WIDE_INT h;
302 l = l1 + l2;
303 h = h1 + h2 + (l < l1);
305 *lv = l;
306 *hv = h;
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
319 if (l1 == 0)
321 *lv = 0;
322 *hv = - h1;
323 return (*hv & h1) < 0;
325 else
327 *lv = -l1;
328 *hv = ~h1;
329 return 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
348 int i, j, k;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
359 carry = 0;
360 for (j = 0; j < 4; j++)
362 k = i + j;
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
366 carry += prod[k];
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
370 prod[i + 4] = carry;
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
378 if (h1 < 0)
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 if (h2 < 0)
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 void
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
404 if (count < 0)
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
407 return;
410 if (SHIFT_COUNT_TRUNCATED)
411 count %= prec;
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
417 *hv = 0;
418 *lv = 0;
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
423 *lv = 0;
425 else
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
429 *lv = l1 << count;
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
446 else
448 *hv = signmask;
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459 void
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 int arith)
465 unsigned HOST_WIDE_INT signmask;
467 signmask = (arith
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 : 0);
471 if (SHIFT_COUNT_TRUNCATED)
472 count %= prec;
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
478 *hv = 0;
479 *lv = 0;
481 else if (count >= HOST_BITS_PER_WIDE_INT)
483 *hv = 0;
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
489 *lv = ((l1 >> count)
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
497 *hv = signmask;
498 *lv = signmask;
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
507 else
509 *hv = signmask;
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 void
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
528 count %= prec;
529 if (count < 0)
530 count += prec;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
534 *lv = s1l | s2l;
535 *hv = s1h | s2h;
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
542 void
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
550 count %= prec;
551 if (count < 0)
552 count += prec;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 *lv = s1l | s2l;
557 *hv = s1h | s2h;
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 or EXACT_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
577 HOST_WIDE_INT *hrem)
579 int quo_neg = 0;
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
582 int i, j;
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
589 int overflow = 0;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
595 if (!uns)
597 if (hnum < 0)
599 quo_neg = ~ quo_neg;
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
603 overflow = 1;
605 if (hden < 0)
607 quo_neg = ~ quo_neg;
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
614 *hquo = *hrem = 0;
615 /* This unsigned division rounds toward zero. */
616 *lquo = lnum / lden;
617 goto finish_up;
620 if (hnum == 0)
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
623 *hquo = *lquo = 0;
624 *hrem = hnum;
625 *lrem = lnum;
626 goto finish_up;
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
645 carry = work % lden;
648 else
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
657 if (den[i] != 0)
659 den_hi_sig = i;
660 break;
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
667 if (scale > 1)
668 { /* scale divisor and dividend */
669 carry = 0;
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
677 num[4] = carry;
678 carry = 0;
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
688 num_hi_sig = 4;
690 /* Main loop */
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
702 else
703 quo_est = BASE - 1;
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
707 if (tmp < BASE
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
710 quo_est--;
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
716 carry = 0;
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
730 quo_est--;
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
743 quo[i] = quo_est;
747 decode (quo, lquo, hquo);
749 finish_up:
750 /* If result is negative, make it so. */
751 if (quo_neg)
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
759 switch (code)
761 case TRUNC_DIV_EXPR:
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
764 return overflow;
766 case FLOOR_DIV_EXPR:
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
770 /* quo = quo - 1; */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
772 lquo, hquo);
774 else
775 return overflow;
776 break;
778 case CEIL_DIV_EXPR:
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
783 lquo, hquo);
785 else
786 return overflow;
787 break;
789 case ROUND_DIV_EXPR:
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
798 if (*hrem < 0)
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
800 if (hden < 0)
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, &ltwice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
813 if (*hquo < 0)
814 /* quo = quo - 1; */
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 else
818 /* quo = quo + 1; */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
825 break;
827 default:
828 gcc_unreachable ();
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
835 return overflow;
838 /* Return true if built-in mathematical function specified by CODE
839 preserves the sign of it argument, i.e. -f(x) == f(-x). */
841 static bool
842 negate_mathfn_p (enum built_in_function code)
844 switch (code)
846 case BUILT_IN_ASIN:
847 case BUILT_IN_ASINF:
848 case BUILT_IN_ASINL:
849 case BUILT_IN_ATAN:
850 case BUILT_IN_ATANF:
851 case BUILT_IN_ATANL:
852 case BUILT_IN_SIN:
853 case BUILT_IN_SINF:
854 case BUILT_IN_SINL:
855 case BUILT_IN_TAN:
856 case BUILT_IN_TANF:
857 case BUILT_IN_TANL:
858 return true;
860 default:
861 break;
863 return false;
866 /* Check whether we may negate an integer constant T without causing
867 overflow. */
869 bool
870 may_negate_without_overflow_p (tree t)
872 unsigned HOST_WIDE_INT val;
873 unsigned int prec;
874 tree type;
876 gcc_assert (TREE_CODE (t) == INTEGER_CST);
878 type = TREE_TYPE (t);
879 if (TYPE_UNSIGNED (type))
880 return false;
882 prec = TYPE_PRECISION (type);
883 if (prec > HOST_BITS_PER_WIDE_INT)
885 if (TREE_INT_CST_LOW (t) != 0)
886 return true;
887 prec -= HOST_BITS_PER_WIDE_INT;
888 val = TREE_INT_CST_HIGH (t);
890 else
891 val = TREE_INT_CST_LOW (t);
892 if (prec < HOST_BITS_PER_WIDE_INT)
893 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
894 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
897 /* Determine whether an expression T can be cheaply negated using
898 the function negate_expr. */
900 static bool
901 negate_expr_p (tree t)
903 tree type;
905 if (t == 0)
906 return false;
908 type = TREE_TYPE (t);
910 STRIP_SIGN_NOPS (t);
911 switch (TREE_CODE (t))
913 case INTEGER_CST:
914 if (TYPE_UNSIGNED (type) || ! flag_trapv)
915 return true;
917 /* Check that -CST will not overflow type. */
918 return may_negate_without_overflow_p (t);
920 case REAL_CST:
921 case NEGATE_EXPR:
922 return true;
924 case COMPLEX_CST:
925 return negate_expr_p (TREE_REALPART (t))
926 && negate_expr_p (TREE_IMAGPART (t));
928 case PLUS_EXPR:
929 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
930 return false;
931 /* -(A + B) -> (-B) - A. */
932 if (negate_expr_p (TREE_OPERAND (t, 1))
933 && reorder_operands_p (TREE_OPERAND (t, 0),
934 TREE_OPERAND (t, 1)))
935 return true;
936 /* -(A + B) -> (-A) - B. */
937 return negate_expr_p (TREE_OPERAND (t, 0));
939 case MINUS_EXPR:
940 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
941 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
942 && reorder_operands_p (TREE_OPERAND (t, 0),
943 TREE_OPERAND (t, 1));
945 case MULT_EXPR:
946 if (TYPE_UNSIGNED (TREE_TYPE (t)))
947 break;
949 /* Fall through. */
951 case RDIV_EXPR:
952 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
953 return negate_expr_p (TREE_OPERAND (t, 1))
954 || negate_expr_p (TREE_OPERAND (t, 0));
955 break;
957 case NOP_EXPR:
958 /* Negate -((double)float) as (double)(-float). */
959 if (TREE_CODE (type) == REAL_TYPE)
961 tree tem = strip_float_extensions (t);
962 if (tem != t)
963 return negate_expr_p (tem);
965 break;
967 case CALL_EXPR:
968 /* Negate -f(x) as f(-x). */
969 if (negate_mathfn_p (builtin_mathfn_code (t)))
970 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
971 break;
973 case RSHIFT_EXPR:
974 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
975 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
977 tree op1 = TREE_OPERAND (t, 1);
978 if (TREE_INT_CST_HIGH (op1) == 0
979 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
980 == TREE_INT_CST_LOW (op1))
981 return true;
983 break;
985 default:
986 break;
988 return false;
991 /* Given T, an expression, return the negation of T. Allow for T to be
992 null, in which case return null. */
994 static tree
995 negate_expr (tree t)
997 tree type;
998 tree tem;
1000 if (t == 0)
1001 return 0;
1003 type = TREE_TYPE (t);
1004 STRIP_SIGN_NOPS (t);
1006 switch (TREE_CODE (t))
1008 case INTEGER_CST:
1009 tem = fold_negate_const (t, type);
1010 if (! TREE_OVERFLOW (tem)
1011 || TYPE_UNSIGNED (type)
1012 || ! flag_trapv)
1013 return tem;
1014 break;
1016 case REAL_CST:
1017 tem = fold_negate_const (t, type);
1018 /* Two's complement FP formats, such as c4x, may overflow. */
1019 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1020 return fold_convert (type, tem);
1021 break;
1023 case COMPLEX_CST:
1025 tree rpart = negate_expr (TREE_REALPART (t));
1026 tree ipart = negate_expr (TREE_IMAGPART (t));
1028 if ((TREE_CODE (rpart) == REAL_CST
1029 && TREE_CODE (ipart) == REAL_CST)
1030 || (TREE_CODE (rpart) == INTEGER_CST
1031 && TREE_CODE (ipart) == INTEGER_CST))
1032 return build_complex (type, rpart, ipart);
1034 break;
1036 case NEGATE_EXPR:
1037 return fold_convert (type, TREE_OPERAND (t, 0));
1039 case PLUS_EXPR:
1040 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1042 /* -(A + B) -> (-B) - A. */
1043 if (negate_expr_p (TREE_OPERAND (t, 1))
1044 && reorder_operands_p (TREE_OPERAND (t, 0),
1045 TREE_OPERAND (t, 1)))
1047 tem = negate_expr (TREE_OPERAND (t, 1));
1048 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1049 tem, TREE_OPERAND (t, 0)));
1050 return fold_convert (type, tem);
1053 /* -(A + B) -> (-A) - B. */
1054 if (negate_expr_p (TREE_OPERAND (t, 0)))
1056 tem = negate_expr (TREE_OPERAND (t, 0));
1057 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1058 tem, TREE_OPERAND (t, 1)));
1059 return fold_convert (type, tem);
1062 break;
1064 case MINUS_EXPR:
1065 /* - (A - B) -> B - A */
1066 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1067 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1068 return fold_convert (type,
1069 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1070 TREE_OPERAND (t, 1),
1071 TREE_OPERAND (t, 0))));
1072 break;
1074 case MULT_EXPR:
1075 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1076 break;
1078 /* Fall through. */
1080 case RDIV_EXPR:
1081 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1083 tem = TREE_OPERAND (t, 1);
1084 if (negate_expr_p (tem))
1085 return fold_convert (type,
1086 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1087 TREE_OPERAND (t, 0),
1088 negate_expr (tem))));
1089 tem = TREE_OPERAND (t, 0);
1090 if (negate_expr_p (tem))
1091 return fold_convert (type,
1092 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1093 negate_expr (tem),
1094 TREE_OPERAND (t, 1))));
1096 break;
1098 case NOP_EXPR:
1099 /* Convert -((double)float) into (double)(-float). */
1100 if (TREE_CODE (type) == REAL_TYPE)
1102 tem = strip_float_extensions (t);
1103 if (tem != t && negate_expr_p (tem))
1104 return fold_convert (type, negate_expr (tem));
1106 break;
1108 case CALL_EXPR:
1109 /* Negate -f(x) as f(-x). */
1110 if (negate_mathfn_p (builtin_mathfn_code (t))
1111 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1113 tree fndecl, arg, arglist;
1115 fndecl = get_callee_fndecl (t);
1116 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1117 arglist = build_tree_list (NULL_TREE, arg);
1118 return build_function_call_expr (fndecl, arglist);
1120 break;
1122 case RSHIFT_EXPR:
1123 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1124 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1126 tree op1 = TREE_OPERAND (t, 1);
1127 if (TREE_INT_CST_HIGH (op1) == 0
1128 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1129 == TREE_INT_CST_LOW (op1))
1131 tree ntype = TYPE_UNSIGNED (type)
1132 ? lang_hooks.types.signed_type (type)
1133 : lang_hooks.types.unsigned_type (type);
1134 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1135 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1136 return fold_convert (type, temp);
1139 break;
1141 default:
1142 break;
1145 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1146 return fold_convert (type, tem);
1149 /* Split a tree IN into a constant, literal and variable parts that could be
1150 combined with CODE to make IN. "constant" means an expression with
1151 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1152 commutative arithmetic operation. Store the constant part into *CONP,
1153 the literal in *LITP and return the variable part. If a part isn't
1154 present, set it to null. If the tree does not decompose in this way,
1155 return the entire tree as the variable part and the other parts as null.
1157 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1158 case, we negate an operand that was subtracted. Except if it is a
1159 literal for which we use *MINUS_LITP instead.
1161 If NEGATE_P is true, we are negating all of IN, again except a literal
1162 for which we use *MINUS_LITP instead.
1164 If IN is itself a literal or constant, return it as appropriate.
1166 Note that we do not guarantee that any of the three values will be the
1167 same type as IN, but they will have the same signedness and mode. */
1169 static tree
1170 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1171 tree *minus_litp, int negate_p)
1173 tree var = 0;
1175 *conp = 0;
1176 *litp = 0;
1177 *minus_litp = 0;
1179 /* Strip any conversions that don't change the machine mode or signedness. */
1180 STRIP_SIGN_NOPS (in);
1182 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1183 *litp = in;
1184 else if (TREE_CODE (in) == code
1185 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1186 /* We can associate addition and subtraction together (even
1187 though the C standard doesn't say so) for integers because
1188 the value is not affected. For reals, the value might be
1189 affected, so we can't. */
1190 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1191 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1193 tree op0 = TREE_OPERAND (in, 0);
1194 tree op1 = TREE_OPERAND (in, 1);
1195 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1196 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1198 /* First see if either of the operands is a literal, then a constant. */
1199 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1200 *litp = op0, op0 = 0;
1201 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1202 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1204 if (op0 != 0 && TREE_CONSTANT (op0))
1205 *conp = op0, op0 = 0;
1206 else if (op1 != 0 && TREE_CONSTANT (op1))
1207 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1209 /* If we haven't dealt with either operand, this is not a case we can
1210 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1211 if (op0 != 0 && op1 != 0)
1212 var = in;
1213 else if (op0 != 0)
1214 var = op0;
1215 else
1216 var = op1, neg_var_p = neg1_p;
1218 /* Now do any needed negations. */
1219 if (neg_litp_p)
1220 *minus_litp = *litp, *litp = 0;
1221 if (neg_conp_p)
1222 *conp = negate_expr (*conp);
1223 if (neg_var_p)
1224 var = negate_expr (var);
1226 else if (TREE_CONSTANT (in))
1227 *conp = in;
1228 else
1229 var = in;
1231 if (negate_p)
1233 if (*litp)
1234 *minus_litp = *litp, *litp = 0;
1235 else if (*minus_litp)
1236 *litp = *minus_litp, *minus_litp = 0;
1237 *conp = negate_expr (*conp);
1238 var = negate_expr (var);
1241 return var;
1244 /* Re-associate trees split by the above function. T1 and T2 are either
1245 expressions to associate or null. Return the new expression, if any. If
1246 we build an operation, do it in TYPE and with CODE. */
1248 static tree
1249 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1251 if (t1 == 0)
1252 return t2;
1253 else if (t2 == 0)
1254 return t1;
1256 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1257 try to fold this since we will have infinite recursion. But do
1258 deal with any NEGATE_EXPRs. */
1259 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1260 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1262 if (code == PLUS_EXPR)
1264 if (TREE_CODE (t1) == NEGATE_EXPR)
1265 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1266 fold_convert (type, TREE_OPERAND (t1, 0)));
1267 else if (TREE_CODE (t2) == NEGATE_EXPR)
1268 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1269 fold_convert (type, TREE_OPERAND (t2, 0)));
1270 else if (integer_zerop (t2))
1271 return fold_convert (type, t1);
1273 else if (code == MINUS_EXPR)
1275 if (integer_zerop (t2))
1276 return fold_convert (type, t1);
1279 return build2 (code, type, fold_convert (type, t1),
1280 fold_convert (type, t2));
1283 return fold (build2 (code, type, fold_convert (type, t1),
1284 fold_convert (type, t2)));
1287 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1288 to produce a new constant.
1290 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1292 tree
1293 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1295 unsigned HOST_WIDE_INT int1l, int2l;
1296 HOST_WIDE_INT int1h, int2h;
1297 unsigned HOST_WIDE_INT low;
1298 HOST_WIDE_INT hi;
1299 unsigned HOST_WIDE_INT garbagel;
1300 HOST_WIDE_INT garbageh;
1301 tree t;
1302 tree type = TREE_TYPE (arg1);
1303 int uns = TYPE_UNSIGNED (type);
1304 int is_sizetype
1305 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1306 int overflow = 0;
1307 int no_overflow = 0;
1309 int1l = TREE_INT_CST_LOW (arg1);
1310 int1h = TREE_INT_CST_HIGH (arg1);
1311 int2l = TREE_INT_CST_LOW (arg2);
1312 int2h = TREE_INT_CST_HIGH (arg2);
1314 switch (code)
1316 case BIT_IOR_EXPR:
1317 low = int1l | int2l, hi = int1h | int2h;
1318 break;
1320 case BIT_XOR_EXPR:
1321 low = int1l ^ int2l, hi = int1h ^ int2h;
1322 break;
1324 case BIT_AND_EXPR:
1325 low = int1l & int2l, hi = int1h & int2h;
1326 break;
1328 case RSHIFT_EXPR:
1329 int2l = -int2l;
1330 case LSHIFT_EXPR:
1331 /* It's unclear from the C standard whether shifts can overflow.
1332 The following code ignores overflow; perhaps a C standard
1333 interpretation ruling is needed. */
1334 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1335 &low, &hi, !uns);
1336 no_overflow = 1;
1337 break;
1339 case RROTATE_EXPR:
1340 int2l = - int2l;
1341 case LROTATE_EXPR:
1342 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1343 &low, &hi);
1344 break;
1346 case PLUS_EXPR:
1347 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1348 break;
1350 case MINUS_EXPR:
1351 neg_double (int2l, int2h, &low, &hi);
1352 add_double (int1l, int1h, low, hi, &low, &hi);
1353 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1354 break;
1356 case MULT_EXPR:
1357 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1358 break;
1360 case TRUNC_DIV_EXPR:
1361 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1362 case EXACT_DIV_EXPR:
1363 /* This is a shortcut for a common special case. */
1364 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1365 && ! TREE_CONSTANT_OVERFLOW (arg1)
1366 && ! TREE_CONSTANT_OVERFLOW (arg2)
1367 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1369 if (code == CEIL_DIV_EXPR)
1370 int1l += int2l - 1;
1372 low = int1l / int2l, hi = 0;
1373 break;
1376 /* ... fall through ... */
1378 case ROUND_DIV_EXPR:
1379 if (int2h == 0 && int2l == 1)
1381 low = int1l, hi = int1h;
1382 break;
1384 if (int1l == int2l && int1h == int2h
1385 && ! (int1l == 0 && int1h == 0))
1387 low = 1, hi = 0;
1388 break;
1390 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1391 &low, &hi, &garbagel, &garbageh);
1392 break;
1394 case TRUNC_MOD_EXPR:
1395 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1396 /* This is a shortcut for a common special case. */
1397 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1398 && ! TREE_CONSTANT_OVERFLOW (arg1)
1399 && ! TREE_CONSTANT_OVERFLOW (arg2)
1400 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1402 if (code == CEIL_MOD_EXPR)
1403 int1l += int2l - 1;
1404 low = int1l % int2l, hi = 0;
1405 break;
1408 /* ... fall through ... */
1410 case ROUND_MOD_EXPR:
1411 overflow = div_and_round_double (code, uns,
1412 int1l, int1h, int2l, int2h,
1413 &garbagel, &garbageh, &low, &hi);
1414 break;
1416 case MIN_EXPR:
1417 case MAX_EXPR:
1418 if (uns)
1419 low = (((unsigned HOST_WIDE_INT) int1h
1420 < (unsigned HOST_WIDE_INT) int2h)
1421 || (((unsigned HOST_WIDE_INT) int1h
1422 == (unsigned HOST_WIDE_INT) int2h)
1423 && int1l < int2l));
1424 else
1425 low = (int1h < int2h
1426 || (int1h == int2h && int1l < int2l));
1428 if (low == (code == MIN_EXPR))
1429 low = int1l, hi = int1h;
1430 else
1431 low = int2l, hi = int2h;
1432 break;
1434 default:
1435 gcc_unreachable ();
1438 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1440 if (notrunc)
1442 /* Propagate overflow flags ourselves. */
1443 if (((!uns || is_sizetype) && overflow)
1444 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1446 t = copy_node (t);
1447 TREE_OVERFLOW (t) = 1;
1448 TREE_CONSTANT_OVERFLOW (t) = 1;
1450 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1452 t = copy_node (t);
1453 TREE_CONSTANT_OVERFLOW (t) = 1;
1456 else
1457 t = force_fit_type (t, 1,
1458 ((!uns || is_sizetype) && overflow)
1459 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1460 TREE_CONSTANT_OVERFLOW (arg1)
1461 | TREE_CONSTANT_OVERFLOW (arg2));
1463 return t;
1466 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1467 constant. We assume ARG1 and ARG2 have the same data type, or at least
1468 are the same kind of constant and the same machine mode.
1470 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1472 static tree
1473 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1475 STRIP_NOPS (arg1);
1476 STRIP_NOPS (arg2);
1478 if (TREE_CODE (arg1) == INTEGER_CST)
1479 return int_const_binop (code, arg1, arg2, notrunc);
1481 if (TREE_CODE (arg1) == REAL_CST)
1483 enum machine_mode mode;
1484 REAL_VALUE_TYPE d1;
1485 REAL_VALUE_TYPE d2;
1486 REAL_VALUE_TYPE value;
1487 tree t, type;
1489 d1 = TREE_REAL_CST (arg1);
1490 d2 = TREE_REAL_CST (arg2);
1492 type = TREE_TYPE (arg1);
1493 mode = TYPE_MODE (type);
1495 /* Don't perform operation if we honor signaling NaNs and
1496 either operand is a NaN. */
1497 if (HONOR_SNANS (mode)
1498 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1499 return NULL_TREE;
1501 /* Don't perform operation if it would raise a division
1502 by zero exception. */
1503 if (code == RDIV_EXPR
1504 && REAL_VALUES_EQUAL (d2, dconst0)
1505 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1506 return NULL_TREE;
1508 /* If either operand is a NaN, just return it. Otherwise, set up
1509 for floating-point trap; we return an overflow. */
1510 if (REAL_VALUE_ISNAN (d1))
1511 return arg1;
1512 else if (REAL_VALUE_ISNAN (d2))
1513 return arg2;
1515 REAL_ARITHMETIC (value, code, d1, d2);
1517 t = build_real (type, real_value_truncate (mode, value));
1519 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1520 TREE_CONSTANT_OVERFLOW (t)
1521 = TREE_OVERFLOW (t)
1522 | TREE_CONSTANT_OVERFLOW (arg1)
1523 | TREE_CONSTANT_OVERFLOW (arg2);
1524 return t;
1526 if (TREE_CODE (arg1) == COMPLEX_CST)
1528 tree type = TREE_TYPE (arg1);
1529 tree r1 = TREE_REALPART (arg1);
1530 tree i1 = TREE_IMAGPART (arg1);
1531 tree r2 = TREE_REALPART (arg2);
1532 tree i2 = TREE_IMAGPART (arg2);
1533 tree t;
1535 switch (code)
1537 case PLUS_EXPR:
1538 t = build_complex (type,
1539 const_binop (PLUS_EXPR, r1, r2, notrunc),
1540 const_binop (PLUS_EXPR, i1, i2, notrunc));
1541 break;
1543 case MINUS_EXPR:
1544 t = build_complex (type,
1545 const_binop (MINUS_EXPR, r1, r2, notrunc),
1546 const_binop (MINUS_EXPR, i1, i2, notrunc));
1547 break;
1549 case MULT_EXPR:
1550 t = build_complex (type,
1551 const_binop (MINUS_EXPR,
1552 const_binop (MULT_EXPR,
1553 r1, r2, notrunc),
1554 const_binop (MULT_EXPR,
1555 i1, i2, notrunc),
1556 notrunc),
1557 const_binop (PLUS_EXPR,
1558 const_binop (MULT_EXPR,
1559 r1, i2, notrunc),
1560 const_binop (MULT_EXPR,
1561 i1, r2, notrunc),
1562 notrunc));
1563 break;
1565 case RDIV_EXPR:
1567 tree magsquared
1568 = const_binop (PLUS_EXPR,
1569 const_binop (MULT_EXPR, r2, r2, notrunc),
1570 const_binop (MULT_EXPR, i2, i2, notrunc),
1571 notrunc);
1573 t = build_complex (type,
1574 const_binop
1575 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1576 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1577 const_binop (PLUS_EXPR,
1578 const_binop (MULT_EXPR, r1, r2,
1579 notrunc),
1580 const_binop (MULT_EXPR, i1, i2,
1581 notrunc),
1582 notrunc),
1583 magsquared, notrunc),
1584 const_binop
1585 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1586 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1587 const_binop (MINUS_EXPR,
1588 const_binop (MULT_EXPR, i1, r2,
1589 notrunc),
1590 const_binop (MULT_EXPR, r1, i2,
1591 notrunc),
1592 notrunc),
1593 magsquared, notrunc));
1595 break;
1597 default:
1598 gcc_unreachable ();
1600 return t;
1602 return 0;
1605 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1606 indicates which particular sizetype to create. */
1608 tree
1609 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1611 return build_int_cst (sizetype_tab[(int) kind], number);
1614 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1615 is a tree code. The type of the result is taken from the operands.
1616 Both must be the same type integer type and it must be a size type.
1617 If the operands are constant, so is the result. */
1619 tree
1620 size_binop (enum tree_code code, tree arg0, tree arg1)
1622 tree type = TREE_TYPE (arg0);
1624 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1625 && type == TREE_TYPE (arg1));
1627 /* Handle the special case of two integer constants faster. */
1628 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1630 /* And some specific cases even faster than that. */
1631 if (code == PLUS_EXPR && integer_zerop (arg0))
1632 return arg1;
1633 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1634 && integer_zerop (arg1))
1635 return arg0;
1636 else if (code == MULT_EXPR && integer_onep (arg0))
1637 return arg1;
1639 /* Handle general case of two integer constants. */
1640 return int_const_binop (code, arg0, arg1, 0);
1643 if (arg0 == error_mark_node || arg1 == error_mark_node)
1644 return error_mark_node;
1646 return fold (build2 (code, type, arg0, arg1));
1649 /* Given two values, either both of sizetype or both of bitsizetype,
1650 compute the difference between the two values. Return the value
1651 in signed type corresponding to the type of the operands. */
1653 tree
1654 size_diffop (tree arg0, tree arg1)
1656 tree type = TREE_TYPE (arg0);
1657 tree ctype;
1659 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1660 && type == TREE_TYPE (arg1));
1662 /* If the type is already signed, just do the simple thing. */
1663 if (!TYPE_UNSIGNED (type))
1664 return size_binop (MINUS_EXPR, arg0, arg1);
1666 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1668 /* If either operand is not a constant, do the conversions to the signed
1669 type and subtract. The hardware will do the right thing with any
1670 overflow in the subtraction. */
1671 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1672 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1673 fold_convert (ctype, arg1));
1675 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1676 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1677 overflow) and negate (which can't either). Special-case a result
1678 of zero while we're here. */
1679 if (tree_int_cst_equal (arg0, arg1))
1680 return fold_convert (ctype, integer_zero_node);
1681 else if (tree_int_cst_lt (arg1, arg0))
1682 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1683 else
1684 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1685 fold_convert (ctype, size_binop (MINUS_EXPR,
1686 arg1, arg0)));
1689 /* Construct a vector of zero elements of vector type TYPE. */
1691 static tree
1692 build_zero_vector (tree type)
1694 tree elem, list;
1695 int i, units;
1697 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1698 units = TYPE_VECTOR_SUBPARTS (type);
1700 list = NULL_TREE;
1701 for (i = 0; i < units; i++)
1702 list = tree_cons (NULL_TREE, elem, list);
1703 return build_vector (type, list);
1707 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1708 type TYPE. If no simplification can be done return NULL_TREE. */
1710 static tree
1711 fold_convert_const (enum tree_code code, tree type, tree arg1)
1713 int overflow = 0;
1714 tree t;
1716 if (TREE_TYPE (arg1) == type)
1717 return arg1;
1719 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1721 if (TREE_CODE (arg1) == INTEGER_CST)
1723 /* If we would build a constant wider than GCC supports,
1724 leave the conversion unfolded. */
1725 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1726 return NULL_TREE;
1728 /* Given an integer constant, make new constant with new type,
1729 appropriately sign-extended or truncated. */
1730 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1731 TREE_INT_CST_HIGH (arg1));
1733 t = force_fit_type (t,
1734 /* Don't set the overflow when
1735 converting a pointer */
1736 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1737 (TREE_INT_CST_HIGH (arg1) < 0
1738 && (TYPE_UNSIGNED (type)
1739 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1740 | TREE_OVERFLOW (arg1),
1741 TREE_CONSTANT_OVERFLOW (arg1));
1742 return t;
1744 else if (TREE_CODE (arg1) == REAL_CST)
1746 /* The following code implements the floating point to integer
1747 conversion rules required by the Java Language Specification,
1748 that IEEE NaNs are mapped to zero and values that overflow
1749 the target precision saturate, i.e. values greater than
1750 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1751 are mapped to INT_MIN. These semantics are allowed by the
1752 C and C++ standards that simply state that the behavior of
1753 FP-to-integer conversion is unspecified upon overflow. */
1755 HOST_WIDE_INT high, low;
1756 REAL_VALUE_TYPE r;
1757 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1759 switch (code)
1761 case FIX_TRUNC_EXPR:
1762 real_trunc (&r, VOIDmode, &x);
1763 break;
1765 case FIX_CEIL_EXPR:
1766 real_ceil (&r, VOIDmode, &x);
1767 break;
1769 case FIX_FLOOR_EXPR:
1770 real_floor (&r, VOIDmode, &x);
1771 break;
1773 case FIX_ROUND_EXPR:
1774 real_round (&r, VOIDmode, &x);
1775 break;
1777 default:
1778 gcc_unreachable ();
1781 /* If R is NaN, return zero and show we have an overflow. */
1782 if (REAL_VALUE_ISNAN (r))
1784 overflow = 1;
1785 high = 0;
1786 low = 0;
1789 /* See if R is less than the lower bound or greater than the
1790 upper bound. */
1792 if (! overflow)
1794 tree lt = TYPE_MIN_VALUE (type);
1795 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1796 if (REAL_VALUES_LESS (r, l))
1798 overflow = 1;
1799 high = TREE_INT_CST_HIGH (lt);
1800 low = TREE_INT_CST_LOW (lt);
1804 if (! overflow)
1806 tree ut = TYPE_MAX_VALUE (type);
1807 if (ut)
1809 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1810 if (REAL_VALUES_LESS (u, r))
1812 overflow = 1;
1813 high = TREE_INT_CST_HIGH (ut);
1814 low = TREE_INT_CST_LOW (ut);
1819 if (! overflow)
1820 REAL_VALUE_TO_INT (&low, &high, r);
1822 t = build_int_cst_wide (type, low, high);
1824 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1825 TREE_CONSTANT_OVERFLOW (arg1));
1826 return t;
1829 else if (TREE_CODE (type) == REAL_TYPE)
1831 if (TREE_CODE (arg1) == INTEGER_CST)
1832 return build_real_from_int_cst (type, arg1);
1833 if (TREE_CODE (arg1) == REAL_CST)
1835 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1837 /* We make a copy of ARG1 so that we don't modify an
1838 existing constant tree. */
1839 t = copy_node (arg1);
1840 TREE_TYPE (t) = type;
1841 return t;
1844 t = build_real (type,
1845 real_value_truncate (TYPE_MODE (type),
1846 TREE_REAL_CST (arg1)));
1848 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1849 TREE_CONSTANT_OVERFLOW (t)
1850 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1851 return t;
1854 return NULL_TREE;
1857 /* Convert expression ARG to type TYPE. Used by the middle-end for
1858 simple conversions in preference to calling the front-end's convert. */
1860 tree
1861 fold_convert (tree type, tree arg)
1863 tree orig = TREE_TYPE (arg);
1864 tree tem;
1866 if (type == orig)
1867 return arg;
1869 if (TREE_CODE (arg) == ERROR_MARK
1870 || TREE_CODE (type) == ERROR_MARK
1871 || TREE_CODE (orig) == ERROR_MARK)
1872 return error_mark_node;
1874 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1875 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1876 TYPE_MAIN_VARIANT (orig)))
1877 return fold (build1 (NOP_EXPR, type, arg));
1879 switch (TREE_CODE (type))
1881 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1882 case POINTER_TYPE: case REFERENCE_TYPE:
1883 case OFFSET_TYPE:
1884 if (TREE_CODE (arg) == INTEGER_CST)
1886 tem = fold_convert_const (NOP_EXPR, type, arg);
1887 if (tem != NULL_TREE)
1888 return tem;
1890 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1891 || TREE_CODE (orig) == OFFSET_TYPE)
1892 return fold (build1 (NOP_EXPR, type, arg));
1893 if (TREE_CODE (orig) == COMPLEX_TYPE)
1895 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1896 return fold_convert (type, tem);
1898 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1899 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1900 return fold (build1 (NOP_EXPR, type, arg));
1902 case REAL_TYPE:
1903 if (TREE_CODE (arg) == INTEGER_CST)
1905 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 return tem;
1909 else if (TREE_CODE (arg) == REAL_CST)
1911 tem = fold_convert_const (NOP_EXPR, type, arg);
1912 if (tem != NULL_TREE)
1913 return tem;
1916 switch (TREE_CODE (orig))
1918 case INTEGER_TYPE: case CHAR_TYPE:
1919 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1920 case POINTER_TYPE: case REFERENCE_TYPE:
1921 return fold (build1 (FLOAT_EXPR, type, arg));
1923 case REAL_TYPE:
1924 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1925 type, arg));
1927 case COMPLEX_TYPE:
1928 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1929 return fold_convert (type, tem);
1931 default:
1932 gcc_unreachable ();
1935 case COMPLEX_TYPE:
1936 switch (TREE_CODE (orig))
1938 case INTEGER_TYPE: case CHAR_TYPE:
1939 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1940 case POINTER_TYPE: case REFERENCE_TYPE:
1941 case REAL_TYPE:
1942 return build2 (COMPLEX_EXPR, type,
1943 fold_convert (TREE_TYPE (type), arg),
1944 fold_convert (TREE_TYPE (type), integer_zero_node));
1945 case COMPLEX_TYPE:
1947 tree rpart, ipart;
1949 if (TREE_CODE (arg) == COMPLEX_EXPR)
1951 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1952 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1953 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1956 arg = save_expr (arg);
1957 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1958 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1959 rpart = fold_convert (TREE_TYPE (type), rpart);
1960 ipart = fold_convert (TREE_TYPE (type), ipart);
1961 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1964 default:
1965 gcc_unreachable ();
1968 case VECTOR_TYPE:
1969 if (integer_zerop (arg))
1970 return build_zero_vector (type);
1971 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1972 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1973 || TREE_CODE (orig) == VECTOR_TYPE);
1974 return fold (build1 (NOP_EXPR, type, arg));
1976 case VOID_TYPE:
1977 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
1979 default:
1980 gcc_unreachable ();
1984 /* Return an expr equal to X but certainly not valid as an lvalue. */
1986 tree
1987 non_lvalue (tree x)
1989 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
1990 us. */
1991 if (in_gimple_form)
1992 return x;
1994 /* We only need to wrap lvalue tree codes. */
1995 switch (TREE_CODE (x))
1997 case VAR_DECL:
1998 case PARM_DECL:
1999 case RESULT_DECL:
2000 case LABEL_DECL:
2001 case FUNCTION_DECL:
2002 case SSA_NAME:
2004 case COMPONENT_REF:
2005 case INDIRECT_REF:
2006 case ALIGN_INDIRECT_REF:
2007 case MISALIGNED_INDIRECT_REF:
2008 case ARRAY_REF:
2009 case ARRAY_RANGE_REF:
2010 case BIT_FIELD_REF:
2011 case OBJ_TYPE_REF:
2013 case REALPART_EXPR:
2014 case IMAGPART_EXPR:
2015 case PREINCREMENT_EXPR:
2016 case PREDECREMENT_EXPR:
2017 case SAVE_EXPR:
2018 case TRY_CATCH_EXPR:
2019 case WITH_CLEANUP_EXPR:
2020 case COMPOUND_EXPR:
2021 case MODIFY_EXPR:
2022 case TARGET_EXPR:
2023 case COND_EXPR:
2024 case BIND_EXPR:
2025 case MIN_EXPR:
2026 case MAX_EXPR:
2027 break;
2029 default:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2032 break;
2033 return x;
2035 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2038 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2039 Zero means allow extended lvalues. */
2041 int pedantic_lvalues;
2043 /* When pedantic, return an expr equal to X but certainly not valid as a
2044 pedantic lvalue. Otherwise, return X. */
2046 static tree
2047 pedantic_non_lvalue (tree x)
2049 if (pedantic_lvalues)
2050 return non_lvalue (x);
2051 else
2052 return x;
2055 /* Given a tree comparison code, return the code that is the logical inverse
2056 of the given code. It is not safe to do this for floating-point
2057 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2058 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2060 static enum tree_code
2061 invert_tree_comparison (enum tree_code code, bool honor_nans)
2063 if (honor_nans && flag_trapping_math)
2064 return ERROR_MARK;
2066 switch (code)
2068 case EQ_EXPR:
2069 return NE_EXPR;
2070 case NE_EXPR:
2071 return EQ_EXPR;
2072 case GT_EXPR:
2073 return honor_nans ? UNLE_EXPR : LE_EXPR;
2074 case GE_EXPR:
2075 return honor_nans ? UNLT_EXPR : LT_EXPR;
2076 case LT_EXPR:
2077 return honor_nans ? UNGE_EXPR : GE_EXPR;
2078 case LE_EXPR:
2079 return honor_nans ? UNGT_EXPR : GT_EXPR;
2080 case LTGT_EXPR:
2081 return UNEQ_EXPR;
2082 case UNEQ_EXPR:
2083 return LTGT_EXPR;
2084 case UNGT_EXPR:
2085 return LE_EXPR;
2086 case UNGE_EXPR:
2087 return LT_EXPR;
2088 case UNLT_EXPR:
2089 return GE_EXPR;
2090 case UNLE_EXPR:
2091 return GT_EXPR;
2092 case ORDERED_EXPR:
2093 return UNORDERED_EXPR;
2094 case UNORDERED_EXPR:
2095 return ORDERED_EXPR;
2096 default:
2097 gcc_unreachable ();
2101 /* Similar, but return the comparison that results if the operands are
2102 swapped. This is safe for floating-point. */
2104 enum tree_code
2105 swap_tree_comparison (enum tree_code code)
2107 switch (code)
2109 case EQ_EXPR:
2110 case NE_EXPR:
2111 return code;
2112 case GT_EXPR:
2113 return LT_EXPR;
2114 case GE_EXPR:
2115 return LE_EXPR;
2116 case LT_EXPR:
2117 return GT_EXPR;
2118 case LE_EXPR:
2119 return GE_EXPR;
2120 default:
2121 gcc_unreachable ();
2126 /* Convert a comparison tree code from an enum tree_code representation
2127 into a compcode bit-based encoding. This function is the inverse of
2128 compcode_to_comparison. */
2130 static enum comparison_code
2131 comparison_to_compcode (enum tree_code code)
2133 switch (code)
2135 case LT_EXPR:
2136 return COMPCODE_LT;
2137 case EQ_EXPR:
2138 return COMPCODE_EQ;
2139 case LE_EXPR:
2140 return COMPCODE_LE;
2141 case GT_EXPR:
2142 return COMPCODE_GT;
2143 case NE_EXPR:
2144 return COMPCODE_NE;
2145 case GE_EXPR:
2146 return COMPCODE_GE;
2147 case ORDERED_EXPR:
2148 return COMPCODE_ORD;
2149 case UNORDERED_EXPR:
2150 return COMPCODE_UNORD;
2151 case UNLT_EXPR:
2152 return COMPCODE_UNLT;
2153 case UNEQ_EXPR:
2154 return COMPCODE_UNEQ;
2155 case UNLE_EXPR:
2156 return COMPCODE_UNLE;
2157 case UNGT_EXPR:
2158 return COMPCODE_UNGT;
2159 case LTGT_EXPR:
2160 return COMPCODE_LTGT;
2161 case UNGE_EXPR:
2162 return COMPCODE_UNGE;
2163 default:
2164 gcc_unreachable ();
2168 /* Convert a compcode bit-based encoding of a comparison operator back
2169 to GCC's enum tree_code representation. This function is the
2170 inverse of comparison_to_compcode. */
2172 static enum tree_code
2173 compcode_to_comparison (enum comparison_code code)
2175 switch (code)
2177 case COMPCODE_LT:
2178 return LT_EXPR;
2179 case COMPCODE_EQ:
2180 return EQ_EXPR;
2181 case COMPCODE_LE:
2182 return LE_EXPR;
2183 case COMPCODE_GT:
2184 return GT_EXPR;
2185 case COMPCODE_NE:
2186 return NE_EXPR;
2187 case COMPCODE_GE:
2188 return GE_EXPR;
2189 case COMPCODE_ORD:
2190 return ORDERED_EXPR;
2191 case COMPCODE_UNORD:
2192 return UNORDERED_EXPR;
2193 case COMPCODE_UNLT:
2194 return UNLT_EXPR;
2195 case COMPCODE_UNEQ:
2196 return UNEQ_EXPR;
2197 case COMPCODE_UNLE:
2198 return UNLE_EXPR;
2199 case COMPCODE_UNGT:
2200 return UNGT_EXPR;
2201 case COMPCODE_LTGT:
2202 return LTGT_EXPR;
2203 case COMPCODE_UNGE:
2204 return UNGE_EXPR;
2205 default:
2206 gcc_unreachable ();
2210 /* Return a tree for the comparison which is the combination of
2211 doing the AND or OR (depending on CODE) of the two operations LCODE
2212 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2213 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2214 if this makes the transformation invalid. */
2216 tree
2217 combine_comparisons (enum tree_code code, enum tree_code lcode,
2218 enum tree_code rcode, tree truth_type,
2219 tree ll_arg, tree lr_arg)
2221 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2222 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2223 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2224 enum comparison_code compcode;
2226 switch (code)
2228 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2229 compcode = lcompcode & rcompcode;
2230 break;
2232 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2233 compcode = lcompcode | rcompcode;
2234 break;
2236 default:
2237 return NULL_TREE;
2240 if (!honor_nans)
2242 /* Eliminate unordered comparisons, as well as LTGT and ORD
2243 which are not used unless the mode has NaNs. */
2244 compcode &= ~COMPCODE_UNORD;
2245 if (compcode == COMPCODE_LTGT)
2246 compcode = COMPCODE_NE;
2247 else if (compcode == COMPCODE_ORD)
2248 compcode = COMPCODE_TRUE;
2250 else if (flag_trapping_math)
2252 /* Check that the original operation and the optimized ones will trap
2253 under the same condition. */
2254 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2255 && (lcompcode != COMPCODE_EQ)
2256 && (lcompcode != COMPCODE_ORD);
2257 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2258 && (rcompcode != COMPCODE_EQ)
2259 && (rcompcode != COMPCODE_ORD);
2260 bool trap = (compcode & COMPCODE_UNORD) == 0
2261 && (compcode != COMPCODE_EQ)
2262 && (compcode != COMPCODE_ORD);
2264 /* In a short-circuited boolean expression the LHS might be
2265 such that the RHS, if evaluated, will never trap. For
2266 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2267 if neither x nor y is NaN. (This is a mixed blessing: for
2268 example, the expression above will never trap, hence
2269 optimizing it to x < y would be invalid). */
2270 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2271 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2272 rtrap = false;
2274 /* If the comparison was short-circuited, and only the RHS
2275 trapped, we may now generate a spurious trap. */
2276 if (rtrap && !ltrap
2277 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2278 return NULL_TREE;
2280 /* If we changed the conditions that cause a trap, we lose. */
2281 if ((ltrap || rtrap) != trap)
2282 return NULL_TREE;
2285 if (compcode == COMPCODE_TRUE)
2286 return constant_boolean_node (true, truth_type);
2287 else if (compcode == COMPCODE_FALSE)
2288 return constant_boolean_node (false, truth_type);
2289 else
2290 return fold (build2 (compcode_to_comparison (compcode),
2291 truth_type, ll_arg, lr_arg));
2294 /* Return nonzero if CODE is a tree code that represents a truth value. */
2296 static int
2297 truth_value_p (enum tree_code code)
2299 return (TREE_CODE_CLASS (code) == tcc_comparison
2300 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2301 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2302 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2305 /* Return nonzero if two operands (typically of the same tree node)
2306 are necessarily equal. If either argument has side-effects this
2307 function returns zero. FLAGS modifies behavior as follows:
2309 If OEP_ONLY_CONST is set, only return nonzero for constants.
2310 This function tests whether the operands are indistinguishable;
2311 it does not test whether they are equal using C's == operation.
2312 The distinction is important for IEEE floating point, because
2313 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2314 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2316 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2317 even though it may hold multiple values during a function.
2318 This is because a GCC tree node guarantees that nothing else is
2319 executed between the evaluation of its "operands" (which may often
2320 be evaluated in arbitrary order). Hence if the operands themselves
2321 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2322 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2323 unset means assuming isochronic (or instantaneous) tree equivalence.
2324 Unless comparing arbitrary expression trees, such as from different
2325 statements, this flag can usually be left unset.
2327 If OEP_PURE_SAME is set, then pure functions with identical arguments
2328 are considered the same. It is used when the caller has other ways
2329 to ensure that global memory is unchanged in between. */
2332 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2334 /* If either is ERROR_MARK, they aren't equal. */
2335 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2336 return 0;
2338 /* If both types don't have the same signedness, then we can't consider
2339 them equal. We must check this before the STRIP_NOPS calls
2340 because they may change the signedness of the arguments. */
2341 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2342 return 0;
2344 STRIP_NOPS (arg0);
2345 STRIP_NOPS (arg1);
2347 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2348 /* This is needed for conversions and for COMPONENT_REF.
2349 Might as well play it safe and always test this. */
2350 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2351 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2352 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2353 return 0;
2355 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2356 We don't care about side effects in that case because the SAVE_EXPR
2357 takes care of that for us. In all other cases, two expressions are
2358 equal if they have no side effects. If we have two identical
2359 expressions with side effects that should be treated the same due
2360 to the only side effects being identical SAVE_EXPR's, that will
2361 be detected in the recursive calls below. */
2362 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2363 && (TREE_CODE (arg0) == SAVE_EXPR
2364 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2365 return 1;
2367 /* Next handle constant cases, those for which we can return 1 even
2368 if ONLY_CONST is set. */
2369 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2370 switch (TREE_CODE (arg0))
2372 case INTEGER_CST:
2373 return (! TREE_CONSTANT_OVERFLOW (arg0)
2374 && ! TREE_CONSTANT_OVERFLOW (arg1)
2375 && tree_int_cst_equal (arg0, arg1));
2377 case REAL_CST:
2378 return (! TREE_CONSTANT_OVERFLOW (arg0)
2379 && ! TREE_CONSTANT_OVERFLOW (arg1)
2380 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2381 TREE_REAL_CST (arg1)));
2383 case VECTOR_CST:
2385 tree v1, v2;
2387 if (TREE_CONSTANT_OVERFLOW (arg0)
2388 || TREE_CONSTANT_OVERFLOW (arg1))
2389 return 0;
2391 v1 = TREE_VECTOR_CST_ELTS (arg0);
2392 v2 = TREE_VECTOR_CST_ELTS (arg1);
2393 while (v1 && v2)
2395 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2396 flags))
2397 return 0;
2398 v1 = TREE_CHAIN (v1);
2399 v2 = TREE_CHAIN (v2);
2402 return 1;
2405 case COMPLEX_CST:
2406 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2407 flags)
2408 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2409 flags));
2411 case STRING_CST:
2412 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2413 && ! memcmp (TREE_STRING_POINTER (arg0),
2414 TREE_STRING_POINTER (arg1),
2415 TREE_STRING_LENGTH (arg0)));
2417 case ADDR_EXPR:
2418 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2420 default:
2421 break;
2424 if (flags & OEP_ONLY_CONST)
2425 return 0;
2427 /* Define macros to test an operand from arg0 and arg1 for equality and a
2428 variant that allows null and views null as being different from any
2429 non-null value. In the latter case, if either is null, the both
2430 must be; otherwise, do the normal comparison. */
2431 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2432 TREE_OPERAND (arg1, N), flags)
2434 #define OP_SAME_WITH_NULL(N) \
2435 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2436 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2438 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2440 case tcc_unary:
2441 /* Two conversions are equal only if signedness and modes match. */
2442 switch (TREE_CODE (arg0))
2444 case NOP_EXPR:
2445 case CONVERT_EXPR:
2446 case FIX_CEIL_EXPR:
2447 case FIX_TRUNC_EXPR:
2448 case FIX_FLOOR_EXPR:
2449 case FIX_ROUND_EXPR:
2450 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2451 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2452 return 0;
2453 break;
2454 default:
2455 break;
2458 return OP_SAME (0);
2461 case tcc_comparison:
2462 case tcc_binary:
2463 if (OP_SAME (0) && OP_SAME (1))
2464 return 1;
2466 /* For commutative ops, allow the other order. */
2467 return (commutative_tree_code (TREE_CODE (arg0))
2468 && operand_equal_p (TREE_OPERAND (arg0, 0),
2469 TREE_OPERAND (arg1, 1), flags)
2470 && operand_equal_p (TREE_OPERAND (arg0, 1),
2471 TREE_OPERAND (arg1, 0), flags));
2473 case tcc_reference:
2474 /* If either of the pointer (or reference) expressions we are
2475 dereferencing contain a side effect, these cannot be equal. */
2476 if (TREE_SIDE_EFFECTS (arg0)
2477 || TREE_SIDE_EFFECTS (arg1))
2478 return 0;
2480 switch (TREE_CODE (arg0))
2482 case INDIRECT_REF:
2483 case ALIGN_INDIRECT_REF:
2484 case MISALIGNED_INDIRECT_REF:
2485 case REALPART_EXPR:
2486 case IMAGPART_EXPR:
2487 return OP_SAME (0);
2489 case ARRAY_REF:
2490 case ARRAY_RANGE_REF:
2491 /* Operands 2 and 3 may be null. */
2492 return (OP_SAME (0)
2493 && OP_SAME (1)
2494 && OP_SAME_WITH_NULL (2)
2495 && OP_SAME_WITH_NULL (3));
2497 case COMPONENT_REF:
2498 /* Handle operand 2 the same as for ARRAY_REF. */
2499 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2501 case BIT_FIELD_REF:
2502 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2504 default:
2505 return 0;
2508 case tcc_expression:
2509 switch (TREE_CODE (arg0))
2511 case ADDR_EXPR:
2512 case TRUTH_NOT_EXPR:
2513 return OP_SAME (0);
2515 case TRUTH_ANDIF_EXPR:
2516 case TRUTH_ORIF_EXPR:
2517 return OP_SAME (0) && OP_SAME (1);
2519 case TRUTH_AND_EXPR:
2520 case TRUTH_OR_EXPR:
2521 case TRUTH_XOR_EXPR:
2522 if (OP_SAME (0) && OP_SAME (1))
2523 return 1;
2525 /* Otherwise take into account this is a commutative operation. */
2526 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2527 TREE_OPERAND (arg1, 1), flags)
2528 && operand_equal_p (TREE_OPERAND (arg0, 1),
2529 TREE_OPERAND (arg1, 0), flags));
2531 case CALL_EXPR:
2532 /* If the CALL_EXPRs call different functions, then they
2533 clearly can not be equal. */
2534 if (!OP_SAME (0))
2535 return 0;
2538 unsigned int cef = call_expr_flags (arg0);
2539 if (flags & OEP_PURE_SAME)
2540 cef &= ECF_CONST | ECF_PURE;
2541 else
2542 cef &= ECF_CONST;
2543 if (!cef)
2544 return 0;
2547 /* Now see if all the arguments are the same. operand_equal_p
2548 does not handle TREE_LIST, so we walk the operands here
2549 feeding them to operand_equal_p. */
2550 arg0 = TREE_OPERAND (arg0, 1);
2551 arg1 = TREE_OPERAND (arg1, 1);
2552 while (arg0 && arg1)
2554 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2555 flags))
2556 return 0;
2558 arg0 = TREE_CHAIN (arg0);
2559 arg1 = TREE_CHAIN (arg1);
2562 /* If we get here and both argument lists are exhausted
2563 then the CALL_EXPRs are equal. */
2564 return ! (arg0 || arg1);
2566 default:
2567 return 0;
2570 case tcc_declaration:
2571 /* Consider __builtin_sqrt equal to sqrt. */
2572 return (TREE_CODE (arg0) == FUNCTION_DECL
2573 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2574 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2575 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2577 default:
2578 return 0;
2581 #undef OP_SAME
2582 #undef OP_SAME_WITH_NULL
2585 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2586 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2588 When in doubt, return 0. */
2590 static int
2591 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2593 int unsignedp1, unsignedpo;
2594 tree primarg0, primarg1, primother;
2595 unsigned int correct_width;
2597 if (operand_equal_p (arg0, arg1, 0))
2598 return 1;
2600 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2601 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2602 return 0;
2604 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2605 and see if the inner values are the same. This removes any
2606 signedness comparison, which doesn't matter here. */
2607 primarg0 = arg0, primarg1 = arg1;
2608 STRIP_NOPS (primarg0);
2609 STRIP_NOPS (primarg1);
2610 if (operand_equal_p (primarg0, primarg1, 0))
2611 return 1;
2613 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2614 actual comparison operand, ARG0.
2616 First throw away any conversions to wider types
2617 already present in the operands. */
2619 primarg1 = get_narrower (arg1, &unsignedp1);
2620 primother = get_narrower (other, &unsignedpo);
2622 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2623 if (unsignedp1 == unsignedpo
2624 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2625 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2627 tree type = TREE_TYPE (arg0);
2629 /* Make sure shorter operand is extended the right way
2630 to match the longer operand. */
2631 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2632 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2634 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2635 return 1;
2638 return 0;
2641 /* See if ARG is an expression that is either a comparison or is performing
2642 arithmetic on comparisons. The comparisons must only be comparing
2643 two different values, which will be stored in *CVAL1 and *CVAL2; if
2644 they are nonzero it means that some operands have already been found.
2645 No variables may be used anywhere else in the expression except in the
2646 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2647 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2649 If this is true, return 1. Otherwise, return zero. */
2651 static int
2652 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2654 enum tree_code code = TREE_CODE (arg);
2655 enum tree_code_class class = TREE_CODE_CLASS (code);
2657 /* We can handle some of the tcc_expression cases here. */
2658 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2659 class = tcc_unary;
2660 else if (class == tcc_expression
2661 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2662 || code == COMPOUND_EXPR))
2663 class = tcc_binary;
2665 else if (class == tcc_expression && code == SAVE_EXPR
2666 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2668 /* If we've already found a CVAL1 or CVAL2, this expression is
2669 two complex to handle. */
2670 if (*cval1 || *cval2)
2671 return 0;
2673 class = tcc_unary;
2674 *save_p = 1;
2677 switch (class)
2679 case tcc_unary:
2680 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2682 case tcc_binary:
2683 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2684 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2685 cval1, cval2, save_p));
2687 case tcc_constant:
2688 return 1;
2690 case tcc_expression:
2691 if (code == COND_EXPR)
2692 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2693 cval1, cval2, save_p)
2694 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2695 cval1, cval2, save_p)
2696 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2697 cval1, cval2, save_p));
2698 return 0;
2700 case tcc_comparison:
2701 /* First see if we can handle the first operand, then the second. For
2702 the second operand, we know *CVAL1 can't be zero. It must be that
2703 one side of the comparison is each of the values; test for the
2704 case where this isn't true by failing if the two operands
2705 are the same. */
2707 if (operand_equal_p (TREE_OPERAND (arg, 0),
2708 TREE_OPERAND (arg, 1), 0))
2709 return 0;
2711 if (*cval1 == 0)
2712 *cval1 = TREE_OPERAND (arg, 0);
2713 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2715 else if (*cval2 == 0)
2716 *cval2 = TREE_OPERAND (arg, 0);
2717 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2719 else
2720 return 0;
2722 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2724 else if (*cval2 == 0)
2725 *cval2 = TREE_OPERAND (arg, 1);
2726 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2728 else
2729 return 0;
2731 return 1;
2733 default:
2734 return 0;
2738 /* ARG is a tree that is known to contain just arithmetic operations and
2739 comparisons. Evaluate the operations in the tree substituting NEW0 for
2740 any occurrence of OLD0 as an operand of a comparison and likewise for
2741 NEW1 and OLD1. */
2743 static tree
2744 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2746 tree type = TREE_TYPE (arg);
2747 enum tree_code code = TREE_CODE (arg);
2748 enum tree_code_class class = TREE_CODE_CLASS (code);
2750 /* We can handle some of the tcc_expression cases here. */
2751 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2752 class = tcc_unary;
2753 else if (class == tcc_expression
2754 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2755 class = tcc_binary;
2757 switch (class)
2759 case tcc_unary:
2760 return fold (build1 (code, type,
2761 eval_subst (TREE_OPERAND (arg, 0),
2762 old0, new0, old1, new1)));
2764 case tcc_binary:
2765 return fold (build2 (code, type,
2766 eval_subst (TREE_OPERAND (arg, 0),
2767 old0, new0, old1, new1),
2768 eval_subst (TREE_OPERAND (arg, 1),
2769 old0, new0, old1, new1)));
2771 case tcc_expression:
2772 switch (code)
2774 case SAVE_EXPR:
2775 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2777 case COMPOUND_EXPR:
2778 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2780 case COND_EXPR:
2781 return fold (build3 (code, type,
2782 eval_subst (TREE_OPERAND (arg, 0),
2783 old0, new0, old1, new1),
2784 eval_subst (TREE_OPERAND (arg, 1),
2785 old0, new0, old1, new1),
2786 eval_subst (TREE_OPERAND (arg, 2),
2787 old0, new0, old1, new1)));
2788 default:
2789 break;
2791 /* Fall through - ??? */
2793 case tcc_comparison:
2795 tree arg0 = TREE_OPERAND (arg, 0);
2796 tree arg1 = TREE_OPERAND (arg, 1);
2798 /* We need to check both for exact equality and tree equality. The
2799 former will be true if the operand has a side-effect. In that
2800 case, we know the operand occurred exactly once. */
2802 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2803 arg0 = new0;
2804 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2805 arg0 = new1;
2807 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2808 arg1 = new0;
2809 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2810 arg1 = new1;
2812 return fold (build2 (code, type, arg0, arg1));
2815 default:
2816 return arg;
2820 /* Return a tree for the case when the result of an expression is RESULT
2821 converted to TYPE and OMITTED was previously an operand of the expression
2822 but is now not needed (e.g., we folded OMITTED * 0).
2824 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2825 the conversion of RESULT to TYPE. */
2827 tree
2828 omit_one_operand (tree type, tree result, tree omitted)
2830 tree t = fold_convert (type, result);
2832 if (TREE_SIDE_EFFECTS (omitted))
2833 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2835 return non_lvalue (t);
2838 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2840 static tree
2841 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2843 tree t = fold_convert (type, result);
2845 if (TREE_SIDE_EFFECTS (omitted))
2846 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2848 return pedantic_non_lvalue (t);
2851 /* Return a tree for the case when the result of an expression is RESULT
2852 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2853 of the expression but are now not needed.
2855 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2856 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2857 evaluated before OMITTED2. Otherwise, if neither has side effects,
2858 just do the conversion of RESULT to TYPE. */
2860 tree
2861 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2863 tree t = fold_convert (type, result);
2865 if (TREE_SIDE_EFFECTS (omitted2))
2866 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2867 if (TREE_SIDE_EFFECTS (omitted1))
2868 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2870 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2874 /* Return a simplified tree node for the truth-negation of ARG. This
2875 never alters ARG itself. We assume that ARG is an operation that
2876 returns a truth value (0 or 1).
2878 FIXME: one would think we would fold the result, but it causes
2879 problems with the dominator optimizer. */
2880 tree
2881 invert_truthvalue (tree arg)
2883 tree type = TREE_TYPE (arg);
2884 enum tree_code code = TREE_CODE (arg);
2886 if (code == ERROR_MARK)
2887 return arg;
2889 /* If this is a comparison, we can simply invert it, except for
2890 floating-point non-equality comparisons, in which case we just
2891 enclose a TRUTH_NOT_EXPR around what we have. */
2893 if (TREE_CODE_CLASS (code) == tcc_comparison)
2895 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2896 if (FLOAT_TYPE_P (op_type)
2897 && flag_trapping_math
2898 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2899 && code != NE_EXPR && code != EQ_EXPR)
2900 return build1 (TRUTH_NOT_EXPR, type, arg);
2901 else
2903 code = invert_tree_comparison (code,
2904 HONOR_NANS (TYPE_MODE (op_type)));
2905 if (code == ERROR_MARK)
2906 return build1 (TRUTH_NOT_EXPR, type, arg);
2907 else
2908 return build2 (code, type,
2909 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2913 switch (code)
2915 case INTEGER_CST:
2916 return fold_convert (type,
2917 build_int_cst (NULL_TREE, integer_zerop (arg)));
2919 case TRUTH_AND_EXPR:
2920 return build2 (TRUTH_OR_EXPR, type,
2921 invert_truthvalue (TREE_OPERAND (arg, 0)),
2922 invert_truthvalue (TREE_OPERAND (arg, 1)));
2924 case TRUTH_OR_EXPR:
2925 return build2 (TRUTH_AND_EXPR, type,
2926 invert_truthvalue (TREE_OPERAND (arg, 0)),
2927 invert_truthvalue (TREE_OPERAND (arg, 1)));
2929 case TRUTH_XOR_EXPR:
2930 /* Here we can invert either operand. We invert the first operand
2931 unless the second operand is a TRUTH_NOT_EXPR in which case our
2932 result is the XOR of the first operand with the inside of the
2933 negation of the second operand. */
2935 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2936 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2937 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2938 else
2939 return build2 (TRUTH_XOR_EXPR, type,
2940 invert_truthvalue (TREE_OPERAND (arg, 0)),
2941 TREE_OPERAND (arg, 1));
2943 case TRUTH_ANDIF_EXPR:
2944 return build2 (TRUTH_ORIF_EXPR, type,
2945 invert_truthvalue (TREE_OPERAND (arg, 0)),
2946 invert_truthvalue (TREE_OPERAND (arg, 1)));
2948 case TRUTH_ORIF_EXPR:
2949 return build2 (TRUTH_ANDIF_EXPR, type,
2950 invert_truthvalue (TREE_OPERAND (arg, 0)),
2951 invert_truthvalue (TREE_OPERAND (arg, 1)));
2953 case TRUTH_NOT_EXPR:
2954 return TREE_OPERAND (arg, 0);
2956 case COND_EXPR:
2957 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2958 invert_truthvalue (TREE_OPERAND (arg, 1)),
2959 invert_truthvalue (TREE_OPERAND (arg, 2)));
2961 case COMPOUND_EXPR:
2962 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2963 invert_truthvalue (TREE_OPERAND (arg, 1)));
2965 case NON_LVALUE_EXPR:
2966 return invert_truthvalue (TREE_OPERAND (arg, 0));
2968 case NOP_EXPR:
2969 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2970 break;
2972 case CONVERT_EXPR:
2973 case FLOAT_EXPR:
2974 return build1 (TREE_CODE (arg), type,
2975 invert_truthvalue (TREE_OPERAND (arg, 0)));
2977 case BIT_AND_EXPR:
2978 if (!integer_onep (TREE_OPERAND (arg, 1)))
2979 break;
2980 return build2 (EQ_EXPR, type, arg,
2981 fold_convert (type, integer_zero_node));
2983 case SAVE_EXPR:
2984 return build1 (TRUTH_NOT_EXPR, type, arg);
2986 case CLEANUP_POINT_EXPR:
2987 return build1 (CLEANUP_POINT_EXPR, type,
2988 invert_truthvalue (TREE_OPERAND (arg, 0)));
2990 default:
2991 break;
2993 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
2994 return build1 (TRUTH_NOT_EXPR, type, arg);
2997 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2998 operands are another bit-wise operation with a common input. If so,
2999 distribute the bit operations to save an operation and possibly two if
3000 constants are involved. For example, convert
3001 (A | B) & (A | C) into A | (B & C)
3002 Further simplification will occur if B and C are constants.
3004 If this optimization cannot be done, 0 will be returned. */
3006 static tree
3007 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3009 tree common;
3010 tree left, right;
3012 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3013 || TREE_CODE (arg0) == code
3014 || (TREE_CODE (arg0) != BIT_AND_EXPR
3015 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3016 return 0;
3018 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3020 common = TREE_OPERAND (arg0, 0);
3021 left = TREE_OPERAND (arg0, 1);
3022 right = TREE_OPERAND (arg1, 1);
3024 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3026 common = TREE_OPERAND (arg0, 0);
3027 left = TREE_OPERAND (arg0, 1);
3028 right = TREE_OPERAND (arg1, 0);
3030 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3032 common = TREE_OPERAND (arg0, 1);
3033 left = TREE_OPERAND (arg0, 0);
3034 right = TREE_OPERAND (arg1, 1);
3036 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3038 common = TREE_OPERAND (arg0, 1);
3039 left = TREE_OPERAND (arg0, 0);
3040 right = TREE_OPERAND (arg1, 0);
3042 else
3043 return 0;
3045 return fold (build2 (TREE_CODE (arg0), type, common,
3046 fold (build2 (code, type, left, right))));
3049 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3050 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3052 static tree
3053 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3054 int unsignedp)
3056 tree result = build3 (BIT_FIELD_REF, type, inner,
3057 size_int (bitsize), bitsize_int (bitpos));
3059 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3061 return result;
3064 /* Optimize a bit-field compare.
3066 There are two cases: First is a compare against a constant and the
3067 second is a comparison of two items where the fields are at the same
3068 bit position relative to the start of a chunk (byte, halfword, word)
3069 large enough to contain it. In these cases we can avoid the shift
3070 implicit in bitfield extractions.
3072 For constants, we emit a compare of the shifted constant with the
3073 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3074 compared. For two fields at the same position, we do the ANDs with the
3075 similar mask and compare the result of the ANDs.
3077 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3078 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3079 are the left and right operands of the comparison, respectively.
3081 If the optimization described above can be done, we return the resulting
3082 tree. Otherwise we return zero. */
3084 static tree
3085 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3086 tree lhs, tree rhs)
3088 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3089 tree type = TREE_TYPE (lhs);
3090 tree signed_type, unsigned_type;
3091 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3092 enum machine_mode lmode, rmode, nmode;
3093 int lunsignedp, runsignedp;
3094 int lvolatilep = 0, rvolatilep = 0;
3095 tree linner, rinner = NULL_TREE;
3096 tree mask;
3097 tree offset;
3099 /* Get all the information about the extractions being done. If the bit size
3100 if the same as the size of the underlying object, we aren't doing an
3101 extraction at all and so can do nothing. We also don't want to
3102 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3103 then will no longer be able to replace it. */
3104 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3105 &lunsignedp, &lvolatilep, false);
3106 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3107 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3108 return 0;
3110 if (!const_p)
3112 /* If this is not a constant, we can only do something if bit positions,
3113 sizes, and signedness are the same. */
3114 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3115 &runsignedp, &rvolatilep, false);
3117 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3118 || lunsignedp != runsignedp || offset != 0
3119 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3120 return 0;
3123 /* See if we can find a mode to refer to this field. We should be able to,
3124 but fail if we can't. */
3125 nmode = get_best_mode (lbitsize, lbitpos,
3126 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3127 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3128 TYPE_ALIGN (TREE_TYPE (rinner))),
3129 word_mode, lvolatilep || rvolatilep);
3130 if (nmode == VOIDmode)
3131 return 0;
3133 /* Set signed and unsigned types of the precision of this mode for the
3134 shifts below. */
3135 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3136 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3138 /* Compute the bit position and size for the new reference and our offset
3139 within it. If the new reference is the same size as the original, we
3140 won't optimize anything, so return zero. */
3141 nbitsize = GET_MODE_BITSIZE (nmode);
3142 nbitpos = lbitpos & ~ (nbitsize - 1);
3143 lbitpos -= nbitpos;
3144 if (nbitsize == lbitsize)
3145 return 0;
3147 if (BYTES_BIG_ENDIAN)
3148 lbitpos = nbitsize - lbitsize - lbitpos;
3150 /* Make the mask to be used against the extracted field. */
3151 mask = build_int_cst (unsigned_type, -1);
3152 mask = force_fit_type (mask, 0, false, false);
3153 mask = fold_convert (unsigned_type, mask);
3154 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3155 mask = const_binop (RSHIFT_EXPR, mask,
3156 size_int (nbitsize - lbitsize - lbitpos), 0);
3158 if (! const_p)
3159 /* If not comparing with constant, just rework the comparison
3160 and return. */
3161 return build2 (code, compare_type,
3162 build2 (BIT_AND_EXPR, unsigned_type,
3163 make_bit_field_ref (linner, unsigned_type,
3164 nbitsize, nbitpos, 1),
3165 mask),
3166 build2 (BIT_AND_EXPR, unsigned_type,
3167 make_bit_field_ref (rinner, unsigned_type,
3168 nbitsize, nbitpos, 1),
3169 mask));
3171 /* Otherwise, we are handling the constant case. See if the constant is too
3172 big for the field. Warn and return a tree of for 0 (false) if so. We do
3173 this not only for its own sake, but to avoid having to test for this
3174 error case below. If we didn't, we might generate wrong code.
3176 For unsigned fields, the constant shifted right by the field length should
3177 be all zero. For signed fields, the high-order bits should agree with
3178 the sign bit. */
3180 if (lunsignedp)
3182 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3183 fold_convert (unsigned_type, rhs),
3184 size_int (lbitsize), 0)))
3186 warning ("comparison is always %d due to width of bit-field",
3187 code == NE_EXPR);
3188 return constant_boolean_node (code == NE_EXPR, compare_type);
3191 else
3193 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3194 size_int (lbitsize - 1), 0);
3195 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3197 warning ("comparison is always %d due to width of bit-field",
3198 code == NE_EXPR);
3199 return constant_boolean_node (code == NE_EXPR, compare_type);
3203 /* Single-bit compares should always be against zero. */
3204 if (lbitsize == 1 && ! integer_zerop (rhs))
3206 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3207 rhs = fold_convert (type, integer_zero_node);
3210 /* Make a new bitfield reference, shift the constant over the
3211 appropriate number of bits and mask it with the computed mask
3212 (in case this was a signed field). If we changed it, make a new one. */
3213 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3214 if (lvolatilep)
3216 TREE_SIDE_EFFECTS (lhs) = 1;
3217 TREE_THIS_VOLATILE (lhs) = 1;
3220 rhs = fold (const_binop (BIT_AND_EXPR,
3221 const_binop (LSHIFT_EXPR,
3222 fold_convert (unsigned_type, rhs),
3223 size_int (lbitpos), 0),
3224 mask, 0));
3226 return build2 (code, compare_type,
3227 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3228 rhs);
3231 /* Subroutine for fold_truthop: decode a field reference.
3233 If EXP is a comparison reference, we return the innermost reference.
3235 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3236 set to the starting bit number.
3238 If the innermost field can be completely contained in a mode-sized
3239 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3241 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3242 otherwise it is not changed.
3244 *PUNSIGNEDP is set to the signedness of the field.
3246 *PMASK is set to the mask used. This is either contained in a
3247 BIT_AND_EXPR or derived from the width of the field.
3249 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3251 Return 0 if this is not a component reference or is one that we can't
3252 do anything with. */
3254 static tree
3255 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3256 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3257 int *punsignedp, int *pvolatilep,
3258 tree *pmask, tree *pand_mask)
3260 tree outer_type = 0;
3261 tree and_mask = 0;
3262 tree mask, inner, offset;
3263 tree unsigned_type;
3264 unsigned int precision;
3266 /* All the optimizations using this function assume integer fields.
3267 There are problems with FP fields since the type_for_size call
3268 below can fail for, e.g., XFmode. */
3269 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3270 return 0;
3272 /* We are interested in the bare arrangement of bits, so strip everything
3273 that doesn't affect the machine mode. However, record the type of the
3274 outermost expression if it may matter below. */
3275 if (TREE_CODE (exp) == NOP_EXPR
3276 || TREE_CODE (exp) == CONVERT_EXPR
3277 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3278 outer_type = TREE_TYPE (exp);
3279 STRIP_NOPS (exp);
3281 if (TREE_CODE (exp) == BIT_AND_EXPR)
3283 and_mask = TREE_OPERAND (exp, 1);
3284 exp = TREE_OPERAND (exp, 0);
3285 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3286 if (TREE_CODE (and_mask) != INTEGER_CST)
3287 return 0;
3290 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3291 punsignedp, pvolatilep, false);
3292 if ((inner == exp && and_mask == 0)
3293 || *pbitsize < 0 || offset != 0
3294 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3295 return 0;
3297 /* If the number of bits in the reference is the same as the bitsize of
3298 the outer type, then the outer type gives the signedness. Otherwise
3299 (in case of a small bitfield) the signedness is unchanged. */
3300 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3301 *punsignedp = TYPE_UNSIGNED (outer_type);
3303 /* Compute the mask to access the bitfield. */
3304 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3305 precision = TYPE_PRECISION (unsigned_type);
3307 mask = build_int_cst (unsigned_type, -1);
3308 mask = force_fit_type (mask, 0, false, false);
3310 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3311 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3313 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3314 if (and_mask != 0)
3315 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3316 fold_convert (unsigned_type, and_mask), mask));
3318 *pmask = mask;
3319 *pand_mask = and_mask;
3320 return inner;
3323 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3324 bit positions. */
3326 static int
3327 all_ones_mask_p (tree mask, int size)
3329 tree type = TREE_TYPE (mask);
3330 unsigned int precision = TYPE_PRECISION (type);
3331 tree tmask;
3333 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3334 tmask = force_fit_type (tmask, 0, false, false);
3336 return
3337 tree_int_cst_equal (mask,
3338 const_binop (RSHIFT_EXPR,
3339 const_binop (LSHIFT_EXPR, tmask,
3340 size_int (precision - size),
3342 size_int (precision - size), 0));
3345 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3346 represents the sign bit of EXP's type. If EXP represents a sign
3347 or zero extension, also test VAL against the unextended type.
3348 The return value is the (sub)expression whose sign bit is VAL,
3349 or NULL_TREE otherwise. */
3351 static tree
3352 sign_bit_p (tree exp, tree val)
3354 unsigned HOST_WIDE_INT mask_lo, lo;
3355 HOST_WIDE_INT mask_hi, hi;
3356 int width;
3357 tree t;
3359 /* Tree EXP must have an integral type. */
3360 t = TREE_TYPE (exp);
3361 if (! INTEGRAL_TYPE_P (t))
3362 return NULL_TREE;
3364 /* Tree VAL must be an integer constant. */
3365 if (TREE_CODE (val) != INTEGER_CST
3366 || TREE_CONSTANT_OVERFLOW (val))
3367 return NULL_TREE;
3369 width = TYPE_PRECISION (t);
3370 if (width > HOST_BITS_PER_WIDE_INT)
3372 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3373 lo = 0;
3375 mask_hi = ((unsigned HOST_WIDE_INT) -1
3376 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3377 mask_lo = -1;
3379 else
3381 hi = 0;
3382 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3384 mask_hi = 0;
3385 mask_lo = ((unsigned HOST_WIDE_INT) -1
3386 >> (HOST_BITS_PER_WIDE_INT - width));
3389 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3390 treat VAL as if it were unsigned. */
3391 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3392 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3393 return exp;
3395 /* Handle extension from a narrower type. */
3396 if (TREE_CODE (exp) == NOP_EXPR
3397 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3398 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3400 return NULL_TREE;
3403 /* Subroutine for fold_truthop: determine if an operand is simple enough
3404 to be evaluated unconditionally. */
3406 static int
3407 simple_operand_p (tree exp)
3409 /* Strip any conversions that don't change the machine mode. */
3410 STRIP_NOPS (exp);
3412 return (CONSTANT_CLASS_P (exp)
3413 || TREE_CODE (exp) == SSA_NAME
3414 || (DECL_P (exp)
3415 && ! TREE_ADDRESSABLE (exp)
3416 && ! TREE_THIS_VOLATILE (exp)
3417 && ! DECL_NONLOCAL (exp)
3418 /* Don't regard global variables as simple. They may be
3419 allocated in ways unknown to the compiler (shared memory,
3420 #pragma weak, etc). */
3421 && ! TREE_PUBLIC (exp)
3422 && ! DECL_EXTERNAL (exp)
3423 /* Loading a static variable is unduly expensive, but global
3424 registers aren't expensive. */
3425 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3428 /* The following functions are subroutines to fold_range_test and allow it to
3429 try to change a logical combination of comparisons into a range test.
3431 For example, both
3432 X == 2 || X == 3 || X == 4 || X == 5
3434 X >= 2 && X <= 5
3435 are converted to
3436 (unsigned) (X - 2) <= 3
3438 We describe each set of comparisons as being either inside or outside
3439 a range, using a variable named like IN_P, and then describe the
3440 range with a lower and upper bound. If one of the bounds is omitted,
3441 it represents either the highest or lowest value of the type.
3443 In the comments below, we represent a range by two numbers in brackets
3444 preceded by a "+" to designate being inside that range, or a "-" to
3445 designate being outside that range, so the condition can be inverted by
3446 flipping the prefix. An omitted bound is represented by a "-". For
3447 example, "- [-, 10]" means being outside the range starting at the lowest
3448 possible value and ending at 10, in other words, being greater than 10.
3449 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3450 always false.
3452 We set up things so that the missing bounds are handled in a consistent
3453 manner so neither a missing bound nor "true" and "false" need to be
3454 handled using a special case. */
3456 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3457 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3458 and UPPER1_P are nonzero if the respective argument is an upper bound
3459 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3460 must be specified for a comparison. ARG1 will be converted to ARG0's
3461 type if both are specified. */
3463 static tree
3464 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3465 tree arg1, int upper1_p)
3467 tree tem;
3468 int result;
3469 int sgn0, sgn1;
3471 /* If neither arg represents infinity, do the normal operation.
3472 Else, if not a comparison, return infinity. Else handle the special
3473 comparison rules. Note that most of the cases below won't occur, but
3474 are handled for consistency. */
3476 if (arg0 != 0 && arg1 != 0)
3478 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3479 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3480 STRIP_NOPS (tem);
3481 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3484 if (TREE_CODE_CLASS (code) != tcc_comparison)
3485 return 0;
3487 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3488 for neither. In real maths, we cannot assume open ended ranges are
3489 the same. But, this is computer arithmetic, where numbers are finite.
3490 We can therefore make the transformation of any unbounded range with
3491 the value Z, Z being greater than any representable number. This permits
3492 us to treat unbounded ranges as equal. */
3493 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3494 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3495 switch (code)
3497 case EQ_EXPR:
3498 result = sgn0 == sgn1;
3499 break;
3500 case NE_EXPR:
3501 result = sgn0 != sgn1;
3502 break;
3503 case LT_EXPR:
3504 result = sgn0 < sgn1;
3505 break;
3506 case LE_EXPR:
3507 result = sgn0 <= sgn1;
3508 break;
3509 case GT_EXPR:
3510 result = sgn0 > sgn1;
3511 break;
3512 case GE_EXPR:
3513 result = sgn0 >= sgn1;
3514 break;
3515 default:
3516 gcc_unreachable ();
3519 return constant_boolean_node (result, type);
3522 /* Given EXP, a logical expression, set the range it is testing into
3523 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3524 actually being tested. *PLOW and *PHIGH will be made of the same type
3525 as the returned expression. If EXP is not a comparison, we will most
3526 likely not be returning a useful value and range. */
3528 static tree
3529 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3531 enum tree_code code;
3532 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3533 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3534 int in_p, n_in_p;
3535 tree low, high, n_low, n_high;
3537 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3538 and see if we can refine the range. Some of the cases below may not
3539 happen, but it doesn't seem worth worrying about this. We "continue"
3540 the outer loop when we've changed something; otherwise we "break"
3541 the switch, which will "break" the while. */
3543 in_p = 0;
3544 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3546 while (1)
3548 code = TREE_CODE (exp);
3549 exp_type = TREE_TYPE (exp);
3551 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3553 if (TREE_CODE_LENGTH (code) > 0)
3554 arg0 = TREE_OPERAND (exp, 0);
3555 if (TREE_CODE_CLASS (code) == tcc_comparison
3556 || TREE_CODE_CLASS (code) == tcc_unary
3557 || TREE_CODE_CLASS (code) == tcc_binary)
3558 arg0_type = TREE_TYPE (arg0);
3559 if (TREE_CODE_CLASS (code) == tcc_binary
3560 || TREE_CODE_CLASS (code) == tcc_comparison
3561 || (TREE_CODE_CLASS (code) == tcc_expression
3562 && TREE_CODE_LENGTH (code) > 1))
3563 arg1 = TREE_OPERAND (exp, 1);
3566 switch (code)
3568 case TRUTH_NOT_EXPR:
3569 in_p = ! in_p, exp = arg0;
3570 continue;
3572 case EQ_EXPR: case NE_EXPR:
3573 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3574 /* We can only do something if the range is testing for zero
3575 and if the second operand is an integer constant. Note that
3576 saying something is "in" the range we make is done by
3577 complementing IN_P since it will set in the initial case of
3578 being not equal to zero; "out" is leaving it alone. */
3579 if (low == 0 || high == 0
3580 || ! integer_zerop (low) || ! integer_zerop (high)
3581 || TREE_CODE (arg1) != INTEGER_CST)
3582 break;
3584 switch (code)
3586 case NE_EXPR: /* - [c, c] */
3587 low = high = arg1;
3588 break;
3589 case EQ_EXPR: /* + [c, c] */
3590 in_p = ! in_p, low = high = arg1;
3591 break;
3592 case GT_EXPR: /* - [-, c] */
3593 low = 0, high = arg1;
3594 break;
3595 case GE_EXPR: /* + [c, -] */
3596 in_p = ! in_p, low = arg1, high = 0;
3597 break;
3598 case LT_EXPR: /* - [c, -] */
3599 low = arg1, high = 0;
3600 break;
3601 case LE_EXPR: /* + [-, c] */
3602 in_p = ! in_p, low = 0, high = arg1;
3603 break;
3604 default:
3605 gcc_unreachable ();
3608 /* If this is an unsigned comparison, we also know that EXP is
3609 greater than or equal to zero. We base the range tests we make
3610 on that fact, so we record it here so we can parse existing
3611 range tests. We test arg0_type since often the return type
3612 of, e.g. EQ_EXPR, is boolean. */
3613 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3615 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3616 in_p, low, high, 1,
3617 fold_convert (arg0_type, integer_zero_node),
3618 NULL_TREE))
3619 break;
3621 in_p = n_in_p, low = n_low, high = n_high;
3623 /* If the high bound is missing, but we have a nonzero low
3624 bound, reverse the range so it goes from zero to the low bound
3625 minus 1. */
3626 if (high == 0 && low && ! integer_zerop (low))
3628 in_p = ! in_p;
3629 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3630 integer_one_node, 0);
3631 low = fold_convert (arg0_type, integer_zero_node);
3635 exp = arg0;
3636 continue;
3638 case NEGATE_EXPR:
3639 /* (-x) IN [a,b] -> x in [-b, -a] */
3640 n_low = range_binop (MINUS_EXPR, exp_type,
3641 fold_convert (exp_type, integer_zero_node),
3642 0, high, 1);
3643 n_high = range_binop (MINUS_EXPR, exp_type,
3644 fold_convert (exp_type, integer_zero_node),
3645 0, low, 0);
3646 low = n_low, high = n_high;
3647 exp = arg0;
3648 continue;
3650 case BIT_NOT_EXPR:
3651 /* ~ X -> -X - 1 */
3652 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3653 fold_convert (exp_type, integer_one_node));
3654 continue;
3656 case PLUS_EXPR: case MINUS_EXPR:
3657 if (TREE_CODE (arg1) != INTEGER_CST)
3658 break;
3660 /* If EXP is signed, any overflow in the computation is undefined,
3661 so we don't worry about it so long as our computations on
3662 the bounds don't overflow. For unsigned, overflow is defined
3663 and this is exactly the right thing. */
3664 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3665 arg0_type, low, 0, arg1, 0);
3666 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3667 arg0_type, high, 1, arg1, 0);
3668 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3669 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3670 break;
3672 /* Check for an unsigned range which has wrapped around the maximum
3673 value thus making n_high < n_low, and normalize it. */
3674 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3676 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3677 integer_one_node, 0);
3678 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3679 integer_one_node, 0);
3681 /* If the range is of the form +/- [ x+1, x ], we won't
3682 be able to normalize it. But then, it represents the
3683 whole range or the empty set, so make it
3684 +/- [ -, - ]. */
3685 if (tree_int_cst_equal (n_low, low)
3686 && tree_int_cst_equal (n_high, high))
3687 low = high = 0;
3688 else
3689 in_p = ! in_p;
3691 else
3692 low = n_low, high = n_high;
3694 exp = arg0;
3695 continue;
3697 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3698 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3699 break;
3701 if (! INTEGRAL_TYPE_P (arg0_type)
3702 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3703 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3704 break;
3706 n_low = low, n_high = high;
3708 if (n_low != 0)
3709 n_low = fold_convert (arg0_type, n_low);
3711 if (n_high != 0)
3712 n_high = fold_convert (arg0_type, n_high);
3715 /* If we're converting arg0 from an unsigned type, to exp,
3716 a signed type, we will be doing the comparison as unsigned.
3717 The tests above have already verified that LOW and HIGH
3718 are both positive.
3720 So we have to ensure that we will handle large unsigned
3721 values the same way that the current signed bounds treat
3722 negative values. */
3724 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3726 tree high_positive;
3727 tree equiv_type = lang_hooks.types.type_for_mode
3728 (TYPE_MODE (arg0_type), 1);
3730 /* A range without an upper bound is, naturally, unbounded.
3731 Since convert would have cropped a very large value, use
3732 the max value for the destination type. */
3733 high_positive
3734 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3735 : TYPE_MAX_VALUE (arg0_type);
3737 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3738 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3739 fold_convert (arg0_type,
3740 high_positive),
3741 fold_convert (arg0_type,
3742 integer_one_node)));
3744 /* If the low bound is specified, "and" the range with the
3745 range for which the original unsigned value will be
3746 positive. */
3747 if (low != 0)
3749 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3750 1, n_low, n_high, 1,
3751 fold_convert (arg0_type,
3752 integer_zero_node),
3753 high_positive))
3754 break;
3756 in_p = (n_in_p == in_p);
3758 else
3760 /* Otherwise, "or" the range with the range of the input
3761 that will be interpreted as negative. */
3762 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3763 0, n_low, n_high, 1,
3764 fold_convert (arg0_type,
3765 integer_zero_node),
3766 high_positive))
3767 break;
3769 in_p = (in_p != n_in_p);
3773 exp = arg0;
3774 low = n_low, high = n_high;
3775 continue;
3777 default:
3778 break;
3781 break;
3784 /* If EXP is a constant, we can evaluate whether this is true or false. */
3785 if (TREE_CODE (exp) == INTEGER_CST)
3787 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3788 exp, 0, low, 0))
3789 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3790 exp, 1, high, 1)));
3791 low = high = 0;
3792 exp = 0;
3795 *pin_p = in_p, *plow = low, *phigh = high;
3796 return exp;
3799 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3800 type, TYPE, return an expression to test if EXP is in (or out of, depending
3801 on IN_P) the range. Return 0 if the test couldn't be created. */
3803 static tree
3804 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3806 tree etype = TREE_TYPE (exp);
3807 tree value;
3809 if (! in_p)
3811 value = build_range_check (type, exp, 1, low, high);
3812 if (value != 0)
3813 return invert_truthvalue (value);
3815 return 0;
3818 if (low == 0 && high == 0)
3819 return fold_convert (type, integer_one_node);
3821 if (low == 0)
3822 return fold (build2 (LE_EXPR, type, exp, high));
3824 if (high == 0)
3825 return fold (build2 (GE_EXPR, type, exp, low));
3827 if (operand_equal_p (low, high, 0))
3828 return fold (build2 (EQ_EXPR, type, exp, low));
3830 if (integer_zerop (low))
3832 if (! TYPE_UNSIGNED (etype))
3834 etype = lang_hooks.types.unsigned_type (etype);
3835 high = fold_convert (etype, high);
3836 exp = fold_convert (etype, exp);
3838 return build_range_check (type, exp, 1, 0, high);
3841 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3842 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3844 unsigned HOST_WIDE_INT lo;
3845 HOST_WIDE_INT hi;
3846 int prec;
3848 prec = TYPE_PRECISION (etype);
3849 if (prec <= HOST_BITS_PER_WIDE_INT)
3851 hi = 0;
3852 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3854 else
3856 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3857 lo = (unsigned HOST_WIDE_INT) -1;
3860 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3862 if (TYPE_UNSIGNED (etype))
3864 etype = lang_hooks.types.signed_type (etype);
3865 exp = fold_convert (etype, exp);
3867 return fold (build2 (GT_EXPR, type, exp,
3868 fold_convert (etype, integer_zero_node)));
3872 value = const_binop (MINUS_EXPR, high, low, 0);
3873 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3875 tree utype, minv, maxv;
3877 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3878 for the type in question, as we rely on this here. */
3879 switch (TREE_CODE (etype))
3881 case INTEGER_TYPE:
3882 case ENUMERAL_TYPE:
3883 case CHAR_TYPE:
3884 utype = lang_hooks.types.unsigned_type (etype);
3885 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3886 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3887 integer_one_node, 1);
3888 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3889 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3890 minv, 1, maxv, 1)))
3892 etype = utype;
3893 high = fold_convert (etype, high);
3894 low = fold_convert (etype, low);
3895 exp = fold_convert (etype, exp);
3896 value = const_binop (MINUS_EXPR, high, low, 0);
3898 break;
3899 default:
3900 break;
3904 if (value != 0 && ! TREE_OVERFLOW (value))
3905 return build_range_check (type,
3906 fold (build2 (MINUS_EXPR, etype, exp, low)),
3907 1, fold_convert (etype, integer_zero_node),
3908 value);
3910 return 0;
3913 /* Given two ranges, see if we can merge them into one. Return 1 if we
3914 can, 0 if we can't. Set the output range into the specified parameters. */
3916 static int
3917 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3918 tree high0, int in1_p, tree low1, tree high1)
3920 int no_overlap;
3921 int subset;
3922 int temp;
3923 tree tem;
3924 int in_p;
3925 tree low, high;
3926 int lowequal = ((low0 == 0 && low1 == 0)
3927 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3928 low0, 0, low1, 0)));
3929 int highequal = ((high0 == 0 && high1 == 0)
3930 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3931 high0, 1, high1, 1)));
3933 /* Make range 0 be the range that starts first, or ends last if they
3934 start at the same value. Swap them if it isn't. */
3935 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3936 low0, 0, low1, 0))
3937 || (lowequal
3938 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3939 high1, 1, high0, 1))))
3941 temp = in0_p, in0_p = in1_p, in1_p = temp;
3942 tem = low0, low0 = low1, low1 = tem;
3943 tem = high0, high0 = high1, high1 = tem;
3946 /* Now flag two cases, whether the ranges are disjoint or whether the
3947 second range is totally subsumed in the first. Note that the tests
3948 below are simplified by the ones above. */
3949 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3950 high0, 1, low1, 0));
3951 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3952 high1, 1, high0, 1));
3954 /* We now have four cases, depending on whether we are including or
3955 excluding the two ranges. */
3956 if (in0_p && in1_p)
3958 /* If they don't overlap, the result is false. If the second range
3959 is a subset it is the result. Otherwise, the range is from the start
3960 of the second to the end of the first. */
3961 if (no_overlap)
3962 in_p = 0, low = high = 0;
3963 else if (subset)
3964 in_p = 1, low = low1, high = high1;
3965 else
3966 in_p = 1, low = low1, high = high0;
3969 else if (in0_p && ! in1_p)
3971 /* If they don't overlap, the result is the first range. If they are
3972 equal, the result is false. If the second range is a subset of the
3973 first, and the ranges begin at the same place, we go from just after
3974 the end of the first range to the end of the second. If the second
3975 range is not a subset of the first, or if it is a subset and both
3976 ranges end at the same place, the range starts at the start of the
3977 first range and ends just before the second range.
3978 Otherwise, we can't describe this as a single range. */
3979 if (no_overlap)
3980 in_p = 1, low = low0, high = high0;
3981 else if (lowequal && highequal)
3982 in_p = 0, low = high = 0;
3983 else if (subset && lowequal)
3985 in_p = 1, high = high0;
3986 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3987 integer_one_node, 0);
3989 else if (! subset || highequal)
3991 in_p = 1, low = low0;
3992 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3993 integer_one_node, 0);
3995 else
3996 return 0;
3999 else if (! in0_p && in1_p)
4001 /* If they don't overlap, the result is the second range. If the second
4002 is a subset of the first, the result is false. Otherwise,
4003 the range starts just after the first range and ends at the
4004 end of the second. */
4005 if (no_overlap)
4006 in_p = 1, low = low1, high = high1;
4007 else if (subset || highequal)
4008 in_p = 0, low = high = 0;
4009 else
4011 in_p = 1, high = high1;
4012 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4013 integer_one_node, 0);
4017 else
4019 /* The case where we are excluding both ranges. Here the complex case
4020 is if they don't overlap. In that case, the only time we have a
4021 range is if they are adjacent. If the second is a subset of the
4022 first, the result is the first. Otherwise, the range to exclude
4023 starts at the beginning of the first range and ends at the end of the
4024 second. */
4025 if (no_overlap)
4027 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4028 range_binop (PLUS_EXPR, NULL_TREE,
4029 high0, 1,
4030 integer_one_node, 1),
4031 1, low1, 0)))
4032 in_p = 0, low = low0, high = high1;
4033 else
4035 /* Canonicalize - [min, x] into - [-, x]. */
4036 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4037 switch (TREE_CODE (TREE_TYPE (low0)))
4039 case ENUMERAL_TYPE:
4040 if (TYPE_PRECISION (TREE_TYPE (low0))
4041 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4042 break;
4043 /* FALLTHROUGH */
4044 case INTEGER_TYPE:
4045 case CHAR_TYPE:
4046 if (tree_int_cst_equal (low0,
4047 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4048 low0 = 0;
4049 break;
4050 case POINTER_TYPE:
4051 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4052 && integer_zerop (low0))
4053 low0 = 0;
4054 break;
4055 default:
4056 break;
4059 /* Canonicalize - [x, max] into - [x, -]. */
4060 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4061 switch (TREE_CODE (TREE_TYPE (high1)))
4063 case ENUMERAL_TYPE:
4064 if (TYPE_PRECISION (TREE_TYPE (high1))
4065 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4066 break;
4067 /* FALLTHROUGH */
4068 case INTEGER_TYPE:
4069 case CHAR_TYPE:
4070 if (tree_int_cst_equal (high1,
4071 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4072 high1 = 0;
4073 break;
4074 case POINTER_TYPE:
4075 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4076 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4077 high1, 1,
4078 integer_one_node, 1)))
4079 high1 = 0;
4080 break;
4081 default:
4082 break;
4085 /* The ranges might be also adjacent between the maximum and
4086 minimum values of the given type. For
4087 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4088 return + [x + 1, y - 1]. */
4089 if (low0 == 0 && high1 == 0)
4091 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4092 integer_one_node, 1);
4093 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4094 integer_one_node, 0);
4095 if (low == 0 || high == 0)
4096 return 0;
4098 in_p = 1;
4100 else
4101 return 0;
4104 else if (subset)
4105 in_p = 0, low = low0, high = high0;
4106 else
4107 in_p = 0, low = low0, high = high1;
4110 *pin_p = in_p, *plow = low, *phigh = high;
4111 return 1;
4115 /* Subroutine of fold, looking inside expressions of the form
4116 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4117 of the COND_EXPR. This function is being used also to optimize
4118 A op B ? C : A, by reversing the comparison first.
4120 Return a folded expression whose code is not a COND_EXPR
4121 anymore, or NULL_TREE if no folding opportunity is found. */
4123 static tree
4124 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4126 enum tree_code comp_code = TREE_CODE (arg0);
4127 tree arg00 = TREE_OPERAND (arg0, 0);
4128 tree arg01 = TREE_OPERAND (arg0, 1);
4129 tree arg1_type = TREE_TYPE (arg1);
4130 tree tem;
4132 STRIP_NOPS (arg1);
4133 STRIP_NOPS (arg2);
4135 /* If we have A op 0 ? A : -A, consider applying the following
4136 transformations:
4138 A == 0? A : -A same as -A
4139 A != 0? A : -A same as A
4140 A >= 0? A : -A same as abs (A)
4141 A > 0? A : -A same as abs (A)
4142 A <= 0? A : -A same as -abs (A)
4143 A < 0? A : -A same as -abs (A)
4145 None of these transformations work for modes with signed
4146 zeros. If A is +/-0, the first two transformations will
4147 change the sign of the result (from +0 to -0, or vice
4148 versa). The last four will fix the sign of the result,
4149 even though the original expressions could be positive or
4150 negative, depending on the sign of A.
4152 Note that all these transformations are correct if A is
4153 NaN, since the two alternatives (A and -A) are also NaNs. */
4154 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4155 ? real_zerop (arg01)
4156 : integer_zerop (arg01))
4157 && TREE_CODE (arg2) == NEGATE_EXPR
4158 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4159 switch (comp_code)
4161 case EQ_EXPR:
4162 case UNEQ_EXPR:
4163 tem = fold_convert (arg1_type, arg1);
4164 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4165 case NE_EXPR:
4166 case LTGT_EXPR:
4167 return pedantic_non_lvalue (fold_convert (type, arg1));
4168 case UNGE_EXPR:
4169 case UNGT_EXPR:
4170 if (flag_trapping_math)
4171 break;
4172 /* Fall through. */
4173 case GE_EXPR:
4174 case GT_EXPR:
4175 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4176 arg1 = fold_convert (lang_hooks.types.signed_type
4177 (TREE_TYPE (arg1)), arg1);
4178 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4179 return pedantic_non_lvalue (fold_convert (type, tem));
4180 case UNLE_EXPR:
4181 case UNLT_EXPR:
4182 if (flag_trapping_math)
4183 break;
4184 case LE_EXPR:
4185 case LT_EXPR:
4186 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4187 arg1 = fold_convert (lang_hooks.types.signed_type
4188 (TREE_TYPE (arg1)), arg1);
4189 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4190 return negate_expr (fold_convert (type, tem));
4191 default:
4192 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4193 break;
4196 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4197 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4198 both transformations are correct when A is NaN: A != 0
4199 is then true, and A == 0 is false. */
4201 if (integer_zerop (arg01) && integer_zerop (arg2))
4203 if (comp_code == NE_EXPR)
4204 return pedantic_non_lvalue (fold_convert (type, arg1));
4205 else if (comp_code == EQ_EXPR)
4206 return fold_convert (type, integer_zero_node);
4209 /* Try some transformations of A op B ? A : B.
4211 A == B? A : B same as B
4212 A != B? A : B same as A
4213 A >= B? A : B same as max (A, B)
4214 A > B? A : B same as max (B, A)
4215 A <= B? A : B same as min (A, B)
4216 A < B? A : B same as min (B, A)
4218 As above, these transformations don't work in the presence
4219 of signed zeros. For example, if A and B are zeros of
4220 opposite sign, the first two transformations will change
4221 the sign of the result. In the last four, the original
4222 expressions give different results for (A=+0, B=-0) and
4223 (A=-0, B=+0), but the transformed expressions do not.
4225 The first two transformations are correct if either A or B
4226 is a NaN. In the first transformation, the condition will
4227 be false, and B will indeed be chosen. In the case of the
4228 second transformation, the condition A != B will be true,
4229 and A will be chosen.
4231 The conversions to max() and min() are not correct if B is
4232 a number and A is not. The conditions in the original
4233 expressions will be false, so all four give B. The min()
4234 and max() versions would give a NaN instead. */
4235 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4237 tree comp_op0 = arg00;
4238 tree comp_op1 = arg01;
4239 tree comp_type = TREE_TYPE (comp_op0);
4241 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4242 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4244 comp_type = type;
4245 comp_op0 = arg1;
4246 comp_op1 = arg2;
4249 switch (comp_code)
4251 case EQ_EXPR:
4252 return pedantic_non_lvalue (fold_convert (type, arg2));
4253 case NE_EXPR:
4254 return pedantic_non_lvalue (fold_convert (type, arg1));
4255 case LE_EXPR:
4256 case LT_EXPR:
4257 case UNLE_EXPR:
4258 case UNLT_EXPR:
4259 /* In C++ a ?: expression can be an lvalue, so put the
4260 operand which will be used if they are equal first
4261 so that we can convert this back to the
4262 corresponding COND_EXPR. */
4263 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4265 comp_op0 = fold_convert (comp_type, comp_op0);
4266 comp_op1 = fold_convert (comp_type, comp_op1);
4267 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4268 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4269 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4270 return pedantic_non_lvalue (fold_convert (type, tem));
4272 break;
4273 case GE_EXPR:
4274 case GT_EXPR:
4275 case UNGE_EXPR:
4276 case UNGT_EXPR:
4277 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4279 comp_op0 = fold_convert (comp_type, comp_op0);
4280 comp_op1 = fold_convert (comp_type, comp_op1);
4281 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4282 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4283 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4284 return pedantic_non_lvalue (fold_convert (type, tem));
4286 break;
4287 case UNEQ_EXPR:
4288 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4289 return pedantic_non_lvalue (fold_convert (type, arg2));
4290 break;
4291 case LTGT_EXPR:
4292 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4293 return pedantic_non_lvalue (fold_convert (type, arg1));
4294 break;
4295 default:
4296 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4297 break;
4301 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4302 we might still be able to simplify this. For example,
4303 if C1 is one less or one more than C2, this might have started
4304 out as a MIN or MAX and been transformed by this function.
4305 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4307 if (INTEGRAL_TYPE_P (type)
4308 && TREE_CODE (arg01) == INTEGER_CST
4309 && TREE_CODE (arg2) == INTEGER_CST)
4310 switch (comp_code)
4312 case EQ_EXPR:
4313 /* We can replace A with C1 in this case. */
4314 arg1 = fold_convert (type, arg01);
4315 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4317 case LT_EXPR:
4318 /* If C1 is C2 + 1, this is min(A, C2). */
4319 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4320 OEP_ONLY_CONST)
4321 && operand_equal_p (arg01,
4322 const_binop (PLUS_EXPR, arg2,
4323 integer_one_node, 0),
4324 OEP_ONLY_CONST))
4325 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4326 type, arg1, arg2)));
4327 break;
4329 case LE_EXPR:
4330 /* If C1 is C2 - 1, this is min(A, C2). */
4331 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4332 OEP_ONLY_CONST)
4333 && operand_equal_p (arg01,
4334 const_binop (MINUS_EXPR, arg2,
4335 integer_one_node, 0),
4336 OEP_ONLY_CONST))
4337 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4338 type, arg1, arg2)));
4339 break;
4341 case GT_EXPR:
4342 /* If C1 is C2 - 1, this is max(A, C2). */
4343 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4344 OEP_ONLY_CONST)
4345 && operand_equal_p (arg01,
4346 const_binop (MINUS_EXPR, arg2,
4347 integer_one_node, 0),
4348 OEP_ONLY_CONST))
4349 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4350 type, arg1, arg2)));
4351 break;
4353 case GE_EXPR:
4354 /* If C1 is C2 + 1, this is max(A, C2). */
4355 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4356 OEP_ONLY_CONST)
4357 && operand_equal_p (arg01,
4358 const_binop (PLUS_EXPR, arg2,
4359 integer_one_node, 0),
4360 OEP_ONLY_CONST))
4361 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4362 type, arg1, arg2)));
4363 break;
4364 case NE_EXPR:
4365 break;
4366 default:
4367 gcc_unreachable ();
4370 return NULL_TREE;
4375 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4376 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4377 #endif
4379 /* EXP is some logical combination of boolean tests. See if we can
4380 merge it into some range test. Return the new tree if so. */
4382 static tree
4383 fold_range_test (tree exp)
4385 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4386 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4387 int in0_p, in1_p, in_p;
4388 tree low0, low1, low, high0, high1, high;
4389 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4390 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4391 tree tem;
4393 /* If this is an OR operation, invert both sides; we will invert
4394 again at the end. */
4395 if (or_op)
4396 in0_p = ! in0_p, in1_p = ! in1_p;
4398 /* If both expressions are the same, if we can merge the ranges, and we
4399 can build the range test, return it or it inverted. If one of the
4400 ranges is always true or always false, consider it to be the same
4401 expression as the other. */
4402 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4403 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4404 in1_p, low1, high1)
4405 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4406 lhs != 0 ? lhs
4407 : rhs != 0 ? rhs : integer_zero_node,
4408 in_p, low, high))))
4409 return or_op ? invert_truthvalue (tem) : tem;
4411 /* On machines where the branch cost is expensive, if this is a
4412 short-circuited branch and the underlying object on both sides
4413 is the same, make a non-short-circuit operation. */
4414 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4415 && lhs != 0 && rhs != 0
4416 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4417 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4418 && operand_equal_p (lhs, rhs, 0))
4420 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4421 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4422 which cases we can't do this. */
4423 if (simple_operand_p (lhs))
4424 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4425 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4426 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4427 TREE_OPERAND (exp, 1));
4429 else if (lang_hooks.decls.global_bindings_p () == 0
4430 && ! CONTAINS_PLACEHOLDER_P (lhs))
4432 tree common = save_expr (lhs);
4434 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4435 or_op ? ! in0_p : in0_p,
4436 low0, high0))
4437 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4438 or_op ? ! in1_p : in1_p,
4439 low1, high1))))
4440 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4441 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4442 TREE_TYPE (exp), lhs, rhs);
4446 return 0;
4449 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4450 bit value. Arrange things so the extra bits will be set to zero if and
4451 only if C is signed-extended to its full width. If MASK is nonzero,
4452 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4454 static tree
4455 unextend (tree c, int p, int unsignedp, tree mask)
4457 tree type = TREE_TYPE (c);
4458 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4459 tree temp;
4461 if (p == modesize || unsignedp)
4462 return c;
4464 /* We work by getting just the sign bit into the low-order bit, then
4465 into the high-order bit, then sign-extend. We then XOR that value
4466 with C. */
4467 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4468 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4470 /* We must use a signed type in order to get an arithmetic right shift.
4471 However, we must also avoid introducing accidental overflows, so that
4472 a subsequent call to integer_zerop will work. Hence we must
4473 do the type conversion here. At this point, the constant is either
4474 zero or one, and the conversion to a signed type can never overflow.
4475 We could get an overflow if this conversion is done anywhere else. */
4476 if (TYPE_UNSIGNED (type))
4477 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4479 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4480 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4481 if (mask != 0)
4482 temp = const_binop (BIT_AND_EXPR, temp,
4483 fold_convert (TREE_TYPE (c), mask), 0);
4484 /* If necessary, convert the type back to match the type of C. */
4485 if (TYPE_UNSIGNED (type))
4486 temp = fold_convert (type, temp);
4488 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4491 /* Find ways of folding logical expressions of LHS and RHS:
4492 Try to merge two comparisons to the same innermost item.
4493 Look for range tests like "ch >= '0' && ch <= '9'".
4494 Look for combinations of simple terms on machines with expensive branches
4495 and evaluate the RHS unconditionally.
4497 For example, if we have p->a == 2 && p->b == 4 and we can make an
4498 object large enough to span both A and B, we can do this with a comparison
4499 against the object ANDed with the a mask.
4501 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4502 operations to do this with one comparison.
4504 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4505 function and the one above.
4507 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4508 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4510 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4511 two operands.
4513 We return the simplified tree or 0 if no optimization is possible. */
4515 static tree
4516 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4518 /* If this is the "or" of two comparisons, we can do something if
4519 the comparisons are NE_EXPR. If this is the "and", we can do something
4520 if the comparisons are EQ_EXPR. I.e.,
4521 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4523 WANTED_CODE is this operation code. For single bit fields, we can
4524 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4525 comparison for one-bit fields. */
4527 enum tree_code wanted_code;
4528 enum tree_code lcode, rcode;
4529 tree ll_arg, lr_arg, rl_arg, rr_arg;
4530 tree ll_inner, lr_inner, rl_inner, rr_inner;
4531 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4532 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4533 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4534 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4535 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4536 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4537 enum machine_mode lnmode, rnmode;
4538 tree ll_mask, lr_mask, rl_mask, rr_mask;
4539 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4540 tree l_const, r_const;
4541 tree lntype, rntype, result;
4542 int first_bit, end_bit;
4543 int volatilep;
4545 /* Start by getting the comparison codes. Fail if anything is volatile.
4546 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4547 it were surrounded with a NE_EXPR. */
4549 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4550 return 0;
4552 lcode = TREE_CODE (lhs);
4553 rcode = TREE_CODE (rhs);
4555 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4557 lhs = build2 (NE_EXPR, truth_type, lhs,
4558 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4559 lcode = NE_EXPR;
4562 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4564 rhs = build2 (NE_EXPR, truth_type, rhs,
4565 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4566 rcode = NE_EXPR;
4569 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4570 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4571 return 0;
4573 ll_arg = TREE_OPERAND (lhs, 0);
4574 lr_arg = TREE_OPERAND (lhs, 1);
4575 rl_arg = TREE_OPERAND (rhs, 0);
4576 rr_arg = TREE_OPERAND (rhs, 1);
4578 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4579 if (simple_operand_p (ll_arg)
4580 && simple_operand_p (lr_arg))
4582 tree result;
4583 if (operand_equal_p (ll_arg, rl_arg, 0)
4584 && operand_equal_p (lr_arg, rr_arg, 0))
4586 result = combine_comparisons (code, lcode, rcode,
4587 truth_type, ll_arg, lr_arg);
4588 if (result)
4589 return result;
4591 else if (operand_equal_p (ll_arg, rr_arg, 0)
4592 && operand_equal_p (lr_arg, rl_arg, 0))
4594 result = combine_comparisons (code, lcode,
4595 swap_tree_comparison (rcode),
4596 truth_type, ll_arg, lr_arg);
4597 if (result)
4598 return result;
4602 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4603 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4605 /* If the RHS can be evaluated unconditionally and its operands are
4606 simple, it wins to evaluate the RHS unconditionally on machines
4607 with expensive branches. In this case, this isn't a comparison
4608 that can be merged. Avoid doing this if the RHS is a floating-point
4609 comparison since those can trap. */
4611 if (BRANCH_COST >= 2
4612 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4613 && simple_operand_p (rl_arg)
4614 && simple_operand_p (rr_arg))
4616 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4617 if (code == TRUTH_OR_EXPR
4618 && lcode == NE_EXPR && integer_zerop (lr_arg)
4619 && rcode == NE_EXPR && integer_zerop (rr_arg)
4620 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4621 return build2 (NE_EXPR, truth_type,
4622 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4623 ll_arg, rl_arg),
4624 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4626 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4627 if (code == TRUTH_AND_EXPR
4628 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4629 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4630 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4631 return build2 (EQ_EXPR, truth_type,
4632 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4633 ll_arg, rl_arg),
4634 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4636 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4637 return build2 (code, truth_type, lhs, rhs);
4640 /* See if the comparisons can be merged. Then get all the parameters for
4641 each side. */
4643 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4644 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4645 return 0;
4647 volatilep = 0;
4648 ll_inner = decode_field_reference (ll_arg,
4649 &ll_bitsize, &ll_bitpos, &ll_mode,
4650 &ll_unsignedp, &volatilep, &ll_mask,
4651 &ll_and_mask);
4652 lr_inner = decode_field_reference (lr_arg,
4653 &lr_bitsize, &lr_bitpos, &lr_mode,
4654 &lr_unsignedp, &volatilep, &lr_mask,
4655 &lr_and_mask);
4656 rl_inner = decode_field_reference (rl_arg,
4657 &rl_bitsize, &rl_bitpos, &rl_mode,
4658 &rl_unsignedp, &volatilep, &rl_mask,
4659 &rl_and_mask);
4660 rr_inner = decode_field_reference (rr_arg,
4661 &rr_bitsize, &rr_bitpos, &rr_mode,
4662 &rr_unsignedp, &volatilep, &rr_mask,
4663 &rr_and_mask);
4665 /* It must be true that the inner operation on the lhs of each
4666 comparison must be the same if we are to be able to do anything.
4667 Then see if we have constants. If not, the same must be true for
4668 the rhs's. */
4669 if (volatilep || ll_inner == 0 || rl_inner == 0
4670 || ! operand_equal_p (ll_inner, rl_inner, 0))
4671 return 0;
4673 if (TREE_CODE (lr_arg) == INTEGER_CST
4674 && TREE_CODE (rr_arg) == INTEGER_CST)
4675 l_const = lr_arg, r_const = rr_arg;
4676 else if (lr_inner == 0 || rr_inner == 0
4677 || ! operand_equal_p (lr_inner, rr_inner, 0))
4678 return 0;
4679 else
4680 l_const = r_const = 0;
4682 /* If either comparison code is not correct for our logical operation,
4683 fail. However, we can convert a one-bit comparison against zero into
4684 the opposite comparison against that bit being set in the field. */
4686 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4687 if (lcode != wanted_code)
4689 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4691 /* Make the left operand unsigned, since we are only interested
4692 in the value of one bit. Otherwise we are doing the wrong
4693 thing below. */
4694 ll_unsignedp = 1;
4695 l_const = ll_mask;
4697 else
4698 return 0;
4701 /* This is analogous to the code for l_const above. */
4702 if (rcode != wanted_code)
4704 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4706 rl_unsignedp = 1;
4707 r_const = rl_mask;
4709 else
4710 return 0;
4713 /* After this point all optimizations will generate bit-field
4714 references, which we might not want. */
4715 if (! lang_hooks.can_use_bit_fields_p ())
4716 return 0;
4718 /* See if we can find a mode that contains both fields being compared on
4719 the left. If we can't, fail. Otherwise, update all constants and masks
4720 to be relative to a field of that size. */
4721 first_bit = MIN (ll_bitpos, rl_bitpos);
4722 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4723 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4724 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4725 volatilep);
4726 if (lnmode == VOIDmode)
4727 return 0;
4729 lnbitsize = GET_MODE_BITSIZE (lnmode);
4730 lnbitpos = first_bit & ~ (lnbitsize - 1);
4731 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4732 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4734 if (BYTES_BIG_ENDIAN)
4736 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4737 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4740 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4741 size_int (xll_bitpos), 0);
4742 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4743 size_int (xrl_bitpos), 0);
4745 if (l_const)
4747 l_const = fold_convert (lntype, l_const);
4748 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4749 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4750 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4751 fold (build1 (BIT_NOT_EXPR,
4752 lntype, ll_mask)),
4753 0)))
4755 warning ("comparison is always %d", wanted_code == NE_EXPR);
4757 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4760 if (r_const)
4762 r_const = fold_convert (lntype, r_const);
4763 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4764 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4765 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4766 fold (build1 (BIT_NOT_EXPR,
4767 lntype, rl_mask)),
4768 0)))
4770 warning ("comparison is always %d", wanted_code == NE_EXPR);
4772 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4776 /* If the right sides are not constant, do the same for it. Also,
4777 disallow this optimization if a size or signedness mismatch occurs
4778 between the left and right sides. */
4779 if (l_const == 0)
4781 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4782 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4783 /* Make sure the two fields on the right
4784 correspond to the left without being swapped. */
4785 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4786 return 0;
4788 first_bit = MIN (lr_bitpos, rr_bitpos);
4789 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4790 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4791 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4792 volatilep);
4793 if (rnmode == VOIDmode)
4794 return 0;
4796 rnbitsize = GET_MODE_BITSIZE (rnmode);
4797 rnbitpos = first_bit & ~ (rnbitsize - 1);
4798 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4799 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4801 if (BYTES_BIG_ENDIAN)
4803 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4804 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4807 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4808 size_int (xlr_bitpos), 0);
4809 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4810 size_int (xrr_bitpos), 0);
4812 /* Make a mask that corresponds to both fields being compared.
4813 Do this for both items being compared. If the operands are the
4814 same size and the bits being compared are in the same position
4815 then we can do this by masking both and comparing the masked
4816 results. */
4817 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4818 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4819 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4821 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4822 ll_unsignedp || rl_unsignedp);
4823 if (! all_ones_mask_p (ll_mask, lnbitsize))
4824 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4826 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4827 lr_unsignedp || rr_unsignedp);
4828 if (! all_ones_mask_p (lr_mask, rnbitsize))
4829 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4831 return build2 (wanted_code, truth_type, lhs, rhs);
4834 /* There is still another way we can do something: If both pairs of
4835 fields being compared are adjacent, we may be able to make a wider
4836 field containing them both.
4838 Note that we still must mask the lhs/rhs expressions. Furthermore,
4839 the mask must be shifted to account for the shift done by
4840 make_bit_field_ref. */
4841 if ((ll_bitsize + ll_bitpos == rl_bitpos
4842 && lr_bitsize + lr_bitpos == rr_bitpos)
4843 || (ll_bitpos == rl_bitpos + rl_bitsize
4844 && lr_bitpos == rr_bitpos + rr_bitsize))
4846 tree type;
4848 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4849 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4850 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4851 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4853 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4854 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4855 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4856 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4858 /* Convert to the smaller type before masking out unwanted bits. */
4859 type = lntype;
4860 if (lntype != rntype)
4862 if (lnbitsize > rnbitsize)
4864 lhs = fold_convert (rntype, lhs);
4865 ll_mask = fold_convert (rntype, ll_mask);
4866 type = rntype;
4868 else if (lnbitsize < rnbitsize)
4870 rhs = fold_convert (lntype, rhs);
4871 lr_mask = fold_convert (lntype, lr_mask);
4872 type = lntype;
4876 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4877 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4879 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4880 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4882 return build2 (wanted_code, truth_type, lhs, rhs);
4885 return 0;
4888 /* Handle the case of comparisons with constants. If there is something in
4889 common between the masks, those bits of the constants must be the same.
4890 If not, the condition is always false. Test for this to avoid generating
4891 incorrect code below. */
4892 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4893 if (! integer_zerop (result)
4894 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4895 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4897 if (wanted_code == NE_EXPR)
4899 warning ("%<or%> of unmatched not-equal tests is always 1");
4900 return constant_boolean_node (true, truth_type);
4902 else
4904 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4905 return constant_boolean_node (false, truth_type);
4909 /* Construct the expression we will return. First get the component
4910 reference we will make. Unless the mask is all ones the width of
4911 that field, perform the mask operation. Then compare with the
4912 merged constant. */
4913 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4914 ll_unsignedp || rl_unsignedp);
4916 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4917 if (! all_ones_mask_p (ll_mask, lnbitsize))
4918 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4920 return build2 (wanted_code, truth_type, result,
4921 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4924 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4925 constant. */
4927 static tree
4928 optimize_minmax_comparison (tree t)
4930 tree type = TREE_TYPE (t);
4931 tree arg0 = TREE_OPERAND (t, 0);
4932 enum tree_code op_code;
4933 tree comp_const = TREE_OPERAND (t, 1);
4934 tree minmax_const;
4935 int consts_equal, consts_lt;
4936 tree inner;
4938 STRIP_SIGN_NOPS (arg0);
4940 op_code = TREE_CODE (arg0);
4941 minmax_const = TREE_OPERAND (arg0, 1);
4942 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4943 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4944 inner = TREE_OPERAND (arg0, 0);
4946 /* If something does not permit us to optimize, return the original tree. */
4947 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4948 || TREE_CODE (comp_const) != INTEGER_CST
4949 || TREE_CONSTANT_OVERFLOW (comp_const)
4950 || TREE_CODE (minmax_const) != INTEGER_CST
4951 || TREE_CONSTANT_OVERFLOW (minmax_const))
4952 return t;
4954 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4955 and GT_EXPR, doing the rest with recursive calls using logical
4956 simplifications. */
4957 switch (TREE_CODE (t))
4959 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4960 return
4961 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4963 case GE_EXPR:
4964 return
4965 fold (build2 (TRUTH_ORIF_EXPR, type,
4966 optimize_minmax_comparison
4967 (build2 (EQ_EXPR, type, arg0, comp_const)),
4968 optimize_minmax_comparison
4969 (build2 (GT_EXPR, type, arg0, comp_const))));
4971 case EQ_EXPR:
4972 if (op_code == MAX_EXPR && consts_equal)
4973 /* MAX (X, 0) == 0 -> X <= 0 */
4974 return fold (build2 (LE_EXPR, type, inner, comp_const));
4976 else if (op_code == MAX_EXPR && consts_lt)
4977 /* MAX (X, 0) == 5 -> X == 5 */
4978 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4980 else if (op_code == MAX_EXPR)
4981 /* MAX (X, 0) == -1 -> false */
4982 return omit_one_operand (type, integer_zero_node, inner);
4984 else if (consts_equal)
4985 /* MIN (X, 0) == 0 -> X >= 0 */
4986 return fold (build2 (GE_EXPR, type, inner, comp_const));
4988 else if (consts_lt)
4989 /* MIN (X, 0) == 5 -> false */
4990 return omit_one_operand (type, integer_zero_node, inner);
4992 else
4993 /* MIN (X, 0) == -1 -> X == -1 */
4994 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4996 case GT_EXPR:
4997 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4998 /* MAX (X, 0) > 0 -> X > 0
4999 MAX (X, 0) > 5 -> X > 5 */
5000 return fold (build2 (GT_EXPR, type, inner, comp_const));
5002 else if (op_code == MAX_EXPR)
5003 /* MAX (X, 0) > -1 -> true */
5004 return omit_one_operand (type, integer_one_node, inner);
5006 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5007 /* MIN (X, 0) > 0 -> false
5008 MIN (X, 0) > 5 -> false */
5009 return omit_one_operand (type, integer_zero_node, inner);
5011 else
5012 /* MIN (X, 0) > -1 -> X > -1 */
5013 return fold (build2 (GT_EXPR, type, inner, comp_const));
5015 default:
5016 return t;
5020 /* T is an integer expression that is being multiplied, divided, or taken a
5021 modulus (CODE says which and what kind of divide or modulus) by a
5022 constant C. See if we can eliminate that operation by folding it with
5023 other operations already in T. WIDE_TYPE, if non-null, is a type that
5024 should be used for the computation if wider than our type.
5026 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5027 (X * 2) + (Y * 4). We must, however, be assured that either the original
5028 expression would not overflow or that overflow is undefined for the type
5029 in the language in question.
5031 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5032 the machine has a multiply-accumulate insn or that this is part of an
5033 addressing calculation.
5035 If we return a non-null expression, it is an equivalent form of the
5036 original computation, but need not be in the original type. */
5038 static tree
5039 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5041 /* To avoid exponential search depth, refuse to allow recursion past
5042 three levels. Beyond that (1) it's highly unlikely that we'll find
5043 something interesting and (2) we've probably processed it before
5044 when we built the inner expression. */
5046 static int depth;
5047 tree ret;
5049 if (depth > 3)
5050 return NULL;
5052 depth++;
5053 ret = extract_muldiv_1 (t, c, code, wide_type);
5054 depth--;
5056 return ret;
5059 static tree
5060 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5062 tree type = TREE_TYPE (t);
5063 enum tree_code tcode = TREE_CODE (t);
5064 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5065 > GET_MODE_SIZE (TYPE_MODE (type)))
5066 ? wide_type : type);
5067 tree t1, t2;
5068 int same_p = tcode == code;
5069 tree op0 = NULL_TREE, op1 = NULL_TREE;
5071 /* Don't deal with constants of zero here; they confuse the code below. */
5072 if (integer_zerop (c))
5073 return NULL_TREE;
5075 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5076 op0 = TREE_OPERAND (t, 0);
5078 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5079 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5081 /* Note that we need not handle conditional operations here since fold
5082 already handles those cases. So just do arithmetic here. */
5083 switch (tcode)
5085 case INTEGER_CST:
5086 /* For a constant, we can always simplify if we are a multiply
5087 or (for divide and modulus) if it is a multiple of our constant. */
5088 if (code == MULT_EXPR
5089 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5090 return const_binop (code, fold_convert (ctype, t),
5091 fold_convert (ctype, c), 0);
5092 break;
5094 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5095 /* If op0 is an expression ... */
5096 if ((COMPARISON_CLASS_P (op0)
5097 || UNARY_CLASS_P (op0)
5098 || BINARY_CLASS_P (op0)
5099 || EXPRESSION_CLASS_P (op0))
5100 /* ... and is unsigned, and its type is smaller than ctype,
5101 then we cannot pass through as widening. */
5102 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5103 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5104 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5105 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5106 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5107 /* ... or this is a truncation (t is narrower than op0),
5108 then we cannot pass through this narrowing. */
5109 || (GET_MODE_SIZE (TYPE_MODE (type))
5110 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5111 /* ... or signedness changes for division or modulus,
5112 then we cannot pass through this conversion. */
5113 || (code != MULT_EXPR
5114 && (TYPE_UNSIGNED (ctype)
5115 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5116 break;
5118 /* Pass the constant down and see if we can make a simplification. If
5119 we can, replace this expression with the inner simplification for
5120 possible later conversion to our or some other type. */
5121 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5122 && TREE_CODE (t2) == INTEGER_CST
5123 && ! TREE_CONSTANT_OVERFLOW (t2)
5124 && (0 != (t1 = extract_muldiv (op0, t2, code,
5125 code == MULT_EXPR
5126 ? ctype : NULL_TREE))))
5127 return t1;
5128 break;
5130 case ABS_EXPR:
5131 /* If widening the type changes it from signed to unsigned, then we
5132 must avoid building ABS_EXPR itself as unsigned. */
5133 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5135 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5136 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5138 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5139 return fold_convert (ctype, t1);
5141 break;
5143 /* FALLTHROUGH */
5144 case NEGATE_EXPR:
5145 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5146 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5147 break;
5149 case MIN_EXPR: case MAX_EXPR:
5150 /* If widening the type changes the signedness, then we can't perform
5151 this optimization as that changes the result. */
5152 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5153 break;
5155 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5156 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5157 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5159 if (tree_int_cst_sgn (c) < 0)
5160 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5162 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5163 fold_convert (ctype, t2)));
5165 break;
5167 case LSHIFT_EXPR: case RSHIFT_EXPR:
5168 /* If the second operand is constant, this is a multiplication
5169 or floor division, by a power of two, so we can treat it that
5170 way unless the multiplier or divisor overflows. Signed
5171 left-shift overflow is implementation-defined rather than
5172 undefined in C90, so do not convert signed left shift into
5173 multiplication. */
5174 if (TREE_CODE (op1) == INTEGER_CST
5175 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5176 /* const_binop may not detect overflow correctly,
5177 so check for it explicitly here. */
5178 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5179 && TREE_INT_CST_HIGH (op1) == 0
5180 && 0 != (t1 = fold_convert (ctype,
5181 const_binop (LSHIFT_EXPR,
5182 size_one_node,
5183 op1, 0)))
5184 && ! TREE_OVERFLOW (t1))
5185 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5186 ? MULT_EXPR : FLOOR_DIV_EXPR,
5187 ctype, fold_convert (ctype, op0), t1),
5188 c, code, wide_type);
5189 break;
5191 case PLUS_EXPR: case MINUS_EXPR:
5192 /* See if we can eliminate the operation on both sides. If we can, we
5193 can return a new PLUS or MINUS. If we can't, the only remaining
5194 cases where we can do anything are if the second operand is a
5195 constant. */
5196 t1 = extract_muldiv (op0, c, code, wide_type);
5197 t2 = extract_muldiv (op1, c, code, wide_type);
5198 if (t1 != 0 && t2 != 0
5199 && (code == MULT_EXPR
5200 /* If not multiplication, we can only do this if both operands
5201 are divisible by c. */
5202 || (multiple_of_p (ctype, op0, c)
5203 && multiple_of_p (ctype, op1, c))))
5204 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5205 fold_convert (ctype, t2)));
5207 /* If this was a subtraction, negate OP1 and set it to be an addition.
5208 This simplifies the logic below. */
5209 if (tcode == MINUS_EXPR)
5210 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5212 if (TREE_CODE (op1) != INTEGER_CST)
5213 break;
5215 /* If either OP1 or C are negative, this optimization is not safe for
5216 some of the division and remainder types while for others we need
5217 to change the code. */
5218 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5220 if (code == CEIL_DIV_EXPR)
5221 code = FLOOR_DIV_EXPR;
5222 else if (code == FLOOR_DIV_EXPR)
5223 code = CEIL_DIV_EXPR;
5224 else if (code != MULT_EXPR
5225 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5226 break;
5229 /* If it's a multiply or a division/modulus operation of a multiple
5230 of our constant, do the operation and verify it doesn't overflow. */
5231 if (code == MULT_EXPR
5232 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5234 op1 = const_binop (code, fold_convert (ctype, op1),
5235 fold_convert (ctype, c), 0);
5236 /* We allow the constant to overflow with wrapping semantics. */
5237 if (op1 == 0
5238 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5239 break;
5241 else
5242 break;
5244 /* If we have an unsigned type is not a sizetype, we cannot widen
5245 the operation since it will change the result if the original
5246 computation overflowed. */
5247 if (TYPE_UNSIGNED (ctype)
5248 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5249 && ctype != type)
5250 break;
5252 /* If we were able to eliminate our operation from the first side,
5253 apply our operation to the second side and reform the PLUS. */
5254 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5255 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5257 /* The last case is if we are a multiply. In that case, we can
5258 apply the distributive law to commute the multiply and addition
5259 if the multiplication of the constants doesn't overflow. */
5260 if (code == MULT_EXPR)
5261 return fold (build2 (tcode, ctype,
5262 fold (build2 (code, ctype,
5263 fold_convert (ctype, op0),
5264 fold_convert (ctype, c))),
5265 op1));
5267 break;
5269 case MULT_EXPR:
5270 /* We have a special case here if we are doing something like
5271 (C * 8) % 4 since we know that's zero. */
5272 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5273 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5274 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5275 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5276 return omit_one_operand (type, integer_zero_node, op0);
5278 /* ... fall through ... */
5280 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5281 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5282 /* If we can extract our operation from the LHS, do so and return a
5283 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5284 do something only if the second operand is a constant. */
5285 if (same_p
5286 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5287 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5288 fold_convert (ctype, op1)));
5289 else if (tcode == MULT_EXPR && code == MULT_EXPR
5290 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5291 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5292 fold_convert (ctype, t1)));
5293 else if (TREE_CODE (op1) != INTEGER_CST)
5294 return 0;
5296 /* If these are the same operation types, we can associate them
5297 assuming no overflow. */
5298 if (tcode == code
5299 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5300 fold_convert (ctype, c), 0))
5301 && ! TREE_OVERFLOW (t1))
5302 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5304 /* If these operations "cancel" each other, we have the main
5305 optimizations of this pass, which occur when either constant is a
5306 multiple of the other, in which case we replace this with either an
5307 operation or CODE or TCODE.
5309 If we have an unsigned type that is not a sizetype, we cannot do
5310 this since it will change the result if the original computation
5311 overflowed. */
5312 if ((! TYPE_UNSIGNED (ctype)
5313 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5314 && ! flag_wrapv
5315 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5316 || (tcode == MULT_EXPR
5317 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5318 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5320 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5321 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5322 fold_convert (ctype,
5323 const_binop (TRUNC_DIV_EXPR,
5324 op1, c, 0))));
5325 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5326 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5327 fold_convert (ctype,
5328 const_binop (TRUNC_DIV_EXPR,
5329 c, op1, 0))));
5331 break;
5333 default:
5334 break;
5337 return 0;
5340 /* Return a node which has the indicated constant VALUE (either 0 or
5341 1), and is of the indicated TYPE. */
5343 tree
5344 constant_boolean_node (int value, tree type)
5346 if (type == integer_type_node)
5347 return value ? integer_one_node : integer_zero_node;
5348 else if (type == boolean_type_node)
5349 return value ? boolean_true_node : boolean_false_node;
5350 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5351 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5352 : integer_zero_node);
5353 else
5354 return build_int_cst (type, value);
5357 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5358 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5359 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5360 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5361 COND is the first argument to CODE; otherwise (as in the example
5362 given here), it is the second argument. TYPE is the type of the
5363 original expression. Return NULL_TREE if no simplification is
5364 possible. */
5366 static tree
5367 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5368 tree cond, tree arg, int cond_first_p)
5370 tree test, true_value, false_value;
5371 tree lhs = NULL_TREE;
5372 tree rhs = NULL_TREE;
5374 /* This transformation is only worthwhile if we don't have to wrap
5375 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5376 one of the branches once its pushed inside the COND_EXPR. */
5377 if (!TREE_CONSTANT (arg))
5378 return NULL_TREE;
5380 if (TREE_CODE (cond) == COND_EXPR)
5382 test = TREE_OPERAND (cond, 0);
5383 true_value = TREE_OPERAND (cond, 1);
5384 false_value = TREE_OPERAND (cond, 2);
5385 /* If this operand throws an expression, then it does not make
5386 sense to try to perform a logical or arithmetic operation
5387 involving it. */
5388 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5389 lhs = true_value;
5390 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5391 rhs = false_value;
5393 else
5395 tree testtype = TREE_TYPE (cond);
5396 test = cond;
5397 true_value = constant_boolean_node (true, testtype);
5398 false_value = constant_boolean_node (false, testtype);
5401 if (lhs == 0)
5402 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5403 : build2 (code, type, arg, true_value));
5404 if (rhs == 0)
5405 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5406 : build2 (code, type, arg, false_value));
5408 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5409 return fold_convert (type, test);
5413 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5415 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5416 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5417 ADDEND is the same as X.
5419 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5420 and finite. The problematic cases are when X is zero, and its mode
5421 has signed zeros. In the case of rounding towards -infinity,
5422 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5423 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5425 static bool
5426 fold_real_zero_addition_p (tree type, tree addend, int negate)
5428 if (!real_zerop (addend))
5429 return false;
5431 /* Don't allow the fold with -fsignaling-nans. */
5432 if (HONOR_SNANS (TYPE_MODE (type)))
5433 return false;
5435 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5436 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5437 return true;
5439 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5440 if (TREE_CODE (addend) == REAL_CST
5441 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5442 negate = !negate;
5444 /* The mode has signed zeros, and we have to honor their sign.
5445 In this situation, there is only one case we can return true for.
5446 X - 0 is the same as X unless rounding towards -infinity is
5447 supported. */
5448 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5451 /* Subroutine of fold() that checks comparisons of built-in math
5452 functions against real constants.
5454 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5455 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5456 is the type of the result and ARG0 and ARG1 are the operands of the
5457 comparison. ARG1 must be a TREE_REAL_CST.
5459 The function returns the constant folded tree if a simplification
5460 can be made, and NULL_TREE otherwise. */
5462 static tree
5463 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5464 tree type, tree arg0, tree arg1)
5466 REAL_VALUE_TYPE c;
5468 if (BUILTIN_SQRT_P (fcode))
5470 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5471 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5473 c = TREE_REAL_CST (arg1);
5474 if (REAL_VALUE_NEGATIVE (c))
5476 /* sqrt(x) < y is always false, if y is negative. */
5477 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5478 return omit_one_operand (type, integer_zero_node, arg);
5480 /* sqrt(x) > y is always true, if y is negative and we
5481 don't care about NaNs, i.e. negative values of x. */
5482 if (code == NE_EXPR || !HONOR_NANS (mode))
5483 return omit_one_operand (type, integer_one_node, arg);
5485 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5486 return fold (build2 (GE_EXPR, type, arg,
5487 build_real (TREE_TYPE (arg), dconst0)));
5489 else if (code == GT_EXPR || code == GE_EXPR)
5491 REAL_VALUE_TYPE c2;
5493 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5494 real_convert (&c2, mode, &c2);
5496 if (REAL_VALUE_ISINF (c2))
5498 /* sqrt(x) > y is x == +Inf, when y is very large. */
5499 if (HONOR_INFINITIES (mode))
5500 return fold (build2 (EQ_EXPR, type, arg,
5501 build_real (TREE_TYPE (arg), c2)));
5503 /* sqrt(x) > y is always false, when y is very large
5504 and we don't care about infinities. */
5505 return omit_one_operand (type, integer_zero_node, arg);
5508 /* sqrt(x) > c is the same as x > c*c. */
5509 return fold (build2 (code, type, arg,
5510 build_real (TREE_TYPE (arg), c2)));
5512 else if (code == LT_EXPR || code == LE_EXPR)
5514 REAL_VALUE_TYPE c2;
5516 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5517 real_convert (&c2, mode, &c2);
5519 if (REAL_VALUE_ISINF (c2))
5521 /* sqrt(x) < y is always true, when y is a very large
5522 value and we don't care about NaNs or Infinities. */
5523 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5524 return omit_one_operand (type, integer_one_node, arg);
5526 /* sqrt(x) < y is x != +Inf when y is very large and we
5527 don't care about NaNs. */
5528 if (! HONOR_NANS (mode))
5529 return fold (build2 (NE_EXPR, type, arg,
5530 build_real (TREE_TYPE (arg), c2)));
5532 /* sqrt(x) < y is x >= 0 when y is very large and we
5533 don't care about Infinities. */
5534 if (! HONOR_INFINITIES (mode))
5535 return fold (build2 (GE_EXPR, type, arg,
5536 build_real (TREE_TYPE (arg), dconst0)));
5538 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5539 if (lang_hooks.decls.global_bindings_p () != 0
5540 || CONTAINS_PLACEHOLDER_P (arg))
5541 return NULL_TREE;
5543 arg = save_expr (arg);
5544 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5545 fold (build2 (GE_EXPR, type, arg,
5546 build_real (TREE_TYPE (arg),
5547 dconst0))),
5548 fold (build2 (NE_EXPR, type, arg,
5549 build_real (TREE_TYPE (arg),
5550 c2)))));
5553 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5554 if (! HONOR_NANS (mode))
5555 return fold (build2 (code, type, arg,
5556 build_real (TREE_TYPE (arg), c2)));
5558 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5559 if (lang_hooks.decls.global_bindings_p () == 0
5560 && ! CONTAINS_PLACEHOLDER_P (arg))
5562 arg = save_expr (arg);
5563 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5564 fold (build2 (GE_EXPR, type, arg,
5565 build_real (TREE_TYPE (arg),
5566 dconst0))),
5567 fold (build2 (code, type, arg,
5568 build_real (TREE_TYPE (arg),
5569 c2)))));
5574 return NULL_TREE;
5577 /* Subroutine of fold() that optimizes comparisons against Infinities,
5578 either +Inf or -Inf.
5580 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5581 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5582 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5584 The function returns the constant folded tree if a simplification
5585 can be made, and NULL_TREE otherwise. */
5587 static tree
5588 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5590 enum machine_mode mode;
5591 REAL_VALUE_TYPE max;
5592 tree temp;
5593 bool neg;
5595 mode = TYPE_MODE (TREE_TYPE (arg0));
5597 /* For negative infinity swap the sense of the comparison. */
5598 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5599 if (neg)
5600 code = swap_tree_comparison (code);
5602 switch (code)
5604 case GT_EXPR:
5605 /* x > +Inf is always false, if with ignore sNANs. */
5606 if (HONOR_SNANS (mode))
5607 return NULL_TREE;
5608 return omit_one_operand (type, integer_zero_node, arg0);
5610 case LE_EXPR:
5611 /* x <= +Inf is always true, if we don't case about NaNs. */
5612 if (! HONOR_NANS (mode))
5613 return omit_one_operand (type, integer_one_node, arg0);
5615 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5616 if (lang_hooks.decls.global_bindings_p () == 0
5617 && ! CONTAINS_PLACEHOLDER_P (arg0))
5619 arg0 = save_expr (arg0);
5620 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5622 break;
5624 case EQ_EXPR:
5625 case GE_EXPR:
5626 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5627 real_maxval (&max, neg, mode);
5628 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5629 arg0, build_real (TREE_TYPE (arg0), max)));
5631 case LT_EXPR:
5632 /* x < +Inf is always equal to x <= DBL_MAX. */
5633 real_maxval (&max, neg, mode);
5634 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5635 arg0, build_real (TREE_TYPE (arg0), max)));
5637 case NE_EXPR:
5638 /* x != +Inf is always equal to !(x > DBL_MAX). */
5639 real_maxval (&max, neg, mode);
5640 if (! HONOR_NANS (mode))
5641 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5642 arg0, build_real (TREE_TYPE (arg0), max)));
5644 /* The transformation below creates non-gimple code and thus is
5645 not appropriate if we are in gimple form. */
5646 if (in_gimple_form)
5647 return NULL_TREE;
5649 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5650 arg0, build_real (TREE_TYPE (arg0), max)));
5651 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5653 default:
5654 break;
5657 return NULL_TREE;
5660 /* Subroutine of fold() that optimizes comparisons of a division by
5661 a nonzero integer constant against an integer constant, i.e.
5662 X/C1 op C2.
5664 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5665 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5666 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5668 The function returns the constant folded tree if a simplification
5669 can be made, and NULL_TREE otherwise. */
5671 static tree
5672 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5674 tree prod, tmp, hi, lo;
5675 tree arg00 = TREE_OPERAND (arg0, 0);
5676 tree arg01 = TREE_OPERAND (arg0, 1);
5677 unsigned HOST_WIDE_INT lpart;
5678 HOST_WIDE_INT hpart;
5679 int overflow;
5681 /* We have to do this the hard way to detect unsigned overflow.
5682 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5683 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5684 TREE_INT_CST_HIGH (arg01),
5685 TREE_INT_CST_LOW (arg1),
5686 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5687 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5688 prod = force_fit_type (prod, -1, overflow, false);
5690 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5692 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5693 lo = prod;
5695 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5696 overflow = add_double (TREE_INT_CST_LOW (prod),
5697 TREE_INT_CST_HIGH (prod),
5698 TREE_INT_CST_LOW (tmp),
5699 TREE_INT_CST_HIGH (tmp),
5700 &lpart, &hpart);
5701 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5702 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5703 TREE_CONSTANT_OVERFLOW (prod));
5705 else if (tree_int_cst_sgn (arg01) >= 0)
5707 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5708 switch (tree_int_cst_sgn (arg1))
5710 case -1:
5711 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5712 hi = prod;
5713 break;
5715 case 0:
5716 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5717 hi = tmp;
5718 break;
5720 case 1:
5721 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5722 lo = prod;
5723 break;
5725 default:
5726 gcc_unreachable ();
5729 else
5731 /* A negative divisor reverses the relational operators. */
5732 code = swap_tree_comparison (code);
5734 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5735 switch (tree_int_cst_sgn (arg1))
5737 case -1:
5738 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5739 lo = prod;
5740 break;
5742 case 0:
5743 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5744 lo = tmp;
5745 break;
5747 case 1:
5748 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5749 hi = prod;
5750 break;
5752 default:
5753 gcc_unreachable ();
5757 switch (code)
5759 case EQ_EXPR:
5760 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5761 return omit_one_operand (type, integer_zero_node, arg00);
5762 if (TREE_OVERFLOW (hi))
5763 return fold (build2 (GE_EXPR, type, arg00, lo));
5764 if (TREE_OVERFLOW (lo))
5765 return fold (build2 (LE_EXPR, type, arg00, hi));
5766 return build_range_check (type, arg00, 1, lo, hi);
5768 case NE_EXPR:
5769 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5770 return omit_one_operand (type, integer_one_node, arg00);
5771 if (TREE_OVERFLOW (hi))
5772 return fold (build2 (LT_EXPR, type, arg00, lo));
5773 if (TREE_OVERFLOW (lo))
5774 return fold (build2 (GT_EXPR, type, arg00, hi));
5775 return build_range_check (type, arg00, 0, lo, hi);
5777 case LT_EXPR:
5778 if (TREE_OVERFLOW (lo))
5779 return omit_one_operand (type, integer_zero_node, arg00);
5780 return fold (build2 (LT_EXPR, type, arg00, lo));
5782 case LE_EXPR:
5783 if (TREE_OVERFLOW (hi))
5784 return omit_one_operand (type, integer_one_node, arg00);
5785 return fold (build2 (LE_EXPR, type, arg00, hi));
5787 case GT_EXPR:
5788 if (TREE_OVERFLOW (hi))
5789 return omit_one_operand (type, integer_zero_node, arg00);
5790 return fold (build2 (GT_EXPR, type, arg00, hi));
5792 case GE_EXPR:
5793 if (TREE_OVERFLOW (lo))
5794 return omit_one_operand (type, integer_one_node, arg00);
5795 return fold (build2 (GE_EXPR, type, arg00, lo));
5797 default:
5798 break;
5801 return NULL_TREE;
5805 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5806 equality/inequality test, then return a simplified form of
5807 the test using shifts and logical operations. Otherwise return
5808 NULL. TYPE is the desired result type. */
5810 tree
5811 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5812 tree result_type)
5814 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5815 operand 0. */
5816 if (code == TRUTH_NOT_EXPR)
5818 code = TREE_CODE (arg0);
5819 if (code != NE_EXPR && code != EQ_EXPR)
5820 return NULL_TREE;
5822 /* Extract the arguments of the EQ/NE. */
5823 arg1 = TREE_OPERAND (arg0, 1);
5824 arg0 = TREE_OPERAND (arg0, 0);
5826 /* This requires us to invert the code. */
5827 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5830 /* If this is testing a single bit, we can optimize the test. */
5831 if ((code == NE_EXPR || code == EQ_EXPR)
5832 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5833 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5835 tree inner = TREE_OPERAND (arg0, 0);
5836 tree type = TREE_TYPE (arg0);
5837 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5838 enum machine_mode operand_mode = TYPE_MODE (type);
5839 int ops_unsigned;
5840 tree signed_type, unsigned_type, intermediate_type;
5841 tree arg00;
5843 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5844 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5845 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5846 if (arg00 != NULL_TREE
5847 /* This is only a win if casting to a signed type is cheap,
5848 i.e. when arg00's type is not a partial mode. */
5849 && TYPE_PRECISION (TREE_TYPE (arg00))
5850 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5852 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5853 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5854 result_type, fold_convert (stype, arg00),
5855 fold_convert (stype, integer_zero_node)));
5858 /* Otherwise we have (A & C) != 0 where C is a single bit,
5859 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5860 Similarly for (A & C) == 0. */
5862 /* If INNER is a right shift of a constant and it plus BITNUM does
5863 not overflow, adjust BITNUM and INNER. */
5864 if (TREE_CODE (inner) == RSHIFT_EXPR
5865 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5866 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5867 && bitnum < TYPE_PRECISION (type)
5868 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5869 bitnum - TYPE_PRECISION (type)))
5871 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5872 inner = TREE_OPERAND (inner, 0);
5875 /* If we are going to be able to omit the AND below, we must do our
5876 operations as unsigned. If we must use the AND, we have a choice.
5877 Normally unsigned is faster, but for some machines signed is. */
5878 #ifdef LOAD_EXTEND_OP
5879 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5880 && !flag_syntax_only) ? 0 : 1;
5881 #else
5882 ops_unsigned = 1;
5883 #endif
5885 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5886 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5887 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5888 inner = fold_convert (intermediate_type, inner);
5890 if (bitnum != 0)
5891 inner = build2 (RSHIFT_EXPR, intermediate_type,
5892 inner, size_int (bitnum));
5894 if (code == EQ_EXPR)
5895 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5896 inner, integer_one_node));
5898 /* Put the AND last so it can combine with more things. */
5899 inner = build2 (BIT_AND_EXPR, intermediate_type,
5900 inner, integer_one_node);
5902 /* Make sure to return the proper type. */
5903 inner = fold_convert (result_type, inner);
5905 return inner;
5907 return NULL_TREE;
5910 /* Check whether we are allowed to reorder operands arg0 and arg1,
5911 such that the evaluation of arg1 occurs before arg0. */
5913 static bool
5914 reorder_operands_p (tree arg0, tree arg1)
5916 if (! flag_evaluation_order)
5917 return true;
5918 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5919 return true;
5920 return ! TREE_SIDE_EFFECTS (arg0)
5921 && ! TREE_SIDE_EFFECTS (arg1);
5924 /* Test whether it is preferable two swap two operands, ARG0 and
5925 ARG1, for example because ARG0 is an integer constant and ARG1
5926 isn't. If REORDER is true, only recommend swapping if we can
5927 evaluate the operands in reverse order. */
5929 bool
5930 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5932 STRIP_SIGN_NOPS (arg0);
5933 STRIP_SIGN_NOPS (arg1);
5935 if (TREE_CODE (arg1) == INTEGER_CST)
5936 return 0;
5937 if (TREE_CODE (arg0) == INTEGER_CST)
5938 return 1;
5940 if (TREE_CODE (arg1) == REAL_CST)
5941 return 0;
5942 if (TREE_CODE (arg0) == REAL_CST)
5943 return 1;
5945 if (TREE_CODE (arg1) == COMPLEX_CST)
5946 return 0;
5947 if (TREE_CODE (arg0) == COMPLEX_CST)
5948 return 1;
5950 if (TREE_CONSTANT (arg1))
5951 return 0;
5952 if (TREE_CONSTANT (arg0))
5953 return 1;
5955 if (optimize_size)
5956 return 0;
5958 if (reorder && flag_evaluation_order
5959 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5960 return 0;
5962 if (DECL_P (arg1))
5963 return 0;
5964 if (DECL_P (arg0))
5965 return 1;
5967 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5968 for commutative and comparison operators. Ensuring a canonical
5969 form allows the optimizers to find additional redundancies without
5970 having to explicitly check for both orderings. */
5971 if (TREE_CODE (arg0) == SSA_NAME
5972 && TREE_CODE (arg1) == SSA_NAME
5973 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5974 return 1;
5976 return 0;
5979 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
5980 ARG0 is extended to a wider type. */
5982 static tree
5983 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
5985 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
5986 tree arg1_unw;
5987 tree shorter_type, outer_type;
5988 tree min, max;
5989 bool above, below;
5991 if (arg0_unw == arg0)
5992 return NULL_TREE;
5993 shorter_type = TREE_TYPE (arg0_unw);
5995 arg1_unw = get_unwidened (arg1, shorter_type);
5996 if (!arg1_unw)
5997 return NULL_TREE;
5999 /* If possible, express the comparison in the shorter mode. */
6000 if ((code == EQ_EXPR || code == NE_EXPR
6001 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6002 && (TREE_TYPE (arg1_unw) == shorter_type
6003 || (TREE_CODE (arg1_unw) == INTEGER_CST
6004 && TREE_CODE (shorter_type) == INTEGER_TYPE
6005 && int_fits_type_p (arg1_unw, shorter_type))))
6006 return fold (build (code, type, arg0_unw,
6007 fold_convert (shorter_type, arg1_unw)));
6009 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6010 return NULL_TREE;
6012 /* If we are comparing with the integer that does not fit into the range
6013 of the shorter type, the result is known. */
6014 outer_type = TREE_TYPE (arg1_unw);
6015 min = lower_bound_in_type (outer_type, shorter_type);
6016 max = upper_bound_in_type (outer_type, shorter_type);
6018 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6019 max, arg1_unw));
6020 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6021 arg1_unw, min));
6023 switch (code)
6025 case EQ_EXPR:
6026 if (above || below)
6027 return omit_one_operand (type, integer_zero_node, arg0);
6028 break;
6030 case NE_EXPR:
6031 if (above || below)
6032 return omit_one_operand (type, integer_one_node, arg0);
6033 break;
6035 case LT_EXPR:
6036 case LE_EXPR:
6037 if (above)
6038 return omit_one_operand (type, integer_one_node, arg0);
6039 else if (below)
6040 return omit_one_operand (type, integer_zero_node, arg0);
6042 case GT_EXPR:
6043 case GE_EXPR:
6044 if (above)
6045 return omit_one_operand (type, integer_zero_node, arg0);
6046 else if (below)
6047 return omit_one_operand (type, integer_one_node, arg0);
6049 default:
6050 break;
6053 return NULL_TREE;
6056 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6057 ARG0 just the signedness is changed. */
6059 static tree
6060 fold_sign_changed_comparison (enum tree_code code, tree type,
6061 tree arg0, tree arg1)
6063 tree arg0_inner, tmp;
6064 tree inner_type, outer_type;
6066 if (TREE_CODE (arg0) != NOP_EXPR)
6067 return NULL_TREE;
6069 outer_type = TREE_TYPE (arg0);
6070 arg0_inner = TREE_OPERAND (arg0, 0);
6071 inner_type = TREE_TYPE (arg0_inner);
6073 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6074 return NULL_TREE;
6076 if (TREE_CODE (arg1) != INTEGER_CST
6077 && !(TREE_CODE (arg1) == NOP_EXPR
6078 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6079 return NULL_TREE;
6081 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6082 && code != NE_EXPR
6083 && code != EQ_EXPR)
6084 return NULL_TREE;
6086 if (TREE_CODE (arg1) == INTEGER_CST)
6088 tmp = build_int_cst_wide (inner_type,
6089 TREE_INT_CST_LOW (arg1),
6090 TREE_INT_CST_HIGH (arg1));
6091 arg1 = force_fit_type (tmp, 0,
6092 TREE_OVERFLOW (arg1),
6093 TREE_CONSTANT_OVERFLOW (arg1));
6095 else
6096 arg1 = fold_convert (inner_type, arg1);
6098 return fold (build (code, type, arg0_inner, arg1));
6101 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6102 step of the array. TYPE is the type of the expression. ADDR is the address.
6103 MULT is the multiplicative expression. If the function succeeds, the new
6104 address expression is returned. Otherwise NULL_TREE is returned. */
6106 static tree
6107 try_move_mult_to_index (tree type, enum tree_code code, tree addr, tree mult)
6109 tree s, delta, step;
6110 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6111 tree ref = TREE_OPERAND (addr, 0), pref;
6112 tree ret, pos;
6113 tree itype;
6115 STRIP_NOPS (arg0);
6116 STRIP_NOPS (arg1);
6118 if (TREE_CODE (arg0) == INTEGER_CST)
6120 s = arg0;
6121 delta = arg1;
6123 else if (TREE_CODE (arg1) == INTEGER_CST)
6125 s = arg1;
6126 delta = arg0;
6128 else
6129 return NULL_TREE;
6131 for (;; ref = TREE_OPERAND (ref, 0))
6133 if (TREE_CODE (ref) == ARRAY_REF)
6135 step = array_ref_element_size (ref);
6137 if (TREE_CODE (step) != INTEGER_CST)
6138 continue;
6140 itype = TREE_TYPE (step);
6142 /* If the type sizes do not match, we might run into problems
6143 when one of them would overflow. */
6144 if (TYPE_PRECISION (itype) != TYPE_PRECISION (type))
6145 continue;
6147 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6148 continue;
6150 delta = fold_convert (itype, delta);
6151 break;
6154 if (!handled_component_p (ref))
6155 return NULL_TREE;
6158 /* We found the suitable array reference. So copy everything up to it,
6159 and replace the index. */
6161 pref = TREE_OPERAND (addr, 0);
6162 ret = copy_node (pref);
6163 pos = ret;
6165 while (pref != ref)
6167 pref = TREE_OPERAND (pref, 0);
6168 TREE_OPERAND (pos, 0) = copy_node (pref);
6169 pos = TREE_OPERAND (pos, 0);
6172 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6173 TREE_OPERAND (pos, 1),
6174 delta));
6176 return build1 (ADDR_EXPR, type, ret);
6180 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6181 means A >= Y && A != MAX, but in this case we know that
6182 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6184 static tree
6185 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6187 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6189 if (TREE_CODE (bound) == LT_EXPR)
6190 a = TREE_OPERAND (bound, 0);
6191 else if (TREE_CODE (bound) == GT_EXPR)
6192 a = TREE_OPERAND (bound, 1);
6193 else
6194 return NULL_TREE;
6196 typea = TREE_TYPE (a);
6197 if (!INTEGRAL_TYPE_P (typea)
6198 && !POINTER_TYPE_P (typea))
6199 return NULL_TREE;
6201 if (TREE_CODE (ineq) == LT_EXPR)
6203 a1 = TREE_OPERAND (ineq, 1);
6204 y = TREE_OPERAND (ineq, 0);
6206 else if (TREE_CODE (ineq) == GT_EXPR)
6208 a1 = TREE_OPERAND (ineq, 0);
6209 y = TREE_OPERAND (ineq, 1);
6211 else
6212 return NULL_TREE;
6214 if (TREE_TYPE (a1) != typea)
6215 return NULL_TREE;
6217 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6218 if (!integer_onep (diff))
6219 return NULL_TREE;
6221 return fold (build2 (GE_EXPR, type, a, y));
6224 /* Perform constant folding and related simplification of EXPR.
6225 The related simplifications include x*1 => x, x*0 => 0, etc.,
6226 and application of the associative law.
6227 NOP_EXPR conversions may be removed freely (as long as we
6228 are careful not to change the type of the overall expression).
6229 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6230 but we can constant-fold them if they have constant operands. */
6232 #ifdef ENABLE_FOLD_CHECKING
6233 # define fold(x) fold_1 (x)
6234 static tree fold_1 (tree);
6235 static
6236 #endif
6237 tree
6238 fold (tree expr)
6240 const tree t = expr;
6241 const tree type = TREE_TYPE (expr);
6242 tree t1 = NULL_TREE;
6243 tree tem;
6244 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6245 enum tree_code code = TREE_CODE (t);
6246 enum tree_code_class kind = TREE_CODE_CLASS (code);
6248 /* WINS will be nonzero when the switch is done
6249 if all operands are constant. */
6250 int wins = 1;
6252 /* Return right away if a constant. */
6253 if (kind == tcc_constant)
6254 return t;
6256 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6258 tree subop;
6260 /* Special case for conversion ops that can have fixed point args. */
6261 arg0 = TREE_OPERAND (t, 0);
6263 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6264 if (arg0 != 0)
6265 STRIP_SIGN_NOPS (arg0);
6267 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6268 subop = TREE_REALPART (arg0);
6269 else
6270 subop = arg0;
6272 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6273 && TREE_CODE (subop) != REAL_CST)
6274 /* Note that TREE_CONSTANT isn't enough:
6275 static var addresses are constant but we can't
6276 do arithmetic on them. */
6277 wins = 0;
6279 else if (IS_EXPR_CODE_CLASS (kind))
6281 int len = TREE_CODE_LENGTH (code);
6282 int i;
6283 for (i = 0; i < len; i++)
6285 tree op = TREE_OPERAND (t, i);
6286 tree subop;
6288 if (op == 0)
6289 continue; /* Valid for CALL_EXPR, at least. */
6291 /* Strip any conversions that don't change the mode. This is
6292 safe for every expression, except for a comparison expression
6293 because its signedness is derived from its operands. So, in
6294 the latter case, only strip conversions that don't change the
6295 signedness.
6297 Note that this is done as an internal manipulation within the
6298 constant folder, in order to find the simplest representation
6299 of the arguments so that their form can be studied. In any
6300 cases, the appropriate type conversions should be put back in
6301 the tree that will get out of the constant folder. */
6302 if (kind == tcc_comparison)
6303 STRIP_SIGN_NOPS (op);
6304 else
6305 STRIP_NOPS (op);
6307 if (TREE_CODE (op) == COMPLEX_CST)
6308 subop = TREE_REALPART (op);
6309 else
6310 subop = op;
6312 if (TREE_CODE (subop) != INTEGER_CST
6313 && TREE_CODE (subop) != REAL_CST)
6314 /* Note that TREE_CONSTANT isn't enough:
6315 static var addresses are constant but we can't
6316 do arithmetic on them. */
6317 wins = 0;
6319 if (i == 0)
6320 arg0 = op;
6321 else if (i == 1)
6322 arg1 = op;
6326 /* If this is a commutative operation, and ARG0 is a constant, move it
6327 to ARG1 to reduce the number of tests below. */
6328 if (commutative_tree_code (code)
6329 && tree_swap_operands_p (arg0, arg1, true))
6330 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6331 TREE_OPERAND (t, 0)));
6333 /* Now WINS is set as described above,
6334 ARG0 is the first operand of EXPR,
6335 and ARG1 is the second operand (if it has more than one operand).
6337 First check for cases where an arithmetic operation is applied to a
6338 compound, conditional, or comparison operation. Push the arithmetic
6339 operation inside the compound or conditional to see if any folding
6340 can then be done. Convert comparison to conditional for this purpose.
6341 The also optimizes non-constant cases that used to be done in
6342 expand_expr.
6344 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6345 one of the operands is a comparison and the other is a comparison, a
6346 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6347 code below would make the expression more complex. Change it to a
6348 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6349 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6351 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6352 || code == EQ_EXPR || code == NE_EXPR)
6353 && ((truth_value_p (TREE_CODE (arg0))
6354 && (truth_value_p (TREE_CODE (arg1))
6355 || (TREE_CODE (arg1) == BIT_AND_EXPR
6356 && integer_onep (TREE_OPERAND (arg1, 1)))))
6357 || (truth_value_p (TREE_CODE (arg1))
6358 && (truth_value_p (TREE_CODE (arg0))
6359 || (TREE_CODE (arg0) == BIT_AND_EXPR
6360 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6362 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6363 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6364 : TRUTH_XOR_EXPR,
6365 type, fold_convert (boolean_type_node, arg0),
6366 fold_convert (boolean_type_node, arg1)));
6368 if (code == EQ_EXPR)
6369 tem = invert_truthvalue (tem);
6371 return tem;
6374 if (TREE_CODE_CLASS (code) == tcc_unary)
6376 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6377 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6378 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6379 else if (TREE_CODE (arg0) == COND_EXPR)
6381 tree arg01 = TREE_OPERAND (arg0, 1);
6382 tree arg02 = TREE_OPERAND (arg0, 2);
6383 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6384 arg01 = fold (build1 (code, type, arg01));
6385 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6386 arg02 = fold (build1 (code, type, arg02));
6387 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6388 arg01, arg02));
6390 /* If this was a conversion, and all we did was to move into
6391 inside the COND_EXPR, bring it back out. But leave it if
6392 it is a conversion from integer to integer and the
6393 result precision is no wider than a word since such a
6394 conversion is cheap and may be optimized away by combine,
6395 while it couldn't if it were outside the COND_EXPR. Then return
6396 so we don't get into an infinite recursion loop taking the
6397 conversion out and then back in. */
6399 if ((code == NOP_EXPR || code == CONVERT_EXPR
6400 || code == NON_LVALUE_EXPR)
6401 && TREE_CODE (tem) == COND_EXPR
6402 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6403 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6404 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6405 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6406 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6407 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6408 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6409 && (INTEGRAL_TYPE_P
6410 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6411 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6412 || flag_syntax_only))
6413 tem = build1 (code, type,
6414 build3 (COND_EXPR,
6415 TREE_TYPE (TREE_OPERAND
6416 (TREE_OPERAND (tem, 1), 0)),
6417 TREE_OPERAND (tem, 0),
6418 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6419 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6420 return tem;
6422 else if (COMPARISON_CLASS_P (arg0))
6424 if (TREE_CODE (type) == BOOLEAN_TYPE)
6426 arg0 = copy_node (arg0);
6427 TREE_TYPE (arg0) = type;
6428 return arg0;
6430 else if (TREE_CODE (type) != INTEGER_TYPE)
6431 return fold (build3 (COND_EXPR, type, arg0,
6432 fold (build1 (code, type,
6433 integer_one_node)),
6434 fold (build1 (code, type,
6435 integer_zero_node))));
6438 else if (TREE_CODE_CLASS (code) == tcc_comparison
6439 && TREE_CODE (arg0) == COMPOUND_EXPR)
6440 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6441 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6442 else if (TREE_CODE_CLASS (code) == tcc_comparison
6443 && TREE_CODE (arg1) == COMPOUND_EXPR)
6444 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6445 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6446 else if (TREE_CODE_CLASS (code) == tcc_binary
6447 || TREE_CODE_CLASS (code) == tcc_comparison)
6449 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6450 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6451 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6452 arg1)));
6453 if (TREE_CODE (arg1) == COMPOUND_EXPR
6454 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6455 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6456 fold (build2 (code, type,
6457 arg0, TREE_OPERAND (arg1, 1))));
6459 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6461 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6462 /*cond_first_p=*/1);
6463 if (tem != NULL_TREE)
6464 return tem;
6467 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6469 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6470 /*cond_first_p=*/0);
6471 if (tem != NULL_TREE)
6472 return tem;
6476 switch (code)
6478 case CONST_DECL:
6479 return fold (DECL_INITIAL (t));
6481 case NOP_EXPR:
6482 case FLOAT_EXPR:
6483 case CONVERT_EXPR:
6484 case FIX_TRUNC_EXPR:
6485 case FIX_CEIL_EXPR:
6486 case FIX_FLOOR_EXPR:
6487 case FIX_ROUND_EXPR:
6488 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6489 return TREE_OPERAND (t, 0);
6491 /* Handle cases of two conversions in a row. */
6492 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6493 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6495 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6496 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6497 int inside_int = INTEGRAL_TYPE_P (inside_type);
6498 int inside_ptr = POINTER_TYPE_P (inside_type);
6499 int inside_float = FLOAT_TYPE_P (inside_type);
6500 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6501 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6502 int inter_int = INTEGRAL_TYPE_P (inter_type);
6503 int inter_ptr = POINTER_TYPE_P (inter_type);
6504 int inter_float = FLOAT_TYPE_P (inter_type);
6505 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6506 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6507 int final_int = INTEGRAL_TYPE_P (type);
6508 int final_ptr = POINTER_TYPE_P (type);
6509 int final_float = FLOAT_TYPE_P (type);
6510 unsigned int final_prec = TYPE_PRECISION (type);
6511 int final_unsignedp = TYPE_UNSIGNED (type);
6513 /* In addition to the cases of two conversions in a row
6514 handled below, if we are converting something to its own
6515 type via an object of identical or wider precision, neither
6516 conversion is needed. */
6517 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6518 && ((inter_int && final_int) || (inter_float && final_float))
6519 && inter_prec >= final_prec)
6520 return fold (build1 (code, type,
6521 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6523 /* Likewise, if the intermediate and final types are either both
6524 float or both integer, we don't need the middle conversion if
6525 it is wider than the final type and doesn't change the signedness
6526 (for integers). Avoid this if the final type is a pointer
6527 since then we sometimes need the inner conversion. Likewise if
6528 the outer has a precision not equal to the size of its mode. */
6529 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6530 || (inter_float && inside_float))
6531 && inter_prec >= inside_prec
6532 && (inter_float || inter_unsignedp == inside_unsignedp)
6533 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6534 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6535 && ! final_ptr)
6536 return fold (build1 (code, type,
6537 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6539 /* If we have a sign-extension of a zero-extended value, we can
6540 replace that by a single zero-extension. */
6541 if (inside_int && inter_int && final_int
6542 && inside_prec < inter_prec && inter_prec < final_prec
6543 && inside_unsignedp && !inter_unsignedp)
6544 return fold (build1 (code, type,
6545 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6547 /* Two conversions in a row are not needed unless:
6548 - some conversion is floating-point (overstrict for now), or
6549 - the intermediate type is narrower than both initial and
6550 final, or
6551 - the intermediate type and innermost type differ in signedness,
6552 and the outermost type is wider than the intermediate, or
6553 - the initial type is a pointer type and the precisions of the
6554 intermediate and final types differ, or
6555 - the final type is a pointer type and the precisions of the
6556 initial and intermediate types differ. */
6557 if (! inside_float && ! inter_float && ! final_float
6558 && (inter_prec > inside_prec || inter_prec > final_prec)
6559 && ! (inside_int && inter_int
6560 && inter_unsignedp != inside_unsignedp
6561 && inter_prec < final_prec)
6562 && ((inter_unsignedp && inter_prec > inside_prec)
6563 == (final_unsignedp && final_prec > inter_prec))
6564 && ! (inside_ptr && inter_prec != final_prec)
6565 && ! (final_ptr && inside_prec != inter_prec)
6566 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6567 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6568 && ! final_ptr)
6569 return fold (build1 (code, type,
6570 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6573 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6574 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6575 /* Detect assigning a bitfield. */
6576 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6577 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6579 /* Don't leave an assignment inside a conversion
6580 unless assigning a bitfield. */
6581 tree prev = TREE_OPERAND (t, 0);
6582 tem = copy_node (t);
6583 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6584 /* First do the assignment, then return converted constant. */
6585 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6586 TREE_NO_WARNING (tem) = 1;
6587 TREE_USED (tem) = 1;
6588 return tem;
6591 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6592 constants (if x has signed type, the sign bit cannot be set
6593 in c). This folds extension into the BIT_AND_EXPR. */
6594 if (INTEGRAL_TYPE_P (type)
6595 && TREE_CODE (type) != BOOLEAN_TYPE
6596 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6597 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6599 tree and = TREE_OPERAND (t, 0);
6600 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6601 int change = 0;
6603 if (TYPE_UNSIGNED (TREE_TYPE (and))
6604 || (TYPE_PRECISION (type)
6605 <= TYPE_PRECISION (TREE_TYPE (and))))
6606 change = 1;
6607 else if (TYPE_PRECISION (TREE_TYPE (and1))
6608 <= HOST_BITS_PER_WIDE_INT
6609 && host_integerp (and1, 1))
6611 unsigned HOST_WIDE_INT cst;
6613 cst = tree_low_cst (and1, 1);
6614 cst &= (HOST_WIDE_INT) -1
6615 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6616 change = (cst == 0);
6617 #ifdef LOAD_EXTEND_OP
6618 if (change
6619 && !flag_syntax_only
6620 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6621 == ZERO_EXTEND))
6623 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6624 and0 = fold_convert (uns, and0);
6625 and1 = fold_convert (uns, and1);
6627 #endif
6629 if (change)
6630 return fold (build2 (BIT_AND_EXPR, type,
6631 fold_convert (type, and0),
6632 fold_convert (type, and1)));
6635 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6636 T2 being pointers to types of the same size. */
6637 if (POINTER_TYPE_P (TREE_TYPE (t))
6638 && BINARY_CLASS_P (arg0)
6639 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6640 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6642 tree arg00 = TREE_OPERAND (arg0, 0);
6643 tree t0 = TREE_TYPE (t);
6644 tree t1 = TREE_TYPE (arg00);
6645 tree tt0 = TREE_TYPE (t0);
6646 tree tt1 = TREE_TYPE (t1);
6647 tree s0 = TYPE_SIZE (tt0);
6648 tree s1 = TYPE_SIZE (tt1);
6650 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6651 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6652 TREE_OPERAND (arg0, 1));
6655 tem = fold_convert_const (code, type, arg0);
6656 return tem ? tem : t;
6658 case VIEW_CONVERT_EXPR:
6659 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6660 return build1 (VIEW_CONVERT_EXPR, type,
6661 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6662 return t;
6664 case COMPONENT_REF:
6665 if (TREE_CODE (arg0) == CONSTRUCTOR
6666 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6668 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6669 if (m)
6670 return TREE_VALUE (m);
6672 return t;
6674 case RANGE_EXPR:
6675 if (TREE_CONSTANT (t) != wins)
6677 tem = copy_node (t);
6678 TREE_CONSTANT (tem) = wins;
6679 TREE_INVARIANT (tem) = wins;
6680 return tem;
6682 return t;
6684 case NEGATE_EXPR:
6685 if (negate_expr_p (arg0))
6686 return fold_convert (type, negate_expr (arg0));
6687 return t;
6689 case ABS_EXPR:
6690 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6691 return fold_abs_const (arg0, type);
6692 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6693 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6694 /* Convert fabs((double)float) into (double)fabsf(float). */
6695 else if (TREE_CODE (arg0) == NOP_EXPR
6696 && TREE_CODE (type) == REAL_TYPE)
6698 tree targ0 = strip_float_extensions (arg0);
6699 if (targ0 != arg0)
6700 return fold_convert (type, fold (build1 (ABS_EXPR,
6701 TREE_TYPE (targ0),
6702 targ0)));
6704 else if (tree_expr_nonnegative_p (arg0))
6705 return arg0;
6706 return t;
6708 case CONJ_EXPR:
6709 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6710 return fold_convert (type, arg0);
6711 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6712 return build2 (COMPLEX_EXPR, type,
6713 TREE_OPERAND (arg0, 0),
6714 negate_expr (TREE_OPERAND (arg0, 1)));
6715 else if (TREE_CODE (arg0) == COMPLEX_CST)
6716 return build_complex (type, TREE_REALPART (arg0),
6717 negate_expr (TREE_IMAGPART (arg0)));
6718 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6719 return fold (build2 (TREE_CODE (arg0), type,
6720 fold (build1 (CONJ_EXPR, type,
6721 TREE_OPERAND (arg0, 0))),
6722 fold (build1 (CONJ_EXPR, type,
6723 TREE_OPERAND (arg0, 1)))));
6724 else if (TREE_CODE (arg0) == CONJ_EXPR)
6725 return TREE_OPERAND (arg0, 0);
6726 return t;
6728 case BIT_NOT_EXPR:
6729 if (TREE_CODE (arg0) == INTEGER_CST)
6730 return fold_not_const (arg0, type);
6731 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6732 return TREE_OPERAND (arg0, 0);
6733 return t;
6735 case PLUS_EXPR:
6736 /* A + (-B) -> A - B */
6737 if (TREE_CODE (arg1) == NEGATE_EXPR)
6738 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6739 /* (-A) + B -> B - A */
6740 if (TREE_CODE (arg0) == NEGATE_EXPR
6741 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6742 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6743 if (! FLOAT_TYPE_P (type))
6745 if (integer_zerop (arg1))
6746 return non_lvalue (fold_convert (type, arg0));
6748 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6749 with a constant, and the two constants have no bits in common,
6750 we should treat this as a BIT_IOR_EXPR since this may produce more
6751 simplifications. */
6752 if (TREE_CODE (arg0) == BIT_AND_EXPR
6753 && TREE_CODE (arg1) == BIT_AND_EXPR
6754 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6755 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6756 && integer_zerop (const_binop (BIT_AND_EXPR,
6757 TREE_OPERAND (arg0, 1),
6758 TREE_OPERAND (arg1, 1), 0)))
6760 code = BIT_IOR_EXPR;
6761 goto bit_ior;
6764 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6765 (plus (plus (mult) (mult)) (foo)) so that we can
6766 take advantage of the factoring cases below. */
6767 if (((TREE_CODE (arg0) == PLUS_EXPR
6768 || TREE_CODE (arg0) == MINUS_EXPR)
6769 && TREE_CODE (arg1) == MULT_EXPR)
6770 || ((TREE_CODE (arg1) == PLUS_EXPR
6771 || TREE_CODE (arg1) == MINUS_EXPR)
6772 && TREE_CODE (arg0) == MULT_EXPR))
6774 tree parg0, parg1, parg, marg;
6775 enum tree_code pcode;
6777 if (TREE_CODE (arg1) == MULT_EXPR)
6778 parg = arg0, marg = arg1;
6779 else
6780 parg = arg1, marg = arg0;
6781 pcode = TREE_CODE (parg);
6782 parg0 = TREE_OPERAND (parg, 0);
6783 parg1 = TREE_OPERAND (parg, 1);
6784 STRIP_NOPS (parg0);
6785 STRIP_NOPS (parg1);
6787 if (TREE_CODE (parg0) == MULT_EXPR
6788 && TREE_CODE (parg1) != MULT_EXPR)
6789 return fold (build2 (pcode, type,
6790 fold (build2 (PLUS_EXPR, type,
6791 fold_convert (type, parg0),
6792 fold_convert (type, marg))),
6793 fold_convert (type, parg1)));
6794 if (TREE_CODE (parg0) != MULT_EXPR
6795 && TREE_CODE (parg1) == MULT_EXPR)
6796 return fold (build2 (PLUS_EXPR, type,
6797 fold_convert (type, parg0),
6798 fold (build2 (pcode, type,
6799 fold_convert (type, marg),
6800 fold_convert (type,
6801 parg1)))));
6804 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6806 tree arg00, arg01, arg10, arg11;
6807 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6809 /* (A * C) + (B * C) -> (A+B) * C.
6810 We are most concerned about the case where C is a constant,
6811 but other combinations show up during loop reduction. Since
6812 it is not difficult, try all four possibilities. */
6814 arg00 = TREE_OPERAND (arg0, 0);
6815 arg01 = TREE_OPERAND (arg0, 1);
6816 arg10 = TREE_OPERAND (arg1, 0);
6817 arg11 = TREE_OPERAND (arg1, 1);
6818 same = NULL_TREE;
6820 if (operand_equal_p (arg01, arg11, 0))
6821 same = arg01, alt0 = arg00, alt1 = arg10;
6822 else if (operand_equal_p (arg00, arg10, 0))
6823 same = arg00, alt0 = arg01, alt1 = arg11;
6824 else if (operand_equal_p (arg00, arg11, 0))
6825 same = arg00, alt0 = arg01, alt1 = arg10;
6826 else if (operand_equal_p (arg01, arg10, 0))
6827 same = arg01, alt0 = arg00, alt1 = arg11;
6829 /* No identical multiplicands; see if we can find a common
6830 power-of-two factor in non-power-of-two multiplies. This
6831 can help in multi-dimensional array access. */
6832 else if (TREE_CODE (arg01) == INTEGER_CST
6833 && TREE_CODE (arg11) == INTEGER_CST
6834 && TREE_INT_CST_HIGH (arg01) == 0
6835 && TREE_INT_CST_HIGH (arg11) == 0)
6837 HOST_WIDE_INT int01, int11, tmp;
6838 int01 = TREE_INT_CST_LOW (arg01);
6839 int11 = TREE_INT_CST_LOW (arg11);
6841 /* Move min of absolute values to int11. */
6842 if ((int01 >= 0 ? int01 : -int01)
6843 < (int11 >= 0 ? int11 : -int11))
6845 tmp = int01, int01 = int11, int11 = tmp;
6846 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6847 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6850 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6852 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6853 build_int_cst (NULL_TREE,
6854 int01 / int11)));
6855 alt1 = arg10;
6856 same = arg11;
6860 if (same)
6861 return fold (build2 (MULT_EXPR, type,
6862 fold (build2 (PLUS_EXPR, type,
6863 fold_convert (type, alt0),
6864 fold_convert (type, alt1))),
6865 same));
6868 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6869 of the array. Loop optimizer sometimes produce this type of
6870 expressions. */
6871 if (TREE_CODE (arg0) == ADDR_EXPR
6872 && TREE_CODE (arg1) == MULT_EXPR)
6874 tem = try_move_mult_to_index (type, PLUS_EXPR, arg0, arg1);
6875 if (tem)
6876 return fold (tem);
6878 else if (TREE_CODE (arg1) == ADDR_EXPR
6879 && TREE_CODE (arg0) == MULT_EXPR)
6881 tem = try_move_mult_to_index (type, PLUS_EXPR, arg1, arg0);
6882 if (tem)
6883 return fold (tem);
6886 else
6888 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6889 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6890 return non_lvalue (fold_convert (type, arg0));
6892 /* Likewise if the operands are reversed. */
6893 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6894 return non_lvalue (fold_convert (type, arg1));
6896 /* Convert X + -C into X - C. */
6897 if (TREE_CODE (arg1) == REAL_CST
6898 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6900 tem = fold_negate_const (arg1, type);
6901 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6902 return fold (build2 (MINUS_EXPR, type,
6903 fold_convert (type, arg0),
6904 fold_convert (type, tem)));
6907 /* Convert x+x into x*2.0. */
6908 if (operand_equal_p (arg0, arg1, 0)
6909 && SCALAR_FLOAT_TYPE_P (type))
6910 return fold (build2 (MULT_EXPR, type, arg0,
6911 build_real (type, dconst2)));
6913 /* Convert x*c+x into x*(c+1). */
6914 if (flag_unsafe_math_optimizations
6915 && TREE_CODE (arg0) == MULT_EXPR
6916 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6917 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6918 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6920 REAL_VALUE_TYPE c;
6922 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6923 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6924 return fold (build2 (MULT_EXPR, type, arg1,
6925 build_real (type, c)));
6928 /* Convert x+x*c into x*(c+1). */
6929 if (flag_unsafe_math_optimizations
6930 && TREE_CODE (arg1) == MULT_EXPR
6931 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6932 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6933 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6935 REAL_VALUE_TYPE c;
6937 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6938 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6939 return fold (build2 (MULT_EXPR, type, arg0,
6940 build_real (type, c)));
6943 /* Convert x*c1+x*c2 into x*(c1+c2). */
6944 if (flag_unsafe_math_optimizations
6945 && TREE_CODE (arg0) == MULT_EXPR
6946 && TREE_CODE (arg1) == MULT_EXPR
6947 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6948 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6949 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6950 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6951 && operand_equal_p (TREE_OPERAND (arg0, 0),
6952 TREE_OPERAND (arg1, 0), 0))
6954 REAL_VALUE_TYPE c1, c2;
6956 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6957 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6958 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6959 return fold (build2 (MULT_EXPR, type,
6960 TREE_OPERAND (arg0, 0),
6961 build_real (type, c1)));
6963 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
6964 if (flag_unsafe_math_optimizations
6965 && TREE_CODE (arg1) == PLUS_EXPR
6966 && TREE_CODE (arg0) != MULT_EXPR)
6968 tree tree10 = TREE_OPERAND (arg1, 0);
6969 tree tree11 = TREE_OPERAND (arg1, 1);
6970 if (TREE_CODE (tree11) == MULT_EXPR
6971 && TREE_CODE (tree10) == MULT_EXPR)
6973 tree tree0;
6974 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6975 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6978 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
6979 if (flag_unsafe_math_optimizations
6980 && TREE_CODE (arg0) == PLUS_EXPR
6981 && TREE_CODE (arg1) != MULT_EXPR)
6983 tree tree00 = TREE_OPERAND (arg0, 0);
6984 tree tree01 = TREE_OPERAND (arg0, 1);
6985 if (TREE_CODE (tree01) == MULT_EXPR
6986 && TREE_CODE (tree00) == MULT_EXPR)
6988 tree tree0;
6989 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6990 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6995 bit_rotate:
6996 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6997 is a rotate of A by C1 bits. */
6998 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6999 is a rotate of A by B bits. */
7001 enum tree_code code0, code1;
7002 code0 = TREE_CODE (arg0);
7003 code1 = TREE_CODE (arg1);
7004 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7005 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7006 && operand_equal_p (TREE_OPERAND (arg0, 0),
7007 TREE_OPERAND (arg1, 0), 0)
7008 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7010 tree tree01, tree11;
7011 enum tree_code code01, code11;
7013 tree01 = TREE_OPERAND (arg0, 1);
7014 tree11 = TREE_OPERAND (arg1, 1);
7015 STRIP_NOPS (tree01);
7016 STRIP_NOPS (tree11);
7017 code01 = TREE_CODE (tree01);
7018 code11 = TREE_CODE (tree11);
7019 if (code01 == INTEGER_CST
7020 && code11 == INTEGER_CST
7021 && TREE_INT_CST_HIGH (tree01) == 0
7022 && TREE_INT_CST_HIGH (tree11) == 0
7023 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7024 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7025 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7026 code0 == LSHIFT_EXPR ? tree01 : tree11);
7027 else if (code11 == MINUS_EXPR)
7029 tree tree110, tree111;
7030 tree110 = TREE_OPERAND (tree11, 0);
7031 tree111 = TREE_OPERAND (tree11, 1);
7032 STRIP_NOPS (tree110);
7033 STRIP_NOPS (tree111);
7034 if (TREE_CODE (tree110) == INTEGER_CST
7035 && 0 == compare_tree_int (tree110,
7036 TYPE_PRECISION
7037 (TREE_TYPE (TREE_OPERAND
7038 (arg0, 0))))
7039 && operand_equal_p (tree01, tree111, 0))
7040 return build2 ((code0 == LSHIFT_EXPR
7041 ? LROTATE_EXPR
7042 : RROTATE_EXPR),
7043 type, TREE_OPERAND (arg0, 0), tree01);
7045 else if (code01 == MINUS_EXPR)
7047 tree tree010, tree011;
7048 tree010 = TREE_OPERAND (tree01, 0);
7049 tree011 = TREE_OPERAND (tree01, 1);
7050 STRIP_NOPS (tree010);
7051 STRIP_NOPS (tree011);
7052 if (TREE_CODE (tree010) == INTEGER_CST
7053 && 0 == compare_tree_int (tree010,
7054 TYPE_PRECISION
7055 (TREE_TYPE (TREE_OPERAND
7056 (arg0, 0))))
7057 && operand_equal_p (tree11, tree011, 0))
7058 return build2 ((code0 != LSHIFT_EXPR
7059 ? LROTATE_EXPR
7060 : RROTATE_EXPR),
7061 type, TREE_OPERAND (arg0, 0), tree11);
7066 associate:
7067 /* In most languages, can't associate operations on floats through
7068 parentheses. Rather than remember where the parentheses were, we
7069 don't associate floats at all, unless the user has specified
7070 -funsafe-math-optimizations. */
7072 if (! wins
7073 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7075 tree var0, con0, lit0, minus_lit0;
7076 tree var1, con1, lit1, minus_lit1;
7078 /* Split both trees into variables, constants, and literals. Then
7079 associate each group together, the constants with literals,
7080 then the result with variables. This increases the chances of
7081 literals being recombined later and of generating relocatable
7082 expressions for the sum of a constant and literal. */
7083 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7084 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7085 code == MINUS_EXPR);
7087 /* Only do something if we found more than two objects. Otherwise,
7088 nothing has changed and we risk infinite recursion. */
7089 if (2 < ((var0 != 0) + (var1 != 0)
7090 + (con0 != 0) + (con1 != 0)
7091 + (lit0 != 0) + (lit1 != 0)
7092 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7094 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7095 if (code == MINUS_EXPR)
7096 code = PLUS_EXPR;
7098 var0 = associate_trees (var0, var1, code, type);
7099 con0 = associate_trees (con0, con1, code, type);
7100 lit0 = associate_trees (lit0, lit1, code, type);
7101 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7103 /* Preserve the MINUS_EXPR if the negative part of the literal is
7104 greater than the positive part. Otherwise, the multiplicative
7105 folding code (i.e extract_muldiv) may be fooled in case
7106 unsigned constants are subtracted, like in the following
7107 example: ((X*2 + 4) - 8U)/2. */
7108 if (minus_lit0 && lit0)
7110 if (TREE_CODE (lit0) == INTEGER_CST
7111 && TREE_CODE (minus_lit0) == INTEGER_CST
7112 && tree_int_cst_lt (lit0, minus_lit0))
7114 minus_lit0 = associate_trees (minus_lit0, lit0,
7115 MINUS_EXPR, type);
7116 lit0 = 0;
7118 else
7120 lit0 = associate_trees (lit0, minus_lit0,
7121 MINUS_EXPR, type);
7122 minus_lit0 = 0;
7125 if (minus_lit0)
7127 if (con0 == 0)
7128 return fold_convert (type,
7129 associate_trees (var0, minus_lit0,
7130 MINUS_EXPR, type));
7131 else
7133 con0 = associate_trees (con0, minus_lit0,
7134 MINUS_EXPR, type);
7135 return fold_convert (type,
7136 associate_trees (var0, con0,
7137 PLUS_EXPR, type));
7141 con0 = associate_trees (con0, lit0, code, type);
7142 return fold_convert (type, associate_trees (var0, con0,
7143 code, type));
7147 binary:
7148 if (wins)
7149 t1 = const_binop (code, arg0, arg1, 0);
7150 if (t1 != NULL_TREE)
7152 /* The return value should always have
7153 the same type as the original expression. */
7154 if (TREE_TYPE (t1) != type)
7155 t1 = fold_convert (type, t1);
7157 return t1;
7159 return t;
7161 case MINUS_EXPR:
7162 /* A - (-B) -> A + B */
7163 if (TREE_CODE (arg1) == NEGATE_EXPR)
7164 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7165 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7166 if (TREE_CODE (arg0) == NEGATE_EXPR
7167 && (FLOAT_TYPE_P (type)
7168 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7169 && negate_expr_p (arg1)
7170 && reorder_operands_p (arg0, arg1))
7171 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7172 TREE_OPERAND (arg0, 0)));
7174 if (! FLOAT_TYPE_P (type))
7176 if (! wins && integer_zerop (arg0))
7177 return negate_expr (fold_convert (type, arg1));
7178 if (integer_zerop (arg1))
7179 return non_lvalue (fold_convert (type, arg0));
7181 /* Fold A - (A & B) into ~B & A. */
7182 if (!TREE_SIDE_EFFECTS (arg0)
7183 && TREE_CODE (arg1) == BIT_AND_EXPR)
7185 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7186 return fold (build2 (BIT_AND_EXPR, type,
7187 fold (build1 (BIT_NOT_EXPR, type,
7188 TREE_OPERAND (arg1, 0))),
7189 arg0));
7190 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7191 return fold (build2 (BIT_AND_EXPR, type,
7192 fold (build1 (BIT_NOT_EXPR, type,
7193 TREE_OPERAND (arg1, 1))),
7194 arg0));
7197 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7198 any power of 2 minus 1. */
7199 if (TREE_CODE (arg0) == BIT_AND_EXPR
7200 && TREE_CODE (arg1) == BIT_AND_EXPR
7201 && operand_equal_p (TREE_OPERAND (arg0, 0),
7202 TREE_OPERAND (arg1, 0), 0))
7204 tree mask0 = TREE_OPERAND (arg0, 1);
7205 tree mask1 = TREE_OPERAND (arg1, 1);
7206 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7208 if (operand_equal_p (tem, mask1, 0))
7210 tem = fold (build2 (BIT_XOR_EXPR, type,
7211 TREE_OPERAND (arg0, 0), mask1));
7212 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7217 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7218 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7219 return non_lvalue (fold_convert (type, arg0));
7221 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7222 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7223 (-ARG1 + ARG0) reduces to -ARG1. */
7224 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7225 return negate_expr (fold_convert (type, arg1));
7227 /* Fold &x - &x. This can happen from &x.foo - &x.
7228 This is unsafe for certain floats even in non-IEEE formats.
7229 In IEEE, it is unsafe because it does wrong for NaNs.
7230 Also note that operand_equal_p is always false if an operand
7231 is volatile. */
7233 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7234 && operand_equal_p (arg0, arg1, 0))
7235 return fold_convert (type, integer_zero_node);
7237 /* A - B -> A + (-B) if B is easily negatable. */
7238 if (!wins && negate_expr_p (arg1)
7239 && ((FLOAT_TYPE_P (type)
7240 /* Avoid this transformation if B is a positive REAL_CST. */
7241 && (TREE_CODE (arg1) != REAL_CST
7242 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7243 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7244 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7246 /* Try folding difference of addresses. */
7248 HOST_WIDE_INT diff;
7250 if ((TREE_CODE (arg0) == ADDR_EXPR
7251 || TREE_CODE (arg1) == ADDR_EXPR)
7252 && ptr_difference_const (arg0, arg1, &diff))
7253 return build_int_cst_type (type, diff);
7256 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7257 of the array. Loop optimizer sometimes produce this type of
7258 expressions. */
7259 if (TREE_CODE (arg0) == ADDR_EXPR
7260 && TREE_CODE (arg1) == MULT_EXPR)
7262 tem = try_move_mult_to_index (type, MINUS_EXPR, arg0, arg1);
7263 if (tem)
7264 return fold (tem);
7267 if (TREE_CODE (arg0) == MULT_EXPR
7268 && TREE_CODE (arg1) == MULT_EXPR
7269 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7271 /* (A * C) - (B * C) -> (A-B) * C. */
7272 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7273 TREE_OPERAND (arg1, 1), 0))
7274 return fold (build2 (MULT_EXPR, type,
7275 fold (build2 (MINUS_EXPR, type,
7276 TREE_OPERAND (arg0, 0),
7277 TREE_OPERAND (arg1, 0))),
7278 TREE_OPERAND (arg0, 1)));
7279 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7280 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7281 TREE_OPERAND (arg1, 0), 0))
7282 return fold (build2 (MULT_EXPR, type,
7283 TREE_OPERAND (arg0, 0),
7284 fold (build2 (MINUS_EXPR, type,
7285 TREE_OPERAND (arg0, 1),
7286 TREE_OPERAND (arg1, 1)))));
7289 goto associate;
7291 case MULT_EXPR:
7292 /* (-A) * (-B) -> A * B */
7293 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7294 return fold (build2 (MULT_EXPR, type,
7295 TREE_OPERAND (arg0, 0),
7296 negate_expr (arg1)));
7297 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7298 return fold (build2 (MULT_EXPR, type,
7299 negate_expr (arg0),
7300 TREE_OPERAND (arg1, 0)));
7302 if (! FLOAT_TYPE_P (type))
7304 if (integer_zerop (arg1))
7305 return omit_one_operand (type, arg1, arg0);
7306 if (integer_onep (arg1))
7307 return non_lvalue (fold_convert (type, arg0));
7309 /* (a * (1 << b)) is (a << b) */
7310 if (TREE_CODE (arg1) == LSHIFT_EXPR
7311 && integer_onep (TREE_OPERAND (arg1, 0)))
7312 return fold (build2 (LSHIFT_EXPR, type, arg0,
7313 TREE_OPERAND (arg1, 1)));
7314 if (TREE_CODE (arg0) == LSHIFT_EXPR
7315 && integer_onep (TREE_OPERAND (arg0, 0)))
7316 return fold (build2 (LSHIFT_EXPR, type, arg1,
7317 TREE_OPERAND (arg0, 1)));
7319 if (TREE_CODE (arg1) == INTEGER_CST
7320 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7321 fold_convert (type, arg1),
7322 code, NULL_TREE)))
7323 return fold_convert (type, tem);
7326 else
7328 /* Maybe fold x * 0 to 0. The expressions aren't the same
7329 when x is NaN, since x * 0 is also NaN. Nor are they the
7330 same in modes with signed zeros, since multiplying a
7331 negative value by 0 gives -0, not +0. */
7332 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7333 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7334 && real_zerop (arg1))
7335 return omit_one_operand (type, arg1, arg0);
7336 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7337 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7338 && real_onep (arg1))
7339 return non_lvalue (fold_convert (type, arg0));
7341 /* Transform x * -1.0 into -x. */
7342 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7343 && real_minus_onep (arg1))
7344 return fold_convert (type, negate_expr (arg0));
7346 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7347 if (flag_unsafe_math_optimizations
7348 && TREE_CODE (arg0) == RDIV_EXPR
7349 && TREE_CODE (arg1) == REAL_CST
7350 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7352 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7353 arg1, 0);
7354 if (tem)
7355 return fold (build2 (RDIV_EXPR, type, tem,
7356 TREE_OPERAND (arg0, 1)));
7359 if (flag_unsafe_math_optimizations)
7361 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7362 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7364 /* Optimizations of root(...)*root(...). */
7365 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7367 tree rootfn, arg, arglist;
7368 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7369 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7371 /* Optimize sqrt(x)*sqrt(x) as x. */
7372 if (BUILTIN_SQRT_P (fcode0)
7373 && operand_equal_p (arg00, arg10, 0)
7374 && ! HONOR_SNANS (TYPE_MODE (type)))
7375 return arg00;
7377 /* Optimize root(x)*root(y) as root(x*y). */
7378 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7379 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7380 arglist = build_tree_list (NULL_TREE, arg);
7381 return build_function_call_expr (rootfn, arglist);
7384 /* Optimize expN(x)*expN(y) as expN(x+y). */
7385 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7387 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7388 tree arg = build2 (PLUS_EXPR, type,
7389 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7390 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7391 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7392 return build_function_call_expr (expfn, arglist);
7395 /* Optimizations of pow(...)*pow(...). */
7396 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7397 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7398 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7400 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7401 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7402 1)));
7403 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7404 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7405 1)));
7407 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7408 if (operand_equal_p (arg01, arg11, 0))
7410 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7411 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7412 tree arglist = tree_cons (NULL_TREE, fold (arg),
7413 build_tree_list (NULL_TREE,
7414 arg01));
7415 return build_function_call_expr (powfn, arglist);
7418 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7419 if (operand_equal_p (arg00, arg10, 0))
7421 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7422 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7423 tree arglist = tree_cons (NULL_TREE, arg00,
7424 build_tree_list (NULL_TREE,
7425 arg));
7426 return build_function_call_expr (powfn, arglist);
7430 /* Optimize tan(x)*cos(x) as sin(x). */
7431 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7432 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7433 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7434 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7435 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7436 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7437 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7438 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7440 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7442 if (sinfn != NULL_TREE)
7443 return build_function_call_expr (sinfn,
7444 TREE_OPERAND (arg0, 1));
7447 /* Optimize x*pow(x,c) as pow(x,c+1). */
7448 if (fcode1 == BUILT_IN_POW
7449 || fcode1 == BUILT_IN_POWF
7450 || fcode1 == BUILT_IN_POWL)
7452 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7453 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7454 1)));
7455 if (TREE_CODE (arg11) == REAL_CST
7456 && ! TREE_CONSTANT_OVERFLOW (arg11)
7457 && operand_equal_p (arg0, arg10, 0))
7459 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7460 REAL_VALUE_TYPE c;
7461 tree arg, arglist;
7463 c = TREE_REAL_CST (arg11);
7464 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7465 arg = build_real (type, c);
7466 arglist = build_tree_list (NULL_TREE, arg);
7467 arglist = tree_cons (NULL_TREE, arg0, arglist);
7468 return build_function_call_expr (powfn, arglist);
7472 /* Optimize pow(x,c)*x as pow(x,c+1). */
7473 if (fcode0 == BUILT_IN_POW
7474 || fcode0 == BUILT_IN_POWF
7475 || fcode0 == BUILT_IN_POWL)
7477 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7478 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7479 1)));
7480 if (TREE_CODE (arg01) == REAL_CST
7481 && ! TREE_CONSTANT_OVERFLOW (arg01)
7482 && operand_equal_p (arg1, arg00, 0))
7484 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7485 REAL_VALUE_TYPE c;
7486 tree arg, arglist;
7488 c = TREE_REAL_CST (arg01);
7489 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7490 arg = build_real (type, c);
7491 arglist = build_tree_list (NULL_TREE, arg);
7492 arglist = tree_cons (NULL_TREE, arg1, arglist);
7493 return build_function_call_expr (powfn, arglist);
7497 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7498 if (! optimize_size
7499 && operand_equal_p (arg0, arg1, 0))
7501 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7503 if (powfn)
7505 tree arg = build_real (type, dconst2);
7506 tree arglist = build_tree_list (NULL_TREE, arg);
7507 arglist = tree_cons (NULL_TREE, arg0, arglist);
7508 return build_function_call_expr (powfn, arglist);
7513 goto associate;
7515 case BIT_IOR_EXPR:
7516 bit_ior:
7517 if (integer_all_onesp (arg1))
7518 return omit_one_operand (type, arg1, arg0);
7519 if (integer_zerop (arg1))
7520 return non_lvalue (fold_convert (type, arg0));
7521 if (operand_equal_p (arg0, arg1, 0))
7522 return non_lvalue (fold_convert (type, arg0));
7524 /* ~X | X is -1. */
7525 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7526 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7528 t1 = build_int_cst (type, -1);
7529 t1 = force_fit_type (t1, 0, false, false);
7530 return omit_one_operand (type, t1, arg1);
7533 /* X | ~X is -1. */
7534 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7535 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7537 t1 = build_int_cst (type, -1);
7538 t1 = force_fit_type (t1, 0, false, false);
7539 return omit_one_operand (type, t1, arg0);
7542 t1 = distribute_bit_expr (code, type, arg0, arg1);
7543 if (t1 != NULL_TREE)
7544 return t1;
7546 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7548 This results in more efficient code for machines without a NAND
7549 instruction. Combine will canonicalize to the first form
7550 which will allow use of NAND instructions provided by the
7551 backend if they exist. */
7552 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7553 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7555 return fold (build1 (BIT_NOT_EXPR, type,
7556 build2 (BIT_AND_EXPR, type,
7557 TREE_OPERAND (arg0, 0),
7558 TREE_OPERAND (arg1, 0))));
7561 /* See if this can be simplified into a rotate first. If that
7562 is unsuccessful continue in the association code. */
7563 goto bit_rotate;
7565 case BIT_XOR_EXPR:
7566 if (integer_zerop (arg1))
7567 return non_lvalue (fold_convert (type, arg0));
7568 if (integer_all_onesp (arg1))
7569 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7570 if (operand_equal_p (arg0, arg1, 0))
7571 return omit_one_operand (type, integer_zero_node, arg0);
7573 /* ~X ^ X is -1. */
7574 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7575 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7577 t1 = build_int_cst (type, -1);
7578 t1 = force_fit_type (t1, 0, false, false);
7579 return omit_one_operand (type, t1, arg1);
7582 /* X ^ ~X is -1. */
7583 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7584 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7586 t1 = build_int_cst (type, -1);
7587 t1 = force_fit_type (t1, 0, false, false);
7588 return omit_one_operand (type, t1, arg0);
7591 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7592 with a constant, and the two constants have no bits in common,
7593 we should treat this as a BIT_IOR_EXPR since this may produce more
7594 simplifications. */
7595 if (TREE_CODE (arg0) == BIT_AND_EXPR
7596 && TREE_CODE (arg1) == BIT_AND_EXPR
7597 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7598 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7599 && integer_zerop (const_binop (BIT_AND_EXPR,
7600 TREE_OPERAND (arg0, 1),
7601 TREE_OPERAND (arg1, 1), 0)))
7603 code = BIT_IOR_EXPR;
7604 goto bit_ior;
7607 /* See if this can be simplified into a rotate first. If that
7608 is unsuccessful continue in the association code. */
7609 goto bit_rotate;
7611 case BIT_AND_EXPR:
7612 if (integer_all_onesp (arg1))
7613 return non_lvalue (fold_convert (type, arg0));
7614 if (integer_zerop (arg1))
7615 return omit_one_operand (type, arg1, arg0);
7616 if (operand_equal_p (arg0, arg1, 0))
7617 return non_lvalue (fold_convert (type, arg0));
7619 /* ~X & X is always zero. */
7620 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7621 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7622 return omit_one_operand (type, integer_zero_node, arg1);
7624 /* X & ~X is always zero. */
7625 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7626 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7627 return omit_one_operand (type, integer_zero_node, arg0);
7629 t1 = distribute_bit_expr (code, type, arg0, arg1);
7630 if (t1 != NULL_TREE)
7631 return t1;
7632 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7633 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7634 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7636 unsigned int prec
7637 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7639 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7640 && (~TREE_INT_CST_LOW (arg1)
7641 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7642 return fold_convert (type, TREE_OPERAND (arg0, 0));
7645 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7647 This results in more efficient code for machines without a NOR
7648 instruction. Combine will canonicalize to the first form
7649 which will allow use of NOR instructions provided by the
7650 backend if they exist. */
7651 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7652 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7654 return fold (build1 (BIT_NOT_EXPR, type,
7655 build2 (BIT_IOR_EXPR, type,
7656 TREE_OPERAND (arg0, 0),
7657 TREE_OPERAND (arg1, 0))));
7660 goto associate;
7662 case RDIV_EXPR:
7663 /* Don't touch a floating-point divide by zero unless the mode
7664 of the constant can represent infinity. */
7665 if (TREE_CODE (arg1) == REAL_CST
7666 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7667 && real_zerop (arg1))
7668 return t;
7670 /* (-A) / (-B) -> A / B */
7671 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7672 return fold (build2 (RDIV_EXPR, type,
7673 TREE_OPERAND (arg0, 0),
7674 negate_expr (arg1)));
7675 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7676 return fold (build2 (RDIV_EXPR, type,
7677 negate_expr (arg0),
7678 TREE_OPERAND (arg1, 0)));
7680 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7681 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7682 && real_onep (arg1))
7683 return non_lvalue (fold_convert (type, arg0));
7685 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7686 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7687 && real_minus_onep (arg1))
7688 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7690 /* If ARG1 is a constant, we can convert this to a multiply by the
7691 reciprocal. This does not have the same rounding properties,
7692 so only do this if -funsafe-math-optimizations. We can actually
7693 always safely do it if ARG1 is a power of two, but it's hard to
7694 tell if it is or not in a portable manner. */
7695 if (TREE_CODE (arg1) == REAL_CST)
7697 if (flag_unsafe_math_optimizations
7698 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7699 arg1, 0)))
7700 return fold (build2 (MULT_EXPR, type, arg0, tem));
7701 /* Find the reciprocal if optimizing and the result is exact. */
7702 if (optimize)
7704 REAL_VALUE_TYPE r;
7705 r = TREE_REAL_CST (arg1);
7706 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7708 tem = build_real (type, r);
7709 return fold (build2 (MULT_EXPR, type, arg0, tem));
7713 /* Convert A/B/C to A/(B*C). */
7714 if (flag_unsafe_math_optimizations
7715 && TREE_CODE (arg0) == RDIV_EXPR)
7716 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7717 fold (build2 (MULT_EXPR, type,
7718 TREE_OPERAND (arg0, 1), arg1))));
7720 /* Convert A/(B/C) to (A/B)*C. */
7721 if (flag_unsafe_math_optimizations
7722 && TREE_CODE (arg1) == RDIV_EXPR)
7723 return fold (build2 (MULT_EXPR, type,
7724 fold (build2 (RDIV_EXPR, type, arg0,
7725 TREE_OPERAND (arg1, 0))),
7726 TREE_OPERAND (arg1, 1)));
7728 /* Convert C1/(X*C2) into (C1/C2)/X. */
7729 if (flag_unsafe_math_optimizations
7730 && TREE_CODE (arg1) == MULT_EXPR
7731 && TREE_CODE (arg0) == REAL_CST
7732 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7734 tree tem = const_binop (RDIV_EXPR, arg0,
7735 TREE_OPERAND (arg1, 1), 0);
7736 if (tem)
7737 return fold (build2 (RDIV_EXPR, type, tem,
7738 TREE_OPERAND (arg1, 0)));
7741 if (flag_unsafe_math_optimizations)
7743 enum built_in_function fcode = builtin_mathfn_code (arg1);
7744 /* Optimize x/expN(y) into x*expN(-y). */
7745 if (BUILTIN_EXPONENT_P (fcode))
7747 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7748 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7749 tree arglist = build_tree_list (NULL_TREE,
7750 fold_convert (type, arg));
7751 arg1 = build_function_call_expr (expfn, arglist);
7752 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7755 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7756 if (fcode == BUILT_IN_POW
7757 || fcode == BUILT_IN_POWF
7758 || fcode == BUILT_IN_POWL)
7760 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7761 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7762 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7763 tree neg11 = fold_convert (type, negate_expr (arg11));
7764 tree arglist = tree_cons(NULL_TREE, arg10,
7765 build_tree_list (NULL_TREE, neg11));
7766 arg1 = build_function_call_expr (powfn, arglist);
7767 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7771 if (flag_unsafe_math_optimizations)
7773 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7774 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7776 /* Optimize sin(x)/cos(x) as tan(x). */
7777 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7778 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7779 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7780 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7781 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7783 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7785 if (tanfn != NULL_TREE)
7786 return build_function_call_expr (tanfn,
7787 TREE_OPERAND (arg0, 1));
7790 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7791 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7792 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7793 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7794 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7795 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7797 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7799 if (tanfn != NULL_TREE)
7801 tree tmp = TREE_OPERAND (arg0, 1);
7802 tmp = build_function_call_expr (tanfn, tmp);
7803 return fold (build2 (RDIV_EXPR, type,
7804 build_real (type, dconst1), tmp));
7808 /* Optimize pow(x,c)/x as pow(x,c-1). */
7809 if (fcode0 == BUILT_IN_POW
7810 || fcode0 == BUILT_IN_POWF
7811 || fcode0 == BUILT_IN_POWL)
7813 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7814 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7815 if (TREE_CODE (arg01) == REAL_CST
7816 && ! TREE_CONSTANT_OVERFLOW (arg01)
7817 && operand_equal_p (arg1, arg00, 0))
7819 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7820 REAL_VALUE_TYPE c;
7821 tree arg, arglist;
7823 c = TREE_REAL_CST (arg01);
7824 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7825 arg = build_real (type, c);
7826 arglist = build_tree_list (NULL_TREE, arg);
7827 arglist = tree_cons (NULL_TREE, arg1, arglist);
7828 return build_function_call_expr (powfn, arglist);
7832 goto binary;
7834 case TRUNC_DIV_EXPR:
7835 case ROUND_DIV_EXPR:
7836 case FLOOR_DIV_EXPR:
7837 case CEIL_DIV_EXPR:
7838 case EXACT_DIV_EXPR:
7839 if (integer_onep (arg1))
7840 return non_lvalue (fold_convert (type, arg0));
7841 if (integer_zerop (arg1))
7842 return t;
7843 /* X / -1 is -X. */
7844 if (!TYPE_UNSIGNED (type)
7845 && TREE_CODE (arg1) == INTEGER_CST
7846 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7847 && TREE_INT_CST_HIGH (arg1) == -1)
7848 return fold_convert (type, negate_expr (arg0));
7850 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7851 operation, EXACT_DIV_EXPR.
7853 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7854 At one time others generated faster code, it's not clear if they do
7855 after the last round to changes to the DIV code in expmed.c. */
7856 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7857 && multiple_of_p (type, arg0, arg1))
7858 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7860 if (TREE_CODE (arg1) == INTEGER_CST
7861 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7862 code, NULL_TREE)))
7863 return fold_convert (type, tem);
7865 goto binary;
7867 case CEIL_MOD_EXPR:
7868 case FLOOR_MOD_EXPR:
7869 case ROUND_MOD_EXPR:
7870 case TRUNC_MOD_EXPR:
7871 if (integer_onep (arg1))
7872 return omit_one_operand (type, integer_zero_node, arg0);
7873 if (integer_zerop (arg1))
7874 return t;
7876 /* X % -1 is zero. */
7877 if (!TYPE_UNSIGNED (type)
7878 && TREE_CODE (arg1) == INTEGER_CST
7879 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7880 && TREE_INT_CST_HIGH (arg1) == -1)
7881 return omit_one_operand (type, integer_zero_node, arg0);
7883 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7884 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7885 if (code == TRUNC_MOD_EXPR
7886 && TYPE_UNSIGNED (type)
7887 && integer_pow2p (arg1))
7889 unsigned HOST_WIDE_INT high, low;
7890 tree mask;
7891 int l;
7893 l = tree_log2 (arg1);
7894 if (l >= HOST_BITS_PER_WIDE_INT)
7896 high = ((unsigned HOST_WIDE_INT) 1
7897 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7898 low = -1;
7900 else
7902 high = 0;
7903 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7906 mask = build_int_cst_wide (type, low, high);
7907 return fold (build2 (BIT_AND_EXPR, type,
7908 fold_convert (type, arg0), mask));
7911 /* X % -C is the same as X % C. */
7912 if (code == TRUNC_MOD_EXPR
7913 && !TYPE_UNSIGNED (type)
7914 && TREE_CODE (arg1) == INTEGER_CST
7915 && TREE_INT_CST_HIGH (arg1) < 0
7916 && !flag_trapv
7917 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7918 && !sign_bit_p (arg1, arg1))
7919 return fold (build2 (code, type, fold_convert (type, arg0),
7920 fold_convert (type, negate_expr (arg1))));
7922 /* X % -Y is the same as X % Y. */
7923 if (code == TRUNC_MOD_EXPR
7924 && !TYPE_UNSIGNED (type)
7925 && TREE_CODE (arg1) == NEGATE_EXPR
7926 && !flag_trapv)
7927 return fold (build2 (code, type, fold_convert (type, arg0),
7928 fold_convert (type, TREE_OPERAND (arg1, 0))));
7930 if (TREE_CODE (arg1) == INTEGER_CST
7931 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7932 code, NULL_TREE)))
7933 return fold_convert (type, tem);
7935 goto binary;
7937 case LROTATE_EXPR:
7938 case RROTATE_EXPR:
7939 if (integer_all_onesp (arg0))
7940 return omit_one_operand (type, arg0, arg1);
7941 goto shift;
7943 case RSHIFT_EXPR:
7944 /* Optimize -1 >> x for arithmetic right shifts. */
7945 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7946 return omit_one_operand (type, arg0, arg1);
7947 /* ... fall through ... */
7949 case LSHIFT_EXPR:
7950 shift:
7951 if (integer_zerop (arg1))
7952 return non_lvalue (fold_convert (type, arg0));
7953 if (integer_zerop (arg0))
7954 return omit_one_operand (type, arg0, arg1);
7956 /* Since negative shift count is not well-defined,
7957 don't try to compute it in the compiler. */
7958 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7959 return t;
7960 /* Rewrite an LROTATE_EXPR by a constant into an
7961 RROTATE_EXPR by a new constant. */
7962 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7964 tree tem = build_int_cst (NULL_TREE,
7965 GET_MODE_BITSIZE (TYPE_MODE (type)));
7966 tem = fold_convert (TREE_TYPE (arg1), tem);
7967 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7968 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7971 /* If we have a rotate of a bit operation with the rotate count and
7972 the second operand of the bit operation both constant,
7973 permute the two operations. */
7974 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7975 && (TREE_CODE (arg0) == BIT_AND_EXPR
7976 || TREE_CODE (arg0) == BIT_IOR_EXPR
7977 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7979 return fold (build2 (TREE_CODE (arg0), type,
7980 fold (build2 (code, type,
7981 TREE_OPERAND (arg0, 0), arg1)),
7982 fold (build2 (code, type,
7983 TREE_OPERAND (arg0, 1), arg1))));
7985 /* Two consecutive rotates adding up to the width of the mode can
7986 be ignored. */
7987 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7988 && TREE_CODE (arg0) == RROTATE_EXPR
7989 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7990 && TREE_INT_CST_HIGH (arg1) == 0
7991 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7992 && ((TREE_INT_CST_LOW (arg1)
7993 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7994 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7995 return TREE_OPERAND (arg0, 0);
7997 goto binary;
7999 case MIN_EXPR:
8000 if (operand_equal_p (arg0, arg1, 0))
8001 return omit_one_operand (type, arg0, arg1);
8002 if (INTEGRAL_TYPE_P (type)
8003 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8004 return omit_one_operand (type, arg1, arg0);
8005 goto associate;
8007 case MAX_EXPR:
8008 if (operand_equal_p (arg0, arg1, 0))
8009 return omit_one_operand (type, arg0, arg1);
8010 if (INTEGRAL_TYPE_P (type)
8011 && TYPE_MAX_VALUE (type)
8012 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8013 return omit_one_operand (type, arg1, arg0);
8014 goto associate;
8016 case TRUTH_NOT_EXPR:
8017 /* The argument to invert_truthvalue must have Boolean type. */
8018 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8019 arg0 = fold_convert (boolean_type_node, arg0);
8021 /* Note that the operand of this must be an int
8022 and its values must be 0 or 1.
8023 ("true" is a fixed value perhaps depending on the language,
8024 but we don't handle values other than 1 correctly yet.) */
8025 tem = invert_truthvalue (arg0);
8026 /* Avoid infinite recursion. */
8027 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
8029 tem = fold_single_bit_test (code, arg0, arg1, type);
8030 if (tem)
8031 return tem;
8032 return t;
8034 return fold_convert (type, tem);
8036 case TRUTH_ANDIF_EXPR:
8037 /* Note that the operands of this must be ints
8038 and their values must be 0 or 1.
8039 ("true" is a fixed value perhaps depending on the language.) */
8040 /* If first arg is constant zero, return it. */
8041 if (integer_zerop (arg0))
8042 return fold_convert (type, arg0);
8043 case TRUTH_AND_EXPR:
8044 /* If either arg is constant true, drop it. */
8045 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8046 return non_lvalue (fold_convert (type, arg1));
8047 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8048 /* Preserve sequence points. */
8049 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8050 return non_lvalue (fold_convert (type, arg0));
8051 /* If second arg is constant zero, result is zero, but first arg
8052 must be evaluated. */
8053 if (integer_zerop (arg1))
8054 return omit_one_operand (type, arg1, arg0);
8055 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8056 case will be handled here. */
8057 if (integer_zerop (arg0))
8058 return omit_one_operand (type, arg0, arg1);
8060 /* !X && X is always false. */
8061 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8062 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8063 return omit_one_operand (type, integer_zero_node, arg1);
8064 /* X && !X is always false. */
8065 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8066 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8067 return omit_one_operand (type, integer_zero_node, arg0);
8069 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8070 means A >= Y && A != MAX, but in this case we know that
8071 A < X <= MAX. */
8073 if (!TREE_SIDE_EFFECTS (arg0)
8074 && !TREE_SIDE_EFFECTS (arg1))
8076 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8077 if (tem)
8078 return fold (build2 (code, type, tem, arg1));
8080 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8081 if (tem)
8082 return fold (build2 (code, type, arg0, tem));
8085 truth_andor:
8086 /* We only do these simplifications if we are optimizing. */
8087 if (!optimize)
8088 return t;
8090 /* Check for things like (A || B) && (A || C). We can convert this
8091 to A || (B && C). Note that either operator can be any of the four
8092 truth and/or operations and the transformation will still be
8093 valid. Also note that we only care about order for the
8094 ANDIF and ORIF operators. If B contains side effects, this
8095 might change the truth-value of A. */
8096 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8097 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8098 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8099 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8100 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8101 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8103 tree a00 = TREE_OPERAND (arg0, 0);
8104 tree a01 = TREE_OPERAND (arg0, 1);
8105 tree a10 = TREE_OPERAND (arg1, 0);
8106 tree a11 = TREE_OPERAND (arg1, 1);
8107 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8108 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8109 && (code == TRUTH_AND_EXPR
8110 || code == TRUTH_OR_EXPR));
8112 if (operand_equal_p (a00, a10, 0))
8113 return fold (build2 (TREE_CODE (arg0), type, a00,
8114 fold (build2 (code, type, a01, a11))));
8115 else if (commutative && operand_equal_p (a00, a11, 0))
8116 return fold (build2 (TREE_CODE (arg0), type, a00,
8117 fold (build2 (code, type, a01, a10))));
8118 else if (commutative && operand_equal_p (a01, a10, 0))
8119 return fold (build2 (TREE_CODE (arg0), type, a01,
8120 fold (build2 (code, type, a00, a11))));
8122 /* This case if tricky because we must either have commutative
8123 operators or else A10 must not have side-effects. */
8125 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8126 && operand_equal_p (a01, a11, 0))
8127 return fold (build2 (TREE_CODE (arg0), type,
8128 fold (build2 (code, type, a00, a10)),
8129 a01));
8132 /* See if we can build a range comparison. */
8133 if (0 != (tem = fold_range_test (t)))
8134 return tem;
8136 /* Check for the possibility of merging component references. If our
8137 lhs is another similar operation, try to merge its rhs with our
8138 rhs. Then try to merge our lhs and rhs. */
8139 if (TREE_CODE (arg0) == code
8140 && 0 != (tem = fold_truthop (code, type,
8141 TREE_OPERAND (arg0, 1), arg1)))
8142 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8144 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8145 return tem;
8147 return t;
8149 case TRUTH_ORIF_EXPR:
8150 /* Note that the operands of this must be ints
8151 and their values must be 0 or true.
8152 ("true" is a fixed value perhaps depending on the language.) */
8153 /* If first arg is constant true, return it. */
8154 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8155 return fold_convert (type, arg0);
8156 case TRUTH_OR_EXPR:
8157 /* If either arg is constant zero, drop it. */
8158 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8159 return non_lvalue (fold_convert (type, arg1));
8160 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8161 /* Preserve sequence points. */
8162 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8163 return non_lvalue (fold_convert (type, arg0));
8164 /* If second arg is constant true, result is true, but we must
8165 evaluate first arg. */
8166 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8167 return omit_one_operand (type, arg1, arg0);
8168 /* Likewise for first arg, but note this only occurs here for
8169 TRUTH_OR_EXPR. */
8170 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8171 return omit_one_operand (type, arg0, arg1);
8173 /* !X || X is always true. */
8174 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8175 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8176 return omit_one_operand (type, integer_one_node, arg1);
8177 /* X || !X is always true. */
8178 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8179 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8180 return omit_one_operand (type, integer_one_node, arg0);
8182 goto truth_andor;
8184 case TRUTH_XOR_EXPR:
8185 /* If the second arg is constant zero, drop it. */
8186 if (integer_zerop (arg1))
8187 return non_lvalue (fold_convert (type, arg0));
8188 /* If the second arg is constant true, this is a logical inversion. */
8189 if (integer_onep (arg1))
8190 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8191 /* Identical arguments cancel to zero. */
8192 if (operand_equal_p (arg0, arg1, 0))
8193 return omit_one_operand (type, integer_zero_node, arg0);
8195 /* !X ^ X is always true. */
8196 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8197 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8198 return omit_one_operand (type, integer_one_node, arg1);
8200 /* X ^ !X is always true. */
8201 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8202 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8203 return omit_one_operand (type, integer_one_node, arg0);
8205 return t;
8207 case EQ_EXPR:
8208 case NE_EXPR:
8209 case LT_EXPR:
8210 case GT_EXPR:
8211 case LE_EXPR:
8212 case GE_EXPR:
8213 /* If one arg is a real or integer constant, put it last. */
8214 if (tree_swap_operands_p (arg0, arg1, true))
8215 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8217 /* If this is an equality comparison of the address of a non-weak
8218 object against zero, then we know the result. */
8219 if ((code == EQ_EXPR || code == NE_EXPR)
8220 && TREE_CODE (arg0) == ADDR_EXPR
8221 && DECL_P (TREE_OPERAND (arg0, 0))
8222 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8223 && integer_zerop (arg1))
8224 return constant_boolean_node (code != EQ_EXPR, type);
8226 /* If this is an equality comparison of the address of two non-weak,
8227 unaliased symbols neither of which are extern (since we do not
8228 have access to attributes for externs), then we know the result. */
8229 if ((code == EQ_EXPR || code == NE_EXPR)
8230 && TREE_CODE (arg0) == ADDR_EXPR
8231 && DECL_P (TREE_OPERAND (arg0, 0))
8232 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8233 && ! lookup_attribute ("alias",
8234 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8235 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8236 && TREE_CODE (arg1) == ADDR_EXPR
8237 && DECL_P (TREE_OPERAND (arg1, 0))
8238 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8239 && ! lookup_attribute ("alias",
8240 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8241 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8242 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8243 ? code == EQ_EXPR : code != EQ_EXPR,
8244 type);
8246 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8248 tree targ0 = strip_float_extensions (arg0);
8249 tree targ1 = strip_float_extensions (arg1);
8250 tree newtype = TREE_TYPE (targ0);
8252 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8253 newtype = TREE_TYPE (targ1);
8255 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8256 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8257 return fold (build2 (code, type, fold_convert (newtype, targ0),
8258 fold_convert (newtype, targ1)));
8260 /* (-a) CMP (-b) -> b CMP a */
8261 if (TREE_CODE (arg0) == NEGATE_EXPR
8262 && TREE_CODE (arg1) == NEGATE_EXPR)
8263 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8264 TREE_OPERAND (arg0, 0)));
8266 if (TREE_CODE (arg1) == REAL_CST)
8268 REAL_VALUE_TYPE cst;
8269 cst = TREE_REAL_CST (arg1);
8271 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8272 if (TREE_CODE (arg0) == NEGATE_EXPR)
8273 return
8274 fold (build2 (swap_tree_comparison (code), type,
8275 TREE_OPERAND (arg0, 0),
8276 build_real (TREE_TYPE (arg1),
8277 REAL_VALUE_NEGATE (cst))));
8279 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8280 /* a CMP (-0) -> a CMP 0 */
8281 if (REAL_VALUE_MINUS_ZERO (cst))
8282 return fold (build2 (code, type, arg0,
8283 build_real (TREE_TYPE (arg1), dconst0)));
8285 /* x != NaN is always true, other ops are always false. */
8286 if (REAL_VALUE_ISNAN (cst)
8287 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8289 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8290 return omit_one_operand (type, tem, arg0);
8293 /* Fold comparisons against infinity. */
8294 if (REAL_VALUE_ISINF (cst))
8296 tem = fold_inf_compare (code, type, arg0, arg1);
8297 if (tem != NULL_TREE)
8298 return tem;
8302 /* If this is a comparison of a real constant with a PLUS_EXPR
8303 or a MINUS_EXPR of a real constant, we can convert it into a
8304 comparison with a revised real constant as long as no overflow
8305 occurs when unsafe_math_optimizations are enabled. */
8306 if (flag_unsafe_math_optimizations
8307 && TREE_CODE (arg1) == REAL_CST
8308 && (TREE_CODE (arg0) == PLUS_EXPR
8309 || TREE_CODE (arg0) == MINUS_EXPR)
8310 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8311 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8312 ? MINUS_EXPR : PLUS_EXPR,
8313 arg1, TREE_OPERAND (arg0, 1), 0))
8314 && ! TREE_CONSTANT_OVERFLOW (tem))
8315 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8317 /* Likewise, we can simplify a comparison of a real constant with
8318 a MINUS_EXPR whose first operand is also a real constant, i.e.
8319 (c1 - x) < c2 becomes x > c1-c2. */
8320 if (flag_unsafe_math_optimizations
8321 && TREE_CODE (arg1) == REAL_CST
8322 && TREE_CODE (arg0) == MINUS_EXPR
8323 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8324 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8325 arg1, 0))
8326 && ! TREE_CONSTANT_OVERFLOW (tem))
8327 return fold (build2 (swap_tree_comparison (code), type,
8328 TREE_OPERAND (arg0, 1), tem));
8330 /* Fold comparisons against built-in math functions. */
8331 if (TREE_CODE (arg1) == REAL_CST
8332 && flag_unsafe_math_optimizations
8333 && ! flag_errno_math)
8335 enum built_in_function fcode = builtin_mathfn_code (arg0);
8337 if (fcode != END_BUILTINS)
8339 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8340 if (tem != NULL_TREE)
8341 return tem;
8346 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8347 if (TREE_CONSTANT (arg1)
8348 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8349 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8350 /* This optimization is invalid for ordered comparisons
8351 if CONST+INCR overflows or if foo+incr might overflow.
8352 This optimization is invalid for floating point due to rounding.
8353 For pointer types we assume overflow doesn't happen. */
8354 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8355 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8356 && (code == EQ_EXPR || code == NE_EXPR))))
8358 tree varop, newconst;
8360 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8362 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8363 arg1, TREE_OPERAND (arg0, 1)));
8364 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8365 TREE_OPERAND (arg0, 0),
8366 TREE_OPERAND (arg0, 1));
8368 else
8370 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8371 arg1, TREE_OPERAND (arg0, 1)));
8372 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8373 TREE_OPERAND (arg0, 0),
8374 TREE_OPERAND (arg0, 1));
8378 /* If VAROP is a reference to a bitfield, we must mask
8379 the constant by the width of the field. */
8380 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8381 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8382 && host_integerp (DECL_SIZE (TREE_OPERAND
8383 (TREE_OPERAND (varop, 0), 1)), 1))
8385 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8386 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8387 tree folded_compare, shift;
8389 /* First check whether the comparison would come out
8390 always the same. If we don't do that we would
8391 change the meaning with the masking. */
8392 folded_compare = fold (build2 (code, type,
8393 TREE_OPERAND (varop, 0), arg1));
8394 if (integer_zerop (folded_compare)
8395 || integer_onep (folded_compare))
8396 return omit_one_operand (type, folded_compare, varop);
8398 shift = build_int_cst (NULL_TREE,
8399 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8400 shift = fold_convert (TREE_TYPE (varop), shift);
8401 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8402 newconst, shift));
8403 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8404 newconst, shift));
8407 return fold (build2 (code, type, varop, newconst));
8410 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8411 This transformation affects the cases which are handled in later
8412 optimizations involving comparisons with non-negative constants. */
8413 if (TREE_CODE (arg1) == INTEGER_CST
8414 && TREE_CODE (arg0) != INTEGER_CST
8415 && tree_int_cst_sgn (arg1) > 0)
8417 switch (code)
8419 case GE_EXPR:
8420 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8421 return fold (build2 (GT_EXPR, type, arg0, arg1));
8423 case LT_EXPR:
8424 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8425 return fold (build2 (LE_EXPR, type, arg0, arg1));
8427 default:
8428 break;
8432 /* Comparisons with the highest or lowest possible integer of
8433 the specified size will have known values.
8435 This is quite similar to fold_relational_hi_lo, however,
8436 attempts to share the code have been nothing but trouble. */
8438 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8440 if (TREE_CODE (arg1) == INTEGER_CST
8441 && ! TREE_CONSTANT_OVERFLOW (arg1)
8442 && width <= HOST_BITS_PER_WIDE_INT
8443 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8444 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8446 unsigned HOST_WIDE_INT signed_max;
8447 unsigned HOST_WIDE_INT max, min;
8449 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8451 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8453 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8454 min = 0;
8456 else
8458 max = signed_max;
8459 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8462 if (TREE_INT_CST_HIGH (arg1) == 0
8463 && TREE_INT_CST_LOW (arg1) == max)
8464 switch (code)
8466 case GT_EXPR:
8467 return omit_one_operand (type, integer_zero_node, arg0);
8469 case GE_EXPR:
8470 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8472 case LE_EXPR:
8473 return omit_one_operand (type, integer_one_node, arg0);
8475 case LT_EXPR:
8476 return fold (build2 (NE_EXPR, type, arg0, arg1));
8478 /* The GE_EXPR and LT_EXPR cases above are not normally
8479 reached because of previous transformations. */
8481 default:
8482 break;
8484 else if (TREE_INT_CST_HIGH (arg1) == 0
8485 && TREE_INT_CST_LOW (arg1) == max - 1)
8486 switch (code)
8488 case GT_EXPR:
8489 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8490 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8491 case LE_EXPR:
8492 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8493 return fold (build2 (NE_EXPR, type, arg0, arg1));
8494 default:
8495 break;
8497 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8498 && TREE_INT_CST_LOW (arg1) == min)
8499 switch (code)
8501 case LT_EXPR:
8502 return omit_one_operand (type, integer_zero_node, arg0);
8504 case LE_EXPR:
8505 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8507 case GE_EXPR:
8508 return omit_one_operand (type, integer_one_node, arg0);
8510 case GT_EXPR:
8511 return fold (build2 (NE_EXPR, type, arg0, arg1));
8513 default:
8514 break;
8516 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8517 && TREE_INT_CST_LOW (arg1) == min + 1)
8518 switch (code)
8520 case GE_EXPR:
8521 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8522 return fold (build2 (NE_EXPR, type, arg0, arg1));
8523 case LT_EXPR:
8524 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8525 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8526 default:
8527 break;
8530 else if (!in_gimple_form
8531 && TREE_INT_CST_HIGH (arg1) == 0
8532 && TREE_INT_CST_LOW (arg1) == signed_max
8533 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8534 /* signed_type does not work on pointer types. */
8535 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8537 /* The following case also applies to X < signed_max+1
8538 and X >= signed_max+1 because previous transformations. */
8539 if (code == LE_EXPR || code == GT_EXPR)
8541 tree st0, st1;
8542 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8543 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8544 return fold
8545 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8546 type, fold_convert (st0, arg0),
8547 fold_convert (st1, integer_zero_node)));
8553 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8554 a MINUS_EXPR of a constant, we can convert it into a comparison with
8555 a revised constant as long as no overflow occurs. */
8556 if ((code == EQ_EXPR || code == NE_EXPR)
8557 && TREE_CODE (arg1) == INTEGER_CST
8558 && (TREE_CODE (arg0) == PLUS_EXPR
8559 || TREE_CODE (arg0) == MINUS_EXPR)
8560 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8561 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8562 ? MINUS_EXPR : PLUS_EXPR,
8563 arg1, TREE_OPERAND (arg0, 1), 0))
8564 && ! TREE_CONSTANT_OVERFLOW (tem))
8565 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8567 /* Similarly for a NEGATE_EXPR. */
8568 else if ((code == EQ_EXPR || code == NE_EXPR)
8569 && TREE_CODE (arg0) == NEGATE_EXPR
8570 && TREE_CODE (arg1) == INTEGER_CST
8571 && 0 != (tem = negate_expr (arg1))
8572 && TREE_CODE (tem) == INTEGER_CST
8573 && ! TREE_CONSTANT_OVERFLOW (tem))
8574 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8576 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8577 for !=. Don't do this for ordered comparisons due to overflow. */
8578 else if ((code == NE_EXPR || code == EQ_EXPR)
8579 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8580 return fold (build2 (code, type,
8581 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8583 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8584 && TREE_CODE (arg0) == NOP_EXPR)
8586 /* If we are widening one operand of an integer comparison,
8587 see if the other operand is similarly being widened. Perhaps we
8588 can do the comparison in the narrower type. */
8589 tem = fold_widened_comparison (code, type, arg0, arg1);
8590 if (tem)
8591 return tem;
8593 /* Or if we are changing signedness. */
8594 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8595 if (tem)
8596 return tem;
8599 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8600 constant, we can simplify it. */
8601 else if (TREE_CODE (arg1) == INTEGER_CST
8602 && (TREE_CODE (arg0) == MIN_EXPR
8603 || TREE_CODE (arg0) == MAX_EXPR)
8604 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8605 return optimize_minmax_comparison (t);
8607 /* If we are comparing an ABS_EXPR with a constant, we can
8608 convert all the cases into explicit comparisons, but they may
8609 well not be faster than doing the ABS and one comparison.
8610 But ABS (X) <= C is a range comparison, which becomes a subtraction
8611 and a comparison, and is probably faster. */
8612 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8613 && TREE_CODE (arg0) == ABS_EXPR
8614 && ! TREE_SIDE_EFFECTS (arg0)
8615 && (0 != (tem = negate_expr (arg1)))
8616 && TREE_CODE (tem) == INTEGER_CST
8617 && ! TREE_CONSTANT_OVERFLOW (tem))
8618 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8619 build2 (GE_EXPR, type,
8620 TREE_OPERAND (arg0, 0), tem),
8621 build2 (LE_EXPR, type,
8622 TREE_OPERAND (arg0, 0), arg1)));
8624 /* If this is an EQ or NE comparison with zero and ARG0 is
8625 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8626 two operations, but the latter can be done in one less insn
8627 on machines that have only two-operand insns or on which a
8628 constant cannot be the first operand. */
8629 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8630 && TREE_CODE (arg0) == BIT_AND_EXPR)
8632 tree arg00 = TREE_OPERAND (arg0, 0);
8633 tree arg01 = TREE_OPERAND (arg0, 1);
8634 if (TREE_CODE (arg00) == LSHIFT_EXPR
8635 && integer_onep (TREE_OPERAND (arg00, 0)))
8636 return
8637 fold (build2 (code, type,
8638 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8639 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8640 arg01, TREE_OPERAND (arg00, 1)),
8641 fold_convert (TREE_TYPE (arg0),
8642 integer_one_node)),
8643 arg1));
8644 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8645 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8646 return
8647 fold (build2 (code, type,
8648 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8649 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8650 arg00, TREE_OPERAND (arg01, 1)),
8651 fold_convert (TREE_TYPE (arg0),
8652 integer_one_node)),
8653 arg1));
8656 /* If this is an NE or EQ comparison of zero against the result of a
8657 signed MOD operation whose second operand is a power of 2, make
8658 the MOD operation unsigned since it is simpler and equivalent. */
8659 if ((code == NE_EXPR || code == EQ_EXPR)
8660 && integer_zerop (arg1)
8661 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8662 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8663 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8664 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8665 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8666 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8668 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8669 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8670 fold_convert (newtype,
8671 TREE_OPERAND (arg0, 0)),
8672 fold_convert (newtype,
8673 TREE_OPERAND (arg0, 1))));
8675 return fold (build2 (code, type, newmod,
8676 fold_convert (newtype, arg1)));
8679 /* If this is an NE comparison of zero with an AND of one, remove the
8680 comparison since the AND will give the correct value. */
8681 if (code == NE_EXPR && integer_zerop (arg1)
8682 && TREE_CODE (arg0) == BIT_AND_EXPR
8683 && integer_onep (TREE_OPERAND (arg0, 1)))
8684 return fold_convert (type, arg0);
8686 /* If we have (A & C) == C where C is a power of 2, convert this into
8687 (A & C) != 0. Similarly for NE_EXPR. */
8688 if ((code == EQ_EXPR || code == NE_EXPR)
8689 && TREE_CODE (arg0) == BIT_AND_EXPR
8690 && integer_pow2p (TREE_OPERAND (arg0, 1))
8691 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8692 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8693 arg0, fold_convert (TREE_TYPE (arg0),
8694 integer_zero_node)));
8696 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8697 2, then fold the expression into shifts and logical operations. */
8698 tem = fold_single_bit_test (code, arg0, arg1, type);
8699 if (tem)
8700 return tem;
8702 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8703 Similarly for NE_EXPR. */
8704 if ((code == EQ_EXPR || code == NE_EXPR)
8705 && TREE_CODE (arg0) == BIT_AND_EXPR
8706 && TREE_CODE (arg1) == INTEGER_CST
8707 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8709 tree notc = fold (build1 (BIT_NOT_EXPR,
8710 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8711 TREE_OPERAND (arg0, 1)));
8712 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8713 arg1, notc));
8714 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8715 if (integer_nonzerop (dandnotc))
8716 return omit_one_operand (type, rslt, arg0);
8719 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8720 Similarly for NE_EXPR. */
8721 if ((code == EQ_EXPR || code == NE_EXPR)
8722 && TREE_CODE (arg0) == BIT_IOR_EXPR
8723 && TREE_CODE (arg1) == INTEGER_CST
8724 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8726 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8727 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8728 TREE_OPERAND (arg0, 1), notd));
8729 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8730 if (integer_nonzerop (candnotd))
8731 return omit_one_operand (type, rslt, arg0);
8734 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8735 and similarly for >= into !=. */
8736 if ((code == LT_EXPR || code == GE_EXPR)
8737 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8738 && TREE_CODE (arg1) == LSHIFT_EXPR
8739 && integer_onep (TREE_OPERAND (arg1, 0)))
8740 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8741 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8742 TREE_OPERAND (arg1, 1)),
8743 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8745 else if ((code == LT_EXPR || code == GE_EXPR)
8746 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8747 && (TREE_CODE (arg1) == NOP_EXPR
8748 || TREE_CODE (arg1) == CONVERT_EXPR)
8749 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8750 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8751 return
8752 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8753 fold_convert (TREE_TYPE (arg0),
8754 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8755 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8756 1))),
8757 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8759 /* Simplify comparison of something with itself. (For IEEE
8760 floating-point, we can only do some of these simplifications.) */
8761 if (operand_equal_p (arg0, arg1, 0))
8763 switch (code)
8765 case EQ_EXPR:
8766 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8767 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8768 return constant_boolean_node (1, type);
8769 break;
8771 case GE_EXPR:
8772 case LE_EXPR:
8773 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8774 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8775 return constant_boolean_node (1, type);
8776 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8778 case NE_EXPR:
8779 /* For NE, we can only do this simplification if integer
8780 or we don't honor IEEE floating point NaNs. */
8781 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8782 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8783 break;
8784 /* ... fall through ... */
8785 case GT_EXPR:
8786 case LT_EXPR:
8787 return constant_boolean_node (0, type);
8788 default:
8789 gcc_unreachable ();
8793 /* If we are comparing an expression that just has comparisons
8794 of two integer values, arithmetic expressions of those comparisons,
8795 and constants, we can simplify it. There are only three cases
8796 to check: the two values can either be equal, the first can be
8797 greater, or the second can be greater. Fold the expression for
8798 those three values. Since each value must be 0 or 1, we have
8799 eight possibilities, each of which corresponds to the constant 0
8800 or 1 or one of the six possible comparisons.
8802 This handles common cases like (a > b) == 0 but also handles
8803 expressions like ((x > y) - (y > x)) > 0, which supposedly
8804 occur in macroized code. */
8806 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8808 tree cval1 = 0, cval2 = 0;
8809 int save_p = 0;
8811 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8812 /* Don't handle degenerate cases here; they should already
8813 have been handled anyway. */
8814 && cval1 != 0 && cval2 != 0
8815 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8816 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8817 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8818 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8819 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8820 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8821 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8823 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8824 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8826 /* We can't just pass T to eval_subst in case cval1 or cval2
8827 was the same as ARG1. */
8829 tree high_result
8830 = fold (build2 (code, type,
8831 eval_subst (arg0, cval1, maxval,
8832 cval2, minval),
8833 arg1));
8834 tree equal_result
8835 = fold (build2 (code, type,
8836 eval_subst (arg0, cval1, maxval,
8837 cval2, maxval),
8838 arg1));
8839 tree low_result
8840 = fold (build2 (code, type,
8841 eval_subst (arg0, cval1, minval,
8842 cval2, maxval),
8843 arg1));
8845 /* All three of these results should be 0 or 1. Confirm they
8846 are. Then use those values to select the proper code
8847 to use. */
8849 if ((integer_zerop (high_result)
8850 || integer_onep (high_result))
8851 && (integer_zerop (equal_result)
8852 || integer_onep (equal_result))
8853 && (integer_zerop (low_result)
8854 || integer_onep (low_result)))
8856 /* Make a 3-bit mask with the high-order bit being the
8857 value for `>', the next for '=', and the low for '<'. */
8858 switch ((integer_onep (high_result) * 4)
8859 + (integer_onep (equal_result) * 2)
8860 + integer_onep (low_result))
8862 case 0:
8863 /* Always false. */
8864 return omit_one_operand (type, integer_zero_node, arg0);
8865 case 1:
8866 code = LT_EXPR;
8867 break;
8868 case 2:
8869 code = EQ_EXPR;
8870 break;
8871 case 3:
8872 code = LE_EXPR;
8873 break;
8874 case 4:
8875 code = GT_EXPR;
8876 break;
8877 case 5:
8878 code = NE_EXPR;
8879 break;
8880 case 6:
8881 code = GE_EXPR;
8882 break;
8883 case 7:
8884 /* Always true. */
8885 return omit_one_operand (type, integer_one_node, arg0);
8888 tem = build2 (code, type, cval1, cval2);
8889 if (save_p)
8890 return save_expr (tem);
8891 else
8892 return fold (tem);
8897 /* If this is a comparison of a field, we may be able to simplify it. */
8898 if (((TREE_CODE (arg0) == COMPONENT_REF
8899 && lang_hooks.can_use_bit_fields_p ())
8900 || TREE_CODE (arg0) == BIT_FIELD_REF)
8901 && (code == EQ_EXPR || code == NE_EXPR)
8902 /* Handle the constant case even without -O
8903 to make sure the warnings are given. */
8904 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8906 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8907 if (t1)
8908 return t1;
8911 /* If this is a comparison of complex values and either or both sides
8912 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8913 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8914 This may prevent needless evaluations. */
8915 if ((code == EQ_EXPR || code == NE_EXPR)
8916 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8917 && (TREE_CODE (arg0) == COMPLEX_EXPR
8918 || TREE_CODE (arg1) == COMPLEX_EXPR
8919 || TREE_CODE (arg0) == COMPLEX_CST
8920 || TREE_CODE (arg1) == COMPLEX_CST))
8922 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8923 tree real0, imag0, real1, imag1;
8925 arg0 = save_expr (arg0);
8926 arg1 = save_expr (arg1);
8927 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8928 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8929 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8930 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8932 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8933 : TRUTH_ORIF_EXPR),
8934 type,
8935 fold (build2 (code, type, real0, real1)),
8936 fold (build2 (code, type, imag0, imag1))));
8939 /* Optimize comparisons of strlen vs zero to a compare of the
8940 first character of the string vs zero. To wit,
8941 strlen(ptr) == 0 => *ptr == 0
8942 strlen(ptr) != 0 => *ptr != 0
8943 Other cases should reduce to one of these two (or a constant)
8944 due to the return value of strlen being unsigned. */
8945 if ((code == EQ_EXPR || code == NE_EXPR)
8946 && integer_zerop (arg1)
8947 && TREE_CODE (arg0) == CALL_EXPR)
8949 tree fndecl = get_callee_fndecl (arg0);
8950 tree arglist;
8952 if (fndecl
8953 && DECL_BUILT_IN (fndecl)
8954 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8955 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8956 && (arglist = TREE_OPERAND (arg0, 1))
8957 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8958 && ! TREE_CHAIN (arglist))
8959 return fold (build2 (code, type,
8960 build1 (INDIRECT_REF, char_type_node,
8961 TREE_VALUE (arglist)),
8962 fold_convert (char_type_node,
8963 integer_zero_node)));
8966 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8967 into a single range test. */
8968 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8969 && TREE_CODE (arg1) == INTEGER_CST
8970 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8971 && !integer_zerop (TREE_OPERAND (arg0, 1))
8972 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8973 && !TREE_OVERFLOW (arg1))
8975 t1 = fold_div_compare (code, type, arg0, arg1);
8976 if (t1 != NULL_TREE)
8977 return t1;
8980 if ((code == EQ_EXPR || code == NE_EXPR)
8981 && !TREE_SIDE_EFFECTS (arg0)
8982 && integer_zerop (arg1)
8983 && tree_expr_nonzero_p (arg0))
8984 return constant_boolean_node (code==NE_EXPR, type);
8986 t1 = fold_relational_const (code, type, arg0, arg1);
8987 return t1 == NULL_TREE ? t : t1;
8989 case UNORDERED_EXPR:
8990 case ORDERED_EXPR:
8991 case UNLT_EXPR:
8992 case UNLE_EXPR:
8993 case UNGT_EXPR:
8994 case UNGE_EXPR:
8995 case UNEQ_EXPR:
8996 case LTGT_EXPR:
8997 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8999 t1 = fold_relational_const (code, type, arg0, arg1);
9000 if (t1 != NULL_TREE)
9001 return t1;
9004 /* If the first operand is NaN, the result is constant. */
9005 if (TREE_CODE (arg0) == REAL_CST
9006 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9007 && (code != LTGT_EXPR || ! flag_trapping_math))
9009 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9010 ? integer_zero_node
9011 : integer_one_node;
9012 return omit_one_operand (type, t1, arg1);
9015 /* If the second operand is NaN, the result is constant. */
9016 if (TREE_CODE (arg1) == REAL_CST
9017 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9018 && (code != LTGT_EXPR || ! flag_trapping_math))
9020 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9021 ? integer_zero_node
9022 : integer_one_node;
9023 return omit_one_operand (type, t1, arg0);
9026 /* Simplify unordered comparison of something with itself. */
9027 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9028 && operand_equal_p (arg0, arg1, 0))
9029 return constant_boolean_node (1, type);
9031 if (code == LTGT_EXPR
9032 && !flag_trapping_math
9033 && operand_equal_p (arg0, arg1, 0))
9034 return constant_boolean_node (0, type);
9036 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9038 tree targ0 = strip_float_extensions (arg0);
9039 tree targ1 = strip_float_extensions (arg1);
9040 tree newtype = TREE_TYPE (targ0);
9042 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9043 newtype = TREE_TYPE (targ1);
9045 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9046 return fold (build2 (code, type, fold_convert (newtype, targ0),
9047 fold_convert (newtype, targ1)));
9050 return t;
9052 case COND_EXPR:
9053 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9054 so all simple results must be passed through pedantic_non_lvalue. */
9055 if (TREE_CODE (arg0) == INTEGER_CST)
9057 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
9058 /* Only optimize constant conditions when the selected branch
9059 has the same type as the COND_EXPR. This avoids optimizing
9060 away "c ? x : throw", where the throw has a void type. */
9061 if (! VOID_TYPE_P (TREE_TYPE (tem))
9062 || VOID_TYPE_P (type))
9063 return pedantic_non_lvalue (tem);
9064 return t;
9066 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
9067 return pedantic_omit_one_operand (type, arg1, arg0);
9069 /* If we have A op B ? A : C, we may be able to convert this to a
9070 simpler expression, depending on the operation and the values
9071 of B and C. Signed zeros prevent all of these transformations,
9072 for reasons given above each one.
9074 Also try swapping the arguments and inverting the conditional. */
9075 if (COMPARISON_CLASS_P (arg0)
9076 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9077 arg1, TREE_OPERAND (arg0, 1))
9078 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9080 tem = fold_cond_expr_with_comparison (type, arg0,
9081 TREE_OPERAND (t, 1),
9082 TREE_OPERAND (t, 2));
9083 if (tem)
9084 return tem;
9087 if (COMPARISON_CLASS_P (arg0)
9088 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9089 TREE_OPERAND (t, 2),
9090 TREE_OPERAND (arg0, 1))
9091 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
9093 tem = invert_truthvalue (arg0);
9094 if (COMPARISON_CLASS_P (tem))
9096 tem = fold_cond_expr_with_comparison (type, tem,
9097 TREE_OPERAND (t, 2),
9098 TREE_OPERAND (t, 1));
9099 if (tem)
9100 return tem;
9104 /* If the second operand is simpler than the third, swap them
9105 since that produces better jump optimization results. */
9106 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
9107 TREE_OPERAND (t, 2), false))
9109 /* See if this can be inverted. If it can't, possibly because
9110 it was a floating-point inequality comparison, don't do
9111 anything. */
9112 tem = invert_truthvalue (arg0);
9114 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9115 return fold (build3 (code, type, tem,
9116 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
9119 /* Convert A ? 1 : 0 to simply A. */
9120 if (integer_onep (TREE_OPERAND (t, 1))
9121 && integer_zerop (TREE_OPERAND (t, 2))
9122 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9123 call to fold will try to move the conversion inside
9124 a COND, which will recurse. In that case, the COND_EXPR
9125 is probably the best choice, so leave it alone. */
9126 && type == TREE_TYPE (arg0))
9127 return pedantic_non_lvalue (arg0);
9129 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9130 over COND_EXPR in cases such as floating point comparisons. */
9131 if (integer_zerop (TREE_OPERAND (t, 1))
9132 && integer_onep (TREE_OPERAND (t, 2))
9133 && truth_value_p (TREE_CODE (arg0)))
9134 return pedantic_non_lvalue (fold_convert (type,
9135 invert_truthvalue (arg0)));
9137 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9138 if (TREE_CODE (arg0) == LT_EXPR
9139 && integer_zerop (TREE_OPERAND (arg0, 1))
9140 && integer_zerop (TREE_OPERAND (t, 2))
9141 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9142 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9143 TREE_TYPE (tem), tem, arg1)));
9145 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9146 already handled above. */
9147 if (TREE_CODE (arg0) == BIT_AND_EXPR
9148 && integer_onep (TREE_OPERAND (arg0, 1))
9149 && integer_zerop (TREE_OPERAND (t, 2))
9150 && integer_pow2p (arg1))
9152 tree tem = TREE_OPERAND (arg0, 0);
9153 STRIP_NOPS (tem);
9154 if (TREE_CODE (tem) == RSHIFT_EXPR
9155 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9156 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9157 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9158 return fold (build2 (BIT_AND_EXPR, type,
9159 TREE_OPERAND (tem, 0), arg1));
9162 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9163 is probably obsolete because the first operand should be a
9164 truth value (that's why we have the two cases above), but let's
9165 leave it in until we can confirm this for all front-ends. */
9166 if (integer_zerop (TREE_OPERAND (t, 2))
9167 && TREE_CODE (arg0) == NE_EXPR
9168 && integer_zerop (TREE_OPERAND (arg0, 1))
9169 && integer_pow2p (arg1)
9170 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9171 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9172 arg1, OEP_ONLY_CONST))
9173 return pedantic_non_lvalue (fold_convert (type,
9174 TREE_OPERAND (arg0, 0)));
9176 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9177 if (integer_zerop (TREE_OPERAND (t, 2))
9178 && truth_value_p (TREE_CODE (arg0))
9179 && truth_value_p (TREE_CODE (arg1)))
9180 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9182 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9183 if (integer_onep (TREE_OPERAND (t, 2))
9184 && truth_value_p (TREE_CODE (arg0))
9185 && truth_value_p (TREE_CODE (arg1)))
9187 /* Only perform transformation if ARG0 is easily inverted. */
9188 tem = invert_truthvalue (arg0);
9189 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9190 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9193 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9194 if (integer_zerop (arg1)
9195 && truth_value_p (TREE_CODE (arg0))
9196 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9198 /* Only perform transformation if ARG0 is easily inverted. */
9199 tem = invert_truthvalue (arg0);
9200 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9201 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9202 TREE_OPERAND (t, 2)));
9205 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9206 if (integer_onep (arg1)
9207 && truth_value_p (TREE_CODE (arg0))
9208 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9209 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9210 TREE_OPERAND (t, 2)));
9212 return t;
9214 case COMPOUND_EXPR:
9215 /* When pedantic, a compound expression can be neither an lvalue
9216 nor an integer constant expression. */
9217 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9218 return t;
9219 /* Don't let (0, 0) be null pointer constant. */
9220 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9221 : fold_convert (type, arg1);
9222 return pedantic_non_lvalue (tem);
9224 case COMPLEX_EXPR:
9225 if (wins)
9226 return build_complex (type, arg0, arg1);
9227 return t;
9229 case REALPART_EXPR:
9230 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9231 return t;
9232 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9233 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9234 TREE_OPERAND (arg0, 1));
9235 else if (TREE_CODE (arg0) == COMPLEX_CST)
9236 return TREE_REALPART (arg0);
9237 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9238 return fold (build2 (TREE_CODE (arg0), type,
9239 fold (build1 (REALPART_EXPR, type,
9240 TREE_OPERAND (arg0, 0))),
9241 fold (build1 (REALPART_EXPR, type,
9242 TREE_OPERAND (arg0, 1)))));
9243 return t;
9245 case IMAGPART_EXPR:
9246 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9247 return fold_convert (type, integer_zero_node);
9248 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9249 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9250 TREE_OPERAND (arg0, 0));
9251 else if (TREE_CODE (arg0) == COMPLEX_CST)
9252 return TREE_IMAGPART (arg0);
9253 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9254 return fold (build2 (TREE_CODE (arg0), type,
9255 fold (build1 (IMAGPART_EXPR, type,
9256 TREE_OPERAND (arg0, 0))),
9257 fold (build1 (IMAGPART_EXPR, type,
9258 TREE_OPERAND (arg0, 1)))));
9259 return t;
9261 case CALL_EXPR:
9262 /* Check for a built-in function. */
9263 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9264 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9265 == FUNCTION_DECL)
9266 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9268 tree tmp = fold_builtin (t, false);
9269 if (tmp)
9270 return tmp;
9272 return t;
9274 default:
9275 return t;
9276 } /* switch (code) */
9279 #ifdef ENABLE_FOLD_CHECKING
9280 #undef fold
9282 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9283 static void fold_check_failed (tree, tree);
9284 void print_fold_checksum (tree);
9286 /* When --enable-checking=fold, compute a digest of expr before
9287 and after actual fold call to see if fold did not accidentally
9288 change original expr. */
9290 tree
9291 fold (tree expr)
9293 tree ret;
9294 struct md5_ctx ctx;
9295 unsigned char checksum_before[16], checksum_after[16];
9296 htab_t ht;
9298 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9299 md5_init_ctx (&ctx);
9300 fold_checksum_tree (expr, &ctx, ht);
9301 md5_finish_ctx (&ctx, checksum_before);
9302 htab_empty (ht);
9304 ret = fold_1 (expr);
9306 md5_init_ctx (&ctx);
9307 fold_checksum_tree (expr, &ctx, ht);
9308 md5_finish_ctx (&ctx, checksum_after);
9309 htab_delete (ht);
9311 if (memcmp (checksum_before, checksum_after, 16))
9312 fold_check_failed (expr, ret);
9314 return ret;
9317 void
9318 print_fold_checksum (tree expr)
9320 struct md5_ctx ctx;
9321 unsigned char checksum[16], cnt;
9322 htab_t ht;
9324 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9325 md5_init_ctx (&ctx);
9326 fold_checksum_tree (expr, &ctx, ht);
9327 md5_finish_ctx (&ctx, checksum);
9328 htab_delete (ht);
9329 for (cnt = 0; cnt < 16; ++cnt)
9330 fprintf (stderr, "%02x", checksum[cnt]);
9331 putc ('\n', stderr);
9334 static void
9335 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9337 internal_error ("fold check: original tree changed by fold");
9340 static void
9341 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9343 void **slot;
9344 enum tree_code code;
9345 char buf[sizeof (struct tree_decl)];
9346 int i, len;
9348 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9349 <= sizeof (struct tree_decl))
9350 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9351 if (expr == NULL)
9352 return;
9353 slot = htab_find_slot (ht, expr, INSERT);
9354 if (*slot != NULL)
9355 return;
9356 *slot = expr;
9357 code = TREE_CODE (expr);
9358 if (TREE_CODE_CLASS (code) == tcc_declaration
9359 && DECL_ASSEMBLER_NAME_SET_P (expr))
9361 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9362 memcpy (buf, expr, tree_size (expr));
9363 expr = (tree) buf;
9364 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9366 else if (TREE_CODE_CLASS (code) == tcc_type
9367 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9368 || TYPE_CACHED_VALUES_P (expr)))
9370 /* Allow these fields to be modified. */
9371 memcpy (buf, expr, tree_size (expr));
9372 expr = (tree) buf;
9373 TYPE_POINTER_TO (expr) = NULL;
9374 TYPE_REFERENCE_TO (expr) = NULL;
9375 TYPE_CACHED_VALUES_P (expr) = 0;
9376 TYPE_CACHED_VALUES (expr) = NULL;
9378 md5_process_bytes (expr, tree_size (expr), ctx);
9379 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9380 if (TREE_CODE_CLASS (code) != tcc_type
9381 && TREE_CODE_CLASS (code) != tcc_declaration)
9382 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9383 switch (TREE_CODE_CLASS (code))
9385 case tcc_constant:
9386 switch (code)
9388 case STRING_CST:
9389 md5_process_bytes (TREE_STRING_POINTER (expr),
9390 TREE_STRING_LENGTH (expr), ctx);
9391 break;
9392 case COMPLEX_CST:
9393 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9394 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9395 break;
9396 case VECTOR_CST:
9397 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9398 break;
9399 default:
9400 break;
9402 break;
9403 case tcc_exceptional:
9404 switch (code)
9406 case TREE_LIST:
9407 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9408 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9409 break;
9410 case TREE_VEC:
9411 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9412 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9413 break;
9414 default:
9415 break;
9417 break;
9418 case tcc_expression:
9419 case tcc_reference:
9420 case tcc_comparison:
9421 case tcc_unary:
9422 case tcc_binary:
9423 case tcc_statement:
9424 len = TREE_CODE_LENGTH (code);
9425 for (i = 0; i < len; ++i)
9426 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9427 break;
9428 case tcc_declaration:
9429 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9430 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9431 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9432 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9433 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9434 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9435 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9436 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9437 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9438 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9439 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9440 break;
9441 case tcc_type:
9442 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9443 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9444 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9445 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9446 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9447 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9448 if (INTEGRAL_TYPE_P (expr)
9449 || SCALAR_FLOAT_TYPE_P (expr))
9451 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9452 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9454 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9455 if (TREE_CODE (expr) == RECORD_TYPE
9456 || TREE_CODE (expr) == UNION_TYPE
9457 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9458 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9459 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9460 break;
9461 default:
9462 break;
9466 #endif
9468 /* Perform constant folding and related simplification of initializer
9469 expression EXPR. This behaves identically to "fold" but ignores
9470 potential run-time traps and exceptions that fold must preserve. */
9472 tree
9473 fold_initializer (tree expr)
9475 int saved_signaling_nans = flag_signaling_nans;
9476 int saved_trapping_math = flag_trapping_math;
9477 int saved_trapv = flag_trapv;
9478 tree result;
9480 flag_signaling_nans = 0;
9481 flag_trapping_math = 0;
9482 flag_trapv = 0;
9484 result = fold (expr);
9486 flag_signaling_nans = saved_signaling_nans;
9487 flag_trapping_math = saved_trapping_math;
9488 flag_trapv = saved_trapv;
9490 return result;
9493 /* Determine if first argument is a multiple of second argument. Return 0 if
9494 it is not, or we cannot easily determined it to be.
9496 An example of the sort of thing we care about (at this point; this routine
9497 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9498 fold cases do now) is discovering that
9500 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9502 is a multiple of
9504 SAVE_EXPR (J * 8)
9506 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9508 This code also handles discovering that
9510 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9512 is a multiple of 8 so we don't have to worry about dealing with a
9513 possible remainder.
9515 Note that we *look* inside a SAVE_EXPR only to determine how it was
9516 calculated; it is not safe for fold to do much of anything else with the
9517 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9518 at run time. For example, the latter example above *cannot* be implemented
9519 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9520 evaluation time of the original SAVE_EXPR is not necessarily the same at
9521 the time the new expression is evaluated. The only optimization of this
9522 sort that would be valid is changing
9524 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9526 divided by 8 to
9528 SAVE_EXPR (I) * SAVE_EXPR (J)
9530 (where the same SAVE_EXPR (J) is used in the original and the
9531 transformed version). */
9533 static int
9534 multiple_of_p (tree type, tree top, tree bottom)
9536 if (operand_equal_p (top, bottom, 0))
9537 return 1;
9539 if (TREE_CODE (type) != INTEGER_TYPE)
9540 return 0;
9542 switch (TREE_CODE (top))
9544 case BIT_AND_EXPR:
9545 /* Bitwise and provides a power of two multiple. If the mask is
9546 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
9547 if (!integer_pow2p (bottom))
9548 return 0;
9549 /* FALLTHRU */
9551 case MULT_EXPR:
9552 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9553 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9555 case PLUS_EXPR:
9556 case MINUS_EXPR:
9557 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9558 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9560 case LSHIFT_EXPR:
9561 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9563 tree op1, t1;
9565 op1 = TREE_OPERAND (top, 1);
9566 /* const_binop may not detect overflow correctly,
9567 so check for it explicitly here. */
9568 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9569 > TREE_INT_CST_LOW (op1)
9570 && TREE_INT_CST_HIGH (op1) == 0
9571 && 0 != (t1 = fold_convert (type,
9572 const_binop (LSHIFT_EXPR,
9573 size_one_node,
9574 op1, 0)))
9575 && ! TREE_OVERFLOW (t1))
9576 return multiple_of_p (type, t1, bottom);
9578 return 0;
9580 case NOP_EXPR:
9581 /* Can't handle conversions from non-integral or wider integral type. */
9582 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9583 || (TYPE_PRECISION (type)
9584 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9585 return 0;
9587 /* .. fall through ... */
9589 case SAVE_EXPR:
9590 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9592 case INTEGER_CST:
9593 if (TREE_CODE (bottom) != INTEGER_CST
9594 || (TYPE_UNSIGNED (type)
9595 && (tree_int_cst_sgn (top) < 0
9596 || tree_int_cst_sgn (bottom) < 0)))
9597 return 0;
9598 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9599 top, bottom, 0));
9601 default:
9602 return 0;
9606 /* Return true if `t' is known to be non-negative. */
9609 tree_expr_nonnegative_p (tree t)
9611 switch (TREE_CODE (t))
9613 case ABS_EXPR:
9614 return 1;
9616 case INTEGER_CST:
9617 return tree_int_cst_sgn (t) >= 0;
9619 case REAL_CST:
9620 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9622 case PLUS_EXPR:
9623 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9624 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9625 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9627 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9628 both unsigned and at least 2 bits shorter than the result. */
9629 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9630 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9631 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9633 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9634 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9635 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9636 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9638 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9639 TYPE_PRECISION (inner2)) + 1;
9640 return prec < TYPE_PRECISION (TREE_TYPE (t));
9643 break;
9645 case MULT_EXPR:
9646 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9648 /* x * x for floating point x is always non-negative. */
9649 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9650 return 1;
9651 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9652 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9655 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9656 both unsigned and their total bits is shorter than the result. */
9657 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9658 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9659 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9661 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9662 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9663 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9664 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9665 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9666 < TYPE_PRECISION (TREE_TYPE (t));
9668 return 0;
9670 case TRUNC_DIV_EXPR:
9671 case CEIL_DIV_EXPR:
9672 case FLOOR_DIV_EXPR:
9673 case ROUND_DIV_EXPR:
9674 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9675 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9677 case TRUNC_MOD_EXPR:
9678 case CEIL_MOD_EXPR:
9679 case FLOOR_MOD_EXPR:
9680 case ROUND_MOD_EXPR:
9681 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9683 case RDIV_EXPR:
9684 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9685 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9687 case BIT_AND_EXPR:
9688 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9689 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9690 case BIT_IOR_EXPR:
9691 case BIT_XOR_EXPR:
9692 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9693 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9695 case NOP_EXPR:
9697 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9698 tree outer_type = TREE_TYPE (t);
9700 if (TREE_CODE (outer_type) == REAL_TYPE)
9702 if (TREE_CODE (inner_type) == REAL_TYPE)
9703 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9704 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9706 if (TYPE_UNSIGNED (inner_type))
9707 return 1;
9708 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9711 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9713 if (TREE_CODE (inner_type) == REAL_TYPE)
9714 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9715 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9716 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9717 && TYPE_UNSIGNED (inner_type);
9720 break;
9722 case COND_EXPR:
9723 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9724 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9725 case COMPOUND_EXPR:
9726 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9727 case MIN_EXPR:
9728 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9729 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9730 case MAX_EXPR:
9731 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9732 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9733 case MODIFY_EXPR:
9734 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9735 case BIND_EXPR:
9736 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9737 case SAVE_EXPR:
9738 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9739 case NON_LVALUE_EXPR:
9740 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9741 case FLOAT_EXPR:
9742 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9744 case TARGET_EXPR:
9746 tree temp = TARGET_EXPR_SLOT (t);
9747 t = TARGET_EXPR_INITIAL (t);
9749 /* If the initializer is non-void, then it's a normal expression
9750 that will be assigned to the slot. */
9751 if (!VOID_TYPE_P (t))
9752 return tree_expr_nonnegative_p (t);
9754 /* Otherwise, the initializer sets the slot in some way. One common
9755 way is an assignment statement at the end of the initializer. */
9756 while (1)
9758 if (TREE_CODE (t) == BIND_EXPR)
9759 t = expr_last (BIND_EXPR_BODY (t));
9760 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9761 || TREE_CODE (t) == TRY_CATCH_EXPR)
9762 t = expr_last (TREE_OPERAND (t, 0));
9763 else if (TREE_CODE (t) == STATEMENT_LIST)
9764 t = expr_last (t);
9765 else
9766 break;
9768 if (TREE_CODE (t) == MODIFY_EXPR
9769 && TREE_OPERAND (t, 0) == temp)
9770 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9772 return 0;
9775 case CALL_EXPR:
9777 tree fndecl = get_callee_fndecl (t);
9778 tree arglist = TREE_OPERAND (t, 1);
9779 if (fndecl
9780 && DECL_BUILT_IN (fndecl)
9781 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9782 switch (DECL_FUNCTION_CODE (fndecl))
9784 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9785 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9786 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9787 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9789 CASE_BUILTIN_F (BUILT_IN_ACOS)
9790 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9791 CASE_BUILTIN_F (BUILT_IN_CABS)
9792 CASE_BUILTIN_F (BUILT_IN_COSH)
9793 CASE_BUILTIN_F (BUILT_IN_ERFC)
9794 CASE_BUILTIN_F (BUILT_IN_EXP)
9795 CASE_BUILTIN_F (BUILT_IN_EXP10)
9796 CASE_BUILTIN_F (BUILT_IN_EXP2)
9797 CASE_BUILTIN_F (BUILT_IN_FABS)
9798 CASE_BUILTIN_F (BUILT_IN_FDIM)
9799 CASE_BUILTIN_F (BUILT_IN_FREXP)
9800 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9801 CASE_BUILTIN_F (BUILT_IN_POW10)
9802 CASE_BUILTIN_I (BUILT_IN_FFS)
9803 CASE_BUILTIN_I (BUILT_IN_PARITY)
9804 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9805 /* Always true. */
9806 return 1;
9808 CASE_BUILTIN_F (BUILT_IN_SQRT)
9809 /* sqrt(-0.0) is -0.0. */
9810 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9811 return 1;
9812 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9814 CASE_BUILTIN_F (BUILT_IN_ASINH)
9815 CASE_BUILTIN_F (BUILT_IN_ATAN)
9816 CASE_BUILTIN_F (BUILT_IN_ATANH)
9817 CASE_BUILTIN_F (BUILT_IN_CBRT)
9818 CASE_BUILTIN_F (BUILT_IN_CEIL)
9819 CASE_BUILTIN_F (BUILT_IN_ERF)
9820 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9821 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9822 CASE_BUILTIN_F (BUILT_IN_FMOD)
9823 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9824 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9825 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9826 CASE_BUILTIN_F (BUILT_IN_LRINT)
9827 CASE_BUILTIN_F (BUILT_IN_LROUND)
9828 CASE_BUILTIN_F (BUILT_IN_MODF)
9829 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9830 CASE_BUILTIN_F (BUILT_IN_POW)
9831 CASE_BUILTIN_F (BUILT_IN_RINT)
9832 CASE_BUILTIN_F (BUILT_IN_ROUND)
9833 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9834 CASE_BUILTIN_F (BUILT_IN_SINH)
9835 CASE_BUILTIN_F (BUILT_IN_TANH)
9836 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9837 /* True if the 1st argument is nonnegative. */
9838 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9840 CASE_BUILTIN_F (BUILT_IN_FMAX)
9841 /* True if the 1st OR 2nd arguments are nonnegative. */
9842 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9843 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9845 CASE_BUILTIN_F (BUILT_IN_FMIN)
9846 /* True if the 1st AND 2nd arguments are nonnegative. */
9847 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9848 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9850 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9851 /* True if the 2nd argument is nonnegative. */
9852 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9854 default:
9855 break;
9856 #undef CASE_BUILTIN_F
9857 #undef CASE_BUILTIN_I
9861 /* ... fall through ... */
9863 default:
9864 if (truth_value_p (TREE_CODE (t)))
9865 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9866 return 1;
9869 /* We don't know sign of `t', so be conservative and return false. */
9870 return 0;
9873 /* Return true when T is an address and is known to be nonzero.
9874 For floating point we further ensure that T is not denormal.
9875 Similar logic is present in nonzero_address in rtlanal.h. */
9877 static bool
9878 tree_expr_nonzero_p (tree t)
9880 tree type = TREE_TYPE (t);
9882 /* Doing something useful for floating point would need more work. */
9883 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9884 return false;
9886 switch (TREE_CODE (t))
9888 case ABS_EXPR:
9889 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9890 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9892 case INTEGER_CST:
9893 /* We used to test for !integer_zerop here. This does not work correctly
9894 if TREE_CONSTANT_OVERFLOW (t). */
9895 return (TREE_INT_CST_LOW (t) != 0
9896 || TREE_INT_CST_HIGH (t) != 0);
9898 case PLUS_EXPR:
9899 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9901 /* With the presence of negative values it is hard
9902 to say something. */
9903 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9904 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9905 return false;
9906 /* One of operands must be positive and the other non-negative. */
9907 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9908 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9910 break;
9912 case MULT_EXPR:
9913 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9915 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9916 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9918 break;
9920 case NOP_EXPR:
9922 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9923 tree outer_type = TREE_TYPE (t);
9925 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9926 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9928 break;
9930 case ADDR_EXPR:
9932 tree base = get_base_address (TREE_OPERAND (t, 0));
9934 if (!base)
9935 return false;
9937 /* Weak declarations may link to NULL. */
9938 if (DECL_P (base))
9939 return !DECL_WEAK (base);
9941 /* Constants are never weak. */
9942 if (CONSTANT_CLASS_P (base))
9943 return true;
9945 return false;
9948 case COND_EXPR:
9949 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9950 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9952 case MIN_EXPR:
9953 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9954 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9956 case MAX_EXPR:
9957 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9959 /* When both operands are nonzero, then MAX must be too. */
9960 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9961 return true;
9963 /* MAX where operand 0 is positive is positive. */
9964 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9966 /* MAX where operand 1 is positive is positive. */
9967 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9968 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9969 return true;
9970 break;
9972 case COMPOUND_EXPR:
9973 case MODIFY_EXPR:
9974 case BIND_EXPR:
9975 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9977 case SAVE_EXPR:
9978 case NON_LVALUE_EXPR:
9979 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9981 case BIT_IOR_EXPR:
9982 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9983 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9985 default:
9986 break;
9988 return false;
9991 /* See if we are applying CODE, a relational to the highest or lowest
9992 possible integer of TYPE. If so, then the result is a compile
9993 time constant. */
9995 static tree
9996 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9997 tree *op1_p)
9999 tree op0 = *op0_p;
10000 tree op1 = *op1_p;
10001 enum tree_code code = *code_p;
10002 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10004 if (TREE_CODE (op1) == INTEGER_CST
10005 && ! TREE_CONSTANT_OVERFLOW (op1)
10006 && width <= HOST_BITS_PER_WIDE_INT
10007 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10008 || POINTER_TYPE_P (TREE_TYPE (op1))))
10010 unsigned HOST_WIDE_INT signed_max;
10011 unsigned HOST_WIDE_INT max, min;
10013 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10015 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10017 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10018 min = 0;
10020 else
10022 max = signed_max;
10023 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10026 if (TREE_INT_CST_HIGH (op1) == 0
10027 && TREE_INT_CST_LOW (op1) == max)
10028 switch (code)
10030 case GT_EXPR:
10031 return omit_one_operand (type, integer_zero_node, op0);
10033 case GE_EXPR:
10034 *code_p = EQ_EXPR;
10035 break;
10036 case LE_EXPR:
10037 return omit_one_operand (type, integer_one_node, op0);
10039 case LT_EXPR:
10040 *code_p = NE_EXPR;
10041 break;
10043 /* The GE_EXPR and LT_EXPR cases above are not normally
10044 reached because of previous transformations. */
10046 default:
10047 break;
10049 else if (TREE_INT_CST_HIGH (op1) == 0
10050 && TREE_INT_CST_LOW (op1) == max - 1)
10051 switch (code)
10053 case GT_EXPR:
10054 *code_p = EQ_EXPR;
10055 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10056 break;
10057 case LE_EXPR:
10058 *code_p = NE_EXPR;
10059 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10060 break;
10061 default:
10062 break;
10064 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10065 && TREE_INT_CST_LOW (op1) == min)
10066 switch (code)
10068 case LT_EXPR:
10069 return omit_one_operand (type, integer_zero_node, op0);
10071 case LE_EXPR:
10072 *code_p = EQ_EXPR;
10073 break;
10075 case GE_EXPR:
10076 return omit_one_operand (type, integer_one_node, op0);
10078 case GT_EXPR:
10079 *code_p = NE_EXPR;
10080 break;
10082 default:
10083 break;
10085 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10086 && TREE_INT_CST_LOW (op1) == min + 1)
10087 switch (code)
10089 case GE_EXPR:
10090 *code_p = NE_EXPR;
10091 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10092 break;
10093 case LT_EXPR:
10094 *code_p = EQ_EXPR;
10095 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10096 break;
10097 default:
10098 break;
10101 else if (TREE_INT_CST_HIGH (op1) == 0
10102 && TREE_INT_CST_LOW (op1) == signed_max
10103 && TYPE_UNSIGNED (TREE_TYPE (op1))
10104 /* signed_type does not work on pointer types. */
10105 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10107 /* The following case also applies to X < signed_max+1
10108 and X >= signed_max+1 because previous transformations. */
10109 if (code == LE_EXPR || code == GT_EXPR)
10111 tree st0, st1, exp, retval;
10112 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10113 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10115 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10116 type,
10117 fold_convert (st0, op0),
10118 fold_convert (st1, integer_zero_node));
10120 retval = fold_binary_to_constant (TREE_CODE (exp),
10121 TREE_TYPE (exp),
10122 TREE_OPERAND (exp, 0),
10123 TREE_OPERAND (exp, 1));
10125 /* If we are in gimple form, then returning EXP would create
10126 non-gimple expressions. Clearing it is safe and insures
10127 we do not allow a non-gimple expression to escape. */
10128 if (in_gimple_form)
10129 exp = NULL;
10131 return (retval ? retval : exp);
10136 return NULL_TREE;
10140 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10141 attempt to fold the expression to a constant without modifying TYPE,
10142 OP0 or OP1.
10144 If the expression could be simplified to a constant, then return
10145 the constant. If the expression would not be simplified to a
10146 constant, then return NULL_TREE.
10148 Note this is primarily designed to be called after gimplification
10149 of the tree structures and when at least one operand is a constant.
10150 As a result of those simplifying assumptions this routine is far
10151 simpler than the generic fold routine. */
10153 tree
10154 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10156 int wins = 1;
10157 tree subop0;
10158 tree subop1;
10159 tree tem;
10161 /* If this is a commutative operation, and ARG0 is a constant, move it
10162 to ARG1 to reduce the number of tests below. */
10163 if (commutative_tree_code (code)
10164 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10166 tem = op0;
10167 op0 = op1;
10168 op1 = tem;
10171 /* If either operand is a complex type, extract its real component. */
10172 if (TREE_CODE (op0) == COMPLEX_CST)
10173 subop0 = TREE_REALPART (op0);
10174 else
10175 subop0 = op0;
10177 if (TREE_CODE (op1) == COMPLEX_CST)
10178 subop1 = TREE_REALPART (op1);
10179 else
10180 subop1 = op1;
10182 /* Note if either argument is not a real or integer constant.
10183 With a few exceptions, simplification is limited to cases
10184 where both arguments are constants. */
10185 if ((TREE_CODE (subop0) != INTEGER_CST
10186 && TREE_CODE (subop0) != REAL_CST)
10187 || (TREE_CODE (subop1) != INTEGER_CST
10188 && TREE_CODE (subop1) != REAL_CST))
10189 wins = 0;
10191 switch (code)
10193 case PLUS_EXPR:
10194 /* (plus (address) (const_int)) is a constant. */
10195 if (TREE_CODE (op0) == PLUS_EXPR
10196 && TREE_CODE (op1) == INTEGER_CST
10197 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10198 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10199 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10200 == ADDR_EXPR)))
10201 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10203 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10204 const_binop (PLUS_EXPR, op1,
10205 TREE_OPERAND (op0, 1), 0));
10207 case BIT_XOR_EXPR:
10209 binary:
10210 if (!wins)
10211 return NULL_TREE;
10213 /* Both arguments are constants. Simplify. */
10214 tem = const_binop (code, op0, op1, 0);
10215 if (tem != NULL_TREE)
10217 /* The return value should always have the same type as
10218 the original expression. */
10219 if (TREE_TYPE (tem) != type)
10220 tem = fold_convert (type, tem);
10222 return tem;
10224 return NULL_TREE;
10226 case MINUS_EXPR:
10227 /* Fold &x - &x. This can happen from &x.foo - &x.
10228 This is unsafe for certain floats even in non-IEEE formats.
10229 In IEEE, it is unsafe because it does wrong for NaNs.
10230 Also note that operand_equal_p is always false if an
10231 operand is volatile. */
10232 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10233 return fold_convert (type, integer_zero_node);
10235 goto binary;
10237 case MULT_EXPR:
10238 case BIT_AND_EXPR:
10239 /* Special case multiplication or bitwise AND where one argument
10240 is zero. */
10241 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10242 return omit_one_operand (type, op1, op0);
10243 else
10244 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10245 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10246 && real_zerop (op1))
10247 return omit_one_operand (type, op1, op0);
10249 goto binary;
10251 case BIT_IOR_EXPR:
10252 /* Special case when we know the result will be all ones. */
10253 if (integer_all_onesp (op1))
10254 return omit_one_operand (type, op1, op0);
10256 goto binary;
10258 case TRUNC_DIV_EXPR:
10259 case ROUND_DIV_EXPR:
10260 case FLOOR_DIV_EXPR:
10261 case CEIL_DIV_EXPR:
10262 case EXACT_DIV_EXPR:
10263 case TRUNC_MOD_EXPR:
10264 case ROUND_MOD_EXPR:
10265 case FLOOR_MOD_EXPR:
10266 case CEIL_MOD_EXPR:
10267 case RDIV_EXPR:
10268 /* Division by zero is undefined. */
10269 if (integer_zerop (op1))
10270 return NULL_TREE;
10272 if (TREE_CODE (op1) == REAL_CST
10273 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10274 && real_zerop (op1))
10275 return NULL_TREE;
10277 goto binary;
10279 case MIN_EXPR:
10280 if (INTEGRAL_TYPE_P (type)
10281 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10282 return omit_one_operand (type, op1, op0);
10284 goto binary;
10286 case MAX_EXPR:
10287 if (INTEGRAL_TYPE_P (type)
10288 && TYPE_MAX_VALUE (type)
10289 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10290 return omit_one_operand (type, op1, op0);
10292 goto binary;
10294 case RSHIFT_EXPR:
10295 /* Optimize -1 >> x for arithmetic right shifts. */
10296 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10297 return omit_one_operand (type, op0, op1);
10298 /* ... fall through ... */
10300 case LSHIFT_EXPR:
10301 if (integer_zerop (op0))
10302 return omit_one_operand (type, op0, op1);
10304 /* Since negative shift count is not well-defined, don't
10305 try to compute it in the compiler. */
10306 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10307 return NULL_TREE;
10309 goto binary;
10311 case LROTATE_EXPR:
10312 case RROTATE_EXPR:
10313 /* -1 rotated either direction by any amount is still -1. */
10314 if (integer_all_onesp (op0))
10315 return omit_one_operand (type, op0, op1);
10317 /* 0 rotated either direction by any amount is still zero. */
10318 if (integer_zerop (op0))
10319 return omit_one_operand (type, op0, op1);
10321 goto binary;
10323 case COMPLEX_EXPR:
10324 if (wins)
10325 return build_complex (type, op0, op1);
10326 return NULL_TREE;
10328 case LT_EXPR:
10329 case LE_EXPR:
10330 case GT_EXPR:
10331 case GE_EXPR:
10332 case EQ_EXPR:
10333 case NE_EXPR:
10334 /* If one arg is a real or integer constant, put it last. */
10335 if ((TREE_CODE (op0) == INTEGER_CST
10336 && TREE_CODE (op1) != INTEGER_CST)
10337 || (TREE_CODE (op0) == REAL_CST
10338 && TREE_CODE (op0) != REAL_CST))
10340 tree temp;
10342 temp = op0;
10343 op0 = op1;
10344 op1 = temp;
10345 code = swap_tree_comparison (code);
10348 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10349 This transformation affects the cases which are handled in later
10350 optimizations involving comparisons with non-negative constants. */
10351 if (TREE_CODE (op1) == INTEGER_CST
10352 && TREE_CODE (op0) != INTEGER_CST
10353 && tree_int_cst_sgn (op1) > 0)
10355 switch (code)
10357 case GE_EXPR:
10358 code = GT_EXPR;
10359 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10360 break;
10362 case LT_EXPR:
10363 code = LE_EXPR;
10364 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10365 break;
10367 default:
10368 break;
10372 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10373 if (tem)
10374 return tem;
10376 /* Fall through. */
10378 case ORDERED_EXPR:
10379 case UNORDERED_EXPR:
10380 case UNLT_EXPR:
10381 case UNLE_EXPR:
10382 case UNGT_EXPR:
10383 case UNGE_EXPR:
10384 case UNEQ_EXPR:
10385 case LTGT_EXPR:
10386 if (!wins)
10387 return NULL_TREE;
10389 return fold_relational_const (code, type, op0, op1);
10391 case RANGE_EXPR:
10392 /* This could probably be handled. */
10393 return NULL_TREE;
10395 case TRUTH_AND_EXPR:
10396 /* If second arg is constant zero, result is zero, but first arg
10397 must be evaluated. */
10398 if (integer_zerop (op1))
10399 return omit_one_operand (type, op1, op0);
10400 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10401 case will be handled here. */
10402 if (integer_zerop (op0))
10403 return omit_one_operand (type, op0, op1);
10404 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10405 return constant_boolean_node (true, type);
10406 return NULL_TREE;
10408 case TRUTH_OR_EXPR:
10409 /* If second arg is constant true, result is true, but we must
10410 evaluate first arg. */
10411 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10412 return omit_one_operand (type, op1, op0);
10413 /* Likewise for first arg, but note this only occurs here for
10414 TRUTH_OR_EXPR. */
10415 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10416 return omit_one_operand (type, op0, op1);
10417 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10418 return constant_boolean_node (false, type);
10419 return NULL_TREE;
10421 case TRUTH_XOR_EXPR:
10422 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10424 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10425 return constant_boolean_node (x, type);
10427 return NULL_TREE;
10429 default:
10430 return NULL_TREE;
10434 /* Given the components of a unary expression CODE, TYPE and OP0,
10435 attempt to fold the expression to a constant without modifying
10436 TYPE or OP0.
10438 If the expression could be simplified to a constant, then return
10439 the constant. If the expression would not be simplified to a
10440 constant, then return NULL_TREE.
10442 Note this is primarily designed to be called after gimplification
10443 of the tree structures and when op0 is a constant. As a result
10444 of those simplifying assumptions this routine is far simpler than
10445 the generic fold routine. */
10447 tree
10448 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10450 /* Make sure we have a suitable constant argument. */
10451 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10453 tree subop;
10455 if (TREE_CODE (op0) == COMPLEX_CST)
10456 subop = TREE_REALPART (op0);
10457 else
10458 subop = op0;
10460 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10461 return NULL_TREE;
10464 switch (code)
10466 case NOP_EXPR:
10467 case FLOAT_EXPR:
10468 case CONVERT_EXPR:
10469 case FIX_TRUNC_EXPR:
10470 case FIX_FLOOR_EXPR:
10471 case FIX_CEIL_EXPR:
10472 return fold_convert_const (code, type, op0);
10474 case NEGATE_EXPR:
10475 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10476 return fold_negate_const (op0, type);
10477 else
10478 return NULL_TREE;
10480 case ABS_EXPR:
10481 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10482 return fold_abs_const (op0, type);
10483 else
10484 return NULL_TREE;
10486 case BIT_NOT_EXPR:
10487 if (TREE_CODE (op0) == INTEGER_CST)
10488 return fold_not_const (op0, type);
10489 else
10490 return NULL_TREE;
10492 case REALPART_EXPR:
10493 if (TREE_CODE (op0) == COMPLEX_CST)
10494 return TREE_REALPART (op0);
10495 else
10496 return NULL_TREE;
10498 case IMAGPART_EXPR:
10499 if (TREE_CODE (op0) == COMPLEX_CST)
10500 return TREE_IMAGPART (op0);
10501 else
10502 return NULL_TREE;
10504 case CONJ_EXPR:
10505 if (TREE_CODE (op0) == COMPLEX_CST
10506 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10507 return build_complex (type, TREE_REALPART (op0),
10508 negate_expr (TREE_IMAGPART (op0)));
10509 return NULL_TREE;
10511 default:
10512 return NULL_TREE;
10516 /* If EXP represents referencing an element in a constant string
10517 (either via pointer arithmetic or array indexing), return the
10518 tree representing the value accessed, otherwise return NULL. */
10520 tree
10521 fold_read_from_constant_string (tree exp)
10523 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10525 tree exp1 = TREE_OPERAND (exp, 0);
10526 tree index;
10527 tree string;
10529 if (TREE_CODE (exp) == INDIRECT_REF)
10530 string = string_constant (exp1, &index);
10531 else
10533 tree low_bound = array_ref_low_bound (exp);
10534 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10536 /* Optimize the special-case of a zero lower bound.
10538 We convert the low_bound to sizetype to avoid some problems
10539 with constant folding. (E.g. suppose the lower bound is 1,
10540 and its mode is QI. Without the conversion,l (ARRAY
10541 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10542 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10543 if (! integer_zerop (low_bound))
10544 index = size_diffop (index, fold_convert (sizetype, low_bound));
10546 string = exp1;
10549 if (string
10550 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10551 && TREE_CODE (string) == STRING_CST
10552 && TREE_CODE (index) == INTEGER_CST
10553 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10554 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10555 == MODE_INT)
10556 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10557 return fold_convert (TREE_TYPE (exp),
10558 build_int_cst (NULL_TREE,
10559 (TREE_STRING_POINTER (string)
10560 [TREE_INT_CST_LOW (index)])));
10562 return NULL;
10565 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10566 an integer constant or real constant.
10568 TYPE is the type of the result. */
10570 static tree
10571 fold_negate_const (tree arg0, tree type)
10573 tree t = NULL_TREE;
10575 switch (TREE_CODE (arg0))
10577 case INTEGER_CST:
10579 unsigned HOST_WIDE_INT low;
10580 HOST_WIDE_INT high;
10581 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10582 TREE_INT_CST_HIGH (arg0),
10583 &low, &high);
10584 t = build_int_cst_wide (type, low, high);
10585 t = force_fit_type (t, 1,
10586 (overflow | TREE_OVERFLOW (arg0))
10587 && !TYPE_UNSIGNED (type),
10588 TREE_CONSTANT_OVERFLOW (arg0));
10589 break;
10592 case REAL_CST:
10593 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10594 break;
10596 default:
10597 gcc_unreachable ();
10600 return t;
10603 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10604 an integer constant or real constant.
10606 TYPE is the type of the result. */
10608 tree
10609 fold_abs_const (tree arg0, tree type)
10611 tree t = NULL_TREE;
10613 switch (TREE_CODE (arg0))
10615 case INTEGER_CST:
10616 /* If the value is unsigned, then the absolute value is
10617 the same as the ordinary value. */
10618 if (TYPE_UNSIGNED (type))
10619 t = arg0;
10620 /* Similarly, if the value is non-negative. */
10621 else if (INT_CST_LT (integer_minus_one_node, arg0))
10622 t = arg0;
10623 /* If the value is negative, then the absolute value is
10624 its negation. */
10625 else
10627 unsigned HOST_WIDE_INT low;
10628 HOST_WIDE_INT high;
10629 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10630 TREE_INT_CST_HIGH (arg0),
10631 &low, &high);
10632 t = build_int_cst_wide (type, low, high);
10633 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10634 TREE_CONSTANT_OVERFLOW (arg0));
10636 break;
10638 case REAL_CST:
10639 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10640 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10641 else
10642 t = arg0;
10643 break;
10645 default:
10646 gcc_unreachable ();
10649 return t;
10652 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10653 constant. TYPE is the type of the result. */
10655 static tree
10656 fold_not_const (tree arg0, tree type)
10658 tree t = NULL_TREE;
10660 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10662 t = build_int_cst_wide (type,
10663 ~ TREE_INT_CST_LOW (arg0),
10664 ~ TREE_INT_CST_HIGH (arg0));
10665 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10666 TREE_CONSTANT_OVERFLOW (arg0));
10668 return t;
10671 /* Given CODE, a relational operator, the target type, TYPE and two
10672 constant operands OP0 and OP1, return the result of the
10673 relational operation. If the result is not a compile time
10674 constant, then return NULL_TREE. */
10676 static tree
10677 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10679 int result, invert;
10681 /* From here on, the only cases we handle are when the result is
10682 known to be a constant. */
10684 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10686 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10687 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10689 /* Handle the cases where either operand is a NaN. */
10690 if (real_isnan (c0) || real_isnan (c1))
10692 switch (code)
10694 case EQ_EXPR:
10695 case ORDERED_EXPR:
10696 result = 0;
10697 break;
10699 case NE_EXPR:
10700 case UNORDERED_EXPR:
10701 case UNLT_EXPR:
10702 case UNLE_EXPR:
10703 case UNGT_EXPR:
10704 case UNGE_EXPR:
10705 case UNEQ_EXPR:
10706 result = 1;
10707 break;
10709 case LT_EXPR:
10710 case LE_EXPR:
10711 case GT_EXPR:
10712 case GE_EXPR:
10713 case LTGT_EXPR:
10714 if (flag_trapping_math)
10715 return NULL_TREE;
10716 result = 0;
10717 break;
10719 default:
10720 gcc_unreachable ();
10723 return constant_boolean_node (result, type);
10726 return constant_boolean_node (real_compare (code, c0, c1), type);
10729 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10731 To compute GT, swap the arguments and do LT.
10732 To compute GE, do LT and invert the result.
10733 To compute LE, swap the arguments, do LT and invert the result.
10734 To compute NE, do EQ and invert the result.
10736 Therefore, the code below must handle only EQ and LT. */
10738 if (code == LE_EXPR || code == GT_EXPR)
10740 tree tem = op0;
10741 op0 = op1;
10742 op1 = tem;
10743 code = swap_tree_comparison (code);
10746 /* Note that it is safe to invert for real values here because we
10747 have already handled the one case that it matters. */
10749 invert = 0;
10750 if (code == NE_EXPR || code == GE_EXPR)
10752 invert = 1;
10753 code = invert_tree_comparison (code, false);
10756 /* Compute a result for LT or EQ if args permit;
10757 Otherwise return T. */
10758 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10760 if (code == EQ_EXPR)
10761 result = tree_int_cst_equal (op0, op1);
10762 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10763 result = INT_CST_LT_UNSIGNED (op0, op1);
10764 else
10765 result = INT_CST_LT (op0, op1);
10767 else
10768 return NULL_TREE;
10770 if (invert)
10771 result ^= 1;
10772 return constant_boolean_node (result, type);
10775 /* Build an expression for the a clean point containing EXPR with type TYPE.
10776 Don't build a cleanup point expression for EXPR which don't have side
10777 effects. */
10779 tree
10780 fold_build_cleanup_point_expr (tree type, tree expr)
10782 /* If the expression does not have side effects then we don't have to wrap
10783 it with a cleanup point expression. */
10784 if (!TREE_SIDE_EFFECTS (expr))
10785 return expr;
10787 return build1 (CLEANUP_POINT_EXPR, type, expr);
10790 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10791 avoid confusing the gimplify process. */
10793 tree
10794 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10796 /* The size of the object is not relevant when talking about its address. */
10797 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10798 t = TREE_OPERAND (t, 0);
10800 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10801 if (TREE_CODE (t) == INDIRECT_REF
10802 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
10804 t = TREE_OPERAND (t, 0);
10805 if (TREE_TYPE (t) != ptrtype)
10806 t = build1 (NOP_EXPR, ptrtype, t);
10808 else
10810 tree base = t;
10812 while (handled_component_p (base))
10813 base = TREE_OPERAND (base, 0);
10814 if (DECL_P (base))
10815 TREE_ADDRESSABLE (base) = 1;
10817 t = build1 (ADDR_EXPR, ptrtype, t);
10820 return t;
10823 tree
10824 build_fold_addr_expr (tree t)
10826 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10829 /* Builds an expression for an indirection through T, simplifying some
10830 cases. */
10832 tree
10833 build_fold_indirect_ref (tree t)
10835 tree type = TREE_TYPE (TREE_TYPE (t));
10836 tree sub = t;
10837 tree subtype;
10839 STRIP_NOPS (sub);
10840 if (TREE_CODE (sub) == ADDR_EXPR)
10842 tree op = TREE_OPERAND (sub, 0);
10843 tree optype = TREE_TYPE (op);
10844 /* *&p => p */
10845 if (lang_hooks.types_compatible_p (type, optype))
10846 return op;
10847 /* *(foo *)&fooarray => fooarray[0] */
10848 else if (TREE_CODE (optype) == ARRAY_TYPE
10849 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10850 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10853 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10854 subtype = TREE_TYPE (sub);
10855 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10856 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10858 sub = build_fold_indirect_ref (sub);
10859 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10862 return build1 (INDIRECT_REF, type, t);
10865 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10866 whose result is ignored. The type of the returned tree need not be
10867 the same as the original expression. */
10869 tree
10870 fold_ignored_result (tree t)
10872 if (!TREE_SIDE_EFFECTS (t))
10873 return integer_zero_node;
10875 for (;;)
10876 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10878 case tcc_unary:
10879 t = TREE_OPERAND (t, 0);
10880 break;
10882 case tcc_binary:
10883 case tcc_comparison:
10884 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10885 t = TREE_OPERAND (t, 0);
10886 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10887 t = TREE_OPERAND (t, 1);
10888 else
10889 return t;
10890 break;
10892 case tcc_expression:
10893 switch (TREE_CODE (t))
10895 case COMPOUND_EXPR:
10896 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10897 return t;
10898 t = TREE_OPERAND (t, 0);
10899 break;
10901 case COND_EXPR:
10902 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10903 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10904 return t;
10905 t = TREE_OPERAND (t, 0);
10906 break;
10908 default:
10909 return t;
10911 break;
10913 default:
10914 return t;
10918 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10919 This can only be applied to objects of a sizetype. */
10921 tree
10922 round_up (tree value, int divisor)
10924 tree div = NULL_TREE;
10926 gcc_assert (divisor > 0);
10927 if (divisor == 1)
10928 return value;
10930 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10931 have to do anything. Only do this when we are not given a const,
10932 because in that case, this check is more expensive than just
10933 doing it. */
10934 if (TREE_CODE (value) != INTEGER_CST)
10936 div = build_int_cst (TREE_TYPE (value), divisor);
10938 if (multiple_of_p (TREE_TYPE (value), value, div))
10939 return value;
10942 /* If divisor is a power of two, simplify this to bit manipulation. */
10943 if (divisor == (divisor & -divisor))
10945 tree t;
10947 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10948 value = size_binop (PLUS_EXPR, value, t);
10949 t = build_int_cst (TREE_TYPE (value), -divisor);
10950 value = size_binop (BIT_AND_EXPR, value, t);
10952 else
10954 if (!div)
10955 div = build_int_cst (TREE_TYPE (value), divisor);
10956 value = size_binop (CEIL_DIV_EXPR, value, div);
10957 value = size_binop (MULT_EXPR, value, div);
10960 return value;
10963 /* Likewise, but round down. */
10965 tree
10966 round_down (tree value, int divisor)
10968 tree div = NULL_TREE;
10970 gcc_assert (divisor > 0);
10971 if (divisor == 1)
10972 return value;
10974 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10975 have to do anything. Only do this when we are not given a const,
10976 because in that case, this check is more expensive than just
10977 doing it. */
10978 if (TREE_CODE (value) != INTEGER_CST)
10980 div = build_int_cst (TREE_TYPE (value), divisor);
10982 if (multiple_of_p (TREE_TYPE (value), value, div))
10983 return value;
10986 /* If divisor is a power of two, simplify this to bit manipulation. */
10987 if (divisor == (divisor & -divisor))
10989 tree t;
10991 t = build_int_cst (TREE_TYPE (value), -divisor);
10992 value = size_binop (BIT_AND_EXPR, value, t);
10994 else
10996 if (!div)
10997 div = build_int_cst (TREE_TYPE (value), divisor);
10998 value = size_binop (FLOOR_DIV_EXPR, value, div);
10999 value = size_binop (MULT_EXPR, value, div);
11002 return value;
11005 /* Returns the pointer to the base of the object addressed by EXP and
11006 extracts the information about the offset of the access, storing it
11007 to PBITPOS and POFFSET. */
11009 static tree
11010 split_address_to_core_and_offset (tree exp,
11011 HOST_WIDE_INT *pbitpos, tree *poffset)
11013 tree core;
11014 enum machine_mode mode;
11015 int unsignedp, volatilep;
11016 HOST_WIDE_INT bitsize;
11018 if (TREE_CODE (exp) == ADDR_EXPR)
11020 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11021 poffset, &mode, &unsignedp, &volatilep,
11022 false);
11024 if (TREE_CODE (core) == INDIRECT_REF)
11025 core = TREE_OPERAND (core, 0);
11027 else
11029 core = exp;
11030 *pbitpos = 0;
11031 *poffset = NULL_TREE;
11034 return core;
11037 /* Returns true if addresses of E1 and E2 differ by a constant, false
11038 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11040 bool
11041 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11043 tree core1, core2;
11044 HOST_WIDE_INT bitpos1, bitpos2;
11045 tree toffset1, toffset2, tdiff, type;
11047 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11048 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11050 if (bitpos1 % BITS_PER_UNIT != 0
11051 || bitpos2 % BITS_PER_UNIT != 0
11052 || !operand_equal_p (core1, core2, 0))
11053 return false;
11055 if (toffset1 && toffset2)
11057 type = TREE_TYPE (toffset1);
11058 if (type != TREE_TYPE (toffset2))
11059 toffset2 = fold_convert (type, toffset2);
11061 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11062 if (!host_integerp (tdiff, 0))
11063 return false;
11065 *diff = tree_low_cst (tdiff, 0);
11067 else if (toffset1 || toffset2)
11069 /* If only one of the offsets is non-constant, the difference cannot
11070 be a constant. */
11071 return false;
11073 else
11074 *diff = 0;
11076 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11077 return true;