config/stormy16/stormy16.c (combine_bnp): Add code to handle zero_extension and
[official-gcc.git] / gcc / fold-const.c
blob0c1a3d587c577396628d0acc3b6df2b4374e60fd
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static tree build_zero_vector (tree);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 tree);
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
127 tree, int);
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_relational_hi_lo (enum tree_code *, const tree,
138 tree *, tree *);
139 static bool tree_expr_nonzero_p (tree);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
144 addition.
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 sign. */
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 #define LOWPART(x) \
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
166 static void
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 static void
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 HOST_WIDE_INT *hi)
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
202 tree
203 force_fit_type (tree t, int overflowable,
204 bool overflowed, bool overflowed_const)
206 unsigned HOST_WIDE_INT low;
207 HOST_WIDE_INT high;
208 unsigned int prec;
209 int sign_extended_type;
211 gcc_assert (TREE_CODE (t) == INTEGER_CST);
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = POINTER_SIZE;
219 else
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
232 else
234 high = 0;
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
253 high = -1;
255 else
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
260 high = -1;
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 if (overflowed
272 || overflowable < 0
273 || (overflowable > 0 && sign_extended_type))
275 t = copy_node (t);
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
281 t = copy_node (t);
282 TREE_CONSTANT_OVERFLOW (t) = 1;
286 return t;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
300 HOST_WIDE_INT h;
302 l = l1 + l2;
303 h = h1 + h2 + (l < l1);
305 *lv = l;
306 *hv = h;
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
319 if (l1 == 0)
321 *lv = 0;
322 *hv = - h1;
323 return (*hv & h1) < 0;
325 else
327 *lv = -l1;
328 *hv = ~h1;
329 return 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
348 int i, j, k;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
359 carry = 0;
360 for (j = 0; j < 4; j++)
362 k = i + j;
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
366 carry += prod[k];
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
370 prod[i + 4] = carry;
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
378 if (h1 < 0)
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 if (h2 < 0)
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 void
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
404 if (count < 0)
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
407 return;
410 if (SHIFT_COUNT_TRUNCATED)
411 count %= prec;
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
417 *hv = 0;
418 *lv = 0;
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
423 *lv = 0;
425 else
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
429 *lv = l1 << count;
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
446 else
448 *hv = signmask;
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459 void
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 int arith)
465 unsigned HOST_WIDE_INT signmask;
467 signmask = (arith
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 : 0);
471 if (SHIFT_COUNT_TRUNCATED)
472 count %= prec;
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
478 *hv = 0;
479 *lv = 0;
481 else if (count >= HOST_BITS_PER_WIDE_INT)
483 *hv = 0;
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
489 *lv = ((l1 >> count)
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
497 *hv = signmask;
498 *lv = signmask;
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
507 else
509 *hv = signmask;
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 void
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
528 count %= prec;
529 if (count < 0)
530 count += prec;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
534 *lv = s1l | s2l;
535 *hv = s1h | s2h;
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
542 void
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
550 count %= prec;
551 if (count < 0)
552 count += prec;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 *lv = s1l | s2l;
557 *hv = s1h | s2h;
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 or EXACT_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
577 HOST_WIDE_INT *hrem)
579 int quo_neg = 0;
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
582 int i, j;
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
589 int overflow = 0;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
595 if (!uns)
597 if (hnum < 0)
599 quo_neg = ~ quo_neg;
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
603 overflow = 1;
605 if (hden < 0)
607 quo_neg = ~ quo_neg;
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
614 *hquo = *hrem = 0;
615 /* This unsigned division rounds toward zero. */
616 *lquo = lnum / lden;
617 goto finish_up;
620 if (hnum == 0)
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
623 *hquo = *lquo = 0;
624 *hrem = hnum;
625 *lrem = lnum;
626 goto finish_up;
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
645 carry = work % lden;
648 else
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
657 if (den[i] != 0)
659 den_hi_sig = i;
660 break;
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
667 if (scale > 1)
668 { /* scale divisor and dividend */
669 carry = 0;
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
677 num[4] = carry;
678 carry = 0;
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
688 num_hi_sig = 4;
690 /* Main loop */
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
702 else
703 quo_est = BASE - 1;
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
707 if (tmp < BASE
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
710 quo_est--;
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
716 carry = 0;
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
730 quo_est--;
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
743 quo[i] = quo_est;
747 decode (quo, lquo, hquo);
749 finish_up:
750 /* If result is negative, make it so. */
751 if (quo_neg)
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
759 switch (code)
761 case TRUNC_DIV_EXPR:
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
764 return overflow;
766 case FLOOR_DIV_EXPR:
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
770 /* quo = quo - 1; */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
772 lquo, hquo);
774 else
775 return overflow;
776 break;
778 case CEIL_DIV_EXPR:
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
783 lquo, hquo);
785 else
786 return overflow;
787 break;
789 case ROUND_DIV_EXPR:
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
798 if (*hrem < 0)
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
800 if (hden < 0)
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, &ltwice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
813 if (*hquo < 0)
814 /* quo = quo - 1; */
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 else
818 /* quo = quo + 1; */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
825 break;
827 default:
828 gcc_unreachable ();
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
835 return overflow;
838 /* Return true if built-in mathematical function specified by CODE
839 preserves the sign of it argument, i.e. -f(x) == f(-x). */
841 static bool
842 negate_mathfn_p (enum built_in_function code)
844 switch (code)
846 case BUILT_IN_ASIN:
847 case BUILT_IN_ASINF:
848 case BUILT_IN_ASINL:
849 case BUILT_IN_ATAN:
850 case BUILT_IN_ATANF:
851 case BUILT_IN_ATANL:
852 case BUILT_IN_SIN:
853 case BUILT_IN_SINF:
854 case BUILT_IN_SINL:
855 case BUILT_IN_TAN:
856 case BUILT_IN_TANF:
857 case BUILT_IN_TANL:
858 return true;
860 default:
861 break;
863 return false;
866 /* Check whether we may negate an integer constant T without causing
867 overflow. */
869 bool
870 may_negate_without_overflow_p (tree t)
872 unsigned HOST_WIDE_INT val;
873 unsigned int prec;
874 tree type;
876 gcc_assert (TREE_CODE (t) == INTEGER_CST);
878 type = TREE_TYPE (t);
879 if (TYPE_UNSIGNED (type))
880 return false;
882 prec = TYPE_PRECISION (type);
883 if (prec > HOST_BITS_PER_WIDE_INT)
885 if (TREE_INT_CST_LOW (t) != 0)
886 return true;
887 prec -= HOST_BITS_PER_WIDE_INT;
888 val = TREE_INT_CST_HIGH (t);
890 else
891 val = TREE_INT_CST_LOW (t);
892 if (prec < HOST_BITS_PER_WIDE_INT)
893 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
894 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
897 /* Determine whether an expression T can be cheaply negated using
898 the function negate_expr. */
900 static bool
901 negate_expr_p (tree t)
903 tree type;
905 if (t == 0)
906 return false;
908 type = TREE_TYPE (t);
910 STRIP_SIGN_NOPS (t);
911 switch (TREE_CODE (t))
913 case INTEGER_CST:
914 if (TYPE_UNSIGNED (type) || ! flag_trapv)
915 return true;
917 /* Check that -CST will not overflow type. */
918 return may_negate_without_overflow_p (t);
920 case REAL_CST:
921 case NEGATE_EXPR:
922 return true;
924 case COMPLEX_CST:
925 return negate_expr_p (TREE_REALPART (t))
926 && negate_expr_p (TREE_IMAGPART (t));
928 case PLUS_EXPR:
929 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
930 return false;
931 /* -(A + B) -> (-B) - A. */
932 if (negate_expr_p (TREE_OPERAND (t, 1))
933 && reorder_operands_p (TREE_OPERAND (t, 0),
934 TREE_OPERAND (t, 1)))
935 return true;
936 /* -(A + B) -> (-A) - B. */
937 return negate_expr_p (TREE_OPERAND (t, 0));
939 case MINUS_EXPR:
940 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
941 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
942 && reorder_operands_p (TREE_OPERAND (t, 0),
943 TREE_OPERAND (t, 1));
945 case MULT_EXPR:
946 if (TYPE_UNSIGNED (TREE_TYPE (t)))
947 break;
949 /* Fall through. */
951 case RDIV_EXPR:
952 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
953 return negate_expr_p (TREE_OPERAND (t, 1))
954 || negate_expr_p (TREE_OPERAND (t, 0));
955 break;
957 case NOP_EXPR:
958 /* Negate -((double)float) as (double)(-float). */
959 if (TREE_CODE (type) == REAL_TYPE)
961 tree tem = strip_float_extensions (t);
962 if (tem != t)
963 return negate_expr_p (tem);
965 break;
967 case CALL_EXPR:
968 /* Negate -f(x) as f(-x). */
969 if (negate_mathfn_p (builtin_mathfn_code (t)))
970 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
971 break;
973 case RSHIFT_EXPR:
974 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
975 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
977 tree op1 = TREE_OPERAND (t, 1);
978 if (TREE_INT_CST_HIGH (op1) == 0
979 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
980 == TREE_INT_CST_LOW (op1))
981 return true;
983 break;
985 default:
986 break;
988 return false;
991 /* Given T, an expression, return the negation of T. Allow for T to be
992 null, in which case return null. */
994 static tree
995 negate_expr (tree t)
997 tree type;
998 tree tem;
1000 if (t == 0)
1001 return 0;
1003 type = TREE_TYPE (t);
1004 STRIP_SIGN_NOPS (t);
1006 switch (TREE_CODE (t))
1008 case INTEGER_CST:
1009 tem = fold_negate_const (t, type);
1010 if (! TREE_OVERFLOW (tem)
1011 || TYPE_UNSIGNED (type)
1012 || ! flag_trapv)
1013 return tem;
1014 break;
1016 case REAL_CST:
1017 tem = fold_negate_const (t, type);
1018 /* Two's complement FP formats, such as c4x, may overflow. */
1019 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1020 return fold_convert (type, tem);
1021 break;
1023 case COMPLEX_CST:
1025 tree rpart = negate_expr (TREE_REALPART (t));
1026 tree ipart = negate_expr (TREE_IMAGPART (t));
1028 if ((TREE_CODE (rpart) == REAL_CST
1029 && TREE_CODE (ipart) == REAL_CST)
1030 || (TREE_CODE (rpart) == INTEGER_CST
1031 && TREE_CODE (ipart) == INTEGER_CST))
1032 return build_complex (type, rpart, ipart);
1034 break;
1036 case NEGATE_EXPR:
1037 return fold_convert (type, TREE_OPERAND (t, 0));
1039 case PLUS_EXPR:
1040 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1042 /* -(A + B) -> (-B) - A. */
1043 if (negate_expr_p (TREE_OPERAND (t, 1))
1044 && reorder_operands_p (TREE_OPERAND (t, 0),
1045 TREE_OPERAND (t, 1)))
1047 tem = negate_expr (TREE_OPERAND (t, 1));
1048 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1049 tem, TREE_OPERAND (t, 0)));
1050 return fold_convert (type, tem);
1053 /* -(A + B) -> (-A) - B. */
1054 if (negate_expr_p (TREE_OPERAND (t, 0)))
1056 tem = negate_expr (TREE_OPERAND (t, 0));
1057 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1058 tem, TREE_OPERAND (t, 1)));
1059 return fold_convert (type, tem);
1062 break;
1064 case MINUS_EXPR:
1065 /* - (A - B) -> B - A */
1066 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1067 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1068 return fold_convert (type,
1069 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1070 TREE_OPERAND (t, 1),
1071 TREE_OPERAND (t, 0))));
1072 break;
1074 case MULT_EXPR:
1075 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1076 break;
1078 /* Fall through. */
1080 case RDIV_EXPR:
1081 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1083 tem = TREE_OPERAND (t, 1);
1084 if (negate_expr_p (tem))
1085 return fold_convert (type,
1086 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1087 TREE_OPERAND (t, 0),
1088 negate_expr (tem))));
1089 tem = TREE_OPERAND (t, 0);
1090 if (negate_expr_p (tem))
1091 return fold_convert (type,
1092 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1093 negate_expr (tem),
1094 TREE_OPERAND (t, 1))));
1096 break;
1098 case NOP_EXPR:
1099 /* Convert -((double)float) into (double)(-float). */
1100 if (TREE_CODE (type) == REAL_TYPE)
1102 tem = strip_float_extensions (t);
1103 if (tem != t && negate_expr_p (tem))
1104 return fold_convert (type, negate_expr (tem));
1106 break;
1108 case CALL_EXPR:
1109 /* Negate -f(x) as f(-x). */
1110 if (negate_mathfn_p (builtin_mathfn_code (t))
1111 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1113 tree fndecl, arg, arglist;
1115 fndecl = get_callee_fndecl (t);
1116 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1117 arglist = build_tree_list (NULL_TREE, arg);
1118 return build_function_call_expr (fndecl, arglist);
1120 break;
1122 case RSHIFT_EXPR:
1123 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1124 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1126 tree op1 = TREE_OPERAND (t, 1);
1127 if (TREE_INT_CST_HIGH (op1) == 0
1128 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1129 == TREE_INT_CST_LOW (op1))
1131 tree ntype = TYPE_UNSIGNED (type)
1132 ? lang_hooks.types.signed_type (type)
1133 : lang_hooks.types.unsigned_type (type);
1134 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1135 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1136 return fold_convert (type, temp);
1139 break;
1141 default:
1142 break;
1145 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1146 return fold_convert (type, tem);
1149 /* Split a tree IN into a constant, literal and variable parts that could be
1150 combined with CODE to make IN. "constant" means an expression with
1151 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1152 commutative arithmetic operation. Store the constant part into *CONP,
1153 the literal in *LITP and return the variable part. If a part isn't
1154 present, set it to null. If the tree does not decompose in this way,
1155 return the entire tree as the variable part and the other parts as null.
1157 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1158 case, we negate an operand that was subtracted. Except if it is a
1159 literal for which we use *MINUS_LITP instead.
1161 If NEGATE_P is true, we are negating all of IN, again except a literal
1162 for which we use *MINUS_LITP instead.
1164 If IN is itself a literal or constant, return it as appropriate.
1166 Note that we do not guarantee that any of the three values will be the
1167 same type as IN, but they will have the same signedness and mode. */
1169 static tree
1170 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1171 tree *minus_litp, int negate_p)
1173 tree var = 0;
1175 *conp = 0;
1176 *litp = 0;
1177 *minus_litp = 0;
1179 /* Strip any conversions that don't change the machine mode or signedness. */
1180 STRIP_SIGN_NOPS (in);
1182 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1183 *litp = in;
1184 else if (TREE_CODE (in) == code
1185 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1186 /* We can associate addition and subtraction together (even
1187 though the C standard doesn't say so) for integers because
1188 the value is not affected. For reals, the value might be
1189 affected, so we can't. */
1190 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1191 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1193 tree op0 = TREE_OPERAND (in, 0);
1194 tree op1 = TREE_OPERAND (in, 1);
1195 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1196 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1198 /* First see if either of the operands is a literal, then a constant. */
1199 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1200 *litp = op0, op0 = 0;
1201 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1202 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1204 if (op0 != 0 && TREE_CONSTANT (op0))
1205 *conp = op0, op0 = 0;
1206 else if (op1 != 0 && TREE_CONSTANT (op1))
1207 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1209 /* If we haven't dealt with either operand, this is not a case we can
1210 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1211 if (op0 != 0 && op1 != 0)
1212 var = in;
1213 else if (op0 != 0)
1214 var = op0;
1215 else
1216 var = op1, neg_var_p = neg1_p;
1218 /* Now do any needed negations. */
1219 if (neg_litp_p)
1220 *minus_litp = *litp, *litp = 0;
1221 if (neg_conp_p)
1222 *conp = negate_expr (*conp);
1223 if (neg_var_p)
1224 var = negate_expr (var);
1226 else if (TREE_CONSTANT (in))
1227 *conp = in;
1228 else
1229 var = in;
1231 if (negate_p)
1233 if (*litp)
1234 *minus_litp = *litp, *litp = 0;
1235 else if (*minus_litp)
1236 *litp = *minus_litp, *minus_litp = 0;
1237 *conp = negate_expr (*conp);
1238 var = negate_expr (var);
1241 return var;
1244 /* Re-associate trees split by the above function. T1 and T2 are either
1245 expressions to associate or null. Return the new expression, if any. If
1246 we build an operation, do it in TYPE and with CODE. */
1248 static tree
1249 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1251 if (t1 == 0)
1252 return t2;
1253 else if (t2 == 0)
1254 return t1;
1256 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1257 try to fold this since we will have infinite recursion. But do
1258 deal with any NEGATE_EXPRs. */
1259 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1260 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1262 if (code == PLUS_EXPR)
1264 if (TREE_CODE (t1) == NEGATE_EXPR)
1265 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1266 fold_convert (type, TREE_OPERAND (t1, 0)));
1267 else if (TREE_CODE (t2) == NEGATE_EXPR)
1268 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1269 fold_convert (type, TREE_OPERAND (t2, 0)));
1270 else if (integer_zerop (t2))
1271 return fold_convert (type, t1);
1273 else if (code == MINUS_EXPR)
1275 if (integer_zerop (t2))
1276 return fold_convert (type, t1);
1279 return build2 (code, type, fold_convert (type, t1),
1280 fold_convert (type, t2));
1283 return fold (build2 (code, type, fold_convert (type, t1),
1284 fold_convert (type, t2)));
1287 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1288 to produce a new constant.
1290 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1292 tree
1293 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1295 unsigned HOST_WIDE_INT int1l, int2l;
1296 HOST_WIDE_INT int1h, int2h;
1297 unsigned HOST_WIDE_INT low;
1298 HOST_WIDE_INT hi;
1299 unsigned HOST_WIDE_INT garbagel;
1300 HOST_WIDE_INT garbageh;
1301 tree t;
1302 tree type = TREE_TYPE (arg1);
1303 int uns = TYPE_UNSIGNED (type);
1304 int is_sizetype
1305 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1306 int overflow = 0;
1307 int no_overflow = 0;
1309 int1l = TREE_INT_CST_LOW (arg1);
1310 int1h = TREE_INT_CST_HIGH (arg1);
1311 int2l = TREE_INT_CST_LOW (arg2);
1312 int2h = TREE_INT_CST_HIGH (arg2);
1314 switch (code)
1316 case BIT_IOR_EXPR:
1317 low = int1l | int2l, hi = int1h | int2h;
1318 break;
1320 case BIT_XOR_EXPR:
1321 low = int1l ^ int2l, hi = int1h ^ int2h;
1322 break;
1324 case BIT_AND_EXPR:
1325 low = int1l & int2l, hi = int1h & int2h;
1326 break;
1328 case RSHIFT_EXPR:
1329 int2l = -int2l;
1330 case LSHIFT_EXPR:
1331 /* It's unclear from the C standard whether shifts can overflow.
1332 The following code ignores overflow; perhaps a C standard
1333 interpretation ruling is needed. */
1334 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1335 &low, &hi, !uns);
1336 no_overflow = 1;
1337 break;
1339 case RROTATE_EXPR:
1340 int2l = - int2l;
1341 case LROTATE_EXPR:
1342 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1343 &low, &hi);
1344 break;
1346 case PLUS_EXPR:
1347 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1348 break;
1350 case MINUS_EXPR:
1351 neg_double (int2l, int2h, &low, &hi);
1352 add_double (int1l, int1h, low, hi, &low, &hi);
1353 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1354 break;
1356 case MULT_EXPR:
1357 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1358 break;
1360 case TRUNC_DIV_EXPR:
1361 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1362 case EXACT_DIV_EXPR:
1363 /* This is a shortcut for a common special case. */
1364 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1365 && ! TREE_CONSTANT_OVERFLOW (arg1)
1366 && ! TREE_CONSTANT_OVERFLOW (arg2)
1367 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1369 if (code == CEIL_DIV_EXPR)
1370 int1l += int2l - 1;
1372 low = int1l / int2l, hi = 0;
1373 break;
1376 /* ... fall through ... */
1378 case ROUND_DIV_EXPR:
1379 if (int2h == 0 && int2l == 1)
1381 low = int1l, hi = int1h;
1382 break;
1384 if (int1l == int2l && int1h == int2h
1385 && ! (int1l == 0 && int1h == 0))
1387 low = 1, hi = 0;
1388 break;
1390 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1391 &low, &hi, &garbagel, &garbageh);
1392 break;
1394 case TRUNC_MOD_EXPR:
1395 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1396 /* This is a shortcut for a common special case. */
1397 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1398 && ! TREE_CONSTANT_OVERFLOW (arg1)
1399 && ! TREE_CONSTANT_OVERFLOW (arg2)
1400 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1402 if (code == CEIL_MOD_EXPR)
1403 int1l += int2l - 1;
1404 low = int1l % int2l, hi = 0;
1405 break;
1408 /* ... fall through ... */
1410 case ROUND_MOD_EXPR:
1411 overflow = div_and_round_double (code, uns,
1412 int1l, int1h, int2l, int2h,
1413 &garbagel, &garbageh, &low, &hi);
1414 break;
1416 case MIN_EXPR:
1417 case MAX_EXPR:
1418 if (uns)
1419 low = (((unsigned HOST_WIDE_INT) int1h
1420 < (unsigned HOST_WIDE_INT) int2h)
1421 || (((unsigned HOST_WIDE_INT) int1h
1422 == (unsigned HOST_WIDE_INT) int2h)
1423 && int1l < int2l));
1424 else
1425 low = (int1h < int2h
1426 || (int1h == int2h && int1l < int2l));
1428 if (low == (code == MIN_EXPR))
1429 low = int1l, hi = int1h;
1430 else
1431 low = int2l, hi = int2h;
1432 break;
1434 default:
1435 gcc_unreachable ();
1438 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1440 if (notrunc)
1442 /* Propagate overflow flags ourselves. */
1443 if (((!uns || is_sizetype) && overflow)
1444 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1446 t = copy_node (t);
1447 TREE_OVERFLOW (t) = 1;
1448 TREE_CONSTANT_OVERFLOW (t) = 1;
1450 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1452 t = copy_node (t);
1453 TREE_CONSTANT_OVERFLOW (t) = 1;
1456 else
1457 t = force_fit_type (t, 1,
1458 ((!uns || is_sizetype) && overflow)
1459 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1460 TREE_CONSTANT_OVERFLOW (arg1)
1461 | TREE_CONSTANT_OVERFLOW (arg2));
1463 return t;
1466 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1467 constant. We assume ARG1 and ARG2 have the same data type, or at least
1468 are the same kind of constant and the same machine mode.
1470 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1472 static tree
1473 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1475 STRIP_NOPS (arg1);
1476 STRIP_NOPS (arg2);
1478 if (TREE_CODE (arg1) == INTEGER_CST)
1479 return int_const_binop (code, arg1, arg2, notrunc);
1481 if (TREE_CODE (arg1) == REAL_CST)
1483 enum machine_mode mode;
1484 REAL_VALUE_TYPE d1;
1485 REAL_VALUE_TYPE d2;
1486 REAL_VALUE_TYPE value;
1487 tree t, type;
1489 d1 = TREE_REAL_CST (arg1);
1490 d2 = TREE_REAL_CST (arg2);
1492 type = TREE_TYPE (arg1);
1493 mode = TYPE_MODE (type);
1495 /* Don't perform operation if we honor signaling NaNs and
1496 either operand is a NaN. */
1497 if (HONOR_SNANS (mode)
1498 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1499 return NULL_TREE;
1501 /* Don't perform operation if it would raise a division
1502 by zero exception. */
1503 if (code == RDIV_EXPR
1504 && REAL_VALUES_EQUAL (d2, dconst0)
1505 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1506 return NULL_TREE;
1508 /* If either operand is a NaN, just return it. Otherwise, set up
1509 for floating-point trap; we return an overflow. */
1510 if (REAL_VALUE_ISNAN (d1))
1511 return arg1;
1512 else if (REAL_VALUE_ISNAN (d2))
1513 return arg2;
1515 REAL_ARITHMETIC (value, code, d1, d2);
1517 t = build_real (type, real_value_truncate (mode, value));
1519 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1520 TREE_CONSTANT_OVERFLOW (t)
1521 = TREE_OVERFLOW (t)
1522 | TREE_CONSTANT_OVERFLOW (arg1)
1523 | TREE_CONSTANT_OVERFLOW (arg2);
1524 return t;
1526 if (TREE_CODE (arg1) == COMPLEX_CST)
1528 tree type = TREE_TYPE (arg1);
1529 tree r1 = TREE_REALPART (arg1);
1530 tree i1 = TREE_IMAGPART (arg1);
1531 tree r2 = TREE_REALPART (arg2);
1532 tree i2 = TREE_IMAGPART (arg2);
1533 tree t;
1535 switch (code)
1537 case PLUS_EXPR:
1538 t = build_complex (type,
1539 const_binop (PLUS_EXPR, r1, r2, notrunc),
1540 const_binop (PLUS_EXPR, i1, i2, notrunc));
1541 break;
1543 case MINUS_EXPR:
1544 t = build_complex (type,
1545 const_binop (MINUS_EXPR, r1, r2, notrunc),
1546 const_binop (MINUS_EXPR, i1, i2, notrunc));
1547 break;
1549 case MULT_EXPR:
1550 t = build_complex (type,
1551 const_binop (MINUS_EXPR,
1552 const_binop (MULT_EXPR,
1553 r1, r2, notrunc),
1554 const_binop (MULT_EXPR,
1555 i1, i2, notrunc),
1556 notrunc),
1557 const_binop (PLUS_EXPR,
1558 const_binop (MULT_EXPR,
1559 r1, i2, notrunc),
1560 const_binop (MULT_EXPR,
1561 i1, r2, notrunc),
1562 notrunc));
1563 break;
1565 case RDIV_EXPR:
1567 tree magsquared
1568 = const_binop (PLUS_EXPR,
1569 const_binop (MULT_EXPR, r2, r2, notrunc),
1570 const_binop (MULT_EXPR, i2, i2, notrunc),
1571 notrunc);
1573 t = build_complex (type,
1574 const_binop
1575 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1576 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1577 const_binop (PLUS_EXPR,
1578 const_binop (MULT_EXPR, r1, r2,
1579 notrunc),
1580 const_binop (MULT_EXPR, i1, i2,
1581 notrunc),
1582 notrunc),
1583 magsquared, notrunc),
1584 const_binop
1585 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1586 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1587 const_binop (MINUS_EXPR,
1588 const_binop (MULT_EXPR, i1, r2,
1589 notrunc),
1590 const_binop (MULT_EXPR, r1, i2,
1591 notrunc),
1592 notrunc),
1593 magsquared, notrunc));
1595 break;
1597 default:
1598 gcc_unreachable ();
1600 return t;
1602 return 0;
1605 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1606 indicates which particular sizetype to create. */
1608 tree
1609 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1611 return build_int_cst (sizetype_tab[(int) kind], number);
1614 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1615 is a tree code. The type of the result is taken from the operands.
1616 Both must be the same type integer type and it must be a size type.
1617 If the operands are constant, so is the result. */
1619 tree
1620 size_binop (enum tree_code code, tree arg0, tree arg1)
1622 tree type = TREE_TYPE (arg0);
1624 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1625 && type == TREE_TYPE (arg1));
1627 /* Handle the special case of two integer constants faster. */
1628 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1630 /* And some specific cases even faster than that. */
1631 if (code == PLUS_EXPR && integer_zerop (arg0))
1632 return arg1;
1633 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1634 && integer_zerop (arg1))
1635 return arg0;
1636 else if (code == MULT_EXPR && integer_onep (arg0))
1637 return arg1;
1639 /* Handle general case of two integer constants. */
1640 return int_const_binop (code, arg0, arg1, 0);
1643 if (arg0 == error_mark_node || arg1 == error_mark_node)
1644 return error_mark_node;
1646 return fold (build2 (code, type, arg0, arg1));
1649 /* Given two values, either both of sizetype or both of bitsizetype,
1650 compute the difference between the two values. Return the value
1651 in signed type corresponding to the type of the operands. */
1653 tree
1654 size_diffop (tree arg0, tree arg1)
1656 tree type = TREE_TYPE (arg0);
1657 tree ctype;
1659 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1660 && type == TREE_TYPE (arg1));
1662 /* If the type is already signed, just do the simple thing. */
1663 if (!TYPE_UNSIGNED (type))
1664 return size_binop (MINUS_EXPR, arg0, arg1);
1666 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1668 /* If either operand is not a constant, do the conversions to the signed
1669 type and subtract. The hardware will do the right thing with any
1670 overflow in the subtraction. */
1671 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1672 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1673 fold_convert (ctype, arg1));
1675 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1676 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1677 overflow) and negate (which can't either). Special-case a result
1678 of zero while we're here. */
1679 if (tree_int_cst_equal (arg0, arg1))
1680 return fold_convert (ctype, integer_zero_node);
1681 else if (tree_int_cst_lt (arg1, arg0))
1682 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1683 else
1684 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1685 fold_convert (ctype, size_binop (MINUS_EXPR,
1686 arg1, arg0)));
1689 /* Construct a vector of zero elements of vector type TYPE. */
1691 static tree
1692 build_zero_vector (tree type)
1694 tree elem, list;
1695 int i, units;
1697 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1698 units = TYPE_VECTOR_SUBPARTS (type);
1700 list = NULL_TREE;
1701 for (i = 0; i < units; i++)
1702 list = tree_cons (NULL_TREE, elem, list);
1703 return build_vector (type, list);
1707 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1708 type TYPE. If no simplification can be done return NULL_TREE. */
1710 static tree
1711 fold_convert_const (enum tree_code code, tree type, tree arg1)
1713 int overflow = 0;
1714 tree t;
1716 if (TREE_TYPE (arg1) == type)
1717 return arg1;
1719 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1721 if (TREE_CODE (arg1) == INTEGER_CST)
1723 /* If we would build a constant wider than GCC supports,
1724 leave the conversion unfolded. */
1725 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1726 return NULL_TREE;
1728 /* Given an integer constant, make new constant with new type,
1729 appropriately sign-extended or truncated. */
1730 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1731 TREE_INT_CST_HIGH (arg1));
1733 t = force_fit_type (t,
1734 /* Don't set the overflow when
1735 converting a pointer */
1736 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1737 (TREE_INT_CST_HIGH (arg1) < 0
1738 && (TYPE_UNSIGNED (type)
1739 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1740 | TREE_OVERFLOW (arg1),
1741 TREE_CONSTANT_OVERFLOW (arg1));
1742 return t;
1744 else if (TREE_CODE (arg1) == REAL_CST)
1746 /* The following code implements the floating point to integer
1747 conversion rules required by the Java Language Specification,
1748 that IEEE NaNs are mapped to zero and values that overflow
1749 the target precision saturate, i.e. values greater than
1750 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1751 are mapped to INT_MIN. These semantics are allowed by the
1752 C and C++ standards that simply state that the behavior of
1753 FP-to-integer conversion is unspecified upon overflow. */
1755 HOST_WIDE_INT high, low;
1756 REAL_VALUE_TYPE r;
1757 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1759 switch (code)
1761 case FIX_TRUNC_EXPR:
1762 real_trunc (&r, VOIDmode, &x);
1763 break;
1765 case FIX_CEIL_EXPR:
1766 real_ceil (&r, VOIDmode, &x);
1767 break;
1769 case FIX_FLOOR_EXPR:
1770 real_floor (&r, VOIDmode, &x);
1771 break;
1773 case FIX_ROUND_EXPR:
1774 real_round (&r, VOIDmode, &x);
1775 break;
1777 default:
1778 gcc_unreachable ();
1781 /* If R is NaN, return zero and show we have an overflow. */
1782 if (REAL_VALUE_ISNAN (r))
1784 overflow = 1;
1785 high = 0;
1786 low = 0;
1789 /* See if R is less than the lower bound or greater than the
1790 upper bound. */
1792 if (! overflow)
1794 tree lt = TYPE_MIN_VALUE (type);
1795 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1796 if (REAL_VALUES_LESS (r, l))
1798 overflow = 1;
1799 high = TREE_INT_CST_HIGH (lt);
1800 low = TREE_INT_CST_LOW (lt);
1804 if (! overflow)
1806 tree ut = TYPE_MAX_VALUE (type);
1807 if (ut)
1809 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1810 if (REAL_VALUES_LESS (u, r))
1812 overflow = 1;
1813 high = TREE_INT_CST_HIGH (ut);
1814 low = TREE_INT_CST_LOW (ut);
1819 if (! overflow)
1820 REAL_VALUE_TO_INT (&low, &high, r);
1822 t = build_int_cst_wide (type, low, high);
1824 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1825 TREE_CONSTANT_OVERFLOW (arg1));
1826 return t;
1829 else if (TREE_CODE (type) == REAL_TYPE)
1831 if (TREE_CODE (arg1) == INTEGER_CST)
1832 return build_real_from_int_cst (type, arg1);
1833 if (TREE_CODE (arg1) == REAL_CST)
1835 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1837 /* We make a copy of ARG1 so that we don't modify an
1838 existing constant tree. */
1839 t = copy_node (arg1);
1840 TREE_TYPE (t) = type;
1841 return t;
1844 t = build_real (type,
1845 real_value_truncate (TYPE_MODE (type),
1846 TREE_REAL_CST (arg1)));
1848 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1849 TREE_CONSTANT_OVERFLOW (t)
1850 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1851 return t;
1854 return NULL_TREE;
1857 /* Convert expression ARG to type TYPE. Used by the middle-end for
1858 simple conversions in preference to calling the front-end's convert. */
1860 tree
1861 fold_convert (tree type, tree arg)
1863 tree orig = TREE_TYPE (arg);
1864 tree tem;
1866 if (type == orig)
1867 return arg;
1869 if (TREE_CODE (arg) == ERROR_MARK
1870 || TREE_CODE (type) == ERROR_MARK
1871 || TREE_CODE (orig) == ERROR_MARK)
1872 return error_mark_node;
1874 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1875 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1876 TYPE_MAIN_VARIANT (orig)))
1877 return fold (build1 (NOP_EXPR, type, arg));
1879 switch (TREE_CODE (type))
1881 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1882 case POINTER_TYPE: case REFERENCE_TYPE:
1883 case OFFSET_TYPE:
1884 if (TREE_CODE (arg) == INTEGER_CST)
1886 tem = fold_convert_const (NOP_EXPR, type, arg);
1887 if (tem != NULL_TREE)
1888 return tem;
1890 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1891 || TREE_CODE (orig) == OFFSET_TYPE)
1892 return fold (build1 (NOP_EXPR, type, arg));
1893 if (TREE_CODE (orig) == COMPLEX_TYPE)
1895 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1896 return fold_convert (type, tem);
1898 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1899 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1900 return fold (build1 (NOP_EXPR, type, arg));
1902 case REAL_TYPE:
1903 if (TREE_CODE (arg) == INTEGER_CST)
1905 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 return tem;
1909 else if (TREE_CODE (arg) == REAL_CST)
1911 tem = fold_convert_const (NOP_EXPR, type, arg);
1912 if (tem != NULL_TREE)
1913 return tem;
1916 switch (TREE_CODE (orig))
1918 case INTEGER_TYPE: case CHAR_TYPE:
1919 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1920 case POINTER_TYPE: case REFERENCE_TYPE:
1921 return fold (build1 (FLOAT_EXPR, type, arg));
1923 case REAL_TYPE:
1924 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1925 type, arg));
1927 case COMPLEX_TYPE:
1928 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1929 return fold_convert (type, tem);
1931 default:
1932 gcc_unreachable ();
1935 case COMPLEX_TYPE:
1936 switch (TREE_CODE (orig))
1938 case INTEGER_TYPE: case CHAR_TYPE:
1939 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1940 case POINTER_TYPE: case REFERENCE_TYPE:
1941 case REAL_TYPE:
1942 return build2 (COMPLEX_EXPR, type,
1943 fold_convert (TREE_TYPE (type), arg),
1944 fold_convert (TREE_TYPE (type), integer_zero_node));
1945 case COMPLEX_TYPE:
1947 tree rpart, ipart;
1949 if (TREE_CODE (arg) == COMPLEX_EXPR)
1951 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1952 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1953 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1956 arg = save_expr (arg);
1957 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1958 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1959 rpart = fold_convert (TREE_TYPE (type), rpart);
1960 ipart = fold_convert (TREE_TYPE (type), ipart);
1961 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1964 default:
1965 gcc_unreachable ();
1968 case VECTOR_TYPE:
1969 if (integer_zerop (arg))
1970 return build_zero_vector (type);
1971 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1972 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1973 || TREE_CODE (orig) == VECTOR_TYPE);
1974 return fold (build1 (NOP_EXPR, type, arg));
1976 case VOID_TYPE:
1977 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
1979 default:
1980 gcc_unreachable ();
1984 /* Return an expr equal to X but certainly not valid as an lvalue. */
1986 tree
1987 non_lvalue (tree x)
1989 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
1990 us. */
1991 if (in_gimple_form)
1992 return x;
1994 /* We only need to wrap lvalue tree codes. */
1995 switch (TREE_CODE (x))
1997 case VAR_DECL:
1998 case PARM_DECL:
1999 case RESULT_DECL:
2000 case LABEL_DECL:
2001 case FUNCTION_DECL:
2002 case SSA_NAME:
2004 case COMPONENT_REF:
2005 case INDIRECT_REF:
2006 case ALIGN_INDIRECT_REF:
2007 case MISALIGNED_INDIRECT_REF:
2008 case ARRAY_REF:
2009 case ARRAY_RANGE_REF:
2010 case BIT_FIELD_REF:
2011 case OBJ_TYPE_REF:
2013 case REALPART_EXPR:
2014 case IMAGPART_EXPR:
2015 case PREINCREMENT_EXPR:
2016 case PREDECREMENT_EXPR:
2017 case SAVE_EXPR:
2018 case TRY_CATCH_EXPR:
2019 case WITH_CLEANUP_EXPR:
2020 case COMPOUND_EXPR:
2021 case MODIFY_EXPR:
2022 case TARGET_EXPR:
2023 case COND_EXPR:
2024 case BIND_EXPR:
2025 case MIN_EXPR:
2026 case MAX_EXPR:
2027 break;
2029 default:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2032 break;
2033 return x;
2035 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2038 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2039 Zero means allow extended lvalues. */
2041 int pedantic_lvalues;
2043 /* When pedantic, return an expr equal to X but certainly not valid as a
2044 pedantic lvalue. Otherwise, return X. */
2046 static tree
2047 pedantic_non_lvalue (tree x)
2049 if (pedantic_lvalues)
2050 return non_lvalue (x);
2051 else
2052 return x;
2055 /* Given a tree comparison code, return the code that is the logical inverse
2056 of the given code. It is not safe to do this for floating-point
2057 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2058 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2060 static enum tree_code
2061 invert_tree_comparison (enum tree_code code, bool honor_nans)
2063 if (honor_nans && flag_trapping_math)
2064 return ERROR_MARK;
2066 switch (code)
2068 case EQ_EXPR:
2069 return NE_EXPR;
2070 case NE_EXPR:
2071 return EQ_EXPR;
2072 case GT_EXPR:
2073 return honor_nans ? UNLE_EXPR : LE_EXPR;
2074 case GE_EXPR:
2075 return honor_nans ? UNLT_EXPR : LT_EXPR;
2076 case LT_EXPR:
2077 return honor_nans ? UNGE_EXPR : GE_EXPR;
2078 case LE_EXPR:
2079 return honor_nans ? UNGT_EXPR : GT_EXPR;
2080 case LTGT_EXPR:
2081 return UNEQ_EXPR;
2082 case UNEQ_EXPR:
2083 return LTGT_EXPR;
2084 case UNGT_EXPR:
2085 return LE_EXPR;
2086 case UNGE_EXPR:
2087 return LT_EXPR;
2088 case UNLT_EXPR:
2089 return GE_EXPR;
2090 case UNLE_EXPR:
2091 return GT_EXPR;
2092 case ORDERED_EXPR:
2093 return UNORDERED_EXPR;
2094 case UNORDERED_EXPR:
2095 return ORDERED_EXPR;
2096 default:
2097 gcc_unreachable ();
2101 /* Similar, but return the comparison that results if the operands are
2102 swapped. This is safe for floating-point. */
2104 enum tree_code
2105 swap_tree_comparison (enum tree_code code)
2107 switch (code)
2109 case EQ_EXPR:
2110 case NE_EXPR:
2111 return code;
2112 case GT_EXPR:
2113 return LT_EXPR;
2114 case GE_EXPR:
2115 return LE_EXPR;
2116 case LT_EXPR:
2117 return GT_EXPR;
2118 case LE_EXPR:
2119 return GE_EXPR;
2120 default:
2121 gcc_unreachable ();
2126 /* Convert a comparison tree code from an enum tree_code representation
2127 into a compcode bit-based encoding. This function is the inverse of
2128 compcode_to_comparison. */
2130 static enum comparison_code
2131 comparison_to_compcode (enum tree_code code)
2133 switch (code)
2135 case LT_EXPR:
2136 return COMPCODE_LT;
2137 case EQ_EXPR:
2138 return COMPCODE_EQ;
2139 case LE_EXPR:
2140 return COMPCODE_LE;
2141 case GT_EXPR:
2142 return COMPCODE_GT;
2143 case NE_EXPR:
2144 return COMPCODE_NE;
2145 case GE_EXPR:
2146 return COMPCODE_GE;
2147 case ORDERED_EXPR:
2148 return COMPCODE_ORD;
2149 case UNORDERED_EXPR:
2150 return COMPCODE_UNORD;
2151 case UNLT_EXPR:
2152 return COMPCODE_UNLT;
2153 case UNEQ_EXPR:
2154 return COMPCODE_UNEQ;
2155 case UNLE_EXPR:
2156 return COMPCODE_UNLE;
2157 case UNGT_EXPR:
2158 return COMPCODE_UNGT;
2159 case LTGT_EXPR:
2160 return COMPCODE_LTGT;
2161 case UNGE_EXPR:
2162 return COMPCODE_UNGE;
2163 default:
2164 gcc_unreachable ();
2168 /* Convert a compcode bit-based encoding of a comparison operator back
2169 to GCC's enum tree_code representation. This function is the
2170 inverse of comparison_to_compcode. */
2172 static enum tree_code
2173 compcode_to_comparison (enum comparison_code code)
2175 switch (code)
2177 case COMPCODE_LT:
2178 return LT_EXPR;
2179 case COMPCODE_EQ:
2180 return EQ_EXPR;
2181 case COMPCODE_LE:
2182 return LE_EXPR;
2183 case COMPCODE_GT:
2184 return GT_EXPR;
2185 case COMPCODE_NE:
2186 return NE_EXPR;
2187 case COMPCODE_GE:
2188 return GE_EXPR;
2189 case COMPCODE_ORD:
2190 return ORDERED_EXPR;
2191 case COMPCODE_UNORD:
2192 return UNORDERED_EXPR;
2193 case COMPCODE_UNLT:
2194 return UNLT_EXPR;
2195 case COMPCODE_UNEQ:
2196 return UNEQ_EXPR;
2197 case COMPCODE_UNLE:
2198 return UNLE_EXPR;
2199 case COMPCODE_UNGT:
2200 return UNGT_EXPR;
2201 case COMPCODE_LTGT:
2202 return LTGT_EXPR;
2203 case COMPCODE_UNGE:
2204 return UNGE_EXPR;
2205 default:
2206 gcc_unreachable ();
2210 /* Return a tree for the comparison which is the combination of
2211 doing the AND or OR (depending on CODE) of the two operations LCODE
2212 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2213 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2214 if this makes the transformation invalid. */
2216 tree
2217 combine_comparisons (enum tree_code code, enum tree_code lcode,
2218 enum tree_code rcode, tree truth_type,
2219 tree ll_arg, tree lr_arg)
2221 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2222 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2223 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2224 enum comparison_code compcode;
2226 switch (code)
2228 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2229 compcode = lcompcode & rcompcode;
2230 break;
2232 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2233 compcode = lcompcode | rcompcode;
2234 break;
2236 default:
2237 return NULL_TREE;
2240 if (!honor_nans)
2242 /* Eliminate unordered comparisons, as well as LTGT and ORD
2243 which are not used unless the mode has NaNs. */
2244 compcode &= ~COMPCODE_UNORD;
2245 if (compcode == COMPCODE_LTGT)
2246 compcode = COMPCODE_NE;
2247 else if (compcode == COMPCODE_ORD)
2248 compcode = COMPCODE_TRUE;
2250 else if (flag_trapping_math)
2252 /* Check that the original operation and the optimized ones will trap
2253 under the same condition. */
2254 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2255 && (lcompcode != COMPCODE_EQ)
2256 && (lcompcode != COMPCODE_ORD);
2257 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2258 && (rcompcode != COMPCODE_EQ)
2259 && (rcompcode != COMPCODE_ORD);
2260 bool trap = (compcode & COMPCODE_UNORD) == 0
2261 && (compcode != COMPCODE_EQ)
2262 && (compcode != COMPCODE_ORD);
2264 /* In a short-circuited boolean expression the LHS might be
2265 such that the RHS, if evaluated, will never trap. For
2266 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2267 if neither x nor y is NaN. (This is a mixed blessing: for
2268 example, the expression above will never trap, hence
2269 optimizing it to x < y would be invalid). */
2270 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2271 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2272 rtrap = false;
2274 /* If the comparison was short-circuited, and only the RHS
2275 trapped, we may now generate a spurious trap. */
2276 if (rtrap && !ltrap
2277 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2278 return NULL_TREE;
2280 /* If we changed the conditions that cause a trap, we lose. */
2281 if ((ltrap || rtrap) != trap)
2282 return NULL_TREE;
2285 if (compcode == COMPCODE_TRUE)
2286 return constant_boolean_node (true, truth_type);
2287 else if (compcode == COMPCODE_FALSE)
2288 return constant_boolean_node (false, truth_type);
2289 else
2290 return fold (build2 (compcode_to_comparison (compcode),
2291 truth_type, ll_arg, lr_arg));
2294 /* Return nonzero if CODE is a tree code that represents a truth value. */
2296 static int
2297 truth_value_p (enum tree_code code)
2299 return (TREE_CODE_CLASS (code) == tcc_comparison
2300 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2301 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2302 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2305 /* Return nonzero if two operands (typically of the same tree node)
2306 are necessarily equal. If either argument has side-effects this
2307 function returns zero. FLAGS modifies behavior as follows:
2309 If OEP_ONLY_CONST is set, only return nonzero for constants.
2310 This function tests whether the operands are indistinguishable;
2311 it does not test whether they are equal using C's == operation.
2312 The distinction is important for IEEE floating point, because
2313 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2314 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2316 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2317 even though it may hold multiple values during a function.
2318 This is because a GCC tree node guarantees that nothing else is
2319 executed between the evaluation of its "operands" (which may often
2320 be evaluated in arbitrary order). Hence if the operands themselves
2321 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2322 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2323 unset means assuming isochronic (or instantaneous) tree equivalence.
2324 Unless comparing arbitrary expression trees, such as from different
2325 statements, this flag can usually be left unset.
2327 If OEP_PURE_SAME is set, then pure functions with identical arguments
2328 are considered the same. It is used when the caller has other ways
2329 to ensure that global memory is unchanged in between. */
2332 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2334 /* If one is specified and the other isn't, they aren't equal and if
2335 neither is specified, they are.
2337 ??? This is temporary and is meant only to handle the cases of the
2338 optional operands for COMPONENT_REF and ARRAY_REF. */
2339 if ((arg0 && !arg1) || (!arg0 && arg1))
2340 return 0;
2341 else if (!arg0 && !arg1)
2342 return 1;
2343 /* If either is ERROR_MARK, they aren't equal. */
2344 else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2345 return 0;
2347 /* If both types don't have the same signedness, then we can't consider
2348 them equal. We must check this before the STRIP_NOPS calls
2349 because they may change the signedness of the arguments. */
2350 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2351 return 0;
2353 STRIP_NOPS (arg0);
2354 STRIP_NOPS (arg1);
2356 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2357 /* This is needed for conversions and for COMPONENT_REF.
2358 Might as well play it safe and always test this. */
2359 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2360 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2361 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2362 return 0;
2364 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2365 We don't care about side effects in that case because the SAVE_EXPR
2366 takes care of that for us. In all other cases, two expressions are
2367 equal if they have no side effects. If we have two identical
2368 expressions with side effects that should be treated the same due
2369 to the only side effects being identical SAVE_EXPR's, that will
2370 be detected in the recursive calls below. */
2371 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2372 && (TREE_CODE (arg0) == SAVE_EXPR
2373 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2374 return 1;
2376 /* Next handle constant cases, those for which we can return 1 even
2377 if ONLY_CONST is set. */
2378 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2379 switch (TREE_CODE (arg0))
2381 case INTEGER_CST:
2382 return (! TREE_CONSTANT_OVERFLOW (arg0)
2383 && ! TREE_CONSTANT_OVERFLOW (arg1)
2384 && tree_int_cst_equal (arg0, arg1));
2386 case REAL_CST:
2387 return (! TREE_CONSTANT_OVERFLOW (arg0)
2388 && ! TREE_CONSTANT_OVERFLOW (arg1)
2389 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2390 TREE_REAL_CST (arg1)));
2392 case VECTOR_CST:
2394 tree v1, v2;
2396 if (TREE_CONSTANT_OVERFLOW (arg0)
2397 || TREE_CONSTANT_OVERFLOW (arg1))
2398 return 0;
2400 v1 = TREE_VECTOR_CST_ELTS (arg0);
2401 v2 = TREE_VECTOR_CST_ELTS (arg1);
2402 while (v1 && v2)
2404 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2405 flags))
2406 return 0;
2407 v1 = TREE_CHAIN (v1);
2408 v2 = TREE_CHAIN (v2);
2411 return 1;
2414 case COMPLEX_CST:
2415 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2416 flags)
2417 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2418 flags));
2420 case STRING_CST:
2421 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2422 && ! memcmp (TREE_STRING_POINTER (arg0),
2423 TREE_STRING_POINTER (arg1),
2424 TREE_STRING_LENGTH (arg0)));
2426 case ADDR_EXPR:
2427 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2429 default:
2430 break;
2433 if (flags & OEP_ONLY_CONST)
2434 return 0;
2436 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2438 case tcc_unary:
2439 /* Two conversions are equal only if signedness and modes match. */
2440 switch (TREE_CODE (arg0))
2442 case NOP_EXPR:
2443 case CONVERT_EXPR:
2444 case FIX_CEIL_EXPR:
2445 case FIX_TRUNC_EXPR:
2446 case FIX_FLOOR_EXPR:
2447 case FIX_ROUND_EXPR:
2448 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2449 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2450 return 0;
2451 break;
2452 default:
2453 break;
2456 return operand_equal_p (TREE_OPERAND (arg0, 0),
2457 TREE_OPERAND (arg1, 0), flags);
2459 case tcc_comparison:
2460 case tcc_binary:
2461 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2462 TREE_OPERAND (arg1, 0), flags)
2463 && operand_equal_p (TREE_OPERAND (arg0, 1),
2464 TREE_OPERAND (arg1, 1), flags))
2465 return 1;
2467 /* For commutative ops, allow the other order. */
2468 return (commutative_tree_code (TREE_CODE (arg0))
2469 && operand_equal_p (TREE_OPERAND (arg0, 0),
2470 TREE_OPERAND (arg1, 1), flags)
2471 && operand_equal_p (TREE_OPERAND (arg0, 1),
2472 TREE_OPERAND (arg1, 0), flags));
2474 case tcc_reference:
2475 /* If either of the pointer (or reference) expressions we are
2476 dereferencing contain a side effect, these cannot be equal. */
2477 if (TREE_SIDE_EFFECTS (arg0)
2478 || TREE_SIDE_EFFECTS (arg1))
2479 return 0;
2481 switch (TREE_CODE (arg0))
2483 case INDIRECT_REF:
2484 case ALIGN_INDIRECT_REF:
2485 case MISALIGNED_INDIRECT_REF:
2486 case REALPART_EXPR:
2487 case IMAGPART_EXPR:
2488 return operand_equal_p (TREE_OPERAND (arg0, 0),
2489 TREE_OPERAND (arg1, 0), flags);
2491 case ARRAY_REF:
2492 case ARRAY_RANGE_REF:
2493 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2494 TREE_OPERAND (arg1, 0), flags)
2495 && operand_equal_p (TREE_OPERAND (arg0, 1),
2496 TREE_OPERAND (arg1, 1), flags)
2497 && operand_equal_p (TREE_OPERAND (arg0, 2),
2498 TREE_OPERAND (arg1, 2), flags)
2499 && operand_equal_p (TREE_OPERAND (arg0, 3),
2500 TREE_OPERAND (arg1, 3), flags));
2503 case COMPONENT_REF:
2504 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2505 TREE_OPERAND (arg1, 0), flags)
2506 && operand_equal_p (TREE_OPERAND (arg0, 1),
2507 TREE_OPERAND (arg1, 1), flags)
2508 && operand_equal_p (TREE_OPERAND (arg0, 2),
2509 TREE_OPERAND (arg1, 2), flags));
2512 case BIT_FIELD_REF:
2513 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2514 TREE_OPERAND (arg1, 0), flags)
2515 && operand_equal_p (TREE_OPERAND (arg0, 1),
2516 TREE_OPERAND (arg1, 1), flags)
2517 && operand_equal_p (TREE_OPERAND (arg0, 2),
2518 TREE_OPERAND (arg1, 2), flags));
2519 default:
2520 return 0;
2523 case tcc_expression:
2524 switch (TREE_CODE (arg0))
2526 case ADDR_EXPR:
2527 case TRUTH_NOT_EXPR:
2528 return operand_equal_p (TREE_OPERAND (arg0, 0),
2529 TREE_OPERAND (arg1, 0), flags);
2531 case TRUTH_ANDIF_EXPR:
2532 case TRUTH_ORIF_EXPR:
2533 return operand_equal_p (TREE_OPERAND (arg0, 0),
2534 TREE_OPERAND (arg1, 0), flags)
2535 && operand_equal_p (TREE_OPERAND (arg0, 1),
2536 TREE_OPERAND (arg1, 1), flags);
2538 case TRUTH_AND_EXPR:
2539 case TRUTH_OR_EXPR:
2540 case TRUTH_XOR_EXPR:
2541 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2542 TREE_OPERAND (arg1, 0), flags)
2543 && operand_equal_p (TREE_OPERAND (arg0, 1),
2544 TREE_OPERAND (arg1, 1), flags))
2545 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2546 TREE_OPERAND (arg1, 1), flags)
2547 && operand_equal_p (TREE_OPERAND (arg0, 1),
2548 TREE_OPERAND (arg1, 0), flags));
2550 case CALL_EXPR:
2551 /* If the CALL_EXPRs call different functions, then they
2552 clearly can not be equal. */
2553 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2554 TREE_OPERAND (arg1, 0), flags))
2555 return 0;
2558 unsigned int cef = call_expr_flags (arg0);
2559 if (flags & OEP_PURE_SAME)
2560 cef &= ECF_CONST | ECF_PURE;
2561 else
2562 cef &= ECF_CONST;
2563 if (!cef)
2564 return 0;
2567 /* Now see if all the arguments are the same. operand_equal_p
2568 does not handle TREE_LIST, so we walk the operands here
2569 feeding them to operand_equal_p. */
2570 arg0 = TREE_OPERAND (arg0, 1);
2571 arg1 = TREE_OPERAND (arg1, 1);
2572 while (arg0 && arg1)
2574 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2575 flags))
2576 return 0;
2578 arg0 = TREE_CHAIN (arg0);
2579 arg1 = TREE_CHAIN (arg1);
2582 /* If we get here and both argument lists are exhausted
2583 then the CALL_EXPRs are equal. */
2584 return ! (arg0 || arg1);
2586 default:
2587 return 0;
2590 case tcc_declaration:
2591 /* Consider __builtin_sqrt equal to sqrt. */
2592 return (TREE_CODE (arg0) == FUNCTION_DECL
2593 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2594 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2595 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2597 default:
2598 return 0;
2602 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2603 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2605 When in doubt, return 0. */
2607 static int
2608 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2610 int unsignedp1, unsignedpo;
2611 tree primarg0, primarg1, primother;
2612 unsigned int correct_width;
2614 if (operand_equal_p (arg0, arg1, 0))
2615 return 1;
2617 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2618 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2619 return 0;
2621 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2622 and see if the inner values are the same. This removes any
2623 signedness comparison, which doesn't matter here. */
2624 primarg0 = arg0, primarg1 = arg1;
2625 STRIP_NOPS (primarg0);
2626 STRIP_NOPS (primarg1);
2627 if (operand_equal_p (primarg0, primarg1, 0))
2628 return 1;
2630 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2631 actual comparison operand, ARG0.
2633 First throw away any conversions to wider types
2634 already present in the operands. */
2636 primarg1 = get_narrower (arg1, &unsignedp1);
2637 primother = get_narrower (other, &unsignedpo);
2639 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2640 if (unsignedp1 == unsignedpo
2641 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2642 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2644 tree type = TREE_TYPE (arg0);
2646 /* Make sure shorter operand is extended the right way
2647 to match the longer operand. */
2648 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2649 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2651 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2652 return 1;
2655 return 0;
2658 /* See if ARG is an expression that is either a comparison or is performing
2659 arithmetic on comparisons. The comparisons must only be comparing
2660 two different values, which will be stored in *CVAL1 and *CVAL2; if
2661 they are nonzero it means that some operands have already been found.
2662 No variables may be used anywhere else in the expression except in the
2663 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2664 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2666 If this is true, return 1. Otherwise, return zero. */
2668 static int
2669 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2671 enum tree_code code = TREE_CODE (arg);
2672 enum tree_code_class class = TREE_CODE_CLASS (code);
2674 /* We can handle some of the tcc_expression cases here. */
2675 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2676 class = tcc_unary;
2677 else if (class == tcc_expression
2678 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2679 || code == COMPOUND_EXPR))
2680 class = tcc_binary;
2682 else if (class == tcc_expression && code == SAVE_EXPR
2683 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2685 /* If we've already found a CVAL1 or CVAL2, this expression is
2686 two complex to handle. */
2687 if (*cval1 || *cval2)
2688 return 0;
2690 class = tcc_unary;
2691 *save_p = 1;
2694 switch (class)
2696 case tcc_unary:
2697 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2699 case tcc_binary:
2700 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2701 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2702 cval1, cval2, save_p));
2704 case tcc_constant:
2705 return 1;
2707 case tcc_expression:
2708 if (code == COND_EXPR)
2709 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2710 cval1, cval2, save_p)
2711 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2712 cval1, cval2, save_p)
2713 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2714 cval1, cval2, save_p));
2715 return 0;
2717 case tcc_comparison:
2718 /* First see if we can handle the first operand, then the second. For
2719 the second operand, we know *CVAL1 can't be zero. It must be that
2720 one side of the comparison is each of the values; test for the
2721 case where this isn't true by failing if the two operands
2722 are the same. */
2724 if (operand_equal_p (TREE_OPERAND (arg, 0),
2725 TREE_OPERAND (arg, 1), 0))
2726 return 0;
2728 if (*cval1 == 0)
2729 *cval1 = TREE_OPERAND (arg, 0);
2730 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2732 else if (*cval2 == 0)
2733 *cval2 = TREE_OPERAND (arg, 0);
2734 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2736 else
2737 return 0;
2739 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2741 else if (*cval2 == 0)
2742 *cval2 = TREE_OPERAND (arg, 1);
2743 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2745 else
2746 return 0;
2748 return 1;
2750 default:
2751 return 0;
2755 /* ARG is a tree that is known to contain just arithmetic operations and
2756 comparisons. Evaluate the operations in the tree substituting NEW0 for
2757 any occurrence of OLD0 as an operand of a comparison and likewise for
2758 NEW1 and OLD1. */
2760 static tree
2761 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2763 tree type = TREE_TYPE (arg);
2764 enum tree_code code = TREE_CODE (arg);
2765 enum tree_code_class class = TREE_CODE_CLASS (code);
2767 /* We can handle some of the tcc_expression cases here. */
2768 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2769 class = tcc_unary;
2770 else if (class == tcc_expression
2771 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2772 class = tcc_binary;
2774 switch (class)
2776 case tcc_unary:
2777 return fold (build1 (code, type,
2778 eval_subst (TREE_OPERAND (arg, 0),
2779 old0, new0, old1, new1)));
2781 case tcc_binary:
2782 return fold (build2 (code, type,
2783 eval_subst (TREE_OPERAND (arg, 0),
2784 old0, new0, old1, new1),
2785 eval_subst (TREE_OPERAND (arg, 1),
2786 old0, new0, old1, new1)));
2788 case tcc_expression:
2789 switch (code)
2791 case SAVE_EXPR:
2792 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2794 case COMPOUND_EXPR:
2795 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2797 case COND_EXPR:
2798 return fold (build3 (code, type,
2799 eval_subst (TREE_OPERAND (arg, 0),
2800 old0, new0, old1, new1),
2801 eval_subst (TREE_OPERAND (arg, 1),
2802 old0, new0, old1, new1),
2803 eval_subst (TREE_OPERAND (arg, 2),
2804 old0, new0, old1, new1)));
2805 default:
2806 break;
2808 /* Fall through - ??? */
2810 case tcc_comparison:
2812 tree arg0 = TREE_OPERAND (arg, 0);
2813 tree arg1 = TREE_OPERAND (arg, 1);
2815 /* We need to check both for exact equality and tree equality. The
2816 former will be true if the operand has a side-effect. In that
2817 case, we know the operand occurred exactly once. */
2819 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2820 arg0 = new0;
2821 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2822 arg0 = new1;
2824 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2825 arg1 = new0;
2826 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2827 arg1 = new1;
2829 return fold (build2 (code, type, arg0, arg1));
2832 default:
2833 return arg;
2837 /* Return a tree for the case when the result of an expression is RESULT
2838 converted to TYPE and OMITTED was previously an operand of the expression
2839 but is now not needed (e.g., we folded OMITTED * 0).
2841 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2842 the conversion of RESULT to TYPE. */
2844 tree
2845 omit_one_operand (tree type, tree result, tree omitted)
2847 tree t = fold_convert (type, result);
2849 if (TREE_SIDE_EFFECTS (omitted))
2850 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2852 return non_lvalue (t);
2855 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2857 static tree
2858 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2860 tree t = fold_convert (type, result);
2862 if (TREE_SIDE_EFFECTS (omitted))
2863 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2865 return pedantic_non_lvalue (t);
2868 /* Return a tree for the case when the result of an expression is RESULT
2869 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2870 of the expression but are now not needed.
2872 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2873 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2874 evaluated before OMITTED2. Otherwise, if neither has side effects,
2875 just do the conversion of RESULT to TYPE. */
2877 tree
2878 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2880 tree t = fold_convert (type, result);
2882 if (TREE_SIDE_EFFECTS (omitted2))
2883 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2884 if (TREE_SIDE_EFFECTS (omitted1))
2885 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2887 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2891 /* Return a simplified tree node for the truth-negation of ARG. This
2892 never alters ARG itself. We assume that ARG is an operation that
2893 returns a truth value (0 or 1).
2895 FIXME: one would think we would fold the result, but it causes
2896 problems with the dominator optimizer. */
2897 tree
2898 invert_truthvalue (tree arg)
2900 tree type = TREE_TYPE (arg);
2901 enum tree_code code = TREE_CODE (arg);
2903 if (code == ERROR_MARK)
2904 return arg;
2906 /* If this is a comparison, we can simply invert it, except for
2907 floating-point non-equality comparisons, in which case we just
2908 enclose a TRUTH_NOT_EXPR around what we have. */
2910 if (TREE_CODE_CLASS (code) == tcc_comparison)
2912 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2913 if (FLOAT_TYPE_P (op_type)
2914 && flag_trapping_math
2915 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2916 && code != NE_EXPR && code != EQ_EXPR)
2917 return build1 (TRUTH_NOT_EXPR, type, arg);
2918 else
2920 code = invert_tree_comparison (code,
2921 HONOR_NANS (TYPE_MODE (op_type)));
2922 if (code == ERROR_MARK)
2923 return build1 (TRUTH_NOT_EXPR, type, arg);
2924 else
2925 return build2 (code, type,
2926 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2930 switch (code)
2932 case INTEGER_CST:
2933 return fold_convert (type,
2934 build_int_cst (NULL_TREE, integer_zerop (arg)));
2936 case TRUTH_AND_EXPR:
2937 return build2 (TRUTH_OR_EXPR, type,
2938 invert_truthvalue (TREE_OPERAND (arg, 0)),
2939 invert_truthvalue (TREE_OPERAND (arg, 1)));
2941 case TRUTH_OR_EXPR:
2942 return build2 (TRUTH_AND_EXPR, type,
2943 invert_truthvalue (TREE_OPERAND (arg, 0)),
2944 invert_truthvalue (TREE_OPERAND (arg, 1)));
2946 case TRUTH_XOR_EXPR:
2947 /* Here we can invert either operand. We invert the first operand
2948 unless the second operand is a TRUTH_NOT_EXPR in which case our
2949 result is the XOR of the first operand with the inside of the
2950 negation of the second operand. */
2952 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2953 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2954 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2955 else
2956 return build2 (TRUTH_XOR_EXPR, type,
2957 invert_truthvalue (TREE_OPERAND (arg, 0)),
2958 TREE_OPERAND (arg, 1));
2960 case TRUTH_ANDIF_EXPR:
2961 return build2 (TRUTH_ORIF_EXPR, type,
2962 invert_truthvalue (TREE_OPERAND (arg, 0)),
2963 invert_truthvalue (TREE_OPERAND (arg, 1)));
2965 case TRUTH_ORIF_EXPR:
2966 return build2 (TRUTH_ANDIF_EXPR, type,
2967 invert_truthvalue (TREE_OPERAND (arg, 0)),
2968 invert_truthvalue (TREE_OPERAND (arg, 1)));
2970 case TRUTH_NOT_EXPR:
2971 return TREE_OPERAND (arg, 0);
2973 case COND_EXPR:
2974 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2975 invert_truthvalue (TREE_OPERAND (arg, 1)),
2976 invert_truthvalue (TREE_OPERAND (arg, 2)));
2978 case COMPOUND_EXPR:
2979 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2980 invert_truthvalue (TREE_OPERAND (arg, 1)));
2982 case NON_LVALUE_EXPR:
2983 return invert_truthvalue (TREE_OPERAND (arg, 0));
2985 case NOP_EXPR:
2986 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2987 break;
2989 case CONVERT_EXPR:
2990 case FLOAT_EXPR:
2991 return build1 (TREE_CODE (arg), type,
2992 invert_truthvalue (TREE_OPERAND (arg, 0)));
2994 case BIT_AND_EXPR:
2995 if (!integer_onep (TREE_OPERAND (arg, 1)))
2996 break;
2997 return build2 (EQ_EXPR, type, arg,
2998 fold_convert (type, integer_zero_node));
3000 case SAVE_EXPR:
3001 return build1 (TRUTH_NOT_EXPR, type, arg);
3003 case CLEANUP_POINT_EXPR:
3004 return build1 (CLEANUP_POINT_EXPR, type,
3005 invert_truthvalue (TREE_OPERAND (arg, 0)));
3007 default:
3008 break;
3010 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3011 return build1 (TRUTH_NOT_EXPR, type, arg);
3014 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3015 operands are another bit-wise operation with a common input. If so,
3016 distribute the bit operations to save an operation and possibly two if
3017 constants are involved. For example, convert
3018 (A | B) & (A | C) into A | (B & C)
3019 Further simplification will occur if B and C are constants.
3021 If this optimization cannot be done, 0 will be returned. */
3023 static tree
3024 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3026 tree common;
3027 tree left, right;
3029 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3030 || TREE_CODE (arg0) == code
3031 || (TREE_CODE (arg0) != BIT_AND_EXPR
3032 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3033 return 0;
3035 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3037 common = TREE_OPERAND (arg0, 0);
3038 left = TREE_OPERAND (arg0, 1);
3039 right = TREE_OPERAND (arg1, 1);
3041 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3043 common = TREE_OPERAND (arg0, 0);
3044 left = TREE_OPERAND (arg0, 1);
3045 right = TREE_OPERAND (arg1, 0);
3047 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3049 common = TREE_OPERAND (arg0, 1);
3050 left = TREE_OPERAND (arg0, 0);
3051 right = TREE_OPERAND (arg1, 1);
3053 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3055 common = TREE_OPERAND (arg0, 1);
3056 left = TREE_OPERAND (arg0, 0);
3057 right = TREE_OPERAND (arg1, 0);
3059 else
3060 return 0;
3062 return fold (build2 (TREE_CODE (arg0), type, common,
3063 fold (build2 (code, type, left, right))));
3066 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3067 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3069 static tree
3070 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3071 int unsignedp)
3073 tree result = build3 (BIT_FIELD_REF, type, inner,
3074 size_int (bitsize), bitsize_int (bitpos));
3076 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3078 return result;
3081 /* Optimize a bit-field compare.
3083 There are two cases: First is a compare against a constant and the
3084 second is a comparison of two items where the fields are at the same
3085 bit position relative to the start of a chunk (byte, halfword, word)
3086 large enough to contain it. In these cases we can avoid the shift
3087 implicit in bitfield extractions.
3089 For constants, we emit a compare of the shifted constant with the
3090 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3091 compared. For two fields at the same position, we do the ANDs with the
3092 similar mask and compare the result of the ANDs.
3094 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3095 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3096 are the left and right operands of the comparison, respectively.
3098 If the optimization described above can be done, we return the resulting
3099 tree. Otherwise we return zero. */
3101 static tree
3102 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3103 tree lhs, tree rhs)
3105 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3106 tree type = TREE_TYPE (lhs);
3107 tree signed_type, unsigned_type;
3108 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3109 enum machine_mode lmode, rmode, nmode;
3110 int lunsignedp, runsignedp;
3111 int lvolatilep = 0, rvolatilep = 0;
3112 tree linner, rinner = NULL_TREE;
3113 tree mask;
3114 tree offset;
3116 /* Get all the information about the extractions being done. If the bit size
3117 if the same as the size of the underlying object, we aren't doing an
3118 extraction at all and so can do nothing. We also don't want to
3119 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3120 then will no longer be able to replace it. */
3121 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3122 &lunsignedp, &lvolatilep);
3123 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3124 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3125 return 0;
3127 if (!const_p)
3129 /* If this is not a constant, we can only do something if bit positions,
3130 sizes, and signedness are the same. */
3131 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3132 &runsignedp, &rvolatilep);
3134 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3135 || lunsignedp != runsignedp || offset != 0
3136 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3137 return 0;
3140 /* See if we can find a mode to refer to this field. We should be able to,
3141 but fail if we can't. */
3142 nmode = get_best_mode (lbitsize, lbitpos,
3143 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3144 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3145 TYPE_ALIGN (TREE_TYPE (rinner))),
3146 word_mode, lvolatilep || rvolatilep);
3147 if (nmode == VOIDmode)
3148 return 0;
3150 /* Set signed and unsigned types of the precision of this mode for the
3151 shifts below. */
3152 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3153 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3155 /* Compute the bit position and size for the new reference and our offset
3156 within it. If the new reference is the same size as the original, we
3157 won't optimize anything, so return zero. */
3158 nbitsize = GET_MODE_BITSIZE (nmode);
3159 nbitpos = lbitpos & ~ (nbitsize - 1);
3160 lbitpos -= nbitpos;
3161 if (nbitsize == lbitsize)
3162 return 0;
3164 if (BYTES_BIG_ENDIAN)
3165 lbitpos = nbitsize - lbitsize - lbitpos;
3167 /* Make the mask to be used against the extracted field. */
3168 mask = build_int_cst (unsigned_type, -1);
3169 mask = force_fit_type (mask, 0, false, false);
3170 mask = fold_convert (unsigned_type, mask);
3171 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3172 mask = const_binop (RSHIFT_EXPR, mask,
3173 size_int (nbitsize - lbitsize - lbitpos), 0);
3175 if (! const_p)
3176 /* If not comparing with constant, just rework the comparison
3177 and return. */
3178 return build2 (code, compare_type,
3179 build2 (BIT_AND_EXPR, unsigned_type,
3180 make_bit_field_ref (linner, unsigned_type,
3181 nbitsize, nbitpos, 1),
3182 mask),
3183 build2 (BIT_AND_EXPR, unsigned_type,
3184 make_bit_field_ref (rinner, unsigned_type,
3185 nbitsize, nbitpos, 1),
3186 mask));
3188 /* Otherwise, we are handling the constant case. See if the constant is too
3189 big for the field. Warn and return a tree of for 0 (false) if so. We do
3190 this not only for its own sake, but to avoid having to test for this
3191 error case below. If we didn't, we might generate wrong code.
3193 For unsigned fields, the constant shifted right by the field length should
3194 be all zero. For signed fields, the high-order bits should agree with
3195 the sign bit. */
3197 if (lunsignedp)
3199 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3200 fold_convert (unsigned_type, rhs),
3201 size_int (lbitsize), 0)))
3203 warning ("comparison is always %d due to width of bit-field",
3204 code == NE_EXPR);
3205 return constant_boolean_node (code == NE_EXPR, compare_type);
3208 else
3210 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3211 size_int (lbitsize - 1), 0);
3212 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3214 warning ("comparison is always %d due to width of bit-field",
3215 code == NE_EXPR);
3216 return constant_boolean_node (code == NE_EXPR, compare_type);
3220 /* Single-bit compares should always be against zero. */
3221 if (lbitsize == 1 && ! integer_zerop (rhs))
3223 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3224 rhs = fold_convert (type, integer_zero_node);
3227 /* Make a new bitfield reference, shift the constant over the
3228 appropriate number of bits and mask it with the computed mask
3229 (in case this was a signed field). If we changed it, make a new one. */
3230 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3231 if (lvolatilep)
3233 TREE_SIDE_EFFECTS (lhs) = 1;
3234 TREE_THIS_VOLATILE (lhs) = 1;
3237 rhs = fold (const_binop (BIT_AND_EXPR,
3238 const_binop (LSHIFT_EXPR,
3239 fold_convert (unsigned_type, rhs),
3240 size_int (lbitpos), 0),
3241 mask, 0));
3243 return build2 (code, compare_type,
3244 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3245 rhs);
3248 /* Subroutine for fold_truthop: decode a field reference.
3250 If EXP is a comparison reference, we return the innermost reference.
3252 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3253 set to the starting bit number.
3255 If the innermost field can be completely contained in a mode-sized
3256 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3258 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3259 otherwise it is not changed.
3261 *PUNSIGNEDP is set to the signedness of the field.
3263 *PMASK is set to the mask used. This is either contained in a
3264 BIT_AND_EXPR or derived from the width of the field.
3266 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3268 Return 0 if this is not a component reference or is one that we can't
3269 do anything with. */
3271 static tree
3272 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3273 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3274 int *punsignedp, int *pvolatilep,
3275 tree *pmask, tree *pand_mask)
3277 tree outer_type = 0;
3278 tree and_mask = 0;
3279 tree mask, inner, offset;
3280 tree unsigned_type;
3281 unsigned int precision;
3283 /* All the optimizations using this function assume integer fields.
3284 There are problems with FP fields since the type_for_size call
3285 below can fail for, e.g., XFmode. */
3286 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3287 return 0;
3289 /* We are interested in the bare arrangement of bits, so strip everything
3290 that doesn't affect the machine mode. However, record the type of the
3291 outermost expression if it may matter below. */
3292 if (TREE_CODE (exp) == NOP_EXPR
3293 || TREE_CODE (exp) == CONVERT_EXPR
3294 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3295 outer_type = TREE_TYPE (exp);
3296 STRIP_NOPS (exp);
3298 if (TREE_CODE (exp) == BIT_AND_EXPR)
3300 and_mask = TREE_OPERAND (exp, 1);
3301 exp = TREE_OPERAND (exp, 0);
3302 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3303 if (TREE_CODE (and_mask) != INTEGER_CST)
3304 return 0;
3307 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3308 punsignedp, pvolatilep);
3309 if ((inner == exp && and_mask == 0)
3310 || *pbitsize < 0 || offset != 0
3311 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3312 return 0;
3314 /* If the number of bits in the reference is the same as the bitsize of
3315 the outer type, then the outer type gives the signedness. Otherwise
3316 (in case of a small bitfield) the signedness is unchanged. */
3317 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3318 *punsignedp = TYPE_UNSIGNED (outer_type);
3320 /* Compute the mask to access the bitfield. */
3321 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3322 precision = TYPE_PRECISION (unsigned_type);
3324 mask = build_int_cst (unsigned_type, -1);
3325 mask = force_fit_type (mask, 0, false, false);
3327 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3328 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3330 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3331 if (and_mask != 0)
3332 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3333 fold_convert (unsigned_type, and_mask), mask));
3335 *pmask = mask;
3336 *pand_mask = and_mask;
3337 return inner;
3340 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3341 bit positions. */
3343 static int
3344 all_ones_mask_p (tree mask, int size)
3346 tree type = TREE_TYPE (mask);
3347 unsigned int precision = TYPE_PRECISION (type);
3348 tree tmask;
3350 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3351 tmask = force_fit_type (tmask, 0, false, false);
3353 return
3354 tree_int_cst_equal (mask,
3355 const_binop (RSHIFT_EXPR,
3356 const_binop (LSHIFT_EXPR, tmask,
3357 size_int (precision - size),
3359 size_int (precision - size), 0));
3362 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3363 represents the sign bit of EXP's type. If EXP represents a sign
3364 or zero extension, also test VAL against the unextended type.
3365 The return value is the (sub)expression whose sign bit is VAL,
3366 or NULL_TREE otherwise. */
3368 static tree
3369 sign_bit_p (tree exp, tree val)
3371 unsigned HOST_WIDE_INT mask_lo, lo;
3372 HOST_WIDE_INT mask_hi, hi;
3373 int width;
3374 tree t;
3376 /* Tree EXP must have an integral type. */
3377 t = TREE_TYPE (exp);
3378 if (! INTEGRAL_TYPE_P (t))
3379 return NULL_TREE;
3381 /* Tree VAL must be an integer constant. */
3382 if (TREE_CODE (val) != INTEGER_CST
3383 || TREE_CONSTANT_OVERFLOW (val))
3384 return NULL_TREE;
3386 width = TYPE_PRECISION (t);
3387 if (width > HOST_BITS_PER_WIDE_INT)
3389 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3390 lo = 0;
3392 mask_hi = ((unsigned HOST_WIDE_INT) -1
3393 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3394 mask_lo = -1;
3396 else
3398 hi = 0;
3399 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3401 mask_hi = 0;
3402 mask_lo = ((unsigned HOST_WIDE_INT) -1
3403 >> (HOST_BITS_PER_WIDE_INT - width));
3406 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3407 treat VAL as if it were unsigned. */
3408 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3409 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3410 return exp;
3412 /* Handle extension from a narrower type. */
3413 if (TREE_CODE (exp) == NOP_EXPR
3414 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3415 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3417 return NULL_TREE;
3420 /* Subroutine for fold_truthop: determine if an operand is simple enough
3421 to be evaluated unconditionally. */
3423 static int
3424 simple_operand_p (tree exp)
3426 /* Strip any conversions that don't change the machine mode. */
3427 while ((TREE_CODE (exp) == NOP_EXPR
3428 || TREE_CODE (exp) == CONVERT_EXPR)
3429 && (TYPE_MODE (TREE_TYPE (exp))
3430 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3431 exp = TREE_OPERAND (exp, 0);
3433 return (CONSTANT_CLASS_P (exp)
3434 || (DECL_P (exp)
3435 && ! TREE_ADDRESSABLE (exp)
3436 && ! TREE_THIS_VOLATILE (exp)
3437 && ! DECL_NONLOCAL (exp)
3438 /* Don't regard global variables as simple. They may be
3439 allocated in ways unknown to the compiler (shared memory,
3440 #pragma weak, etc). */
3441 && ! TREE_PUBLIC (exp)
3442 && ! DECL_EXTERNAL (exp)
3443 /* Loading a static variable is unduly expensive, but global
3444 registers aren't expensive. */
3445 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3448 /* The following functions are subroutines to fold_range_test and allow it to
3449 try to change a logical combination of comparisons into a range test.
3451 For example, both
3452 X == 2 || X == 3 || X == 4 || X == 5
3454 X >= 2 && X <= 5
3455 are converted to
3456 (unsigned) (X - 2) <= 3
3458 We describe each set of comparisons as being either inside or outside
3459 a range, using a variable named like IN_P, and then describe the
3460 range with a lower and upper bound. If one of the bounds is omitted,
3461 it represents either the highest or lowest value of the type.
3463 In the comments below, we represent a range by two numbers in brackets
3464 preceded by a "+" to designate being inside that range, or a "-" to
3465 designate being outside that range, so the condition can be inverted by
3466 flipping the prefix. An omitted bound is represented by a "-". For
3467 example, "- [-, 10]" means being outside the range starting at the lowest
3468 possible value and ending at 10, in other words, being greater than 10.
3469 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3470 always false.
3472 We set up things so that the missing bounds are handled in a consistent
3473 manner so neither a missing bound nor "true" and "false" need to be
3474 handled using a special case. */
3476 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3477 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3478 and UPPER1_P are nonzero if the respective argument is an upper bound
3479 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3480 must be specified for a comparison. ARG1 will be converted to ARG0's
3481 type if both are specified. */
3483 static tree
3484 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3485 tree arg1, int upper1_p)
3487 tree tem;
3488 int result;
3489 int sgn0, sgn1;
3491 /* If neither arg represents infinity, do the normal operation.
3492 Else, if not a comparison, return infinity. Else handle the special
3493 comparison rules. Note that most of the cases below won't occur, but
3494 are handled for consistency. */
3496 if (arg0 != 0 && arg1 != 0)
3498 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3499 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3500 STRIP_NOPS (tem);
3501 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3504 if (TREE_CODE_CLASS (code) != tcc_comparison)
3505 return 0;
3507 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3508 for neither. In real maths, we cannot assume open ended ranges are
3509 the same. But, this is computer arithmetic, where numbers are finite.
3510 We can therefore make the transformation of any unbounded range with
3511 the value Z, Z being greater than any representable number. This permits
3512 us to treat unbounded ranges as equal. */
3513 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3514 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3515 switch (code)
3517 case EQ_EXPR:
3518 result = sgn0 == sgn1;
3519 break;
3520 case NE_EXPR:
3521 result = sgn0 != sgn1;
3522 break;
3523 case LT_EXPR:
3524 result = sgn0 < sgn1;
3525 break;
3526 case LE_EXPR:
3527 result = sgn0 <= sgn1;
3528 break;
3529 case GT_EXPR:
3530 result = sgn0 > sgn1;
3531 break;
3532 case GE_EXPR:
3533 result = sgn0 >= sgn1;
3534 break;
3535 default:
3536 gcc_unreachable ();
3539 return constant_boolean_node (result, type);
3542 /* Given EXP, a logical expression, set the range it is testing into
3543 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3544 actually being tested. *PLOW and *PHIGH will be made of the same type
3545 as the returned expression. If EXP is not a comparison, we will most
3546 likely not be returning a useful value and range. */
3548 static tree
3549 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3551 enum tree_code code;
3552 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3553 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3554 int in_p, n_in_p;
3555 tree low, high, n_low, n_high;
3557 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3558 and see if we can refine the range. Some of the cases below may not
3559 happen, but it doesn't seem worth worrying about this. We "continue"
3560 the outer loop when we've changed something; otherwise we "break"
3561 the switch, which will "break" the while. */
3563 in_p = 0;
3564 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3566 while (1)
3568 code = TREE_CODE (exp);
3569 exp_type = TREE_TYPE (exp);
3571 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3573 if (first_rtl_op (code) > 0)
3574 arg0 = TREE_OPERAND (exp, 0);
3575 if (TREE_CODE_CLASS (code) == tcc_comparison
3576 || TREE_CODE_CLASS (code) == tcc_unary
3577 || TREE_CODE_CLASS (code) == tcc_binary)
3578 arg0_type = TREE_TYPE (arg0);
3579 if (TREE_CODE_CLASS (code) == tcc_binary
3580 || TREE_CODE_CLASS (code) == tcc_comparison
3581 || (TREE_CODE_CLASS (code) == tcc_expression
3582 && TREE_CODE_LENGTH (code) > 1))
3583 arg1 = TREE_OPERAND (exp, 1);
3586 switch (code)
3588 case TRUTH_NOT_EXPR:
3589 in_p = ! in_p, exp = arg0;
3590 continue;
3592 case EQ_EXPR: case NE_EXPR:
3593 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3594 /* We can only do something if the range is testing for zero
3595 and if the second operand is an integer constant. Note that
3596 saying something is "in" the range we make is done by
3597 complementing IN_P since it will set in the initial case of
3598 being not equal to zero; "out" is leaving it alone. */
3599 if (low == 0 || high == 0
3600 || ! integer_zerop (low) || ! integer_zerop (high)
3601 || TREE_CODE (arg1) != INTEGER_CST)
3602 break;
3604 switch (code)
3606 case NE_EXPR: /* - [c, c] */
3607 low = high = arg1;
3608 break;
3609 case EQ_EXPR: /* + [c, c] */
3610 in_p = ! in_p, low = high = arg1;
3611 break;
3612 case GT_EXPR: /* - [-, c] */
3613 low = 0, high = arg1;
3614 break;
3615 case GE_EXPR: /* + [c, -] */
3616 in_p = ! in_p, low = arg1, high = 0;
3617 break;
3618 case LT_EXPR: /* - [c, -] */
3619 low = arg1, high = 0;
3620 break;
3621 case LE_EXPR: /* + [-, c] */
3622 in_p = ! in_p, low = 0, high = arg1;
3623 break;
3624 default:
3625 gcc_unreachable ();
3628 /* If this is an unsigned comparison, we also know that EXP is
3629 greater than or equal to zero. We base the range tests we make
3630 on that fact, so we record it here so we can parse existing
3631 range tests. We test arg0_type since often the return type
3632 of, e.g. EQ_EXPR, is boolean. */
3633 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3635 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3636 in_p, low, high, 1,
3637 fold_convert (arg0_type, integer_zero_node),
3638 NULL_TREE))
3639 break;
3641 in_p = n_in_p, low = n_low, high = n_high;
3643 /* If the high bound is missing, but we have a nonzero low
3644 bound, reverse the range so it goes from zero to the low bound
3645 minus 1. */
3646 if (high == 0 && low && ! integer_zerop (low))
3648 in_p = ! in_p;
3649 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3650 integer_one_node, 0);
3651 low = fold_convert (arg0_type, integer_zero_node);
3655 exp = arg0;
3656 continue;
3658 case NEGATE_EXPR:
3659 /* (-x) IN [a,b] -> x in [-b, -a] */
3660 n_low = range_binop (MINUS_EXPR, exp_type,
3661 fold_convert (exp_type, integer_zero_node),
3662 0, high, 1);
3663 n_high = range_binop (MINUS_EXPR, exp_type,
3664 fold_convert (exp_type, integer_zero_node),
3665 0, low, 0);
3666 low = n_low, high = n_high;
3667 exp = arg0;
3668 continue;
3670 case BIT_NOT_EXPR:
3671 /* ~ X -> -X - 1 */
3672 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3673 fold_convert (exp_type, integer_one_node));
3674 continue;
3676 case PLUS_EXPR: case MINUS_EXPR:
3677 if (TREE_CODE (arg1) != INTEGER_CST)
3678 break;
3680 /* If EXP is signed, any overflow in the computation is undefined,
3681 so we don't worry about it so long as our computations on
3682 the bounds don't overflow. For unsigned, overflow is defined
3683 and this is exactly the right thing. */
3684 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3685 arg0_type, low, 0, arg1, 0);
3686 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3687 arg0_type, high, 1, arg1, 0);
3688 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3689 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3690 break;
3692 /* Check for an unsigned range which has wrapped around the maximum
3693 value thus making n_high < n_low, and normalize it. */
3694 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3696 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3697 integer_one_node, 0);
3698 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3699 integer_one_node, 0);
3701 /* If the range is of the form +/- [ x+1, x ], we won't
3702 be able to normalize it. But then, it represents the
3703 whole range or the empty set, so make it
3704 +/- [ -, - ]. */
3705 if (tree_int_cst_equal (n_low, low)
3706 && tree_int_cst_equal (n_high, high))
3707 low = high = 0;
3708 else
3709 in_p = ! in_p;
3711 else
3712 low = n_low, high = n_high;
3714 exp = arg0;
3715 continue;
3717 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3718 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3719 break;
3721 if (! INTEGRAL_TYPE_P (arg0_type)
3722 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3723 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3724 break;
3726 n_low = low, n_high = high;
3728 if (n_low != 0)
3729 n_low = fold_convert (arg0_type, n_low);
3731 if (n_high != 0)
3732 n_high = fold_convert (arg0_type, n_high);
3735 /* If we're converting arg0 from an unsigned type, to exp,
3736 a signed type, we will be doing the comparison as unsigned.
3737 The tests above have already verified that LOW and HIGH
3738 are both positive.
3740 So we have to ensure that we will handle large unsigned
3741 values the same way that the current signed bounds treat
3742 negative values. */
3744 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3746 tree high_positive;
3747 tree equiv_type = lang_hooks.types.type_for_mode
3748 (TYPE_MODE (arg0_type), 1);
3750 /* A range without an upper bound is, naturally, unbounded.
3751 Since convert would have cropped a very large value, use
3752 the max value for the destination type. */
3753 high_positive
3754 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3755 : TYPE_MAX_VALUE (arg0_type);
3757 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3758 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3759 fold_convert (arg0_type,
3760 high_positive),
3761 fold_convert (arg0_type,
3762 integer_one_node)));
3764 /* If the low bound is specified, "and" the range with the
3765 range for which the original unsigned value will be
3766 positive. */
3767 if (low != 0)
3769 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3770 1, n_low, n_high, 1,
3771 fold_convert (arg0_type,
3772 integer_zero_node),
3773 high_positive))
3774 break;
3776 in_p = (n_in_p == in_p);
3778 else
3780 /* Otherwise, "or" the range with the range of the input
3781 that will be interpreted as negative. */
3782 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3783 0, n_low, n_high, 1,
3784 fold_convert (arg0_type,
3785 integer_zero_node),
3786 high_positive))
3787 break;
3789 in_p = (in_p != n_in_p);
3793 exp = arg0;
3794 low = n_low, high = n_high;
3795 continue;
3797 default:
3798 break;
3801 break;
3804 /* If EXP is a constant, we can evaluate whether this is true or false. */
3805 if (TREE_CODE (exp) == INTEGER_CST)
3807 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3808 exp, 0, low, 0))
3809 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3810 exp, 1, high, 1)));
3811 low = high = 0;
3812 exp = 0;
3815 *pin_p = in_p, *plow = low, *phigh = high;
3816 return exp;
3819 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3820 type, TYPE, return an expression to test if EXP is in (or out of, depending
3821 on IN_P) the range. Return 0 if the test couldn't be created. */
3823 static tree
3824 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3826 tree etype = TREE_TYPE (exp);
3827 tree value;
3829 if (! in_p)
3831 value = build_range_check (type, exp, 1, low, high);
3832 if (value != 0)
3833 return invert_truthvalue (value);
3835 return 0;
3838 if (low == 0 && high == 0)
3839 return fold_convert (type, integer_one_node);
3841 if (low == 0)
3842 return fold (build2 (LE_EXPR, type, exp, high));
3844 if (high == 0)
3845 return fold (build2 (GE_EXPR, type, exp, low));
3847 if (operand_equal_p (low, high, 0))
3848 return fold (build2 (EQ_EXPR, type, exp, low));
3850 if (integer_zerop (low))
3852 if (! TYPE_UNSIGNED (etype))
3854 etype = lang_hooks.types.unsigned_type (etype);
3855 high = fold_convert (etype, high);
3856 exp = fold_convert (etype, exp);
3858 return build_range_check (type, exp, 1, 0, high);
3861 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3862 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3864 unsigned HOST_WIDE_INT lo;
3865 HOST_WIDE_INT hi;
3866 int prec;
3868 prec = TYPE_PRECISION (etype);
3869 if (prec <= HOST_BITS_PER_WIDE_INT)
3871 hi = 0;
3872 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3874 else
3876 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3877 lo = (unsigned HOST_WIDE_INT) -1;
3880 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3882 if (TYPE_UNSIGNED (etype))
3884 etype = lang_hooks.types.signed_type (etype);
3885 exp = fold_convert (etype, exp);
3887 return fold (build2 (GT_EXPR, type, exp,
3888 fold_convert (etype, integer_zero_node)));
3892 value = const_binop (MINUS_EXPR, high, low, 0);
3893 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3895 tree utype, minv, maxv;
3897 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3898 for the type in question, as we rely on this here. */
3899 switch (TREE_CODE (etype))
3901 case INTEGER_TYPE:
3902 case ENUMERAL_TYPE:
3903 case CHAR_TYPE:
3904 utype = lang_hooks.types.unsigned_type (etype);
3905 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3906 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3907 integer_one_node, 1);
3908 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3909 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3910 minv, 1, maxv, 1)))
3912 etype = utype;
3913 high = fold_convert (etype, high);
3914 low = fold_convert (etype, low);
3915 exp = fold_convert (etype, exp);
3916 value = const_binop (MINUS_EXPR, high, low, 0);
3918 break;
3919 default:
3920 break;
3924 if (value != 0 && ! TREE_OVERFLOW (value))
3925 return build_range_check (type,
3926 fold (build2 (MINUS_EXPR, etype, exp, low)),
3927 1, fold_convert (etype, integer_zero_node),
3928 value);
3930 return 0;
3933 /* Given two ranges, see if we can merge them into one. Return 1 if we
3934 can, 0 if we can't. Set the output range into the specified parameters. */
3936 static int
3937 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3938 tree high0, int in1_p, tree low1, tree high1)
3940 int no_overlap;
3941 int subset;
3942 int temp;
3943 tree tem;
3944 int in_p;
3945 tree low, high;
3946 int lowequal = ((low0 == 0 && low1 == 0)
3947 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3948 low0, 0, low1, 0)));
3949 int highequal = ((high0 == 0 && high1 == 0)
3950 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3951 high0, 1, high1, 1)));
3953 /* Make range 0 be the range that starts first, or ends last if they
3954 start at the same value. Swap them if it isn't. */
3955 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3956 low0, 0, low1, 0))
3957 || (lowequal
3958 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3959 high1, 1, high0, 1))))
3961 temp = in0_p, in0_p = in1_p, in1_p = temp;
3962 tem = low0, low0 = low1, low1 = tem;
3963 tem = high0, high0 = high1, high1 = tem;
3966 /* Now flag two cases, whether the ranges are disjoint or whether the
3967 second range is totally subsumed in the first. Note that the tests
3968 below are simplified by the ones above. */
3969 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3970 high0, 1, low1, 0));
3971 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3972 high1, 1, high0, 1));
3974 /* We now have four cases, depending on whether we are including or
3975 excluding the two ranges. */
3976 if (in0_p && in1_p)
3978 /* If they don't overlap, the result is false. If the second range
3979 is a subset it is the result. Otherwise, the range is from the start
3980 of the second to the end of the first. */
3981 if (no_overlap)
3982 in_p = 0, low = high = 0;
3983 else if (subset)
3984 in_p = 1, low = low1, high = high1;
3985 else
3986 in_p = 1, low = low1, high = high0;
3989 else if (in0_p && ! in1_p)
3991 /* If they don't overlap, the result is the first range. If they are
3992 equal, the result is false. If the second range is a subset of the
3993 first, and the ranges begin at the same place, we go from just after
3994 the end of the first range to the end of the second. If the second
3995 range is not a subset of the first, or if it is a subset and both
3996 ranges end at the same place, the range starts at the start of the
3997 first range and ends just before the second range.
3998 Otherwise, we can't describe this as a single range. */
3999 if (no_overlap)
4000 in_p = 1, low = low0, high = high0;
4001 else if (lowequal && highequal)
4002 in_p = 0, low = high = 0;
4003 else if (subset && lowequal)
4005 in_p = 1, high = high0;
4006 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4007 integer_one_node, 0);
4009 else if (! subset || highequal)
4011 in_p = 1, low = low0;
4012 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4013 integer_one_node, 0);
4015 else
4016 return 0;
4019 else if (! in0_p && in1_p)
4021 /* If they don't overlap, the result is the second range. If the second
4022 is a subset of the first, the result is false. Otherwise,
4023 the range starts just after the first range and ends at the
4024 end of the second. */
4025 if (no_overlap)
4026 in_p = 1, low = low1, high = high1;
4027 else if (subset || highequal)
4028 in_p = 0, low = high = 0;
4029 else
4031 in_p = 1, high = high1;
4032 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4033 integer_one_node, 0);
4037 else
4039 /* The case where we are excluding both ranges. Here the complex case
4040 is if they don't overlap. In that case, the only time we have a
4041 range is if they are adjacent. If the second is a subset of the
4042 first, the result is the first. Otherwise, the range to exclude
4043 starts at the beginning of the first range and ends at the end of the
4044 second. */
4045 if (no_overlap)
4047 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4048 range_binop (PLUS_EXPR, NULL_TREE,
4049 high0, 1,
4050 integer_one_node, 1),
4051 1, low1, 0)))
4052 in_p = 0, low = low0, high = high1;
4053 else
4055 /* Canonicalize - [min, x] into - [-, x]. */
4056 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4057 switch (TREE_CODE (TREE_TYPE (low0)))
4059 case ENUMERAL_TYPE:
4060 if (TYPE_PRECISION (TREE_TYPE (low0))
4061 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4062 break;
4063 /* FALLTHROUGH */
4064 case INTEGER_TYPE:
4065 case CHAR_TYPE:
4066 if (tree_int_cst_equal (low0,
4067 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4068 low0 = 0;
4069 break;
4070 case POINTER_TYPE:
4071 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4072 && integer_zerop (low0))
4073 low0 = 0;
4074 break;
4075 default:
4076 break;
4079 /* Canonicalize - [x, max] into - [x, -]. */
4080 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4081 switch (TREE_CODE (TREE_TYPE (high1)))
4083 case ENUMERAL_TYPE:
4084 if (TYPE_PRECISION (TREE_TYPE (high1))
4085 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4086 break;
4087 /* FALLTHROUGH */
4088 case INTEGER_TYPE:
4089 case CHAR_TYPE:
4090 if (tree_int_cst_equal (high1,
4091 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4092 high1 = 0;
4093 break;
4094 case POINTER_TYPE:
4095 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4096 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4097 high1, 1,
4098 integer_one_node, 1)))
4099 high1 = 0;
4100 break;
4101 default:
4102 break;
4105 /* The ranges might be also adjacent between the maximum and
4106 minimum values of the given type. For
4107 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4108 return + [x + 1, y - 1]. */
4109 if (low0 == 0 && high1 == 0)
4111 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4112 integer_one_node, 1);
4113 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4114 integer_one_node, 0);
4115 if (low == 0 || high == 0)
4116 return 0;
4118 in_p = 1;
4120 else
4121 return 0;
4124 else if (subset)
4125 in_p = 0, low = low0, high = high0;
4126 else
4127 in_p = 0, low = low0, high = high1;
4130 *pin_p = in_p, *plow = low, *phigh = high;
4131 return 1;
4135 /* Subroutine of fold, looking inside expressions of the form
4136 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4137 of the COND_EXPR. This function is being used also to optimize
4138 A op B ? C : A, by reversing the comparison first.
4140 Return a folded expression whose code is not a COND_EXPR
4141 anymore, or NULL_TREE if no folding opportunity is found. */
4143 static tree
4144 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4146 enum tree_code comp_code = TREE_CODE (arg0);
4147 tree arg00 = TREE_OPERAND (arg0, 0);
4148 tree arg01 = TREE_OPERAND (arg0, 1);
4149 tree arg1_type = TREE_TYPE (arg1);
4150 tree tem;
4152 STRIP_NOPS (arg1);
4153 STRIP_NOPS (arg2);
4155 /* If we have A op 0 ? A : -A, consider applying the following
4156 transformations:
4158 A == 0? A : -A same as -A
4159 A != 0? A : -A same as A
4160 A >= 0? A : -A same as abs (A)
4161 A > 0? A : -A same as abs (A)
4162 A <= 0? A : -A same as -abs (A)
4163 A < 0? A : -A same as -abs (A)
4165 None of these transformations work for modes with signed
4166 zeros. If A is +/-0, the first two transformations will
4167 change the sign of the result (from +0 to -0, or vice
4168 versa). The last four will fix the sign of the result,
4169 even though the original expressions could be positive or
4170 negative, depending on the sign of A.
4172 Note that all these transformations are correct if A is
4173 NaN, since the two alternatives (A and -A) are also NaNs. */
4174 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4175 ? real_zerop (arg01)
4176 : integer_zerop (arg01))
4177 && TREE_CODE (arg2) == NEGATE_EXPR
4178 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4179 switch (comp_code)
4181 case EQ_EXPR:
4182 case UNEQ_EXPR:
4183 tem = fold_convert (arg1_type, arg1);
4184 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4185 case NE_EXPR:
4186 case LTGT_EXPR:
4187 return pedantic_non_lvalue (fold_convert (type, arg1));
4188 case UNGE_EXPR:
4189 case UNGT_EXPR:
4190 if (flag_trapping_math)
4191 break;
4192 /* Fall through. */
4193 case GE_EXPR:
4194 case GT_EXPR:
4195 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4196 arg1 = fold_convert (lang_hooks.types.signed_type
4197 (TREE_TYPE (arg1)), arg1);
4198 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4199 return pedantic_non_lvalue (fold_convert (type, tem));
4200 case UNLE_EXPR:
4201 case UNLT_EXPR:
4202 if (flag_trapping_math)
4203 break;
4204 case LE_EXPR:
4205 case LT_EXPR:
4206 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4207 arg1 = fold_convert (lang_hooks.types.signed_type
4208 (TREE_TYPE (arg1)), arg1);
4209 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4210 return negate_expr (fold_convert (type, tem));
4211 default:
4212 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4213 break;
4216 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4217 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4218 both transformations are correct when A is NaN: A != 0
4219 is then true, and A == 0 is false. */
4221 if (integer_zerop (arg01) && integer_zerop (arg2))
4223 if (comp_code == NE_EXPR)
4224 return pedantic_non_lvalue (fold_convert (type, arg1));
4225 else if (comp_code == EQ_EXPR)
4226 return fold_convert (type, integer_zero_node);
4229 /* Try some transformations of A op B ? A : B.
4231 A == B? A : B same as B
4232 A != B? A : B same as A
4233 A >= B? A : B same as max (A, B)
4234 A > B? A : B same as max (B, A)
4235 A <= B? A : B same as min (A, B)
4236 A < B? A : B same as min (B, A)
4238 As above, these transformations don't work in the presence
4239 of signed zeros. For example, if A and B are zeros of
4240 opposite sign, the first two transformations will change
4241 the sign of the result. In the last four, the original
4242 expressions give different results for (A=+0, B=-0) and
4243 (A=-0, B=+0), but the transformed expressions do not.
4245 The first two transformations are correct if either A or B
4246 is a NaN. In the first transformation, the condition will
4247 be false, and B will indeed be chosen. In the case of the
4248 second transformation, the condition A != B will be true,
4249 and A will be chosen.
4251 The conversions to max() and min() are not correct if B is
4252 a number and A is not. The conditions in the original
4253 expressions will be false, so all four give B. The min()
4254 and max() versions would give a NaN instead. */
4255 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4257 tree comp_op0 = arg00;
4258 tree comp_op1 = arg01;
4259 tree comp_type = TREE_TYPE (comp_op0);
4261 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4262 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4264 comp_type = type;
4265 comp_op0 = arg1;
4266 comp_op1 = arg2;
4269 switch (comp_code)
4271 case EQ_EXPR:
4272 return pedantic_non_lvalue (fold_convert (type, arg2));
4273 case NE_EXPR:
4274 return pedantic_non_lvalue (fold_convert (type, arg1));
4275 case LE_EXPR:
4276 case LT_EXPR:
4277 case UNLE_EXPR:
4278 case UNLT_EXPR:
4279 /* In C++ a ?: expression can be an lvalue, so put the
4280 operand which will be used if they are equal first
4281 so that we can convert this back to the
4282 corresponding COND_EXPR. */
4283 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4285 comp_op0 = fold_convert (comp_type, comp_op0);
4286 comp_op1 = fold_convert (comp_type, comp_op1);
4287 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4288 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4289 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4290 return pedantic_non_lvalue (fold_convert (type, tem));
4292 break;
4293 case GE_EXPR:
4294 case GT_EXPR:
4295 case UNGE_EXPR:
4296 case UNGT_EXPR:
4297 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4299 comp_op0 = fold_convert (comp_type, comp_op0);
4300 comp_op1 = fold_convert (comp_type, comp_op1);
4301 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4302 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4303 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4304 return pedantic_non_lvalue (fold_convert (type, tem));
4306 break;
4307 case UNEQ_EXPR:
4308 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4309 return pedantic_non_lvalue (fold_convert (type, arg2));
4310 break;
4311 case LTGT_EXPR:
4312 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4313 return pedantic_non_lvalue (fold_convert (type, arg1));
4314 break;
4315 default:
4316 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4317 break;
4321 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4322 we might still be able to simplify this. For example,
4323 if C1 is one less or one more than C2, this might have started
4324 out as a MIN or MAX and been transformed by this function.
4325 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4327 if (INTEGRAL_TYPE_P (type)
4328 && TREE_CODE (arg01) == INTEGER_CST
4329 && TREE_CODE (arg2) == INTEGER_CST)
4330 switch (comp_code)
4332 case EQ_EXPR:
4333 /* We can replace A with C1 in this case. */
4334 arg1 = fold_convert (type, arg01);
4335 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4337 case LT_EXPR:
4338 /* If C1 is C2 + 1, this is min(A, C2). */
4339 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4340 OEP_ONLY_CONST)
4341 && operand_equal_p (arg01,
4342 const_binop (PLUS_EXPR, arg2,
4343 integer_one_node, 0),
4344 OEP_ONLY_CONST))
4345 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4346 type, arg1, arg2)));
4347 break;
4349 case LE_EXPR:
4350 /* If C1 is C2 - 1, this is min(A, C2). */
4351 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4352 OEP_ONLY_CONST)
4353 && operand_equal_p (arg01,
4354 const_binop (MINUS_EXPR, arg2,
4355 integer_one_node, 0),
4356 OEP_ONLY_CONST))
4357 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4358 type, arg1, arg2)));
4359 break;
4361 case GT_EXPR:
4362 /* If C1 is C2 - 1, this is max(A, C2). */
4363 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4364 OEP_ONLY_CONST)
4365 && operand_equal_p (arg01,
4366 const_binop (MINUS_EXPR, arg2,
4367 integer_one_node, 0),
4368 OEP_ONLY_CONST))
4369 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4370 type, arg1, arg2)));
4371 break;
4373 case GE_EXPR:
4374 /* If C1 is C2 + 1, this is max(A, C2). */
4375 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4376 OEP_ONLY_CONST)
4377 && operand_equal_p (arg01,
4378 const_binop (PLUS_EXPR, arg2,
4379 integer_one_node, 0),
4380 OEP_ONLY_CONST))
4381 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4382 type, arg1, arg2)));
4383 break;
4384 case NE_EXPR:
4385 break;
4386 default:
4387 gcc_unreachable ();
4390 return NULL_TREE;
4395 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4396 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4397 #endif
4399 /* EXP is some logical combination of boolean tests. See if we can
4400 merge it into some range test. Return the new tree if so. */
4402 static tree
4403 fold_range_test (tree exp)
4405 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4406 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4407 int in0_p, in1_p, in_p;
4408 tree low0, low1, low, high0, high1, high;
4409 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4410 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4411 tree tem;
4413 /* If this is an OR operation, invert both sides; we will invert
4414 again at the end. */
4415 if (or_op)
4416 in0_p = ! in0_p, in1_p = ! in1_p;
4418 /* If both expressions are the same, if we can merge the ranges, and we
4419 can build the range test, return it or it inverted. If one of the
4420 ranges is always true or always false, consider it to be the same
4421 expression as the other. */
4422 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4423 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4424 in1_p, low1, high1)
4425 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4426 lhs != 0 ? lhs
4427 : rhs != 0 ? rhs : integer_zero_node,
4428 in_p, low, high))))
4429 return or_op ? invert_truthvalue (tem) : tem;
4431 /* On machines where the branch cost is expensive, if this is a
4432 short-circuited branch and the underlying object on both sides
4433 is the same, make a non-short-circuit operation. */
4434 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4435 && lhs != 0 && rhs != 0
4436 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4437 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4438 && operand_equal_p (lhs, rhs, 0))
4440 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4441 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4442 which cases we can't do this. */
4443 if (simple_operand_p (lhs))
4444 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4445 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4446 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4447 TREE_OPERAND (exp, 1));
4449 else if (lang_hooks.decls.global_bindings_p () == 0
4450 && ! CONTAINS_PLACEHOLDER_P (lhs))
4452 tree common = save_expr (lhs);
4454 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4455 or_op ? ! in0_p : in0_p,
4456 low0, high0))
4457 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4458 or_op ? ! in1_p : in1_p,
4459 low1, high1))))
4460 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4461 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4462 TREE_TYPE (exp), lhs, rhs);
4466 return 0;
4469 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4470 bit value. Arrange things so the extra bits will be set to zero if and
4471 only if C is signed-extended to its full width. If MASK is nonzero,
4472 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4474 static tree
4475 unextend (tree c, int p, int unsignedp, tree mask)
4477 tree type = TREE_TYPE (c);
4478 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4479 tree temp;
4481 if (p == modesize || unsignedp)
4482 return c;
4484 /* We work by getting just the sign bit into the low-order bit, then
4485 into the high-order bit, then sign-extend. We then XOR that value
4486 with C. */
4487 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4488 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4490 /* We must use a signed type in order to get an arithmetic right shift.
4491 However, we must also avoid introducing accidental overflows, so that
4492 a subsequent call to integer_zerop will work. Hence we must
4493 do the type conversion here. At this point, the constant is either
4494 zero or one, and the conversion to a signed type can never overflow.
4495 We could get an overflow if this conversion is done anywhere else. */
4496 if (TYPE_UNSIGNED (type))
4497 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4499 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4500 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4501 if (mask != 0)
4502 temp = const_binop (BIT_AND_EXPR, temp,
4503 fold_convert (TREE_TYPE (c), mask), 0);
4504 /* If necessary, convert the type back to match the type of C. */
4505 if (TYPE_UNSIGNED (type))
4506 temp = fold_convert (type, temp);
4508 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4511 /* Find ways of folding logical expressions of LHS and RHS:
4512 Try to merge two comparisons to the same innermost item.
4513 Look for range tests like "ch >= '0' && ch <= '9'".
4514 Look for combinations of simple terms on machines with expensive branches
4515 and evaluate the RHS unconditionally.
4517 For example, if we have p->a == 2 && p->b == 4 and we can make an
4518 object large enough to span both A and B, we can do this with a comparison
4519 against the object ANDed with the a mask.
4521 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4522 operations to do this with one comparison.
4524 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4525 function and the one above.
4527 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4528 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4530 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4531 two operands.
4533 We return the simplified tree or 0 if no optimization is possible. */
4535 static tree
4536 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4538 /* If this is the "or" of two comparisons, we can do something if
4539 the comparisons are NE_EXPR. If this is the "and", we can do something
4540 if the comparisons are EQ_EXPR. I.e.,
4541 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4543 WANTED_CODE is this operation code. For single bit fields, we can
4544 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4545 comparison for one-bit fields. */
4547 enum tree_code wanted_code;
4548 enum tree_code lcode, rcode;
4549 tree ll_arg, lr_arg, rl_arg, rr_arg;
4550 tree ll_inner, lr_inner, rl_inner, rr_inner;
4551 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4552 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4553 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4554 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4555 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4556 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4557 enum machine_mode lnmode, rnmode;
4558 tree ll_mask, lr_mask, rl_mask, rr_mask;
4559 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4560 tree l_const, r_const;
4561 tree lntype, rntype, result;
4562 int first_bit, end_bit;
4563 int volatilep;
4565 /* Start by getting the comparison codes. Fail if anything is volatile.
4566 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4567 it were surrounded with a NE_EXPR. */
4569 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4570 return 0;
4572 lcode = TREE_CODE (lhs);
4573 rcode = TREE_CODE (rhs);
4575 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4577 lhs = build2 (NE_EXPR, truth_type, lhs,
4578 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4579 lcode = NE_EXPR;
4582 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4584 rhs = build2 (NE_EXPR, truth_type, rhs,
4585 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4586 rcode = NE_EXPR;
4589 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4590 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4591 return 0;
4593 ll_arg = TREE_OPERAND (lhs, 0);
4594 lr_arg = TREE_OPERAND (lhs, 1);
4595 rl_arg = TREE_OPERAND (rhs, 0);
4596 rr_arg = TREE_OPERAND (rhs, 1);
4598 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4599 if (simple_operand_p (ll_arg)
4600 && simple_operand_p (lr_arg))
4602 tree result;
4603 if (operand_equal_p (ll_arg, rl_arg, 0)
4604 && operand_equal_p (lr_arg, rr_arg, 0))
4606 result = combine_comparisons (code, lcode, rcode,
4607 truth_type, ll_arg, lr_arg);
4608 if (result)
4609 return result;
4611 else if (operand_equal_p (ll_arg, rr_arg, 0)
4612 && operand_equal_p (lr_arg, rl_arg, 0))
4614 result = combine_comparisons (code, lcode,
4615 swap_tree_comparison (rcode),
4616 truth_type, ll_arg, lr_arg);
4617 if (result)
4618 return result;
4622 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4623 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4625 /* If the RHS can be evaluated unconditionally and its operands are
4626 simple, it wins to evaluate the RHS unconditionally on machines
4627 with expensive branches. In this case, this isn't a comparison
4628 that can be merged. Avoid doing this if the RHS is a floating-point
4629 comparison since those can trap. */
4631 if (BRANCH_COST >= 2
4632 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4633 && simple_operand_p (rl_arg)
4634 && simple_operand_p (rr_arg))
4636 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4637 if (code == TRUTH_OR_EXPR
4638 && lcode == NE_EXPR && integer_zerop (lr_arg)
4639 && rcode == NE_EXPR && integer_zerop (rr_arg)
4640 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4641 return build2 (NE_EXPR, truth_type,
4642 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4643 ll_arg, rl_arg),
4644 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4646 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4647 if (code == TRUTH_AND_EXPR
4648 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4649 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4650 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4651 return build2 (EQ_EXPR, truth_type,
4652 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4653 ll_arg, rl_arg),
4654 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4656 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4657 return build2 (code, truth_type, lhs, rhs);
4660 /* See if the comparisons can be merged. Then get all the parameters for
4661 each side. */
4663 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4664 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4665 return 0;
4667 volatilep = 0;
4668 ll_inner = decode_field_reference (ll_arg,
4669 &ll_bitsize, &ll_bitpos, &ll_mode,
4670 &ll_unsignedp, &volatilep, &ll_mask,
4671 &ll_and_mask);
4672 lr_inner = decode_field_reference (lr_arg,
4673 &lr_bitsize, &lr_bitpos, &lr_mode,
4674 &lr_unsignedp, &volatilep, &lr_mask,
4675 &lr_and_mask);
4676 rl_inner = decode_field_reference (rl_arg,
4677 &rl_bitsize, &rl_bitpos, &rl_mode,
4678 &rl_unsignedp, &volatilep, &rl_mask,
4679 &rl_and_mask);
4680 rr_inner = decode_field_reference (rr_arg,
4681 &rr_bitsize, &rr_bitpos, &rr_mode,
4682 &rr_unsignedp, &volatilep, &rr_mask,
4683 &rr_and_mask);
4685 /* It must be true that the inner operation on the lhs of each
4686 comparison must be the same if we are to be able to do anything.
4687 Then see if we have constants. If not, the same must be true for
4688 the rhs's. */
4689 if (volatilep || ll_inner == 0 || rl_inner == 0
4690 || ! operand_equal_p (ll_inner, rl_inner, 0))
4691 return 0;
4693 if (TREE_CODE (lr_arg) == INTEGER_CST
4694 && TREE_CODE (rr_arg) == INTEGER_CST)
4695 l_const = lr_arg, r_const = rr_arg;
4696 else if (lr_inner == 0 || rr_inner == 0
4697 || ! operand_equal_p (lr_inner, rr_inner, 0))
4698 return 0;
4699 else
4700 l_const = r_const = 0;
4702 /* If either comparison code is not correct for our logical operation,
4703 fail. However, we can convert a one-bit comparison against zero into
4704 the opposite comparison against that bit being set in the field. */
4706 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4707 if (lcode != wanted_code)
4709 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4711 /* Make the left operand unsigned, since we are only interested
4712 in the value of one bit. Otherwise we are doing the wrong
4713 thing below. */
4714 ll_unsignedp = 1;
4715 l_const = ll_mask;
4717 else
4718 return 0;
4721 /* This is analogous to the code for l_const above. */
4722 if (rcode != wanted_code)
4724 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4726 rl_unsignedp = 1;
4727 r_const = rl_mask;
4729 else
4730 return 0;
4733 /* After this point all optimizations will generate bit-field
4734 references, which we might not want. */
4735 if (! lang_hooks.can_use_bit_fields_p ())
4736 return 0;
4738 /* See if we can find a mode that contains both fields being compared on
4739 the left. If we can't, fail. Otherwise, update all constants and masks
4740 to be relative to a field of that size. */
4741 first_bit = MIN (ll_bitpos, rl_bitpos);
4742 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4743 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4744 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4745 volatilep);
4746 if (lnmode == VOIDmode)
4747 return 0;
4749 lnbitsize = GET_MODE_BITSIZE (lnmode);
4750 lnbitpos = first_bit & ~ (lnbitsize - 1);
4751 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4752 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4754 if (BYTES_BIG_ENDIAN)
4756 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4757 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4760 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4761 size_int (xll_bitpos), 0);
4762 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4763 size_int (xrl_bitpos), 0);
4765 if (l_const)
4767 l_const = fold_convert (lntype, l_const);
4768 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4769 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4770 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4771 fold (build1 (BIT_NOT_EXPR,
4772 lntype, ll_mask)),
4773 0)))
4775 warning ("comparison is always %d", wanted_code == NE_EXPR);
4777 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4780 if (r_const)
4782 r_const = fold_convert (lntype, r_const);
4783 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4784 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4785 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4786 fold (build1 (BIT_NOT_EXPR,
4787 lntype, rl_mask)),
4788 0)))
4790 warning ("comparison is always %d", wanted_code == NE_EXPR);
4792 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4796 /* If the right sides are not constant, do the same for it. Also,
4797 disallow this optimization if a size or signedness mismatch occurs
4798 between the left and right sides. */
4799 if (l_const == 0)
4801 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4802 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4803 /* Make sure the two fields on the right
4804 correspond to the left without being swapped. */
4805 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4806 return 0;
4808 first_bit = MIN (lr_bitpos, rr_bitpos);
4809 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4810 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4811 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4812 volatilep);
4813 if (rnmode == VOIDmode)
4814 return 0;
4816 rnbitsize = GET_MODE_BITSIZE (rnmode);
4817 rnbitpos = first_bit & ~ (rnbitsize - 1);
4818 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4819 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4821 if (BYTES_BIG_ENDIAN)
4823 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4824 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4827 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4828 size_int (xlr_bitpos), 0);
4829 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4830 size_int (xrr_bitpos), 0);
4832 /* Make a mask that corresponds to both fields being compared.
4833 Do this for both items being compared. If the operands are the
4834 same size and the bits being compared are in the same position
4835 then we can do this by masking both and comparing the masked
4836 results. */
4837 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4838 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4839 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4841 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4842 ll_unsignedp || rl_unsignedp);
4843 if (! all_ones_mask_p (ll_mask, lnbitsize))
4844 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4846 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4847 lr_unsignedp || rr_unsignedp);
4848 if (! all_ones_mask_p (lr_mask, rnbitsize))
4849 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4851 return build2 (wanted_code, truth_type, lhs, rhs);
4854 /* There is still another way we can do something: If both pairs of
4855 fields being compared are adjacent, we may be able to make a wider
4856 field containing them both.
4858 Note that we still must mask the lhs/rhs expressions. Furthermore,
4859 the mask must be shifted to account for the shift done by
4860 make_bit_field_ref. */
4861 if ((ll_bitsize + ll_bitpos == rl_bitpos
4862 && lr_bitsize + lr_bitpos == rr_bitpos)
4863 || (ll_bitpos == rl_bitpos + rl_bitsize
4864 && lr_bitpos == rr_bitpos + rr_bitsize))
4866 tree type;
4868 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4869 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4870 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4871 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4873 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4874 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4875 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4876 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4878 /* Convert to the smaller type before masking out unwanted bits. */
4879 type = lntype;
4880 if (lntype != rntype)
4882 if (lnbitsize > rnbitsize)
4884 lhs = fold_convert (rntype, lhs);
4885 ll_mask = fold_convert (rntype, ll_mask);
4886 type = rntype;
4888 else if (lnbitsize < rnbitsize)
4890 rhs = fold_convert (lntype, rhs);
4891 lr_mask = fold_convert (lntype, lr_mask);
4892 type = lntype;
4896 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4897 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4899 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4900 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4902 return build2 (wanted_code, truth_type, lhs, rhs);
4905 return 0;
4908 /* Handle the case of comparisons with constants. If there is something in
4909 common between the masks, those bits of the constants must be the same.
4910 If not, the condition is always false. Test for this to avoid generating
4911 incorrect code below. */
4912 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4913 if (! integer_zerop (result)
4914 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4915 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4917 if (wanted_code == NE_EXPR)
4919 warning ("%<or%> of unmatched not-equal tests is always 1");
4920 return constant_boolean_node (true, truth_type);
4922 else
4924 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4925 return constant_boolean_node (false, truth_type);
4929 /* Construct the expression we will return. First get the component
4930 reference we will make. Unless the mask is all ones the width of
4931 that field, perform the mask operation. Then compare with the
4932 merged constant. */
4933 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4934 ll_unsignedp || rl_unsignedp);
4936 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4937 if (! all_ones_mask_p (ll_mask, lnbitsize))
4938 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4940 return build2 (wanted_code, truth_type, result,
4941 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4944 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4945 constant. */
4947 static tree
4948 optimize_minmax_comparison (tree t)
4950 tree type = TREE_TYPE (t);
4951 tree arg0 = TREE_OPERAND (t, 0);
4952 enum tree_code op_code;
4953 tree comp_const = TREE_OPERAND (t, 1);
4954 tree minmax_const;
4955 int consts_equal, consts_lt;
4956 tree inner;
4958 STRIP_SIGN_NOPS (arg0);
4960 op_code = TREE_CODE (arg0);
4961 minmax_const = TREE_OPERAND (arg0, 1);
4962 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4963 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4964 inner = TREE_OPERAND (arg0, 0);
4966 /* If something does not permit us to optimize, return the original tree. */
4967 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4968 || TREE_CODE (comp_const) != INTEGER_CST
4969 || TREE_CONSTANT_OVERFLOW (comp_const)
4970 || TREE_CODE (minmax_const) != INTEGER_CST
4971 || TREE_CONSTANT_OVERFLOW (minmax_const))
4972 return t;
4974 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4975 and GT_EXPR, doing the rest with recursive calls using logical
4976 simplifications. */
4977 switch (TREE_CODE (t))
4979 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4980 return
4981 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4983 case GE_EXPR:
4984 return
4985 fold (build2 (TRUTH_ORIF_EXPR, type,
4986 optimize_minmax_comparison
4987 (build2 (EQ_EXPR, type, arg0, comp_const)),
4988 optimize_minmax_comparison
4989 (build2 (GT_EXPR, type, arg0, comp_const))));
4991 case EQ_EXPR:
4992 if (op_code == MAX_EXPR && consts_equal)
4993 /* MAX (X, 0) == 0 -> X <= 0 */
4994 return fold (build2 (LE_EXPR, type, inner, comp_const));
4996 else if (op_code == MAX_EXPR && consts_lt)
4997 /* MAX (X, 0) == 5 -> X == 5 */
4998 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5000 else if (op_code == MAX_EXPR)
5001 /* MAX (X, 0) == -1 -> false */
5002 return omit_one_operand (type, integer_zero_node, inner);
5004 else if (consts_equal)
5005 /* MIN (X, 0) == 0 -> X >= 0 */
5006 return fold (build2 (GE_EXPR, type, inner, comp_const));
5008 else if (consts_lt)
5009 /* MIN (X, 0) == 5 -> false */
5010 return omit_one_operand (type, integer_zero_node, inner);
5012 else
5013 /* MIN (X, 0) == -1 -> X == -1 */
5014 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5016 case GT_EXPR:
5017 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5018 /* MAX (X, 0) > 0 -> X > 0
5019 MAX (X, 0) > 5 -> X > 5 */
5020 return fold (build2 (GT_EXPR, type, inner, comp_const));
5022 else if (op_code == MAX_EXPR)
5023 /* MAX (X, 0) > -1 -> true */
5024 return omit_one_operand (type, integer_one_node, inner);
5026 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5027 /* MIN (X, 0) > 0 -> false
5028 MIN (X, 0) > 5 -> false */
5029 return omit_one_operand (type, integer_zero_node, inner);
5031 else
5032 /* MIN (X, 0) > -1 -> X > -1 */
5033 return fold (build2 (GT_EXPR, type, inner, comp_const));
5035 default:
5036 return t;
5040 /* T is an integer expression that is being multiplied, divided, or taken a
5041 modulus (CODE says which and what kind of divide or modulus) by a
5042 constant C. See if we can eliminate that operation by folding it with
5043 other operations already in T. WIDE_TYPE, if non-null, is a type that
5044 should be used for the computation if wider than our type.
5046 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5047 (X * 2) + (Y * 4). We must, however, be assured that either the original
5048 expression would not overflow or that overflow is undefined for the type
5049 in the language in question.
5051 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5052 the machine has a multiply-accumulate insn or that this is part of an
5053 addressing calculation.
5055 If we return a non-null expression, it is an equivalent form of the
5056 original computation, but need not be in the original type. */
5058 static tree
5059 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5061 /* To avoid exponential search depth, refuse to allow recursion past
5062 three levels. Beyond that (1) it's highly unlikely that we'll find
5063 something interesting and (2) we've probably processed it before
5064 when we built the inner expression. */
5066 static int depth;
5067 tree ret;
5069 if (depth > 3)
5070 return NULL;
5072 depth++;
5073 ret = extract_muldiv_1 (t, c, code, wide_type);
5074 depth--;
5076 return ret;
5079 static tree
5080 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5082 tree type = TREE_TYPE (t);
5083 enum tree_code tcode = TREE_CODE (t);
5084 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5085 > GET_MODE_SIZE (TYPE_MODE (type)))
5086 ? wide_type : type);
5087 tree t1, t2;
5088 int same_p = tcode == code;
5089 tree op0 = NULL_TREE, op1 = NULL_TREE;
5091 /* Don't deal with constants of zero here; they confuse the code below. */
5092 if (integer_zerop (c))
5093 return NULL_TREE;
5095 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5096 op0 = TREE_OPERAND (t, 0);
5098 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5099 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5101 /* Note that we need not handle conditional operations here since fold
5102 already handles those cases. So just do arithmetic here. */
5103 switch (tcode)
5105 case INTEGER_CST:
5106 /* For a constant, we can always simplify if we are a multiply
5107 or (for divide and modulus) if it is a multiple of our constant. */
5108 if (code == MULT_EXPR
5109 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5110 return const_binop (code, fold_convert (ctype, t),
5111 fold_convert (ctype, c), 0);
5112 break;
5114 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5115 /* If op0 is an expression ... */
5116 if ((COMPARISON_CLASS_P (op0)
5117 || UNARY_CLASS_P (op0)
5118 || BINARY_CLASS_P (op0)
5119 || EXPRESSION_CLASS_P (op0))
5120 /* ... and is unsigned, and its type is smaller than ctype,
5121 then we cannot pass through as widening. */
5122 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5123 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5124 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5125 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5126 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5127 /* ... or this is a truncation (t is narrower than op0),
5128 then we cannot pass through this narrowing. */
5129 || (GET_MODE_SIZE (TYPE_MODE (type))
5130 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5131 /* ... or signedness changes for division or modulus,
5132 then we cannot pass through this conversion. */
5133 || (code != MULT_EXPR
5134 && (TYPE_UNSIGNED (ctype)
5135 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5136 break;
5138 /* Pass the constant down and see if we can make a simplification. If
5139 we can, replace this expression with the inner simplification for
5140 possible later conversion to our or some other type. */
5141 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5142 && TREE_CODE (t2) == INTEGER_CST
5143 && ! TREE_CONSTANT_OVERFLOW (t2)
5144 && (0 != (t1 = extract_muldiv (op0, t2, code,
5145 code == MULT_EXPR
5146 ? ctype : NULL_TREE))))
5147 return t1;
5148 break;
5150 case NEGATE_EXPR: case ABS_EXPR:
5151 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5152 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5153 break;
5155 case MIN_EXPR: case MAX_EXPR:
5156 /* If widening the type changes the signedness, then we can't perform
5157 this optimization as that changes the result. */
5158 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5159 break;
5161 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5162 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5163 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5165 if (tree_int_cst_sgn (c) < 0)
5166 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5168 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5169 fold_convert (ctype, t2)));
5171 break;
5173 case LSHIFT_EXPR: case RSHIFT_EXPR:
5174 /* If the second operand is constant, this is a multiplication
5175 or floor division, by a power of two, so we can treat it that
5176 way unless the multiplier or divisor overflows. Signed
5177 left-shift overflow is implementation-defined rather than
5178 undefined in C90, so do not convert signed left shift into
5179 multiplication. */
5180 if (TREE_CODE (op1) == INTEGER_CST
5181 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5182 /* const_binop may not detect overflow correctly,
5183 so check for it explicitly here. */
5184 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5185 && TREE_INT_CST_HIGH (op1) == 0
5186 && 0 != (t1 = fold_convert (ctype,
5187 const_binop (LSHIFT_EXPR,
5188 size_one_node,
5189 op1, 0)))
5190 && ! TREE_OVERFLOW (t1))
5191 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5192 ? MULT_EXPR : FLOOR_DIV_EXPR,
5193 ctype, fold_convert (ctype, op0), t1),
5194 c, code, wide_type);
5195 break;
5197 case PLUS_EXPR: case MINUS_EXPR:
5198 /* See if we can eliminate the operation on both sides. If we can, we
5199 can return a new PLUS or MINUS. If we can't, the only remaining
5200 cases where we can do anything are if the second operand is a
5201 constant. */
5202 t1 = extract_muldiv (op0, c, code, wide_type);
5203 t2 = extract_muldiv (op1, c, code, wide_type);
5204 if (t1 != 0 && t2 != 0
5205 && (code == MULT_EXPR
5206 /* If not multiplication, we can only do this if both operands
5207 are divisible by c. */
5208 || (multiple_of_p (ctype, op0, c)
5209 && multiple_of_p (ctype, op1, c))))
5210 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5211 fold_convert (ctype, t2)));
5213 /* If this was a subtraction, negate OP1 and set it to be an addition.
5214 This simplifies the logic below. */
5215 if (tcode == MINUS_EXPR)
5216 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5218 if (TREE_CODE (op1) != INTEGER_CST)
5219 break;
5221 /* If either OP1 or C are negative, this optimization is not safe for
5222 some of the division and remainder types while for others we need
5223 to change the code. */
5224 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5226 if (code == CEIL_DIV_EXPR)
5227 code = FLOOR_DIV_EXPR;
5228 else if (code == FLOOR_DIV_EXPR)
5229 code = CEIL_DIV_EXPR;
5230 else if (code != MULT_EXPR
5231 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5232 break;
5235 /* If it's a multiply or a division/modulus operation of a multiple
5236 of our constant, do the operation and verify it doesn't overflow. */
5237 if (code == MULT_EXPR
5238 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5240 op1 = const_binop (code, fold_convert (ctype, op1),
5241 fold_convert (ctype, c), 0);
5242 /* We allow the constant to overflow with wrapping semantics. */
5243 if (op1 == 0
5244 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5245 break;
5247 else
5248 break;
5250 /* If we have an unsigned type is not a sizetype, we cannot widen
5251 the operation since it will change the result if the original
5252 computation overflowed. */
5253 if (TYPE_UNSIGNED (ctype)
5254 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5255 && ctype != type)
5256 break;
5258 /* If we were able to eliminate our operation from the first side,
5259 apply our operation to the second side and reform the PLUS. */
5260 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5261 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5263 /* The last case is if we are a multiply. In that case, we can
5264 apply the distributive law to commute the multiply and addition
5265 if the multiplication of the constants doesn't overflow. */
5266 if (code == MULT_EXPR)
5267 return fold (build2 (tcode, ctype,
5268 fold (build2 (code, ctype,
5269 fold_convert (ctype, op0),
5270 fold_convert (ctype, c))),
5271 op1));
5273 break;
5275 case MULT_EXPR:
5276 /* We have a special case here if we are doing something like
5277 (C * 8) % 4 since we know that's zero. */
5278 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5279 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5280 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5281 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5282 return omit_one_operand (type, integer_zero_node, op0);
5284 /* ... fall through ... */
5286 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5287 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5288 /* If we can extract our operation from the LHS, do so and return a
5289 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5290 do something only if the second operand is a constant. */
5291 if (same_p
5292 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5293 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5294 fold_convert (ctype, op1)));
5295 else if (tcode == MULT_EXPR && code == MULT_EXPR
5296 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5297 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5298 fold_convert (ctype, t1)));
5299 else if (TREE_CODE (op1) != INTEGER_CST)
5300 return 0;
5302 /* If these are the same operation types, we can associate them
5303 assuming no overflow. */
5304 if (tcode == code
5305 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5306 fold_convert (ctype, c), 0))
5307 && ! TREE_OVERFLOW (t1))
5308 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5310 /* If these operations "cancel" each other, we have the main
5311 optimizations of this pass, which occur when either constant is a
5312 multiple of the other, in which case we replace this with either an
5313 operation or CODE or TCODE.
5315 If we have an unsigned type that is not a sizetype, we cannot do
5316 this since it will change the result if the original computation
5317 overflowed. */
5318 if ((! TYPE_UNSIGNED (ctype)
5319 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5320 && ! flag_wrapv
5321 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5322 || (tcode == MULT_EXPR
5323 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5324 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5326 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5327 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5328 fold_convert (ctype,
5329 const_binop (TRUNC_DIV_EXPR,
5330 op1, c, 0))));
5331 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5332 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5333 fold_convert (ctype,
5334 const_binop (TRUNC_DIV_EXPR,
5335 c, op1, 0))));
5337 break;
5339 default:
5340 break;
5343 return 0;
5346 /* Return a node which has the indicated constant VALUE (either 0 or
5347 1), and is of the indicated TYPE. */
5349 tree
5350 constant_boolean_node (int value, tree type)
5352 if (type == integer_type_node)
5353 return value ? integer_one_node : integer_zero_node;
5354 else if (type == boolean_type_node)
5355 return value ? boolean_true_node : boolean_false_node;
5356 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5357 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5358 : integer_zero_node);
5359 else
5360 return build_int_cst (type, value);
5363 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5364 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5365 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5366 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5367 COND is the first argument to CODE; otherwise (as in the example
5368 given here), it is the second argument. TYPE is the type of the
5369 original expression. Return NULL_TREE if no simplification is
5370 possible. */
5372 static tree
5373 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5374 tree cond, tree arg, int cond_first_p)
5376 tree test, true_value, false_value;
5377 tree lhs = NULL_TREE;
5378 tree rhs = NULL_TREE;
5380 /* This transformation is only worthwhile if we don't have to wrap
5381 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5382 one of the branches once its pushed inside the COND_EXPR. */
5383 if (!TREE_CONSTANT (arg))
5384 return NULL_TREE;
5386 if (TREE_CODE (cond) == COND_EXPR)
5388 test = TREE_OPERAND (cond, 0);
5389 true_value = TREE_OPERAND (cond, 1);
5390 false_value = TREE_OPERAND (cond, 2);
5391 /* If this operand throws an expression, then it does not make
5392 sense to try to perform a logical or arithmetic operation
5393 involving it. */
5394 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5395 lhs = true_value;
5396 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5397 rhs = false_value;
5399 else
5401 tree testtype = TREE_TYPE (cond);
5402 test = cond;
5403 true_value = constant_boolean_node (true, testtype);
5404 false_value = constant_boolean_node (false, testtype);
5407 if (lhs == 0)
5408 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5409 : build2 (code, type, arg, true_value));
5410 if (rhs == 0)
5411 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5412 : build2 (code, type, arg, false_value));
5414 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5415 return fold_convert (type, test);
5419 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5421 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5422 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5423 ADDEND is the same as X.
5425 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5426 and finite. The problematic cases are when X is zero, and its mode
5427 has signed zeros. In the case of rounding towards -infinity,
5428 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5429 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5431 static bool
5432 fold_real_zero_addition_p (tree type, tree addend, int negate)
5434 if (!real_zerop (addend))
5435 return false;
5437 /* Don't allow the fold with -fsignaling-nans. */
5438 if (HONOR_SNANS (TYPE_MODE (type)))
5439 return false;
5441 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5442 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5443 return true;
5445 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5446 if (TREE_CODE (addend) == REAL_CST
5447 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5448 negate = !negate;
5450 /* The mode has signed zeros, and we have to honor their sign.
5451 In this situation, there is only one case we can return true for.
5452 X - 0 is the same as X unless rounding towards -infinity is
5453 supported. */
5454 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5457 /* Subroutine of fold() that checks comparisons of built-in math
5458 functions against real constants.
5460 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5461 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5462 is the type of the result and ARG0 and ARG1 are the operands of the
5463 comparison. ARG1 must be a TREE_REAL_CST.
5465 The function returns the constant folded tree if a simplification
5466 can be made, and NULL_TREE otherwise. */
5468 static tree
5469 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5470 tree type, tree arg0, tree arg1)
5472 REAL_VALUE_TYPE c;
5474 if (BUILTIN_SQRT_P (fcode))
5476 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5477 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5479 c = TREE_REAL_CST (arg1);
5480 if (REAL_VALUE_NEGATIVE (c))
5482 /* sqrt(x) < y is always false, if y is negative. */
5483 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5484 return omit_one_operand (type, integer_zero_node, arg);
5486 /* sqrt(x) > y is always true, if y is negative and we
5487 don't care about NaNs, i.e. negative values of x. */
5488 if (code == NE_EXPR || !HONOR_NANS (mode))
5489 return omit_one_operand (type, integer_one_node, arg);
5491 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5492 return fold (build2 (GE_EXPR, type, arg,
5493 build_real (TREE_TYPE (arg), dconst0)));
5495 else if (code == GT_EXPR || code == GE_EXPR)
5497 REAL_VALUE_TYPE c2;
5499 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5500 real_convert (&c2, mode, &c2);
5502 if (REAL_VALUE_ISINF (c2))
5504 /* sqrt(x) > y is x == +Inf, when y is very large. */
5505 if (HONOR_INFINITIES (mode))
5506 return fold (build2 (EQ_EXPR, type, arg,
5507 build_real (TREE_TYPE (arg), c2)));
5509 /* sqrt(x) > y is always false, when y is very large
5510 and we don't care about infinities. */
5511 return omit_one_operand (type, integer_zero_node, arg);
5514 /* sqrt(x) > c is the same as x > c*c. */
5515 return fold (build2 (code, type, arg,
5516 build_real (TREE_TYPE (arg), c2)));
5518 else if (code == LT_EXPR || code == LE_EXPR)
5520 REAL_VALUE_TYPE c2;
5522 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5523 real_convert (&c2, mode, &c2);
5525 if (REAL_VALUE_ISINF (c2))
5527 /* sqrt(x) < y is always true, when y is a very large
5528 value and we don't care about NaNs or Infinities. */
5529 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5530 return omit_one_operand (type, integer_one_node, arg);
5532 /* sqrt(x) < y is x != +Inf when y is very large and we
5533 don't care about NaNs. */
5534 if (! HONOR_NANS (mode))
5535 return fold (build2 (NE_EXPR, type, arg,
5536 build_real (TREE_TYPE (arg), c2)));
5538 /* sqrt(x) < y is x >= 0 when y is very large and we
5539 don't care about Infinities. */
5540 if (! HONOR_INFINITIES (mode))
5541 return fold (build2 (GE_EXPR, type, arg,
5542 build_real (TREE_TYPE (arg), dconst0)));
5544 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5545 if (lang_hooks.decls.global_bindings_p () != 0
5546 || CONTAINS_PLACEHOLDER_P (arg))
5547 return NULL_TREE;
5549 arg = save_expr (arg);
5550 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5551 fold (build2 (GE_EXPR, type, arg,
5552 build_real (TREE_TYPE (arg),
5553 dconst0))),
5554 fold (build2 (NE_EXPR, type, arg,
5555 build_real (TREE_TYPE (arg),
5556 c2)))));
5559 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5560 if (! HONOR_NANS (mode))
5561 return fold (build2 (code, type, arg,
5562 build_real (TREE_TYPE (arg), c2)));
5564 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5565 if (lang_hooks.decls.global_bindings_p () == 0
5566 && ! CONTAINS_PLACEHOLDER_P (arg))
5568 arg = save_expr (arg);
5569 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5570 fold (build2 (GE_EXPR, type, arg,
5571 build_real (TREE_TYPE (arg),
5572 dconst0))),
5573 fold (build2 (code, type, arg,
5574 build_real (TREE_TYPE (arg),
5575 c2)))));
5580 return NULL_TREE;
5583 /* Subroutine of fold() that optimizes comparisons against Infinities,
5584 either +Inf or -Inf.
5586 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5587 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5588 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5590 The function returns the constant folded tree if a simplification
5591 can be made, and NULL_TREE otherwise. */
5593 static tree
5594 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5596 enum machine_mode mode;
5597 REAL_VALUE_TYPE max;
5598 tree temp;
5599 bool neg;
5601 mode = TYPE_MODE (TREE_TYPE (arg0));
5603 /* For negative infinity swap the sense of the comparison. */
5604 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5605 if (neg)
5606 code = swap_tree_comparison (code);
5608 switch (code)
5610 case GT_EXPR:
5611 /* x > +Inf is always false, if with ignore sNANs. */
5612 if (HONOR_SNANS (mode))
5613 return NULL_TREE;
5614 return omit_one_operand (type, integer_zero_node, arg0);
5616 case LE_EXPR:
5617 /* x <= +Inf is always true, if we don't case about NaNs. */
5618 if (! HONOR_NANS (mode))
5619 return omit_one_operand (type, integer_one_node, arg0);
5621 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5622 if (lang_hooks.decls.global_bindings_p () == 0
5623 && ! CONTAINS_PLACEHOLDER_P (arg0))
5625 arg0 = save_expr (arg0);
5626 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5628 break;
5630 case EQ_EXPR:
5631 case GE_EXPR:
5632 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5633 real_maxval (&max, neg, mode);
5634 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5635 arg0, build_real (TREE_TYPE (arg0), max)));
5637 case LT_EXPR:
5638 /* x < +Inf is always equal to x <= DBL_MAX. */
5639 real_maxval (&max, neg, mode);
5640 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5641 arg0, build_real (TREE_TYPE (arg0), max)));
5643 case NE_EXPR:
5644 /* x != +Inf is always equal to !(x > DBL_MAX). */
5645 real_maxval (&max, neg, mode);
5646 if (! HONOR_NANS (mode))
5647 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5648 arg0, build_real (TREE_TYPE (arg0), max)));
5650 /* The transformation below creates non-gimple code and thus is
5651 not appropriate if we are in gimple form. */
5652 if (in_gimple_form)
5653 return NULL_TREE;
5655 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5656 arg0, build_real (TREE_TYPE (arg0), max)));
5657 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5659 default:
5660 break;
5663 return NULL_TREE;
5666 /* Subroutine of fold() that optimizes comparisons of a division by
5667 a nonzero integer constant against an integer constant, i.e.
5668 X/C1 op C2.
5670 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5671 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5672 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5674 The function returns the constant folded tree if a simplification
5675 can be made, and NULL_TREE otherwise. */
5677 static tree
5678 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5680 tree prod, tmp, hi, lo;
5681 tree arg00 = TREE_OPERAND (arg0, 0);
5682 tree arg01 = TREE_OPERAND (arg0, 1);
5683 unsigned HOST_WIDE_INT lpart;
5684 HOST_WIDE_INT hpart;
5685 int overflow;
5687 /* We have to do this the hard way to detect unsigned overflow.
5688 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5689 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5690 TREE_INT_CST_HIGH (arg01),
5691 TREE_INT_CST_LOW (arg1),
5692 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5693 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5694 prod = force_fit_type (prod, -1, overflow, false);
5696 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5698 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5699 lo = prod;
5701 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5702 overflow = add_double (TREE_INT_CST_LOW (prod),
5703 TREE_INT_CST_HIGH (prod),
5704 TREE_INT_CST_LOW (tmp),
5705 TREE_INT_CST_HIGH (tmp),
5706 &lpart, &hpart);
5707 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5708 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5709 TREE_CONSTANT_OVERFLOW (prod));
5711 else if (tree_int_cst_sgn (arg01) >= 0)
5713 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5714 switch (tree_int_cst_sgn (arg1))
5716 case -1:
5717 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5718 hi = prod;
5719 break;
5721 case 0:
5722 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5723 hi = tmp;
5724 break;
5726 case 1:
5727 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5728 lo = prod;
5729 break;
5731 default:
5732 gcc_unreachable ();
5735 else
5737 /* A negative divisor reverses the relational operators. */
5738 code = swap_tree_comparison (code);
5740 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5741 switch (tree_int_cst_sgn (arg1))
5743 case -1:
5744 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5745 lo = prod;
5746 break;
5748 case 0:
5749 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5750 lo = tmp;
5751 break;
5753 case 1:
5754 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5755 hi = prod;
5756 break;
5758 default:
5759 gcc_unreachable ();
5763 switch (code)
5765 case EQ_EXPR:
5766 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5767 return omit_one_operand (type, integer_zero_node, arg00);
5768 if (TREE_OVERFLOW (hi))
5769 return fold (build2 (GE_EXPR, type, arg00, lo));
5770 if (TREE_OVERFLOW (lo))
5771 return fold (build2 (LE_EXPR, type, arg00, hi));
5772 return build_range_check (type, arg00, 1, lo, hi);
5774 case NE_EXPR:
5775 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5776 return omit_one_operand (type, integer_one_node, arg00);
5777 if (TREE_OVERFLOW (hi))
5778 return fold (build2 (LT_EXPR, type, arg00, lo));
5779 if (TREE_OVERFLOW (lo))
5780 return fold (build2 (GT_EXPR, type, arg00, hi));
5781 return build_range_check (type, arg00, 0, lo, hi);
5783 case LT_EXPR:
5784 if (TREE_OVERFLOW (lo))
5785 return omit_one_operand (type, integer_zero_node, arg00);
5786 return fold (build2 (LT_EXPR, type, arg00, lo));
5788 case LE_EXPR:
5789 if (TREE_OVERFLOW (hi))
5790 return omit_one_operand (type, integer_one_node, arg00);
5791 return fold (build2 (LE_EXPR, type, arg00, hi));
5793 case GT_EXPR:
5794 if (TREE_OVERFLOW (hi))
5795 return omit_one_operand (type, integer_zero_node, arg00);
5796 return fold (build2 (GT_EXPR, type, arg00, hi));
5798 case GE_EXPR:
5799 if (TREE_OVERFLOW (lo))
5800 return omit_one_operand (type, integer_one_node, arg00);
5801 return fold (build2 (GE_EXPR, type, arg00, lo));
5803 default:
5804 break;
5807 return NULL_TREE;
5811 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5812 equality/inequality test, then return a simplified form of
5813 the test using shifts and logical operations. Otherwise return
5814 NULL. TYPE is the desired result type. */
5816 tree
5817 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5818 tree result_type)
5820 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5821 operand 0. */
5822 if (code == TRUTH_NOT_EXPR)
5824 code = TREE_CODE (arg0);
5825 if (code != NE_EXPR && code != EQ_EXPR)
5826 return NULL_TREE;
5828 /* Extract the arguments of the EQ/NE. */
5829 arg1 = TREE_OPERAND (arg0, 1);
5830 arg0 = TREE_OPERAND (arg0, 0);
5832 /* This requires us to invert the code. */
5833 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5836 /* If this is testing a single bit, we can optimize the test. */
5837 if ((code == NE_EXPR || code == EQ_EXPR)
5838 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5839 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5841 tree inner = TREE_OPERAND (arg0, 0);
5842 tree type = TREE_TYPE (arg0);
5843 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5844 enum machine_mode operand_mode = TYPE_MODE (type);
5845 int ops_unsigned;
5846 tree signed_type, unsigned_type, intermediate_type;
5847 tree arg00;
5849 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5850 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5851 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5852 if (arg00 != NULL_TREE
5853 /* This is only a win if casting to a signed type is cheap,
5854 i.e. when arg00's type is not a partial mode. */
5855 && TYPE_PRECISION (TREE_TYPE (arg00))
5856 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5858 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5859 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5860 result_type, fold_convert (stype, arg00),
5861 fold_convert (stype, integer_zero_node)));
5864 /* Otherwise we have (A & C) != 0 where C is a single bit,
5865 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5866 Similarly for (A & C) == 0. */
5868 /* If INNER is a right shift of a constant and it plus BITNUM does
5869 not overflow, adjust BITNUM and INNER. */
5870 if (TREE_CODE (inner) == RSHIFT_EXPR
5871 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5872 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5873 && bitnum < TYPE_PRECISION (type)
5874 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5875 bitnum - TYPE_PRECISION (type)))
5877 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5878 inner = TREE_OPERAND (inner, 0);
5881 /* If we are going to be able to omit the AND below, we must do our
5882 operations as unsigned. If we must use the AND, we have a choice.
5883 Normally unsigned is faster, but for some machines signed is. */
5884 #ifdef LOAD_EXTEND_OP
5885 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5886 #else
5887 ops_unsigned = 1;
5888 #endif
5890 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5891 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5892 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5893 inner = fold_convert (intermediate_type, inner);
5895 if (bitnum != 0)
5896 inner = build2 (RSHIFT_EXPR, intermediate_type,
5897 inner, size_int (bitnum));
5899 if (code == EQ_EXPR)
5900 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5901 inner, integer_one_node));
5903 /* Put the AND last so it can combine with more things. */
5904 inner = build2 (BIT_AND_EXPR, intermediate_type,
5905 inner, integer_one_node);
5907 /* Make sure to return the proper type. */
5908 inner = fold_convert (result_type, inner);
5910 return inner;
5912 return NULL_TREE;
5915 /* Check whether we are allowed to reorder operands arg0 and arg1,
5916 such that the evaluation of arg1 occurs before arg0. */
5918 static bool
5919 reorder_operands_p (tree arg0, tree arg1)
5921 if (! flag_evaluation_order)
5922 return true;
5923 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5924 return true;
5925 return ! TREE_SIDE_EFFECTS (arg0)
5926 && ! TREE_SIDE_EFFECTS (arg1);
5929 /* Test whether it is preferable two swap two operands, ARG0 and
5930 ARG1, for example because ARG0 is an integer constant and ARG1
5931 isn't. If REORDER is true, only recommend swapping if we can
5932 evaluate the operands in reverse order. */
5934 bool
5935 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5937 STRIP_SIGN_NOPS (arg0);
5938 STRIP_SIGN_NOPS (arg1);
5940 if (TREE_CODE (arg1) == INTEGER_CST)
5941 return 0;
5942 if (TREE_CODE (arg0) == INTEGER_CST)
5943 return 1;
5945 if (TREE_CODE (arg1) == REAL_CST)
5946 return 0;
5947 if (TREE_CODE (arg0) == REAL_CST)
5948 return 1;
5950 if (TREE_CODE (arg1) == COMPLEX_CST)
5951 return 0;
5952 if (TREE_CODE (arg0) == COMPLEX_CST)
5953 return 1;
5955 if (TREE_CONSTANT (arg1))
5956 return 0;
5957 if (TREE_CONSTANT (arg0))
5958 return 1;
5960 if (optimize_size)
5961 return 0;
5963 if (reorder && flag_evaluation_order
5964 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5965 return 0;
5967 if (DECL_P (arg1))
5968 return 0;
5969 if (DECL_P (arg0))
5970 return 1;
5972 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5973 for commutative and comparison operators. Ensuring a canonical
5974 form allows the optimizers to find additional redundancies without
5975 having to explicitly check for both orderings. */
5976 if (TREE_CODE (arg0) == SSA_NAME
5977 && TREE_CODE (arg1) == SSA_NAME
5978 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5979 return 1;
5981 return 0;
5984 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
5985 ARG0 is extended to a wider type. */
5987 static tree
5988 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
5990 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
5991 tree arg1_unw;
5992 tree shorter_type, outer_type;
5993 tree min, max;
5994 bool above, below;
5996 if (arg0_unw == arg0)
5997 return NULL_TREE;
5998 shorter_type = TREE_TYPE (arg0_unw);
6000 arg1_unw = get_unwidened (arg1, shorter_type);
6001 if (!arg1_unw)
6002 return NULL_TREE;
6004 /* If possible, express the comparison in the shorter mode. */
6005 if ((code == EQ_EXPR || code == NE_EXPR
6006 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6007 && (TREE_TYPE (arg1_unw) == shorter_type
6008 || (TREE_CODE (arg1_unw) == INTEGER_CST
6009 && int_fits_type_p (arg1_unw, shorter_type))))
6010 return fold (build (code, type, arg0_unw,
6011 fold_convert (shorter_type, arg1_unw)));
6013 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6014 return NULL_TREE;
6016 /* If we are comparing with the integer that does not fit into the range
6017 of the shorter type, the result is known. */
6018 outer_type = TREE_TYPE (arg1_unw);
6019 min = lower_bound_in_type (outer_type, shorter_type);
6020 max = upper_bound_in_type (outer_type, shorter_type);
6022 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6023 max, arg1_unw));
6024 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6025 arg1_unw, min));
6027 switch (code)
6029 case EQ_EXPR:
6030 if (above || below)
6031 return constant_boolean_node (false, type);
6032 break;
6034 case NE_EXPR:
6035 if (above || below)
6036 return constant_boolean_node (true, type);
6037 break;
6039 case LT_EXPR:
6040 case LE_EXPR:
6041 if (above)
6042 return constant_boolean_node (true, type);
6043 else if (below)
6044 return constant_boolean_node (false, type);;
6046 case GT_EXPR:
6047 case GE_EXPR:
6048 if (above)
6049 return constant_boolean_node (false, type);
6050 else if (below)
6051 return constant_boolean_node (true, type);;
6053 default:
6054 break;
6057 return NULL_TREE;
6060 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6061 ARG0 just the signedness is changed. */
6063 static tree
6064 fold_sign_changed_comparison (enum tree_code code, tree type,
6065 tree arg0, tree arg1)
6067 tree arg0_inner, tmp;
6068 tree inner_type, outer_type;
6070 if (TREE_CODE (arg0) != NOP_EXPR)
6071 return NULL_TREE;
6073 outer_type = TREE_TYPE (arg0);
6074 arg0_inner = TREE_OPERAND (arg0, 0);
6075 inner_type = TREE_TYPE (arg0_inner);
6077 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6078 return NULL_TREE;
6080 if (TREE_CODE (arg1) != INTEGER_CST
6081 && !(TREE_CODE (arg1) == NOP_EXPR
6082 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6083 return NULL_TREE;
6085 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6086 && code != NE_EXPR
6087 && code != EQ_EXPR)
6088 return NULL_TREE;
6090 if (TREE_CODE (arg1) == INTEGER_CST)
6092 tmp = build_int_cst_wide (inner_type,
6093 TREE_INT_CST_LOW (arg1),
6094 TREE_INT_CST_HIGH (arg1));
6095 arg1 = force_fit_type (tmp, 0,
6096 TREE_OVERFLOW (arg1),
6097 TREE_CONSTANT_OVERFLOW (arg1));
6099 else
6100 arg1 = fold_convert (inner_type, arg1);
6102 return fold (build (code, type, arg0_inner, arg1));
6105 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6106 step of the array. TYPE is the type of the expression. ADDR is the address.
6107 MULT is the multiplicative expression. If the function succeeds, the new
6108 address expression is returned. Otherwise NULL_TREE is returned. */
6110 static tree
6111 try_move_mult_to_index (tree type, enum tree_code code, tree addr, tree mult)
6113 tree s, delta, step;
6114 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6115 tree ref = TREE_OPERAND (addr, 0), pref;
6116 tree ret, pos;
6117 tree itype;
6119 STRIP_NOPS (arg0);
6120 STRIP_NOPS (arg1);
6122 if (TREE_CODE (arg0) == INTEGER_CST)
6124 s = arg0;
6125 delta = arg1;
6127 else if (TREE_CODE (arg1) == INTEGER_CST)
6129 s = arg1;
6130 delta = arg0;
6132 else
6133 return NULL_TREE;
6135 for (;; ref = TREE_OPERAND (ref, 0))
6137 if (TREE_CODE (ref) == ARRAY_REF)
6139 step = array_ref_element_size (ref);
6141 if (TREE_CODE (step) != INTEGER_CST)
6142 continue;
6144 itype = TREE_TYPE (step);
6146 /* If the type sizes do not match, we might run into problems
6147 when one of them would overflow. */
6148 if (TYPE_PRECISION (itype) != TYPE_PRECISION (type))
6149 continue;
6151 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6152 continue;
6154 delta = fold_convert (itype, delta);
6155 break;
6158 if (!handled_component_p (ref))
6159 return NULL_TREE;
6162 /* We found the suitable array reference. So copy everything up to it,
6163 and replace the index. */
6165 pref = TREE_OPERAND (addr, 0);
6166 ret = copy_node (pref);
6167 pos = ret;
6169 while (pref != ref)
6171 pref = TREE_OPERAND (pref, 0);
6172 TREE_OPERAND (pos, 0) = copy_node (pref);
6173 pos = TREE_OPERAND (pos, 0);
6176 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6177 TREE_OPERAND (pos, 1),
6178 delta));
6180 return build1 (ADDR_EXPR, type, ret);
6183 /* Perform constant folding and related simplification of EXPR.
6184 The related simplifications include x*1 => x, x*0 => 0, etc.,
6185 and application of the associative law.
6186 NOP_EXPR conversions may be removed freely (as long as we
6187 are careful not to change the type of the overall expression).
6188 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6189 but we can constant-fold them if they have constant operands. */
6191 #ifdef ENABLE_FOLD_CHECKING
6192 # define fold(x) fold_1 (x)
6193 static tree fold_1 (tree);
6194 static
6195 #endif
6196 tree
6197 fold (tree expr)
6199 const tree t = expr;
6200 const tree type = TREE_TYPE (expr);
6201 tree t1 = NULL_TREE;
6202 tree tem;
6203 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6204 enum tree_code code = TREE_CODE (t);
6205 enum tree_code_class kind = TREE_CODE_CLASS (code);
6207 /* WINS will be nonzero when the switch is done
6208 if all operands are constant. */
6209 int wins = 1;
6211 /* Return right away if a constant. */
6212 if (kind == tcc_constant)
6213 return t;
6215 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6217 tree subop;
6219 /* Special case for conversion ops that can have fixed point args. */
6220 arg0 = TREE_OPERAND (t, 0);
6222 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6223 if (arg0 != 0)
6224 STRIP_SIGN_NOPS (arg0);
6226 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6227 subop = TREE_REALPART (arg0);
6228 else
6229 subop = arg0;
6231 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6232 && TREE_CODE (subop) != REAL_CST)
6233 /* Note that TREE_CONSTANT isn't enough:
6234 static var addresses are constant but we can't
6235 do arithmetic on them. */
6236 wins = 0;
6238 else if (IS_EXPR_CODE_CLASS (kind))
6240 int len = first_rtl_op (code);
6241 int i;
6242 for (i = 0; i < len; i++)
6244 tree op = TREE_OPERAND (t, i);
6245 tree subop;
6247 if (op == 0)
6248 continue; /* Valid for CALL_EXPR, at least. */
6250 /* Strip any conversions that don't change the mode. This is
6251 safe for every expression, except for a comparison expression
6252 because its signedness is derived from its operands. So, in
6253 the latter case, only strip conversions that don't change the
6254 signedness.
6256 Note that this is done as an internal manipulation within the
6257 constant folder, in order to find the simplest representation
6258 of the arguments so that their form can be studied. In any
6259 cases, the appropriate type conversions should be put back in
6260 the tree that will get out of the constant folder. */
6261 if (kind == tcc_comparison)
6262 STRIP_SIGN_NOPS (op);
6263 else
6264 STRIP_NOPS (op);
6266 if (TREE_CODE (op) == COMPLEX_CST)
6267 subop = TREE_REALPART (op);
6268 else
6269 subop = op;
6271 if (TREE_CODE (subop) != INTEGER_CST
6272 && TREE_CODE (subop) != REAL_CST)
6273 /* Note that TREE_CONSTANT isn't enough:
6274 static var addresses are constant but we can't
6275 do arithmetic on them. */
6276 wins = 0;
6278 if (i == 0)
6279 arg0 = op;
6280 else if (i == 1)
6281 arg1 = op;
6285 /* If this is a commutative operation, and ARG0 is a constant, move it
6286 to ARG1 to reduce the number of tests below. */
6287 if (commutative_tree_code (code)
6288 && tree_swap_operands_p (arg0, arg1, true))
6289 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6290 TREE_OPERAND (t, 0)));
6292 /* Now WINS is set as described above,
6293 ARG0 is the first operand of EXPR,
6294 and ARG1 is the second operand (if it has more than one operand).
6296 First check for cases where an arithmetic operation is applied to a
6297 compound, conditional, or comparison operation. Push the arithmetic
6298 operation inside the compound or conditional to see if any folding
6299 can then be done. Convert comparison to conditional for this purpose.
6300 The also optimizes non-constant cases that used to be done in
6301 expand_expr.
6303 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6304 one of the operands is a comparison and the other is a comparison, a
6305 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6306 code below would make the expression more complex. Change it to a
6307 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6308 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6310 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6311 || code == EQ_EXPR || code == NE_EXPR)
6312 && ((truth_value_p (TREE_CODE (arg0))
6313 && (truth_value_p (TREE_CODE (arg1))
6314 || (TREE_CODE (arg1) == BIT_AND_EXPR
6315 && integer_onep (TREE_OPERAND (arg1, 1)))))
6316 || (truth_value_p (TREE_CODE (arg1))
6317 && (truth_value_p (TREE_CODE (arg0))
6318 || (TREE_CODE (arg0) == BIT_AND_EXPR
6319 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6321 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6322 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6323 : TRUTH_XOR_EXPR,
6324 type, fold_convert (boolean_type_node, arg0),
6325 fold_convert (boolean_type_node, arg1)));
6327 if (code == EQ_EXPR)
6328 tem = invert_truthvalue (tem);
6330 return tem;
6333 if (TREE_CODE_CLASS (code) == tcc_unary)
6335 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6336 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6337 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6338 else if (TREE_CODE (arg0) == COND_EXPR)
6340 tree arg01 = TREE_OPERAND (arg0, 1);
6341 tree arg02 = TREE_OPERAND (arg0, 2);
6342 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6343 arg01 = fold (build1 (code, type, arg01));
6344 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6345 arg02 = fold (build1 (code, type, arg02));
6346 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6347 arg01, arg02));
6349 /* If this was a conversion, and all we did was to move into
6350 inside the COND_EXPR, bring it back out. But leave it if
6351 it is a conversion from integer to integer and the
6352 result precision is no wider than a word since such a
6353 conversion is cheap and may be optimized away by combine,
6354 while it couldn't if it were outside the COND_EXPR. Then return
6355 so we don't get into an infinite recursion loop taking the
6356 conversion out and then back in. */
6358 if ((code == NOP_EXPR || code == CONVERT_EXPR
6359 || code == NON_LVALUE_EXPR)
6360 && TREE_CODE (tem) == COND_EXPR
6361 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6362 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6363 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6364 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6365 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6366 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6367 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6368 && (INTEGRAL_TYPE_P
6369 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6370 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6371 tem = build1 (code, type,
6372 build3 (COND_EXPR,
6373 TREE_TYPE (TREE_OPERAND
6374 (TREE_OPERAND (tem, 1), 0)),
6375 TREE_OPERAND (tem, 0),
6376 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6377 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6378 return tem;
6380 else if (COMPARISON_CLASS_P (arg0))
6382 if (TREE_CODE (type) == BOOLEAN_TYPE)
6384 arg0 = copy_node (arg0);
6385 TREE_TYPE (arg0) = type;
6386 return arg0;
6388 else if (TREE_CODE (type) != INTEGER_TYPE)
6389 return fold (build3 (COND_EXPR, type, arg0,
6390 fold (build1 (code, type,
6391 integer_one_node)),
6392 fold (build1 (code, type,
6393 integer_zero_node))));
6396 else if (TREE_CODE_CLASS (code) == tcc_comparison
6397 && TREE_CODE (arg0) == COMPOUND_EXPR)
6398 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6399 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6400 else if (TREE_CODE_CLASS (code) == tcc_comparison
6401 && TREE_CODE (arg1) == COMPOUND_EXPR)
6402 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6403 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6404 else if (TREE_CODE_CLASS (code) == tcc_binary
6405 || TREE_CODE_CLASS (code) == tcc_comparison)
6407 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6408 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6409 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6410 arg1)));
6411 if (TREE_CODE (arg1) == COMPOUND_EXPR
6412 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6413 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6414 fold (build2 (code, type,
6415 arg0, TREE_OPERAND (arg1, 1))));
6417 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6419 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6420 /*cond_first_p=*/1);
6421 if (tem != NULL_TREE)
6422 return tem;
6425 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6427 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6428 /*cond_first_p=*/0);
6429 if (tem != NULL_TREE)
6430 return tem;
6434 switch (code)
6436 case CONST_DECL:
6437 return fold (DECL_INITIAL (t));
6439 case NOP_EXPR:
6440 case FLOAT_EXPR:
6441 case CONVERT_EXPR:
6442 case FIX_TRUNC_EXPR:
6443 case FIX_CEIL_EXPR:
6444 case FIX_FLOOR_EXPR:
6445 case FIX_ROUND_EXPR:
6446 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6447 return TREE_OPERAND (t, 0);
6449 /* Handle cases of two conversions in a row. */
6450 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6451 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6453 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6454 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6455 int inside_int = INTEGRAL_TYPE_P (inside_type);
6456 int inside_ptr = POINTER_TYPE_P (inside_type);
6457 int inside_float = FLOAT_TYPE_P (inside_type);
6458 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6459 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6460 int inter_int = INTEGRAL_TYPE_P (inter_type);
6461 int inter_ptr = POINTER_TYPE_P (inter_type);
6462 int inter_float = FLOAT_TYPE_P (inter_type);
6463 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6464 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6465 int final_int = INTEGRAL_TYPE_P (type);
6466 int final_ptr = POINTER_TYPE_P (type);
6467 int final_float = FLOAT_TYPE_P (type);
6468 unsigned int final_prec = TYPE_PRECISION (type);
6469 int final_unsignedp = TYPE_UNSIGNED (type);
6471 /* In addition to the cases of two conversions in a row
6472 handled below, if we are converting something to its own
6473 type via an object of identical or wider precision, neither
6474 conversion is needed. */
6475 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6476 && ((inter_int && final_int) || (inter_float && final_float))
6477 && inter_prec >= final_prec)
6478 return fold (build1 (code, type,
6479 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6481 /* Likewise, if the intermediate and final types are either both
6482 float or both integer, we don't need the middle conversion if
6483 it is wider than the final type and doesn't change the signedness
6484 (for integers). Avoid this if the final type is a pointer
6485 since then we sometimes need the inner conversion. Likewise if
6486 the outer has a precision not equal to the size of its mode. */
6487 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6488 || (inter_float && inside_float))
6489 && inter_prec >= inside_prec
6490 && (inter_float || inter_unsignedp == inside_unsignedp)
6491 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6492 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6493 && ! final_ptr)
6494 return fold (build1 (code, type,
6495 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6497 /* If we have a sign-extension of a zero-extended value, we can
6498 replace that by a single zero-extension. */
6499 if (inside_int && inter_int && final_int
6500 && inside_prec < inter_prec && inter_prec < final_prec
6501 && inside_unsignedp && !inter_unsignedp)
6502 return fold (build1 (code, type,
6503 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6505 /* Two conversions in a row are not needed unless:
6506 - some conversion is floating-point (overstrict for now), or
6507 - the intermediate type is narrower than both initial and
6508 final, or
6509 - the intermediate type and innermost type differ in signedness,
6510 and the outermost type is wider than the intermediate, or
6511 - the initial type is a pointer type and the precisions of the
6512 intermediate and final types differ, or
6513 - the final type is a pointer type and the precisions of the
6514 initial and intermediate types differ. */
6515 if (! inside_float && ! inter_float && ! final_float
6516 && (inter_prec > inside_prec || inter_prec > final_prec)
6517 && ! (inside_int && inter_int
6518 && inter_unsignedp != inside_unsignedp
6519 && inter_prec < final_prec)
6520 && ((inter_unsignedp && inter_prec > inside_prec)
6521 == (final_unsignedp && final_prec > inter_prec))
6522 && ! (inside_ptr && inter_prec != final_prec)
6523 && ! (final_ptr && inside_prec != inter_prec)
6524 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6525 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6526 && ! final_ptr)
6527 return fold (build1 (code, type,
6528 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6531 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6532 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6533 /* Detect assigning a bitfield. */
6534 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6535 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6537 /* Don't leave an assignment inside a conversion
6538 unless assigning a bitfield. */
6539 tree prev = TREE_OPERAND (t, 0);
6540 tem = copy_node (t);
6541 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6542 /* First do the assignment, then return converted constant. */
6543 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6544 TREE_NO_WARNING (tem) = 1;
6545 TREE_USED (tem) = 1;
6546 return tem;
6549 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6550 constants (if x has signed type, the sign bit cannot be set
6551 in c). This folds extension into the BIT_AND_EXPR. */
6552 if (INTEGRAL_TYPE_P (type)
6553 && TREE_CODE (type) != BOOLEAN_TYPE
6554 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6555 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6557 tree and = TREE_OPERAND (t, 0);
6558 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6559 int change = 0;
6561 if (TYPE_UNSIGNED (TREE_TYPE (and))
6562 || (TYPE_PRECISION (type)
6563 <= TYPE_PRECISION (TREE_TYPE (and))))
6564 change = 1;
6565 else if (TYPE_PRECISION (TREE_TYPE (and1))
6566 <= HOST_BITS_PER_WIDE_INT
6567 && host_integerp (and1, 1))
6569 unsigned HOST_WIDE_INT cst;
6571 cst = tree_low_cst (and1, 1);
6572 cst &= (HOST_WIDE_INT) -1
6573 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6574 change = (cst == 0);
6575 #ifdef LOAD_EXTEND_OP
6576 if (change
6577 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6578 == ZERO_EXTEND))
6580 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6581 and0 = fold_convert (uns, and0);
6582 and1 = fold_convert (uns, and1);
6584 #endif
6586 if (change)
6587 return fold (build2 (BIT_AND_EXPR, type,
6588 fold_convert (type, and0),
6589 fold_convert (type, and1)));
6592 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6593 T2 being pointers to types of the same size. */
6594 if (POINTER_TYPE_P (TREE_TYPE (t))
6595 && BINARY_CLASS_P (arg0)
6596 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6597 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6599 tree arg00 = TREE_OPERAND (arg0, 0);
6600 tree t0 = TREE_TYPE (t);
6601 tree t1 = TREE_TYPE (arg00);
6602 tree tt0 = TREE_TYPE (t0);
6603 tree tt1 = TREE_TYPE (t1);
6604 tree s0 = TYPE_SIZE (tt0);
6605 tree s1 = TYPE_SIZE (tt1);
6607 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6608 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6609 TREE_OPERAND (arg0, 1));
6612 tem = fold_convert_const (code, type, arg0);
6613 return tem ? tem : t;
6615 case VIEW_CONVERT_EXPR:
6616 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6617 return build1 (VIEW_CONVERT_EXPR, type,
6618 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6619 return t;
6621 case COMPONENT_REF:
6622 if (TREE_CODE (arg0) == CONSTRUCTOR
6623 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6625 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6626 if (m)
6627 return TREE_VALUE (m);
6629 return t;
6631 case RANGE_EXPR:
6632 if (TREE_CONSTANT (t) != wins)
6634 tem = copy_node (t);
6635 TREE_CONSTANT (tem) = wins;
6636 TREE_INVARIANT (tem) = wins;
6637 return tem;
6639 return t;
6641 case NEGATE_EXPR:
6642 if (negate_expr_p (arg0))
6643 return fold_convert (type, negate_expr (arg0));
6644 return t;
6646 case ABS_EXPR:
6647 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6648 return fold_abs_const (arg0, type);
6649 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6650 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6651 /* Convert fabs((double)float) into (double)fabsf(float). */
6652 else if (TREE_CODE (arg0) == NOP_EXPR
6653 && TREE_CODE (type) == REAL_TYPE)
6655 tree targ0 = strip_float_extensions (arg0);
6656 if (targ0 != arg0)
6657 return fold_convert (type, fold (build1 (ABS_EXPR,
6658 TREE_TYPE (targ0),
6659 targ0)));
6661 else if (tree_expr_nonnegative_p (arg0))
6662 return arg0;
6663 return t;
6665 case CONJ_EXPR:
6666 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6667 return fold_convert (type, arg0);
6668 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6669 return build2 (COMPLEX_EXPR, type,
6670 TREE_OPERAND (arg0, 0),
6671 negate_expr (TREE_OPERAND (arg0, 1)));
6672 else if (TREE_CODE (arg0) == COMPLEX_CST)
6673 return build_complex (type, TREE_REALPART (arg0),
6674 negate_expr (TREE_IMAGPART (arg0)));
6675 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6676 return fold (build2 (TREE_CODE (arg0), type,
6677 fold (build1 (CONJ_EXPR, type,
6678 TREE_OPERAND (arg0, 0))),
6679 fold (build1 (CONJ_EXPR, type,
6680 TREE_OPERAND (arg0, 1)))));
6681 else if (TREE_CODE (arg0) == CONJ_EXPR)
6682 return TREE_OPERAND (arg0, 0);
6683 return t;
6685 case BIT_NOT_EXPR:
6686 if (TREE_CODE (arg0) == INTEGER_CST)
6687 return fold_not_const (arg0, type);
6688 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6689 return TREE_OPERAND (arg0, 0);
6690 return t;
6692 case PLUS_EXPR:
6693 /* A + (-B) -> A - B */
6694 if (TREE_CODE (arg1) == NEGATE_EXPR)
6695 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6696 /* (-A) + B -> B - A */
6697 if (TREE_CODE (arg0) == NEGATE_EXPR
6698 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6699 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6700 if (! FLOAT_TYPE_P (type))
6702 if (integer_zerop (arg1))
6703 return non_lvalue (fold_convert (type, arg0));
6705 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6706 with a constant, and the two constants have no bits in common,
6707 we should treat this as a BIT_IOR_EXPR since this may produce more
6708 simplifications. */
6709 if (TREE_CODE (arg0) == BIT_AND_EXPR
6710 && TREE_CODE (arg1) == BIT_AND_EXPR
6711 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6712 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6713 && integer_zerop (const_binop (BIT_AND_EXPR,
6714 TREE_OPERAND (arg0, 1),
6715 TREE_OPERAND (arg1, 1), 0)))
6717 code = BIT_IOR_EXPR;
6718 goto bit_ior;
6721 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6722 (plus (plus (mult) (mult)) (foo)) so that we can
6723 take advantage of the factoring cases below. */
6724 if (((TREE_CODE (arg0) == PLUS_EXPR
6725 || TREE_CODE (arg0) == MINUS_EXPR)
6726 && TREE_CODE (arg1) == MULT_EXPR)
6727 || ((TREE_CODE (arg1) == PLUS_EXPR
6728 || TREE_CODE (arg1) == MINUS_EXPR)
6729 && TREE_CODE (arg0) == MULT_EXPR))
6731 tree parg0, parg1, parg, marg;
6732 enum tree_code pcode;
6734 if (TREE_CODE (arg1) == MULT_EXPR)
6735 parg = arg0, marg = arg1;
6736 else
6737 parg = arg1, marg = arg0;
6738 pcode = TREE_CODE (parg);
6739 parg0 = TREE_OPERAND (parg, 0);
6740 parg1 = TREE_OPERAND (parg, 1);
6741 STRIP_NOPS (parg0);
6742 STRIP_NOPS (parg1);
6744 if (TREE_CODE (parg0) == MULT_EXPR
6745 && TREE_CODE (parg1) != MULT_EXPR)
6746 return fold (build2 (pcode, type,
6747 fold (build2 (PLUS_EXPR, type,
6748 fold_convert (type, parg0),
6749 fold_convert (type, marg))),
6750 fold_convert (type, parg1)));
6751 if (TREE_CODE (parg0) != MULT_EXPR
6752 && TREE_CODE (parg1) == MULT_EXPR)
6753 return fold (build2 (PLUS_EXPR, type,
6754 fold_convert (type, parg0),
6755 fold (build2 (pcode, type,
6756 fold_convert (type, marg),
6757 fold_convert (type,
6758 parg1)))));
6761 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6763 tree arg00, arg01, arg10, arg11;
6764 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6766 /* (A * C) + (B * C) -> (A+B) * C.
6767 We are most concerned about the case where C is a constant,
6768 but other combinations show up during loop reduction. Since
6769 it is not difficult, try all four possibilities. */
6771 arg00 = TREE_OPERAND (arg0, 0);
6772 arg01 = TREE_OPERAND (arg0, 1);
6773 arg10 = TREE_OPERAND (arg1, 0);
6774 arg11 = TREE_OPERAND (arg1, 1);
6775 same = NULL_TREE;
6777 if (operand_equal_p (arg01, arg11, 0))
6778 same = arg01, alt0 = arg00, alt1 = arg10;
6779 else if (operand_equal_p (arg00, arg10, 0))
6780 same = arg00, alt0 = arg01, alt1 = arg11;
6781 else if (operand_equal_p (arg00, arg11, 0))
6782 same = arg00, alt0 = arg01, alt1 = arg10;
6783 else if (operand_equal_p (arg01, arg10, 0))
6784 same = arg01, alt0 = arg00, alt1 = arg11;
6786 /* No identical multiplicands; see if we can find a common
6787 power-of-two factor in non-power-of-two multiplies. This
6788 can help in multi-dimensional array access. */
6789 else if (TREE_CODE (arg01) == INTEGER_CST
6790 && TREE_CODE (arg11) == INTEGER_CST
6791 && TREE_INT_CST_HIGH (arg01) == 0
6792 && TREE_INT_CST_HIGH (arg11) == 0)
6794 HOST_WIDE_INT int01, int11, tmp;
6795 int01 = TREE_INT_CST_LOW (arg01);
6796 int11 = TREE_INT_CST_LOW (arg11);
6798 /* Move min of absolute values to int11. */
6799 if ((int01 >= 0 ? int01 : -int01)
6800 < (int11 >= 0 ? int11 : -int11))
6802 tmp = int01, int01 = int11, int11 = tmp;
6803 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6804 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6807 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6809 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6810 build_int_cst (NULL_TREE,
6811 int01 / int11)));
6812 alt1 = arg10;
6813 same = arg11;
6817 if (same)
6818 return fold (build2 (MULT_EXPR, type,
6819 fold (build2 (PLUS_EXPR, type,
6820 fold_convert (type, alt0),
6821 fold_convert (type, alt1))),
6822 same));
6825 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6826 of the array. Loop optimizer sometimes produce this type of
6827 expressions. */
6828 if (TREE_CODE (arg0) == ADDR_EXPR
6829 && TREE_CODE (arg1) == MULT_EXPR)
6831 tem = try_move_mult_to_index (type, PLUS_EXPR, arg0, arg1);
6832 if (tem)
6833 return fold (tem);
6835 else if (TREE_CODE (arg1) == ADDR_EXPR
6836 && TREE_CODE (arg0) == MULT_EXPR)
6838 tem = try_move_mult_to_index (type, PLUS_EXPR, arg1, arg0);
6839 if (tem)
6840 return fold (tem);
6843 else
6845 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6846 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6847 return non_lvalue (fold_convert (type, arg0));
6849 /* Likewise if the operands are reversed. */
6850 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6851 return non_lvalue (fold_convert (type, arg1));
6853 /* Convert X + -C into X - C. */
6854 if (TREE_CODE (arg1) == REAL_CST
6855 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6857 tem = fold_negate_const (arg1, type);
6858 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6859 return fold (build2 (MINUS_EXPR, type,
6860 fold_convert (type, arg0),
6861 fold_convert (type, tem)));
6864 /* Convert x+x into x*2.0. */
6865 if (operand_equal_p (arg0, arg1, 0)
6866 && SCALAR_FLOAT_TYPE_P (type))
6867 return fold (build2 (MULT_EXPR, type, arg0,
6868 build_real (type, dconst2)));
6870 /* Convert x*c+x into x*(c+1). */
6871 if (flag_unsafe_math_optimizations
6872 && TREE_CODE (arg0) == MULT_EXPR
6873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6874 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6875 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6877 REAL_VALUE_TYPE c;
6879 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6880 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6881 return fold (build2 (MULT_EXPR, type, arg1,
6882 build_real (type, c)));
6885 /* Convert x+x*c into x*(c+1). */
6886 if (flag_unsafe_math_optimizations
6887 && TREE_CODE (arg1) == MULT_EXPR
6888 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6889 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6890 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6892 REAL_VALUE_TYPE c;
6894 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6895 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6896 return fold (build2 (MULT_EXPR, type, arg0,
6897 build_real (type, c)));
6900 /* Convert x*c1+x*c2 into x*(c1+c2). */
6901 if (flag_unsafe_math_optimizations
6902 && TREE_CODE (arg0) == MULT_EXPR
6903 && TREE_CODE (arg1) == MULT_EXPR
6904 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6905 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6906 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6907 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6908 && operand_equal_p (TREE_OPERAND (arg0, 0),
6909 TREE_OPERAND (arg1, 0), 0))
6911 REAL_VALUE_TYPE c1, c2;
6913 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6914 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6915 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6916 return fold (build2 (MULT_EXPR, type,
6917 TREE_OPERAND (arg0, 0),
6918 build_real (type, c1)));
6920 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
6921 if (flag_unsafe_math_optimizations
6922 && TREE_CODE (arg1) == PLUS_EXPR
6923 && TREE_CODE (arg0) != MULT_EXPR)
6925 tree tree10 = TREE_OPERAND (arg1, 0);
6926 tree tree11 = TREE_OPERAND (arg1, 1);
6927 if (TREE_CODE (tree11) == MULT_EXPR
6928 && TREE_CODE (tree10) == MULT_EXPR)
6930 tree tree0;
6931 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6932 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6935 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
6936 if (flag_unsafe_math_optimizations
6937 && TREE_CODE (arg0) == PLUS_EXPR
6938 && TREE_CODE (arg1) != MULT_EXPR)
6940 tree tree00 = TREE_OPERAND (arg0, 0);
6941 tree tree01 = TREE_OPERAND (arg0, 1);
6942 if (TREE_CODE (tree01) == MULT_EXPR
6943 && TREE_CODE (tree00) == MULT_EXPR)
6945 tree tree0;
6946 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6947 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6952 bit_rotate:
6953 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6954 is a rotate of A by C1 bits. */
6955 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6956 is a rotate of A by B bits. */
6958 enum tree_code code0, code1;
6959 code0 = TREE_CODE (arg0);
6960 code1 = TREE_CODE (arg1);
6961 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6962 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6963 && operand_equal_p (TREE_OPERAND (arg0, 0),
6964 TREE_OPERAND (arg1, 0), 0)
6965 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6967 tree tree01, tree11;
6968 enum tree_code code01, code11;
6970 tree01 = TREE_OPERAND (arg0, 1);
6971 tree11 = TREE_OPERAND (arg1, 1);
6972 STRIP_NOPS (tree01);
6973 STRIP_NOPS (tree11);
6974 code01 = TREE_CODE (tree01);
6975 code11 = TREE_CODE (tree11);
6976 if (code01 == INTEGER_CST
6977 && code11 == INTEGER_CST
6978 && TREE_INT_CST_HIGH (tree01) == 0
6979 && TREE_INT_CST_HIGH (tree11) == 0
6980 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6981 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6982 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6983 code0 == LSHIFT_EXPR ? tree01 : tree11);
6984 else if (code11 == MINUS_EXPR)
6986 tree tree110, tree111;
6987 tree110 = TREE_OPERAND (tree11, 0);
6988 tree111 = TREE_OPERAND (tree11, 1);
6989 STRIP_NOPS (tree110);
6990 STRIP_NOPS (tree111);
6991 if (TREE_CODE (tree110) == INTEGER_CST
6992 && 0 == compare_tree_int (tree110,
6993 TYPE_PRECISION
6994 (TREE_TYPE (TREE_OPERAND
6995 (arg0, 0))))
6996 && operand_equal_p (tree01, tree111, 0))
6997 return build2 ((code0 == LSHIFT_EXPR
6998 ? LROTATE_EXPR
6999 : RROTATE_EXPR),
7000 type, TREE_OPERAND (arg0, 0), tree01);
7002 else if (code01 == MINUS_EXPR)
7004 tree tree010, tree011;
7005 tree010 = TREE_OPERAND (tree01, 0);
7006 tree011 = TREE_OPERAND (tree01, 1);
7007 STRIP_NOPS (tree010);
7008 STRIP_NOPS (tree011);
7009 if (TREE_CODE (tree010) == INTEGER_CST
7010 && 0 == compare_tree_int (tree010,
7011 TYPE_PRECISION
7012 (TREE_TYPE (TREE_OPERAND
7013 (arg0, 0))))
7014 && operand_equal_p (tree11, tree011, 0))
7015 return build2 ((code0 != LSHIFT_EXPR
7016 ? LROTATE_EXPR
7017 : RROTATE_EXPR),
7018 type, TREE_OPERAND (arg0, 0), tree11);
7023 associate:
7024 /* In most languages, can't associate operations on floats through
7025 parentheses. Rather than remember where the parentheses were, we
7026 don't associate floats at all, unless the user has specified
7027 -funsafe-math-optimizations. */
7029 if (! wins
7030 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7032 tree var0, con0, lit0, minus_lit0;
7033 tree var1, con1, lit1, minus_lit1;
7035 /* Split both trees into variables, constants, and literals. Then
7036 associate each group together, the constants with literals,
7037 then the result with variables. This increases the chances of
7038 literals being recombined later and of generating relocatable
7039 expressions for the sum of a constant and literal. */
7040 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7041 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7042 code == MINUS_EXPR);
7044 /* Only do something if we found more than two objects. Otherwise,
7045 nothing has changed and we risk infinite recursion. */
7046 if (2 < ((var0 != 0) + (var1 != 0)
7047 + (con0 != 0) + (con1 != 0)
7048 + (lit0 != 0) + (lit1 != 0)
7049 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7051 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7052 if (code == MINUS_EXPR)
7053 code = PLUS_EXPR;
7055 var0 = associate_trees (var0, var1, code, type);
7056 con0 = associate_trees (con0, con1, code, type);
7057 lit0 = associate_trees (lit0, lit1, code, type);
7058 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7060 /* Preserve the MINUS_EXPR if the negative part of the literal is
7061 greater than the positive part. Otherwise, the multiplicative
7062 folding code (i.e extract_muldiv) may be fooled in case
7063 unsigned constants are subtracted, like in the following
7064 example: ((X*2 + 4) - 8U)/2. */
7065 if (minus_lit0 && lit0)
7067 if (TREE_CODE (lit0) == INTEGER_CST
7068 && TREE_CODE (minus_lit0) == INTEGER_CST
7069 && tree_int_cst_lt (lit0, minus_lit0))
7071 minus_lit0 = associate_trees (minus_lit0, lit0,
7072 MINUS_EXPR, type);
7073 lit0 = 0;
7075 else
7077 lit0 = associate_trees (lit0, minus_lit0,
7078 MINUS_EXPR, type);
7079 minus_lit0 = 0;
7082 if (minus_lit0)
7084 if (con0 == 0)
7085 return fold_convert (type,
7086 associate_trees (var0, minus_lit0,
7087 MINUS_EXPR, type));
7088 else
7090 con0 = associate_trees (con0, minus_lit0,
7091 MINUS_EXPR, type);
7092 return fold_convert (type,
7093 associate_trees (var0, con0,
7094 PLUS_EXPR, type));
7098 con0 = associate_trees (con0, lit0, code, type);
7099 return fold_convert (type, associate_trees (var0, con0,
7100 code, type));
7104 binary:
7105 if (wins)
7106 t1 = const_binop (code, arg0, arg1, 0);
7107 if (t1 != NULL_TREE)
7109 /* The return value should always have
7110 the same type as the original expression. */
7111 if (TREE_TYPE (t1) != type)
7112 t1 = fold_convert (type, t1);
7114 return t1;
7116 return t;
7118 case MINUS_EXPR:
7119 /* A - (-B) -> A + B */
7120 if (TREE_CODE (arg1) == NEGATE_EXPR)
7121 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7122 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7123 if (TREE_CODE (arg0) == NEGATE_EXPR
7124 && (FLOAT_TYPE_P (type)
7125 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7126 && negate_expr_p (arg1)
7127 && reorder_operands_p (arg0, arg1))
7128 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7129 TREE_OPERAND (arg0, 0)));
7131 if (! FLOAT_TYPE_P (type))
7133 if (! wins && integer_zerop (arg0))
7134 return negate_expr (fold_convert (type, arg1));
7135 if (integer_zerop (arg1))
7136 return non_lvalue (fold_convert (type, arg0));
7138 /* Fold A - (A & B) into ~B & A. */
7139 if (!TREE_SIDE_EFFECTS (arg0)
7140 && TREE_CODE (arg1) == BIT_AND_EXPR)
7142 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7143 return fold (build2 (BIT_AND_EXPR, type,
7144 fold (build1 (BIT_NOT_EXPR, type,
7145 TREE_OPERAND (arg1, 0))),
7146 arg0));
7147 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7148 return fold (build2 (BIT_AND_EXPR, type,
7149 fold (build1 (BIT_NOT_EXPR, type,
7150 TREE_OPERAND (arg1, 1))),
7151 arg0));
7154 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7155 any power of 2 minus 1. */
7156 if (TREE_CODE (arg0) == BIT_AND_EXPR
7157 && TREE_CODE (arg1) == BIT_AND_EXPR
7158 && operand_equal_p (TREE_OPERAND (arg0, 0),
7159 TREE_OPERAND (arg1, 0), 0))
7161 tree mask0 = TREE_OPERAND (arg0, 1);
7162 tree mask1 = TREE_OPERAND (arg1, 1);
7163 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7165 if (operand_equal_p (tem, mask1, 0))
7167 tem = fold (build2 (BIT_XOR_EXPR, type,
7168 TREE_OPERAND (arg0, 0), mask1));
7169 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7174 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7175 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7176 return non_lvalue (fold_convert (type, arg0));
7178 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7179 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7180 (-ARG1 + ARG0) reduces to -ARG1. */
7181 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7182 return negate_expr (fold_convert (type, arg1));
7184 /* Fold &x - &x. This can happen from &x.foo - &x.
7185 This is unsafe for certain floats even in non-IEEE formats.
7186 In IEEE, it is unsafe because it does wrong for NaNs.
7187 Also note that operand_equal_p is always false if an operand
7188 is volatile. */
7190 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7191 && operand_equal_p (arg0, arg1, 0))
7192 return fold_convert (type, integer_zero_node);
7194 /* A - B -> A + (-B) if B is easily negatable. */
7195 if (!wins && negate_expr_p (arg1)
7196 && ((FLOAT_TYPE_P (type)
7197 /* Avoid this transformation if B is a positive REAL_CST. */
7198 && (TREE_CODE (arg1) != REAL_CST
7199 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7200 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7201 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7203 /* Try folding difference of addresses. */
7205 HOST_WIDE_INT diff;
7207 if ((TREE_CODE (arg0) == ADDR_EXPR
7208 || TREE_CODE (arg1) == ADDR_EXPR)
7209 && ptr_difference_const (arg0, arg1, &diff))
7210 return build_int_cst_type (type, diff);
7213 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7214 of the array. Loop optimizer sometimes produce this type of
7215 expressions. */
7216 if (TREE_CODE (arg0) == ADDR_EXPR
7217 && TREE_CODE (arg1) == MULT_EXPR)
7219 tem = try_move_mult_to_index (type, MINUS_EXPR, arg0, arg1);
7220 if (tem)
7221 return fold (tem);
7224 if (TREE_CODE (arg0) == MULT_EXPR
7225 && TREE_CODE (arg1) == MULT_EXPR
7226 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7228 /* (A * C) - (B * C) -> (A-B) * C. */
7229 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7230 TREE_OPERAND (arg1, 1), 0))
7231 return fold (build2 (MULT_EXPR, type,
7232 fold (build2 (MINUS_EXPR, type,
7233 TREE_OPERAND (arg0, 0),
7234 TREE_OPERAND (arg1, 0))),
7235 TREE_OPERAND (arg0, 1)));
7236 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7237 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7238 TREE_OPERAND (arg1, 0), 0))
7239 return fold (build2 (MULT_EXPR, type,
7240 TREE_OPERAND (arg0, 0),
7241 fold (build2 (MINUS_EXPR, type,
7242 TREE_OPERAND (arg0, 1),
7243 TREE_OPERAND (arg1, 1)))));
7246 goto associate;
7248 case MULT_EXPR:
7249 /* (-A) * (-B) -> A * B */
7250 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7251 return fold (build2 (MULT_EXPR, type,
7252 TREE_OPERAND (arg0, 0),
7253 negate_expr (arg1)));
7254 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7255 return fold (build2 (MULT_EXPR, type,
7256 negate_expr (arg0),
7257 TREE_OPERAND (arg1, 0)));
7259 if (! FLOAT_TYPE_P (type))
7261 if (integer_zerop (arg1))
7262 return omit_one_operand (type, arg1, arg0);
7263 if (integer_onep (arg1))
7264 return non_lvalue (fold_convert (type, arg0));
7266 /* (a * (1 << b)) is (a << b) */
7267 if (TREE_CODE (arg1) == LSHIFT_EXPR
7268 && integer_onep (TREE_OPERAND (arg1, 0)))
7269 return fold (build2 (LSHIFT_EXPR, type, arg0,
7270 TREE_OPERAND (arg1, 1)));
7271 if (TREE_CODE (arg0) == LSHIFT_EXPR
7272 && integer_onep (TREE_OPERAND (arg0, 0)))
7273 return fold (build2 (LSHIFT_EXPR, type, arg1,
7274 TREE_OPERAND (arg0, 1)));
7276 if (TREE_CODE (arg1) == INTEGER_CST
7277 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7278 fold_convert (type, arg1),
7279 code, NULL_TREE)))
7280 return fold_convert (type, tem);
7283 else
7285 /* Maybe fold x * 0 to 0. The expressions aren't the same
7286 when x is NaN, since x * 0 is also NaN. Nor are they the
7287 same in modes with signed zeros, since multiplying a
7288 negative value by 0 gives -0, not +0. */
7289 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7290 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7291 && real_zerop (arg1))
7292 return omit_one_operand (type, arg1, arg0);
7293 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7294 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7295 && real_onep (arg1))
7296 return non_lvalue (fold_convert (type, arg0));
7298 /* Transform x * -1.0 into -x. */
7299 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7300 && real_minus_onep (arg1))
7301 return fold_convert (type, negate_expr (arg0));
7303 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7304 if (flag_unsafe_math_optimizations
7305 && TREE_CODE (arg0) == RDIV_EXPR
7306 && TREE_CODE (arg1) == REAL_CST
7307 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7309 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7310 arg1, 0);
7311 if (tem)
7312 return fold (build2 (RDIV_EXPR, type, tem,
7313 TREE_OPERAND (arg0, 1)));
7316 if (flag_unsafe_math_optimizations)
7318 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7319 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7321 /* Optimizations of root(...)*root(...). */
7322 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7324 tree rootfn, arg, arglist;
7325 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7326 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7328 /* Optimize sqrt(x)*sqrt(x) as x. */
7329 if (BUILTIN_SQRT_P (fcode0)
7330 && operand_equal_p (arg00, arg10, 0)
7331 && ! HONOR_SNANS (TYPE_MODE (type)))
7332 return arg00;
7334 /* Optimize root(x)*root(y) as root(x*y). */
7335 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7336 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7337 arglist = build_tree_list (NULL_TREE, arg);
7338 return build_function_call_expr (rootfn, arglist);
7341 /* Optimize expN(x)*expN(y) as expN(x+y). */
7342 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7344 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7345 tree arg = build2 (PLUS_EXPR, type,
7346 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7347 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7348 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7349 return build_function_call_expr (expfn, arglist);
7352 /* Optimizations of pow(...)*pow(...). */
7353 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7354 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7355 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7357 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7358 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7359 1)));
7360 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7361 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7362 1)));
7364 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7365 if (operand_equal_p (arg01, arg11, 0))
7367 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7368 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7369 tree arglist = tree_cons (NULL_TREE, fold (arg),
7370 build_tree_list (NULL_TREE,
7371 arg01));
7372 return build_function_call_expr (powfn, arglist);
7375 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7376 if (operand_equal_p (arg00, arg10, 0))
7378 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7379 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7380 tree arglist = tree_cons (NULL_TREE, arg00,
7381 build_tree_list (NULL_TREE,
7382 arg));
7383 return build_function_call_expr (powfn, arglist);
7387 /* Optimize tan(x)*cos(x) as sin(x). */
7388 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7389 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7390 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7391 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7392 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7393 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7394 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7395 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7397 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7399 if (sinfn != NULL_TREE)
7400 return build_function_call_expr (sinfn,
7401 TREE_OPERAND (arg0, 1));
7404 /* Optimize x*pow(x,c) as pow(x,c+1). */
7405 if (fcode1 == BUILT_IN_POW
7406 || fcode1 == BUILT_IN_POWF
7407 || fcode1 == BUILT_IN_POWL)
7409 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7410 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7411 1)));
7412 if (TREE_CODE (arg11) == REAL_CST
7413 && ! TREE_CONSTANT_OVERFLOW (arg11)
7414 && operand_equal_p (arg0, arg10, 0))
7416 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7417 REAL_VALUE_TYPE c;
7418 tree arg, arglist;
7420 c = TREE_REAL_CST (arg11);
7421 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7422 arg = build_real (type, c);
7423 arglist = build_tree_list (NULL_TREE, arg);
7424 arglist = tree_cons (NULL_TREE, arg0, arglist);
7425 return build_function_call_expr (powfn, arglist);
7429 /* Optimize pow(x,c)*x as pow(x,c+1). */
7430 if (fcode0 == BUILT_IN_POW
7431 || fcode0 == BUILT_IN_POWF
7432 || fcode0 == BUILT_IN_POWL)
7434 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7435 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7436 1)));
7437 if (TREE_CODE (arg01) == REAL_CST
7438 && ! TREE_CONSTANT_OVERFLOW (arg01)
7439 && operand_equal_p (arg1, arg00, 0))
7441 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7442 REAL_VALUE_TYPE c;
7443 tree arg, arglist;
7445 c = TREE_REAL_CST (arg01);
7446 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7447 arg = build_real (type, c);
7448 arglist = build_tree_list (NULL_TREE, arg);
7449 arglist = tree_cons (NULL_TREE, arg1, arglist);
7450 return build_function_call_expr (powfn, arglist);
7454 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7455 if (! optimize_size
7456 && operand_equal_p (arg0, arg1, 0))
7458 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7460 if (powfn)
7462 tree arg = build_real (type, dconst2);
7463 tree arglist = build_tree_list (NULL_TREE, arg);
7464 arglist = tree_cons (NULL_TREE, arg0, arglist);
7465 return build_function_call_expr (powfn, arglist);
7470 goto associate;
7472 case BIT_IOR_EXPR:
7473 bit_ior:
7474 if (integer_all_onesp (arg1))
7475 return omit_one_operand (type, arg1, arg0);
7476 if (integer_zerop (arg1))
7477 return non_lvalue (fold_convert (type, arg0));
7478 if (operand_equal_p (arg0, arg1, 0))
7479 return non_lvalue (fold_convert (type, arg0));
7481 /* ~X | X is -1. */
7482 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7483 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7485 t1 = build_int_cst (type, -1);
7486 t1 = force_fit_type (t1, 0, false, false);
7487 return omit_one_operand (type, t1, arg1);
7490 /* X | ~X is -1. */
7491 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7492 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7494 t1 = build_int_cst (type, -1);
7495 t1 = force_fit_type (t1, 0, false, false);
7496 return omit_one_operand (type, t1, arg0);
7499 t1 = distribute_bit_expr (code, type, arg0, arg1);
7500 if (t1 != NULL_TREE)
7501 return t1;
7503 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7505 This results in more efficient code for machines without a NAND
7506 instruction. Combine will canonicalize to the first form
7507 which will allow use of NAND instructions provided by the
7508 backend if they exist. */
7509 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7510 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7512 return fold (build1 (BIT_NOT_EXPR, type,
7513 build2 (BIT_AND_EXPR, type,
7514 TREE_OPERAND (arg0, 0),
7515 TREE_OPERAND (arg1, 0))));
7518 /* See if this can be simplified into a rotate first. If that
7519 is unsuccessful continue in the association code. */
7520 goto bit_rotate;
7522 case BIT_XOR_EXPR:
7523 if (integer_zerop (arg1))
7524 return non_lvalue (fold_convert (type, arg0));
7525 if (integer_all_onesp (arg1))
7526 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7527 if (operand_equal_p (arg0, arg1, 0))
7528 return omit_one_operand (type, integer_zero_node, arg0);
7530 /* ~X ^ X is -1. */
7531 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7532 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7534 t1 = build_int_cst (type, -1);
7535 t1 = force_fit_type (t1, 0, false, false);
7536 return omit_one_operand (type, t1, arg1);
7539 /* X ^ ~X is -1. */
7540 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7541 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7543 t1 = build_int_cst (type, -1);
7544 t1 = force_fit_type (t1, 0, false, false);
7545 return omit_one_operand (type, t1, arg0);
7548 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7549 with a constant, and the two constants have no bits in common,
7550 we should treat this as a BIT_IOR_EXPR since this may produce more
7551 simplifications. */
7552 if (TREE_CODE (arg0) == BIT_AND_EXPR
7553 && TREE_CODE (arg1) == BIT_AND_EXPR
7554 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7555 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7556 && integer_zerop (const_binop (BIT_AND_EXPR,
7557 TREE_OPERAND (arg0, 1),
7558 TREE_OPERAND (arg1, 1), 0)))
7560 code = BIT_IOR_EXPR;
7561 goto bit_ior;
7564 /* See if this can be simplified into a rotate first. If that
7565 is unsuccessful continue in the association code. */
7566 goto bit_rotate;
7568 case BIT_AND_EXPR:
7569 if (integer_all_onesp (arg1))
7570 return non_lvalue (fold_convert (type, arg0));
7571 if (integer_zerop (arg1))
7572 return omit_one_operand (type, arg1, arg0);
7573 if (operand_equal_p (arg0, arg1, 0))
7574 return non_lvalue (fold_convert (type, arg0));
7576 /* ~X & X is always zero. */
7577 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7578 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7579 return omit_one_operand (type, integer_zero_node, arg1);
7581 /* X & ~X is always zero. */
7582 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7583 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7584 return omit_one_operand (type, integer_zero_node, arg0);
7586 t1 = distribute_bit_expr (code, type, arg0, arg1);
7587 if (t1 != NULL_TREE)
7588 return t1;
7589 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7590 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7591 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7593 unsigned int prec
7594 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7596 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7597 && (~TREE_INT_CST_LOW (arg1)
7598 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7599 return fold_convert (type, TREE_OPERAND (arg0, 0));
7602 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7604 This results in more efficient code for machines without a NOR
7605 instruction. Combine will canonicalize to the first form
7606 which will allow use of NOR instructions provided by the
7607 backend if they exist. */
7608 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7609 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7611 return fold (build1 (BIT_NOT_EXPR, type,
7612 build2 (BIT_IOR_EXPR, type,
7613 TREE_OPERAND (arg0, 0),
7614 TREE_OPERAND (arg1, 0))));
7617 goto associate;
7619 case RDIV_EXPR:
7620 /* Don't touch a floating-point divide by zero unless the mode
7621 of the constant can represent infinity. */
7622 if (TREE_CODE (arg1) == REAL_CST
7623 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7624 && real_zerop (arg1))
7625 return t;
7627 /* (-A) / (-B) -> A / B */
7628 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7629 return fold (build2 (RDIV_EXPR, type,
7630 TREE_OPERAND (arg0, 0),
7631 negate_expr (arg1)));
7632 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7633 return fold (build2 (RDIV_EXPR, type,
7634 negate_expr (arg0),
7635 TREE_OPERAND (arg1, 0)));
7637 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7638 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7639 && real_onep (arg1))
7640 return non_lvalue (fold_convert (type, arg0));
7642 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7643 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7644 && real_minus_onep (arg1))
7645 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7647 /* If ARG1 is a constant, we can convert this to a multiply by the
7648 reciprocal. This does not have the same rounding properties,
7649 so only do this if -funsafe-math-optimizations. We can actually
7650 always safely do it if ARG1 is a power of two, but it's hard to
7651 tell if it is or not in a portable manner. */
7652 if (TREE_CODE (arg1) == REAL_CST)
7654 if (flag_unsafe_math_optimizations
7655 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7656 arg1, 0)))
7657 return fold (build2 (MULT_EXPR, type, arg0, tem));
7658 /* Find the reciprocal if optimizing and the result is exact. */
7659 if (optimize)
7661 REAL_VALUE_TYPE r;
7662 r = TREE_REAL_CST (arg1);
7663 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7665 tem = build_real (type, r);
7666 return fold (build2 (MULT_EXPR, type, arg0, tem));
7670 /* Convert A/B/C to A/(B*C). */
7671 if (flag_unsafe_math_optimizations
7672 && TREE_CODE (arg0) == RDIV_EXPR)
7673 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7674 fold (build2 (MULT_EXPR, type,
7675 TREE_OPERAND (arg0, 1), arg1))));
7677 /* Convert A/(B/C) to (A/B)*C. */
7678 if (flag_unsafe_math_optimizations
7679 && TREE_CODE (arg1) == RDIV_EXPR)
7680 return fold (build2 (MULT_EXPR, type,
7681 fold (build2 (RDIV_EXPR, type, arg0,
7682 TREE_OPERAND (arg1, 0))),
7683 TREE_OPERAND (arg1, 1)));
7685 /* Convert C1/(X*C2) into (C1/C2)/X. */
7686 if (flag_unsafe_math_optimizations
7687 && TREE_CODE (arg1) == MULT_EXPR
7688 && TREE_CODE (arg0) == REAL_CST
7689 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7691 tree tem = const_binop (RDIV_EXPR, arg0,
7692 TREE_OPERAND (arg1, 1), 0);
7693 if (tem)
7694 return fold (build2 (RDIV_EXPR, type, tem,
7695 TREE_OPERAND (arg1, 0)));
7698 if (flag_unsafe_math_optimizations)
7700 enum built_in_function fcode = builtin_mathfn_code (arg1);
7701 /* Optimize x/expN(y) into x*expN(-y). */
7702 if (BUILTIN_EXPONENT_P (fcode))
7704 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7705 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7706 tree arglist = build_tree_list (NULL_TREE,
7707 fold_convert (type, arg));
7708 arg1 = build_function_call_expr (expfn, arglist);
7709 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7712 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7713 if (fcode == BUILT_IN_POW
7714 || fcode == BUILT_IN_POWF
7715 || fcode == BUILT_IN_POWL)
7717 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7718 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7719 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7720 tree neg11 = fold_convert (type, negate_expr (arg11));
7721 tree arglist = tree_cons(NULL_TREE, arg10,
7722 build_tree_list (NULL_TREE, neg11));
7723 arg1 = build_function_call_expr (powfn, arglist);
7724 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7728 if (flag_unsafe_math_optimizations)
7730 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7731 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7733 /* Optimize sin(x)/cos(x) as tan(x). */
7734 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7735 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7736 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7737 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7738 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7740 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7742 if (tanfn != NULL_TREE)
7743 return build_function_call_expr (tanfn,
7744 TREE_OPERAND (arg0, 1));
7747 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7748 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7749 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7750 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7751 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7752 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7754 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7756 if (tanfn != NULL_TREE)
7758 tree tmp = TREE_OPERAND (arg0, 1);
7759 tmp = build_function_call_expr (tanfn, tmp);
7760 return fold (build2 (RDIV_EXPR, type,
7761 build_real (type, dconst1), tmp));
7765 /* Optimize pow(x,c)/x as pow(x,c-1). */
7766 if (fcode0 == BUILT_IN_POW
7767 || fcode0 == BUILT_IN_POWF
7768 || fcode0 == BUILT_IN_POWL)
7770 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7771 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7772 if (TREE_CODE (arg01) == REAL_CST
7773 && ! TREE_CONSTANT_OVERFLOW (arg01)
7774 && operand_equal_p (arg1, arg00, 0))
7776 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7777 REAL_VALUE_TYPE c;
7778 tree arg, arglist;
7780 c = TREE_REAL_CST (arg01);
7781 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7782 arg = build_real (type, c);
7783 arglist = build_tree_list (NULL_TREE, arg);
7784 arglist = tree_cons (NULL_TREE, arg1, arglist);
7785 return build_function_call_expr (powfn, arglist);
7789 goto binary;
7791 case TRUNC_DIV_EXPR:
7792 case ROUND_DIV_EXPR:
7793 case FLOOR_DIV_EXPR:
7794 case CEIL_DIV_EXPR:
7795 case EXACT_DIV_EXPR:
7796 if (integer_onep (arg1))
7797 return non_lvalue (fold_convert (type, arg0));
7798 if (integer_zerop (arg1))
7799 return t;
7800 /* X / -1 is -X. */
7801 if (!TYPE_UNSIGNED (type)
7802 && TREE_CODE (arg1) == INTEGER_CST
7803 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7804 && TREE_INT_CST_HIGH (arg1) == -1)
7805 return fold_convert (type, negate_expr (arg0));
7807 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7808 operation, EXACT_DIV_EXPR.
7810 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7811 At one time others generated faster code, it's not clear if they do
7812 after the last round to changes to the DIV code in expmed.c. */
7813 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7814 && multiple_of_p (type, arg0, arg1))
7815 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7817 if (TREE_CODE (arg1) == INTEGER_CST
7818 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7819 code, NULL_TREE)))
7820 return fold_convert (type, tem);
7822 goto binary;
7824 case CEIL_MOD_EXPR:
7825 case FLOOR_MOD_EXPR:
7826 case ROUND_MOD_EXPR:
7827 case TRUNC_MOD_EXPR:
7828 if (integer_onep (arg1))
7829 return omit_one_operand (type, integer_zero_node, arg0);
7830 if (integer_zerop (arg1))
7831 return t;
7833 /* X % -1 is zero. */
7834 if (!TYPE_UNSIGNED (type)
7835 && TREE_CODE (arg1) == INTEGER_CST
7836 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7837 && TREE_INT_CST_HIGH (arg1) == -1)
7838 return omit_one_operand (type, integer_zero_node, arg0);
7840 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7841 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7842 if (code == TRUNC_MOD_EXPR
7843 && TYPE_UNSIGNED (type)
7844 && integer_pow2p (arg1))
7846 unsigned HOST_WIDE_INT high, low;
7847 tree mask;
7848 int l;
7850 l = tree_log2 (arg1);
7851 if (l >= HOST_BITS_PER_WIDE_INT)
7853 high = ((unsigned HOST_WIDE_INT) 1
7854 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7855 low = -1;
7857 else
7859 high = 0;
7860 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7863 mask = build_int_cst_wide (type, low, high);
7864 return fold (build2 (BIT_AND_EXPR, type,
7865 fold_convert (type, arg0), mask));
7868 /* X % -C is the same as X % C. */
7869 if (code == TRUNC_MOD_EXPR
7870 && !TYPE_UNSIGNED (type)
7871 && TREE_CODE (arg1) == INTEGER_CST
7872 && TREE_INT_CST_HIGH (arg1) < 0
7873 && !flag_trapv
7874 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7875 && !sign_bit_p (arg1, arg1))
7876 return fold (build2 (code, type, fold_convert (type, arg0),
7877 fold_convert (type, negate_expr (arg1))));
7879 /* X % -Y is the same as X % Y. */
7880 if (code == TRUNC_MOD_EXPR
7881 && !TYPE_UNSIGNED (type)
7882 && TREE_CODE (arg1) == NEGATE_EXPR
7883 && !flag_trapv)
7884 return fold (build2 (code, type, fold_convert (type, arg0),
7885 fold_convert (type, TREE_OPERAND (arg1, 0))));
7887 if (TREE_CODE (arg1) == INTEGER_CST
7888 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7889 code, NULL_TREE)))
7890 return fold_convert (type, tem);
7892 goto binary;
7894 case LROTATE_EXPR:
7895 case RROTATE_EXPR:
7896 if (integer_all_onesp (arg0))
7897 return omit_one_operand (type, arg0, arg1);
7898 goto shift;
7900 case RSHIFT_EXPR:
7901 /* Optimize -1 >> x for arithmetic right shifts. */
7902 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7903 return omit_one_operand (type, arg0, arg1);
7904 /* ... fall through ... */
7906 case LSHIFT_EXPR:
7907 shift:
7908 if (integer_zerop (arg1))
7909 return non_lvalue (fold_convert (type, arg0));
7910 if (integer_zerop (arg0))
7911 return omit_one_operand (type, arg0, arg1);
7913 /* Since negative shift count is not well-defined,
7914 don't try to compute it in the compiler. */
7915 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7916 return t;
7917 /* Rewrite an LROTATE_EXPR by a constant into an
7918 RROTATE_EXPR by a new constant. */
7919 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7921 tree tem = build_int_cst (NULL_TREE,
7922 GET_MODE_BITSIZE (TYPE_MODE (type)));
7923 tem = fold_convert (TREE_TYPE (arg1), tem);
7924 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7925 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7928 /* If we have a rotate of a bit operation with the rotate count and
7929 the second operand of the bit operation both constant,
7930 permute the two operations. */
7931 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7932 && (TREE_CODE (arg0) == BIT_AND_EXPR
7933 || TREE_CODE (arg0) == BIT_IOR_EXPR
7934 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7936 return fold (build2 (TREE_CODE (arg0), type,
7937 fold (build2 (code, type,
7938 TREE_OPERAND (arg0, 0), arg1)),
7939 fold (build2 (code, type,
7940 TREE_OPERAND (arg0, 1), arg1))));
7942 /* Two consecutive rotates adding up to the width of the mode can
7943 be ignored. */
7944 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7945 && TREE_CODE (arg0) == RROTATE_EXPR
7946 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7947 && TREE_INT_CST_HIGH (arg1) == 0
7948 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7949 && ((TREE_INT_CST_LOW (arg1)
7950 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7951 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7952 return TREE_OPERAND (arg0, 0);
7954 goto binary;
7956 case MIN_EXPR:
7957 if (operand_equal_p (arg0, arg1, 0))
7958 return omit_one_operand (type, arg0, arg1);
7959 if (INTEGRAL_TYPE_P (type)
7960 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7961 return omit_one_operand (type, arg1, arg0);
7962 goto associate;
7964 case MAX_EXPR:
7965 if (operand_equal_p (arg0, arg1, 0))
7966 return omit_one_operand (type, arg0, arg1);
7967 if (INTEGRAL_TYPE_P (type)
7968 && TYPE_MAX_VALUE (type)
7969 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7970 return omit_one_operand (type, arg1, arg0);
7971 goto associate;
7973 case TRUTH_NOT_EXPR:
7974 /* The argument to invert_truthvalue must have Boolean type. */
7975 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7976 arg0 = fold_convert (boolean_type_node, arg0);
7978 /* Note that the operand of this must be an int
7979 and its values must be 0 or 1.
7980 ("true" is a fixed value perhaps depending on the language,
7981 but we don't handle values other than 1 correctly yet.) */
7982 tem = invert_truthvalue (arg0);
7983 /* Avoid infinite recursion. */
7984 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7986 tem = fold_single_bit_test (code, arg0, arg1, type);
7987 if (tem)
7988 return tem;
7989 return t;
7991 return fold_convert (type, tem);
7993 case TRUTH_ANDIF_EXPR:
7994 /* Note that the operands of this must be ints
7995 and their values must be 0 or 1.
7996 ("true" is a fixed value perhaps depending on the language.) */
7997 /* If first arg is constant zero, return it. */
7998 if (integer_zerop (arg0))
7999 return fold_convert (type, arg0);
8000 case TRUTH_AND_EXPR:
8001 /* If either arg is constant true, drop it. */
8002 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8003 return non_lvalue (fold_convert (type, arg1));
8004 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8005 /* Preserve sequence points. */
8006 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8007 return non_lvalue (fold_convert (type, arg0));
8008 /* If second arg is constant zero, result is zero, but first arg
8009 must be evaluated. */
8010 if (integer_zerop (arg1))
8011 return omit_one_operand (type, arg1, arg0);
8012 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8013 case will be handled here. */
8014 if (integer_zerop (arg0))
8015 return omit_one_operand (type, arg0, arg1);
8017 /* !X && X is always false. */
8018 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8019 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8020 return omit_one_operand (type, integer_zero_node, arg1);
8021 /* X && !X is always false. */
8022 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8023 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8024 return omit_one_operand (type, integer_zero_node, arg0);
8026 truth_andor:
8027 /* We only do these simplifications if we are optimizing. */
8028 if (!optimize)
8029 return t;
8031 /* Check for things like (A || B) && (A || C). We can convert this
8032 to A || (B && C). Note that either operator can be any of the four
8033 truth and/or operations and the transformation will still be
8034 valid. Also note that we only care about order for the
8035 ANDIF and ORIF operators. If B contains side effects, this
8036 might change the truth-value of A. */
8037 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8038 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8039 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8040 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8041 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8042 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8044 tree a00 = TREE_OPERAND (arg0, 0);
8045 tree a01 = TREE_OPERAND (arg0, 1);
8046 tree a10 = TREE_OPERAND (arg1, 0);
8047 tree a11 = TREE_OPERAND (arg1, 1);
8048 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8049 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8050 && (code == TRUTH_AND_EXPR
8051 || code == TRUTH_OR_EXPR));
8053 if (operand_equal_p (a00, a10, 0))
8054 return fold (build2 (TREE_CODE (arg0), type, a00,
8055 fold (build2 (code, type, a01, a11))));
8056 else if (commutative && operand_equal_p (a00, a11, 0))
8057 return fold (build2 (TREE_CODE (arg0), type, a00,
8058 fold (build2 (code, type, a01, a10))));
8059 else if (commutative && operand_equal_p (a01, a10, 0))
8060 return fold (build2 (TREE_CODE (arg0), type, a01,
8061 fold (build2 (code, type, a00, a11))));
8063 /* This case if tricky because we must either have commutative
8064 operators or else A10 must not have side-effects. */
8066 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8067 && operand_equal_p (a01, a11, 0))
8068 return fold (build2 (TREE_CODE (arg0), type,
8069 fold (build2 (code, type, a00, a10)),
8070 a01));
8073 /* See if we can build a range comparison. */
8074 if (0 != (tem = fold_range_test (t)))
8075 return tem;
8077 /* Check for the possibility of merging component references. If our
8078 lhs is another similar operation, try to merge its rhs with our
8079 rhs. Then try to merge our lhs and rhs. */
8080 if (TREE_CODE (arg0) == code
8081 && 0 != (tem = fold_truthop (code, type,
8082 TREE_OPERAND (arg0, 1), arg1)))
8083 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8085 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8086 return tem;
8088 return t;
8090 case TRUTH_ORIF_EXPR:
8091 /* Note that the operands of this must be ints
8092 and their values must be 0 or true.
8093 ("true" is a fixed value perhaps depending on the language.) */
8094 /* If first arg is constant true, return it. */
8095 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8096 return fold_convert (type, arg0);
8097 case TRUTH_OR_EXPR:
8098 /* If either arg is constant zero, drop it. */
8099 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8100 return non_lvalue (fold_convert (type, arg1));
8101 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8102 /* Preserve sequence points. */
8103 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8104 return non_lvalue (fold_convert (type, arg0));
8105 /* If second arg is constant true, result is true, but we must
8106 evaluate first arg. */
8107 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8108 return omit_one_operand (type, arg1, arg0);
8109 /* Likewise for first arg, but note this only occurs here for
8110 TRUTH_OR_EXPR. */
8111 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8112 return omit_one_operand (type, arg0, arg1);
8114 /* !X || X is always true. */
8115 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8116 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8117 return omit_one_operand (type, integer_one_node, arg1);
8118 /* X || !X is always true. */
8119 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8120 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8121 return omit_one_operand (type, integer_one_node, arg0);
8123 goto truth_andor;
8125 case TRUTH_XOR_EXPR:
8126 /* If the second arg is constant zero, drop it. */
8127 if (integer_zerop (arg1))
8128 return non_lvalue (fold_convert (type, arg0));
8129 /* If the second arg is constant true, this is a logical inversion. */
8130 if (integer_onep (arg1))
8131 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8132 /* Identical arguments cancel to zero. */
8133 if (operand_equal_p (arg0, arg1, 0))
8134 return omit_one_operand (type, integer_zero_node, arg0);
8136 /* !X ^ X is always true. */
8137 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8138 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8139 return omit_one_operand (type, integer_one_node, arg1);
8141 /* X ^ !X is always true. */
8142 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8143 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8144 return omit_one_operand (type, integer_one_node, arg0);
8146 return t;
8148 case EQ_EXPR:
8149 case NE_EXPR:
8150 case LT_EXPR:
8151 case GT_EXPR:
8152 case LE_EXPR:
8153 case GE_EXPR:
8154 /* If one arg is a real or integer constant, put it last. */
8155 if (tree_swap_operands_p (arg0, arg1, true))
8156 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8158 /* If this is an equality comparison of the address of a non-weak
8159 object against zero, then we know the result. */
8160 if ((code == EQ_EXPR || code == NE_EXPR)
8161 && TREE_CODE (arg0) == ADDR_EXPR
8162 && DECL_P (TREE_OPERAND (arg0, 0))
8163 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8164 && integer_zerop (arg1))
8165 return constant_boolean_node (code != EQ_EXPR, type);
8167 /* If this is an equality comparison of the address of two non-weak,
8168 unaliased symbols neither of which are extern (since we do not
8169 have access to attributes for externs), then we know the result. */
8170 if ((code == EQ_EXPR || code == NE_EXPR)
8171 && TREE_CODE (arg0) == ADDR_EXPR
8172 && DECL_P (TREE_OPERAND (arg0, 0))
8173 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8174 && ! lookup_attribute ("alias",
8175 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8176 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8177 && TREE_CODE (arg1) == ADDR_EXPR
8178 && DECL_P (TREE_OPERAND (arg1, 0))
8179 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8180 && ! lookup_attribute ("alias",
8181 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8182 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8183 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8184 ? code == EQ_EXPR : code != EQ_EXPR,
8185 type);
8187 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8189 tree targ0 = strip_float_extensions (arg0);
8190 tree targ1 = strip_float_extensions (arg1);
8191 tree newtype = TREE_TYPE (targ0);
8193 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8194 newtype = TREE_TYPE (targ1);
8196 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8197 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8198 return fold (build2 (code, type, fold_convert (newtype, targ0),
8199 fold_convert (newtype, targ1)));
8201 /* (-a) CMP (-b) -> b CMP a */
8202 if (TREE_CODE (arg0) == NEGATE_EXPR
8203 && TREE_CODE (arg1) == NEGATE_EXPR)
8204 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8205 TREE_OPERAND (arg0, 0)));
8207 if (TREE_CODE (arg1) == REAL_CST)
8209 REAL_VALUE_TYPE cst;
8210 cst = TREE_REAL_CST (arg1);
8212 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8213 if (TREE_CODE (arg0) == NEGATE_EXPR)
8214 return
8215 fold (build2 (swap_tree_comparison (code), type,
8216 TREE_OPERAND (arg0, 0),
8217 build_real (TREE_TYPE (arg1),
8218 REAL_VALUE_NEGATE (cst))));
8220 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8221 /* a CMP (-0) -> a CMP 0 */
8222 if (REAL_VALUE_MINUS_ZERO (cst))
8223 return fold (build2 (code, type, arg0,
8224 build_real (TREE_TYPE (arg1), dconst0)));
8226 /* x != NaN is always true, other ops are always false. */
8227 if (REAL_VALUE_ISNAN (cst)
8228 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8230 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8231 return omit_one_operand (type, tem, arg0);
8234 /* Fold comparisons against infinity. */
8235 if (REAL_VALUE_ISINF (cst))
8237 tem = fold_inf_compare (code, type, arg0, arg1);
8238 if (tem != NULL_TREE)
8239 return tem;
8243 /* If this is a comparison of a real constant with a PLUS_EXPR
8244 or a MINUS_EXPR of a real constant, we can convert it into a
8245 comparison with a revised real constant as long as no overflow
8246 occurs when unsafe_math_optimizations are enabled. */
8247 if (flag_unsafe_math_optimizations
8248 && TREE_CODE (arg1) == REAL_CST
8249 && (TREE_CODE (arg0) == PLUS_EXPR
8250 || TREE_CODE (arg0) == MINUS_EXPR)
8251 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8252 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8253 ? MINUS_EXPR : PLUS_EXPR,
8254 arg1, TREE_OPERAND (arg0, 1), 0))
8255 && ! TREE_CONSTANT_OVERFLOW (tem))
8256 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8258 /* Likewise, we can simplify a comparison of a real constant with
8259 a MINUS_EXPR whose first operand is also a real constant, i.e.
8260 (c1 - x) < c2 becomes x > c1-c2. */
8261 if (flag_unsafe_math_optimizations
8262 && TREE_CODE (arg1) == REAL_CST
8263 && TREE_CODE (arg0) == MINUS_EXPR
8264 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8265 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8266 arg1, 0))
8267 && ! TREE_CONSTANT_OVERFLOW (tem))
8268 return fold (build2 (swap_tree_comparison (code), type,
8269 TREE_OPERAND (arg0, 1), tem));
8271 /* Fold comparisons against built-in math functions. */
8272 if (TREE_CODE (arg1) == REAL_CST
8273 && flag_unsafe_math_optimizations
8274 && ! flag_errno_math)
8276 enum built_in_function fcode = builtin_mathfn_code (arg0);
8278 if (fcode != END_BUILTINS)
8280 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8281 if (tem != NULL_TREE)
8282 return tem;
8287 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8288 if (TREE_CONSTANT (arg1)
8289 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8290 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8291 /* This optimization is invalid for ordered comparisons
8292 if CONST+INCR overflows or if foo+incr might overflow.
8293 This optimization is invalid for floating point due to rounding.
8294 For pointer types we assume overflow doesn't happen. */
8295 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8296 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8297 && (code == EQ_EXPR || code == NE_EXPR))))
8299 tree varop, newconst;
8301 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8303 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8304 arg1, TREE_OPERAND (arg0, 1)));
8305 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8306 TREE_OPERAND (arg0, 0),
8307 TREE_OPERAND (arg0, 1));
8309 else
8311 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8312 arg1, TREE_OPERAND (arg0, 1)));
8313 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8314 TREE_OPERAND (arg0, 0),
8315 TREE_OPERAND (arg0, 1));
8319 /* If VAROP is a reference to a bitfield, we must mask
8320 the constant by the width of the field. */
8321 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8322 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8323 && host_integerp (DECL_SIZE (TREE_OPERAND
8324 (TREE_OPERAND (varop, 0), 1)), 1))
8326 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8327 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8328 tree folded_compare, shift;
8330 /* First check whether the comparison would come out
8331 always the same. If we don't do that we would
8332 change the meaning with the masking. */
8333 folded_compare = fold (build2 (code, type,
8334 TREE_OPERAND (varop, 0), arg1));
8335 if (integer_zerop (folded_compare)
8336 || integer_onep (folded_compare))
8337 return omit_one_operand (type, folded_compare, varop);
8339 shift = build_int_cst (NULL_TREE,
8340 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8341 shift = fold_convert (TREE_TYPE (varop), shift);
8342 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8343 newconst, shift));
8344 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8345 newconst, shift));
8348 return fold (build2 (code, type, varop, newconst));
8351 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8352 This transformation affects the cases which are handled in later
8353 optimizations involving comparisons with non-negative constants. */
8354 if (TREE_CODE (arg1) == INTEGER_CST
8355 && TREE_CODE (arg0) != INTEGER_CST
8356 && tree_int_cst_sgn (arg1) > 0)
8358 switch (code)
8360 case GE_EXPR:
8361 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8362 return fold (build2 (GT_EXPR, type, arg0, arg1));
8364 case LT_EXPR:
8365 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8366 return fold (build2 (LE_EXPR, type, arg0, arg1));
8368 default:
8369 break;
8373 /* Comparisons with the highest or lowest possible integer of
8374 the specified size will have known values.
8376 This is quite similar to fold_relational_hi_lo; however, my
8377 attempts to share the code have been nothing but trouble.
8378 I give up for now. */
8380 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8382 if (TREE_CODE (arg1) == INTEGER_CST
8383 && ! TREE_CONSTANT_OVERFLOW (arg1)
8384 && width <= HOST_BITS_PER_WIDE_INT
8385 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8386 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8388 unsigned HOST_WIDE_INT signed_max;
8389 unsigned HOST_WIDE_INT max, min;
8391 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8393 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8395 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8396 min = 0;
8398 else
8400 max = signed_max;
8401 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8404 if (TREE_INT_CST_HIGH (arg1) == 0
8405 && TREE_INT_CST_LOW (arg1) == max)
8406 switch (code)
8408 case GT_EXPR:
8409 return omit_one_operand (type, integer_zero_node, arg0);
8411 case GE_EXPR:
8412 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8414 case LE_EXPR:
8415 return omit_one_operand (type, integer_one_node, arg0);
8417 case LT_EXPR:
8418 return fold (build2 (NE_EXPR, type, arg0, arg1));
8420 /* The GE_EXPR and LT_EXPR cases above are not normally
8421 reached because of previous transformations. */
8423 default:
8424 break;
8426 else if (TREE_INT_CST_HIGH (arg1) == 0
8427 && TREE_INT_CST_LOW (arg1) == max - 1)
8428 switch (code)
8430 case GT_EXPR:
8431 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8432 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8433 case LE_EXPR:
8434 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8435 return fold (build2 (NE_EXPR, type, arg0, arg1));
8436 default:
8437 break;
8439 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8440 && TREE_INT_CST_LOW (arg1) == min)
8441 switch (code)
8443 case LT_EXPR:
8444 return omit_one_operand (type, integer_zero_node, arg0);
8446 case LE_EXPR:
8447 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8449 case GE_EXPR:
8450 return omit_one_operand (type, integer_one_node, arg0);
8452 case GT_EXPR:
8453 return fold (build2 (NE_EXPR, type, arg0, arg1));
8455 default:
8456 break;
8458 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8459 && TREE_INT_CST_LOW (arg1) == min + 1)
8460 switch (code)
8462 case GE_EXPR:
8463 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8464 return fold (build2 (NE_EXPR, type, arg0, arg1));
8465 case LT_EXPR:
8466 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8467 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8468 default:
8469 break;
8472 else if (!in_gimple_form
8473 && TREE_INT_CST_HIGH (arg1) == 0
8474 && TREE_INT_CST_LOW (arg1) == signed_max
8475 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8476 /* signed_type does not work on pointer types. */
8477 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8479 /* The following case also applies to X < signed_max+1
8480 and X >= signed_max+1 because previous transformations. */
8481 if (code == LE_EXPR || code == GT_EXPR)
8483 tree st0, st1;
8484 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8485 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8486 return fold
8487 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8488 type, fold_convert (st0, arg0),
8489 fold_convert (st1, integer_zero_node)));
8495 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8496 a MINUS_EXPR of a constant, we can convert it into a comparison with
8497 a revised constant as long as no overflow occurs. */
8498 if ((code == EQ_EXPR || code == NE_EXPR)
8499 && TREE_CODE (arg1) == INTEGER_CST
8500 && (TREE_CODE (arg0) == PLUS_EXPR
8501 || TREE_CODE (arg0) == MINUS_EXPR)
8502 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8503 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8504 ? MINUS_EXPR : PLUS_EXPR,
8505 arg1, TREE_OPERAND (arg0, 1), 0))
8506 && ! TREE_CONSTANT_OVERFLOW (tem))
8507 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8509 /* Similarly for a NEGATE_EXPR. */
8510 else if ((code == EQ_EXPR || code == NE_EXPR)
8511 && TREE_CODE (arg0) == NEGATE_EXPR
8512 && TREE_CODE (arg1) == INTEGER_CST
8513 && 0 != (tem = negate_expr (arg1))
8514 && TREE_CODE (tem) == INTEGER_CST
8515 && ! TREE_CONSTANT_OVERFLOW (tem))
8516 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8518 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8519 for !=. Don't do this for ordered comparisons due to overflow. */
8520 else if ((code == NE_EXPR || code == EQ_EXPR)
8521 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8522 return fold (build2 (code, type,
8523 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8525 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8526 && TREE_CODE (arg0) == NOP_EXPR)
8528 /* If we are widening one operand of an integer comparison,
8529 see if the other operand is similarly being widened. Perhaps we
8530 can do the comparison in the narrower type. */
8531 tem = fold_widened_comparison (code, type, arg0, arg1);
8532 if (tem)
8533 return tem;
8535 /* Or if we are changing signedness. */
8536 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8537 if (tem)
8538 return tem;
8541 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8542 constant, we can simplify it. */
8543 else if (TREE_CODE (arg1) == INTEGER_CST
8544 && (TREE_CODE (arg0) == MIN_EXPR
8545 || TREE_CODE (arg0) == MAX_EXPR)
8546 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8547 return optimize_minmax_comparison (t);
8549 /* If we are comparing an ABS_EXPR with a constant, we can
8550 convert all the cases into explicit comparisons, but they may
8551 well not be faster than doing the ABS and one comparison.
8552 But ABS (X) <= C is a range comparison, which becomes a subtraction
8553 and a comparison, and is probably faster. */
8554 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8555 && TREE_CODE (arg0) == ABS_EXPR
8556 && ! TREE_SIDE_EFFECTS (arg0)
8557 && (0 != (tem = negate_expr (arg1)))
8558 && TREE_CODE (tem) == INTEGER_CST
8559 && ! TREE_CONSTANT_OVERFLOW (tem))
8560 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8561 build2 (GE_EXPR, type,
8562 TREE_OPERAND (arg0, 0), tem),
8563 build2 (LE_EXPR, type,
8564 TREE_OPERAND (arg0, 0), arg1)));
8566 /* If this is an EQ or NE comparison with zero and ARG0 is
8567 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8568 two operations, but the latter can be done in one less insn
8569 on machines that have only two-operand insns or on which a
8570 constant cannot be the first operand. */
8571 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8572 && TREE_CODE (arg0) == BIT_AND_EXPR)
8574 tree arg00 = TREE_OPERAND (arg0, 0);
8575 tree arg01 = TREE_OPERAND (arg0, 1);
8576 if (TREE_CODE (arg00) == LSHIFT_EXPR
8577 && integer_onep (TREE_OPERAND (arg00, 0)))
8578 return
8579 fold (build2 (code, type,
8580 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8581 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8582 arg01, TREE_OPERAND (arg00, 1)),
8583 fold_convert (TREE_TYPE (arg0),
8584 integer_one_node)),
8585 arg1));
8586 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8587 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8588 return
8589 fold (build2 (code, type,
8590 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8591 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8592 arg00, TREE_OPERAND (arg01, 1)),
8593 fold_convert (TREE_TYPE (arg0),
8594 integer_one_node)),
8595 arg1));
8598 /* If this is an NE or EQ comparison of zero against the result of a
8599 signed MOD operation whose second operand is a power of 2, make
8600 the MOD operation unsigned since it is simpler and equivalent. */
8601 if ((code == NE_EXPR || code == EQ_EXPR)
8602 && integer_zerop (arg1)
8603 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8604 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8605 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8606 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8607 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8608 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8610 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8611 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8612 fold_convert (newtype,
8613 TREE_OPERAND (arg0, 0)),
8614 fold_convert (newtype,
8615 TREE_OPERAND (arg0, 1))));
8617 return fold (build2 (code, type, newmod,
8618 fold_convert (newtype, arg1)));
8621 /* If this is an NE comparison of zero with an AND of one, remove the
8622 comparison since the AND will give the correct value. */
8623 if (code == NE_EXPR && integer_zerop (arg1)
8624 && TREE_CODE (arg0) == BIT_AND_EXPR
8625 && integer_onep (TREE_OPERAND (arg0, 1)))
8626 return fold_convert (type, arg0);
8628 /* If we have (A & C) == C where C is a power of 2, convert this into
8629 (A & C) != 0. Similarly for NE_EXPR. */
8630 if ((code == EQ_EXPR || code == NE_EXPR)
8631 && TREE_CODE (arg0) == BIT_AND_EXPR
8632 && integer_pow2p (TREE_OPERAND (arg0, 1))
8633 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8634 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8635 arg0, fold_convert (TREE_TYPE (arg0),
8636 integer_zero_node)));
8638 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8639 2, then fold the expression into shifts and logical operations. */
8640 tem = fold_single_bit_test (code, arg0, arg1, type);
8641 if (tem)
8642 return tem;
8644 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8645 Similarly for NE_EXPR. */
8646 if ((code == EQ_EXPR || code == NE_EXPR)
8647 && TREE_CODE (arg0) == BIT_AND_EXPR
8648 && TREE_CODE (arg1) == INTEGER_CST
8649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8651 tree notc = fold (build1 (BIT_NOT_EXPR,
8652 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8653 TREE_OPERAND (arg0, 1)));
8654 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8655 arg1, notc));
8656 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8657 if (integer_nonzerop (dandnotc))
8658 return omit_one_operand (type, rslt, arg0);
8661 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8662 Similarly for NE_EXPR. */
8663 if ((code == EQ_EXPR || code == NE_EXPR)
8664 && TREE_CODE (arg0) == BIT_IOR_EXPR
8665 && TREE_CODE (arg1) == INTEGER_CST
8666 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8668 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8669 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8670 TREE_OPERAND (arg0, 1), notd));
8671 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8672 if (integer_nonzerop (candnotd))
8673 return omit_one_operand (type, rslt, arg0);
8676 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8677 and similarly for >= into !=. */
8678 if ((code == LT_EXPR || code == GE_EXPR)
8679 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8680 && TREE_CODE (arg1) == LSHIFT_EXPR
8681 && integer_onep (TREE_OPERAND (arg1, 0)))
8682 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8683 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8684 TREE_OPERAND (arg1, 1)),
8685 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8687 else if ((code == LT_EXPR || code == GE_EXPR)
8688 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8689 && (TREE_CODE (arg1) == NOP_EXPR
8690 || TREE_CODE (arg1) == CONVERT_EXPR)
8691 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8692 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8693 return
8694 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8695 fold_convert (TREE_TYPE (arg0),
8696 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8697 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8698 1))),
8699 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8701 /* Simplify comparison of something with itself. (For IEEE
8702 floating-point, we can only do some of these simplifications.) */
8703 if (operand_equal_p (arg0, arg1, 0))
8705 switch (code)
8707 case EQ_EXPR:
8708 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8709 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8710 return constant_boolean_node (1, type);
8711 break;
8713 case GE_EXPR:
8714 case LE_EXPR:
8715 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8716 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8717 return constant_boolean_node (1, type);
8718 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8720 case NE_EXPR:
8721 /* For NE, we can only do this simplification if integer
8722 or we don't honor IEEE floating point NaNs. */
8723 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8724 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8725 break;
8726 /* ... fall through ... */
8727 case GT_EXPR:
8728 case LT_EXPR:
8729 return constant_boolean_node (0, type);
8730 default:
8731 gcc_unreachable ();
8735 /* If we are comparing an expression that just has comparisons
8736 of two integer values, arithmetic expressions of those comparisons,
8737 and constants, we can simplify it. There are only three cases
8738 to check: the two values can either be equal, the first can be
8739 greater, or the second can be greater. Fold the expression for
8740 those three values. Since each value must be 0 or 1, we have
8741 eight possibilities, each of which corresponds to the constant 0
8742 or 1 or one of the six possible comparisons.
8744 This handles common cases like (a > b) == 0 but also handles
8745 expressions like ((x > y) - (y > x)) > 0, which supposedly
8746 occur in macroized code. */
8748 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8750 tree cval1 = 0, cval2 = 0;
8751 int save_p = 0;
8753 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8754 /* Don't handle degenerate cases here; they should already
8755 have been handled anyway. */
8756 && cval1 != 0 && cval2 != 0
8757 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8758 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8759 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8760 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8761 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8762 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8763 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8765 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8766 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8768 /* We can't just pass T to eval_subst in case cval1 or cval2
8769 was the same as ARG1. */
8771 tree high_result
8772 = fold (build2 (code, type,
8773 eval_subst (arg0, cval1, maxval,
8774 cval2, minval),
8775 arg1));
8776 tree equal_result
8777 = fold (build2 (code, type,
8778 eval_subst (arg0, cval1, maxval,
8779 cval2, maxval),
8780 arg1));
8781 tree low_result
8782 = fold (build2 (code, type,
8783 eval_subst (arg0, cval1, minval,
8784 cval2, maxval),
8785 arg1));
8787 /* All three of these results should be 0 or 1. Confirm they
8788 are. Then use those values to select the proper code
8789 to use. */
8791 if ((integer_zerop (high_result)
8792 || integer_onep (high_result))
8793 && (integer_zerop (equal_result)
8794 || integer_onep (equal_result))
8795 && (integer_zerop (low_result)
8796 || integer_onep (low_result)))
8798 /* Make a 3-bit mask with the high-order bit being the
8799 value for `>', the next for '=', and the low for '<'. */
8800 switch ((integer_onep (high_result) * 4)
8801 + (integer_onep (equal_result) * 2)
8802 + integer_onep (low_result))
8804 case 0:
8805 /* Always false. */
8806 return omit_one_operand (type, integer_zero_node, arg0);
8807 case 1:
8808 code = LT_EXPR;
8809 break;
8810 case 2:
8811 code = EQ_EXPR;
8812 break;
8813 case 3:
8814 code = LE_EXPR;
8815 break;
8816 case 4:
8817 code = GT_EXPR;
8818 break;
8819 case 5:
8820 code = NE_EXPR;
8821 break;
8822 case 6:
8823 code = GE_EXPR;
8824 break;
8825 case 7:
8826 /* Always true. */
8827 return omit_one_operand (type, integer_one_node, arg0);
8830 tem = build2 (code, type, cval1, cval2);
8831 if (save_p)
8832 return save_expr (tem);
8833 else
8834 return fold (tem);
8839 /* If this is a comparison of a field, we may be able to simplify it. */
8840 if (((TREE_CODE (arg0) == COMPONENT_REF
8841 && lang_hooks.can_use_bit_fields_p ())
8842 || TREE_CODE (arg0) == BIT_FIELD_REF)
8843 && (code == EQ_EXPR || code == NE_EXPR)
8844 /* Handle the constant case even without -O
8845 to make sure the warnings are given. */
8846 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8848 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8849 if (t1)
8850 return t1;
8853 /* If this is a comparison of complex values and either or both sides
8854 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8855 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8856 This may prevent needless evaluations. */
8857 if ((code == EQ_EXPR || code == NE_EXPR)
8858 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8859 && (TREE_CODE (arg0) == COMPLEX_EXPR
8860 || TREE_CODE (arg1) == COMPLEX_EXPR
8861 || TREE_CODE (arg0) == COMPLEX_CST
8862 || TREE_CODE (arg1) == COMPLEX_CST))
8864 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8865 tree real0, imag0, real1, imag1;
8867 arg0 = save_expr (arg0);
8868 arg1 = save_expr (arg1);
8869 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8870 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8871 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8872 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8874 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8875 : TRUTH_ORIF_EXPR),
8876 type,
8877 fold (build2 (code, type, real0, real1)),
8878 fold (build2 (code, type, imag0, imag1))));
8881 /* Optimize comparisons of strlen vs zero to a compare of the
8882 first character of the string vs zero. To wit,
8883 strlen(ptr) == 0 => *ptr == 0
8884 strlen(ptr) != 0 => *ptr != 0
8885 Other cases should reduce to one of these two (or a constant)
8886 due to the return value of strlen being unsigned. */
8887 if ((code == EQ_EXPR || code == NE_EXPR)
8888 && integer_zerop (arg1)
8889 && TREE_CODE (arg0) == CALL_EXPR)
8891 tree fndecl = get_callee_fndecl (arg0);
8892 tree arglist;
8894 if (fndecl
8895 && DECL_BUILT_IN (fndecl)
8896 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8897 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8898 && (arglist = TREE_OPERAND (arg0, 1))
8899 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8900 && ! TREE_CHAIN (arglist))
8901 return fold (build2 (code, type,
8902 build1 (INDIRECT_REF, char_type_node,
8903 TREE_VALUE (arglist)),
8904 fold_convert (char_type_node,
8905 integer_zero_node)));
8908 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8909 into a single range test. */
8910 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8911 && TREE_CODE (arg1) == INTEGER_CST
8912 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8913 && !integer_zerop (TREE_OPERAND (arg0, 1))
8914 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8915 && !TREE_OVERFLOW (arg1))
8917 t1 = fold_div_compare (code, type, arg0, arg1);
8918 if (t1 != NULL_TREE)
8919 return t1;
8922 if ((code == EQ_EXPR || code == NE_EXPR)
8923 && !TREE_SIDE_EFFECTS (arg0)
8924 && integer_zerop (arg1)
8925 && tree_expr_nonzero_p (arg0))
8926 return constant_boolean_node (code==NE_EXPR, type);
8928 t1 = fold_relational_const (code, type, arg0, arg1);
8929 return t1 == NULL_TREE ? t : t1;
8931 case UNORDERED_EXPR:
8932 case ORDERED_EXPR:
8933 case UNLT_EXPR:
8934 case UNLE_EXPR:
8935 case UNGT_EXPR:
8936 case UNGE_EXPR:
8937 case UNEQ_EXPR:
8938 case LTGT_EXPR:
8939 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8941 t1 = fold_relational_const (code, type, arg0, arg1);
8942 if (t1 != NULL_TREE)
8943 return t1;
8946 /* If the first operand is NaN, the result is constant. */
8947 if (TREE_CODE (arg0) == REAL_CST
8948 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8949 && (code != LTGT_EXPR || ! flag_trapping_math))
8951 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8952 ? integer_zero_node
8953 : integer_one_node;
8954 return omit_one_operand (type, t1, arg1);
8957 /* If the second operand is NaN, the result is constant. */
8958 if (TREE_CODE (arg1) == REAL_CST
8959 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8960 && (code != LTGT_EXPR || ! flag_trapping_math))
8962 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8963 ? integer_zero_node
8964 : integer_one_node;
8965 return omit_one_operand (type, t1, arg0);
8968 /* Simplify unordered comparison of something with itself. */
8969 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
8970 && operand_equal_p (arg0, arg1, 0))
8971 return constant_boolean_node (1, type);
8973 if (code == LTGT_EXPR
8974 && !flag_trapping_math
8975 && operand_equal_p (arg0, arg1, 0))
8976 return constant_boolean_node (0, type);
8978 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8980 tree targ0 = strip_float_extensions (arg0);
8981 tree targ1 = strip_float_extensions (arg1);
8982 tree newtype = TREE_TYPE (targ0);
8984 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8985 newtype = TREE_TYPE (targ1);
8987 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8988 return fold (build2 (code, type, fold_convert (newtype, targ0),
8989 fold_convert (newtype, targ1)));
8992 return t;
8994 case COND_EXPR:
8995 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8996 so all simple results must be passed through pedantic_non_lvalue. */
8997 if (TREE_CODE (arg0) == INTEGER_CST)
8999 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
9000 /* Only optimize constant conditions when the selected branch
9001 has the same type as the COND_EXPR. This avoids optimizing
9002 away "c ? x : throw", where the throw has a void type. */
9003 if (! VOID_TYPE_P (TREE_TYPE (tem))
9004 || VOID_TYPE_P (type))
9005 return pedantic_non_lvalue (tem);
9006 return t;
9008 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
9009 return pedantic_omit_one_operand (type, arg1, arg0);
9011 /* If we have A op B ? A : C, we may be able to convert this to a
9012 simpler expression, depending on the operation and the values
9013 of B and C. Signed zeros prevent all of these transformations,
9014 for reasons given above each one.
9016 Also try swapping the arguments and inverting the conditional. */
9017 if (COMPARISON_CLASS_P (arg0)
9018 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9019 arg1, TREE_OPERAND (arg0, 1))
9020 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9022 tem = fold_cond_expr_with_comparison (type, arg0,
9023 TREE_OPERAND (t, 1),
9024 TREE_OPERAND (t, 2));
9025 if (tem)
9026 return tem;
9029 if (COMPARISON_CLASS_P (arg0)
9030 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9031 TREE_OPERAND (t, 2),
9032 TREE_OPERAND (arg0, 1))
9033 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
9035 tem = invert_truthvalue (arg0);
9036 if (COMPARISON_CLASS_P (tem))
9038 tem = fold_cond_expr_with_comparison (type, tem,
9039 TREE_OPERAND (t, 2),
9040 TREE_OPERAND (t, 1));
9041 if (tem)
9042 return tem;
9046 /* If the second operand is simpler than the third, swap them
9047 since that produces better jump optimization results. */
9048 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
9049 TREE_OPERAND (t, 2), false))
9051 /* See if this can be inverted. If it can't, possibly because
9052 it was a floating-point inequality comparison, don't do
9053 anything. */
9054 tem = invert_truthvalue (arg0);
9056 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9057 return fold (build3 (code, type, tem,
9058 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
9061 /* Convert A ? 1 : 0 to simply A. */
9062 if (integer_onep (TREE_OPERAND (t, 1))
9063 && integer_zerop (TREE_OPERAND (t, 2))
9064 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9065 call to fold will try to move the conversion inside
9066 a COND, which will recurse. In that case, the COND_EXPR
9067 is probably the best choice, so leave it alone. */
9068 && type == TREE_TYPE (arg0))
9069 return pedantic_non_lvalue (arg0);
9071 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9072 over COND_EXPR in cases such as floating point comparisons. */
9073 if (integer_zerop (TREE_OPERAND (t, 1))
9074 && integer_onep (TREE_OPERAND (t, 2))
9075 && truth_value_p (TREE_CODE (arg0)))
9076 return pedantic_non_lvalue (fold_convert (type,
9077 invert_truthvalue (arg0)));
9079 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9080 if (TREE_CODE (arg0) == LT_EXPR
9081 && integer_zerop (TREE_OPERAND (arg0, 1))
9082 && integer_zerop (TREE_OPERAND (t, 2))
9083 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9084 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9085 TREE_TYPE (tem), tem, arg1)));
9087 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9088 already handled above. */
9089 if (TREE_CODE (arg0) == BIT_AND_EXPR
9090 && integer_onep (TREE_OPERAND (arg0, 1))
9091 && integer_zerop (TREE_OPERAND (t, 2))
9092 && integer_pow2p (arg1))
9094 tree tem = TREE_OPERAND (arg0, 0);
9095 STRIP_NOPS (tem);
9096 if (TREE_CODE (tem) == RSHIFT_EXPR
9097 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9098 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9099 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9100 return fold (build2 (BIT_AND_EXPR, type,
9101 TREE_OPERAND (tem, 0), arg1));
9104 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9105 is probably obsolete because the first operand should be a
9106 truth value (that's why we have the two cases above), but let's
9107 leave it in until we can confirm this for all front-ends. */
9108 if (integer_zerop (TREE_OPERAND (t, 2))
9109 && TREE_CODE (arg0) == NE_EXPR
9110 && integer_zerop (TREE_OPERAND (arg0, 1))
9111 && integer_pow2p (arg1)
9112 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9113 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9114 arg1, OEP_ONLY_CONST))
9115 return pedantic_non_lvalue (fold_convert (type,
9116 TREE_OPERAND (arg0, 0)));
9118 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9119 if (integer_zerop (TREE_OPERAND (t, 2))
9120 && truth_value_p (TREE_CODE (arg0))
9121 && truth_value_p (TREE_CODE (arg1)))
9122 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9124 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9125 if (integer_onep (TREE_OPERAND (t, 2))
9126 && truth_value_p (TREE_CODE (arg0))
9127 && truth_value_p (TREE_CODE (arg1)))
9129 /* Only perform transformation if ARG0 is easily inverted. */
9130 tem = invert_truthvalue (arg0);
9131 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9132 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9135 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9136 if (integer_zerop (arg1)
9137 && truth_value_p (TREE_CODE (arg0))
9138 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9140 /* Only perform transformation if ARG0 is easily inverted. */
9141 tem = invert_truthvalue (arg0);
9142 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9143 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9144 TREE_OPERAND (t, 2)));
9147 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9148 if (integer_onep (arg1)
9149 && truth_value_p (TREE_CODE (arg0))
9150 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9151 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9152 TREE_OPERAND (t, 2)));
9154 return t;
9156 case COMPOUND_EXPR:
9157 /* When pedantic, a compound expression can be neither an lvalue
9158 nor an integer constant expression. */
9159 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9160 return t;
9161 /* Don't let (0, 0) be null pointer constant. */
9162 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9163 : fold_convert (type, arg1);
9164 return pedantic_non_lvalue (tem);
9166 case COMPLEX_EXPR:
9167 if (wins)
9168 return build_complex (type, arg0, arg1);
9169 return t;
9171 case REALPART_EXPR:
9172 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9173 return t;
9174 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9175 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9176 TREE_OPERAND (arg0, 1));
9177 else if (TREE_CODE (arg0) == COMPLEX_CST)
9178 return TREE_REALPART (arg0);
9179 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9180 return fold (build2 (TREE_CODE (arg0), type,
9181 fold (build1 (REALPART_EXPR, type,
9182 TREE_OPERAND (arg0, 0))),
9183 fold (build1 (REALPART_EXPR, type,
9184 TREE_OPERAND (arg0, 1)))));
9185 return t;
9187 case IMAGPART_EXPR:
9188 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9189 return fold_convert (type, integer_zero_node);
9190 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9191 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9192 TREE_OPERAND (arg0, 0));
9193 else if (TREE_CODE (arg0) == COMPLEX_CST)
9194 return TREE_IMAGPART (arg0);
9195 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9196 return fold (build2 (TREE_CODE (arg0), type,
9197 fold (build1 (IMAGPART_EXPR, type,
9198 TREE_OPERAND (arg0, 0))),
9199 fold (build1 (IMAGPART_EXPR, type,
9200 TREE_OPERAND (arg0, 1)))));
9201 return t;
9203 case CALL_EXPR:
9204 /* Check for a built-in function. */
9205 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9206 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9207 == FUNCTION_DECL)
9208 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9210 tree tmp = fold_builtin (t, false);
9211 if (tmp)
9212 return tmp;
9214 return t;
9216 default:
9217 return t;
9218 } /* switch (code) */
9221 #ifdef ENABLE_FOLD_CHECKING
9222 #undef fold
9224 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9225 static void fold_check_failed (tree, tree);
9226 void print_fold_checksum (tree);
9228 /* When --enable-checking=fold, compute a digest of expr before
9229 and after actual fold call to see if fold did not accidentally
9230 change original expr. */
9232 tree
9233 fold (tree expr)
9235 tree ret;
9236 struct md5_ctx ctx;
9237 unsigned char checksum_before[16], checksum_after[16];
9238 htab_t ht;
9240 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9241 md5_init_ctx (&ctx);
9242 fold_checksum_tree (expr, &ctx, ht);
9243 md5_finish_ctx (&ctx, checksum_before);
9244 htab_empty (ht);
9246 ret = fold_1 (expr);
9248 md5_init_ctx (&ctx);
9249 fold_checksum_tree (expr, &ctx, ht);
9250 md5_finish_ctx (&ctx, checksum_after);
9251 htab_delete (ht);
9253 if (memcmp (checksum_before, checksum_after, 16))
9254 fold_check_failed (expr, ret);
9256 return ret;
9259 void
9260 print_fold_checksum (tree expr)
9262 struct md5_ctx ctx;
9263 unsigned char checksum[16], cnt;
9264 htab_t ht;
9266 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9267 md5_init_ctx (&ctx);
9268 fold_checksum_tree (expr, &ctx, ht);
9269 md5_finish_ctx (&ctx, checksum);
9270 htab_delete (ht);
9271 for (cnt = 0; cnt < 16; ++cnt)
9272 fprintf (stderr, "%02x", checksum[cnt]);
9273 putc ('\n', stderr);
9276 static void
9277 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9279 internal_error ("fold check: original tree changed by fold");
9282 static void
9283 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9285 void **slot;
9286 enum tree_code code;
9287 char buf[sizeof (struct tree_decl)];
9288 int i, len;
9290 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9291 <= sizeof (struct tree_decl))
9292 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9293 if (expr == NULL)
9294 return;
9295 slot = htab_find_slot (ht, expr, INSERT);
9296 if (*slot != NULL)
9297 return;
9298 *slot = expr;
9299 code = TREE_CODE (expr);
9300 if (TREE_CODE_CLASS (code) == tcc_declaration
9301 && DECL_ASSEMBLER_NAME_SET_P (expr))
9303 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9304 memcpy (buf, expr, tree_size (expr));
9305 expr = (tree) buf;
9306 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9308 else if (TREE_CODE_CLASS (code) == tcc_type
9309 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9310 || TYPE_CACHED_VALUES_P (expr)))
9312 /* Allow these fields to be modified. */
9313 memcpy (buf, expr, tree_size (expr));
9314 expr = (tree) buf;
9315 TYPE_POINTER_TO (expr) = NULL;
9316 TYPE_REFERENCE_TO (expr) = NULL;
9317 TYPE_CACHED_VALUES_P (expr) = 0;
9318 TYPE_CACHED_VALUES (expr) = NULL;
9320 md5_process_bytes (expr, tree_size (expr), ctx);
9321 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9322 if (TREE_CODE_CLASS (code) != tcc_type
9323 && TREE_CODE_CLASS (code) != tcc_declaration)
9324 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9325 switch (TREE_CODE_CLASS (code))
9327 case tcc_constant:
9328 switch (code)
9330 case STRING_CST:
9331 md5_process_bytes (TREE_STRING_POINTER (expr),
9332 TREE_STRING_LENGTH (expr), ctx);
9333 break;
9334 case COMPLEX_CST:
9335 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9336 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9337 break;
9338 case VECTOR_CST:
9339 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9340 break;
9341 default:
9342 break;
9344 break;
9345 case tcc_exceptional:
9346 switch (code)
9348 case TREE_LIST:
9349 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9350 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9351 break;
9352 case TREE_VEC:
9353 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9354 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9355 break;
9356 default:
9357 break;
9359 break;
9360 case tcc_expression:
9361 case tcc_reference:
9362 case tcc_comparison:
9363 case tcc_unary:
9364 case tcc_binary:
9365 case tcc_statement:
9366 len = first_rtl_op (code);
9367 for (i = 0; i < len; ++i)
9368 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9369 break;
9370 case tcc_declaration:
9371 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9372 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9373 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9374 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9375 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9376 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9377 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9378 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9379 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9380 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9381 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9382 break;
9383 case tcc_type:
9384 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9385 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9386 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9387 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9388 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9389 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9390 if (INTEGRAL_TYPE_P (expr)
9391 || SCALAR_FLOAT_TYPE_P (expr))
9393 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9394 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9396 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9397 if (TREE_CODE (expr) == RECORD_TYPE
9398 || TREE_CODE (expr) == UNION_TYPE
9399 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9400 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9401 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9402 break;
9403 default:
9404 break;
9408 #endif
9410 /* Perform constant folding and related simplification of initializer
9411 expression EXPR. This behaves identically to "fold" but ignores
9412 potential run-time traps and exceptions that fold must preserve. */
9414 tree
9415 fold_initializer (tree expr)
9417 int saved_signaling_nans = flag_signaling_nans;
9418 int saved_trapping_math = flag_trapping_math;
9419 int saved_trapv = flag_trapv;
9420 tree result;
9422 flag_signaling_nans = 0;
9423 flag_trapping_math = 0;
9424 flag_trapv = 0;
9426 result = fold (expr);
9428 flag_signaling_nans = saved_signaling_nans;
9429 flag_trapping_math = saved_trapping_math;
9430 flag_trapv = saved_trapv;
9432 return result;
9435 /* Determine if first argument is a multiple of second argument. Return 0 if
9436 it is not, or we cannot easily determined it to be.
9438 An example of the sort of thing we care about (at this point; this routine
9439 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9440 fold cases do now) is discovering that
9442 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9444 is a multiple of
9446 SAVE_EXPR (J * 8)
9448 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9450 This code also handles discovering that
9452 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9454 is a multiple of 8 so we don't have to worry about dealing with a
9455 possible remainder.
9457 Note that we *look* inside a SAVE_EXPR only to determine how it was
9458 calculated; it is not safe for fold to do much of anything else with the
9459 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9460 at run time. For example, the latter example above *cannot* be implemented
9461 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9462 evaluation time of the original SAVE_EXPR is not necessarily the same at
9463 the time the new expression is evaluated. The only optimization of this
9464 sort that would be valid is changing
9466 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9468 divided by 8 to
9470 SAVE_EXPR (I) * SAVE_EXPR (J)
9472 (where the same SAVE_EXPR (J) is used in the original and the
9473 transformed version). */
9475 static int
9476 multiple_of_p (tree type, tree top, tree bottom)
9478 if (operand_equal_p (top, bottom, 0))
9479 return 1;
9481 if (TREE_CODE (type) != INTEGER_TYPE)
9482 return 0;
9484 switch (TREE_CODE (top))
9486 case MULT_EXPR:
9487 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9488 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9490 case PLUS_EXPR:
9491 case MINUS_EXPR:
9492 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9493 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9495 case LSHIFT_EXPR:
9496 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9498 tree op1, t1;
9500 op1 = TREE_OPERAND (top, 1);
9501 /* const_binop may not detect overflow correctly,
9502 so check for it explicitly here. */
9503 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9504 > TREE_INT_CST_LOW (op1)
9505 && TREE_INT_CST_HIGH (op1) == 0
9506 && 0 != (t1 = fold_convert (type,
9507 const_binop (LSHIFT_EXPR,
9508 size_one_node,
9509 op1, 0)))
9510 && ! TREE_OVERFLOW (t1))
9511 return multiple_of_p (type, t1, bottom);
9513 return 0;
9515 case NOP_EXPR:
9516 /* Can't handle conversions from non-integral or wider integral type. */
9517 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9518 || (TYPE_PRECISION (type)
9519 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9520 return 0;
9522 /* .. fall through ... */
9524 case SAVE_EXPR:
9525 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9527 case INTEGER_CST:
9528 if (TREE_CODE (bottom) != INTEGER_CST
9529 || (TYPE_UNSIGNED (type)
9530 && (tree_int_cst_sgn (top) < 0
9531 || tree_int_cst_sgn (bottom) < 0)))
9532 return 0;
9533 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9534 top, bottom, 0));
9536 default:
9537 return 0;
9541 /* Return true if `t' is known to be non-negative. */
9544 tree_expr_nonnegative_p (tree t)
9546 switch (TREE_CODE (t))
9548 case ABS_EXPR:
9549 return 1;
9551 case INTEGER_CST:
9552 return tree_int_cst_sgn (t) >= 0;
9554 case REAL_CST:
9555 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9557 case PLUS_EXPR:
9558 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9559 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9560 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9562 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9563 both unsigned and at least 2 bits shorter than the result. */
9564 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9565 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9566 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9568 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9569 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9570 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9571 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9573 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9574 TYPE_PRECISION (inner2)) + 1;
9575 return prec < TYPE_PRECISION (TREE_TYPE (t));
9578 break;
9580 case MULT_EXPR:
9581 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9583 /* x * x for floating point x is always non-negative. */
9584 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9585 return 1;
9586 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9587 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9590 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9591 both unsigned and their total bits is shorter than the result. */
9592 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9593 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9594 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9596 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9597 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9598 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9599 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9600 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9601 < TYPE_PRECISION (TREE_TYPE (t));
9603 return 0;
9605 case TRUNC_DIV_EXPR:
9606 case CEIL_DIV_EXPR:
9607 case FLOOR_DIV_EXPR:
9608 case ROUND_DIV_EXPR:
9609 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9610 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9612 case TRUNC_MOD_EXPR:
9613 case CEIL_MOD_EXPR:
9614 case FLOOR_MOD_EXPR:
9615 case ROUND_MOD_EXPR:
9616 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9618 case RDIV_EXPR:
9619 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9620 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9622 case BIT_AND_EXPR:
9623 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9624 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9625 case BIT_IOR_EXPR:
9626 case BIT_XOR_EXPR:
9627 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9628 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9630 case NOP_EXPR:
9632 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9633 tree outer_type = TREE_TYPE (t);
9635 if (TREE_CODE (outer_type) == REAL_TYPE)
9637 if (TREE_CODE (inner_type) == REAL_TYPE)
9638 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9639 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9641 if (TYPE_UNSIGNED (inner_type))
9642 return 1;
9643 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9646 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9648 if (TREE_CODE (inner_type) == REAL_TYPE)
9649 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9650 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9651 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9652 && TYPE_UNSIGNED (inner_type);
9655 break;
9657 case COND_EXPR:
9658 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9659 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9660 case COMPOUND_EXPR:
9661 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9662 case MIN_EXPR:
9663 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9664 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9665 case MAX_EXPR:
9666 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9667 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9668 case MODIFY_EXPR:
9669 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9670 case BIND_EXPR:
9671 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9672 case SAVE_EXPR:
9673 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9674 case NON_LVALUE_EXPR:
9675 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9676 case FLOAT_EXPR:
9677 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9679 case TARGET_EXPR:
9681 tree temp = TARGET_EXPR_SLOT (t);
9682 t = TARGET_EXPR_INITIAL (t);
9684 /* If the initializer is non-void, then it's a normal expression
9685 that will be assigned to the slot. */
9686 if (!VOID_TYPE_P (t))
9687 return tree_expr_nonnegative_p (t);
9689 /* Otherwise, the initializer sets the slot in some way. One common
9690 way is an assignment statement at the end of the initializer. */
9691 while (1)
9693 if (TREE_CODE (t) == BIND_EXPR)
9694 t = expr_last (BIND_EXPR_BODY (t));
9695 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9696 || TREE_CODE (t) == TRY_CATCH_EXPR)
9697 t = expr_last (TREE_OPERAND (t, 0));
9698 else if (TREE_CODE (t) == STATEMENT_LIST)
9699 t = expr_last (t);
9700 else
9701 break;
9703 if (TREE_CODE (t) == MODIFY_EXPR
9704 && TREE_OPERAND (t, 0) == temp)
9705 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9707 return 0;
9710 case CALL_EXPR:
9712 tree fndecl = get_callee_fndecl (t);
9713 tree arglist = TREE_OPERAND (t, 1);
9714 if (fndecl
9715 && DECL_BUILT_IN (fndecl)
9716 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9717 switch (DECL_FUNCTION_CODE (fndecl))
9719 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9720 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9721 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9722 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9724 CASE_BUILTIN_F (BUILT_IN_ACOS)
9725 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9726 CASE_BUILTIN_F (BUILT_IN_CABS)
9727 CASE_BUILTIN_F (BUILT_IN_COSH)
9728 CASE_BUILTIN_F (BUILT_IN_ERFC)
9729 CASE_BUILTIN_F (BUILT_IN_EXP)
9730 CASE_BUILTIN_F (BUILT_IN_EXP10)
9731 CASE_BUILTIN_F (BUILT_IN_EXP2)
9732 CASE_BUILTIN_F (BUILT_IN_FABS)
9733 CASE_BUILTIN_F (BUILT_IN_FDIM)
9734 CASE_BUILTIN_F (BUILT_IN_FREXP)
9735 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9736 CASE_BUILTIN_F (BUILT_IN_POW10)
9737 CASE_BUILTIN_I (BUILT_IN_FFS)
9738 CASE_BUILTIN_I (BUILT_IN_PARITY)
9739 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9740 /* Always true. */
9741 return 1;
9743 CASE_BUILTIN_F (BUILT_IN_SQRT)
9744 /* sqrt(-0.0) is -0.0. */
9745 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9746 return 1;
9747 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9749 CASE_BUILTIN_F (BUILT_IN_ASINH)
9750 CASE_BUILTIN_F (BUILT_IN_ATAN)
9751 CASE_BUILTIN_F (BUILT_IN_ATANH)
9752 CASE_BUILTIN_F (BUILT_IN_CBRT)
9753 CASE_BUILTIN_F (BUILT_IN_CEIL)
9754 CASE_BUILTIN_F (BUILT_IN_ERF)
9755 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9756 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9757 CASE_BUILTIN_F (BUILT_IN_FMOD)
9758 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9759 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9760 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9761 CASE_BUILTIN_F (BUILT_IN_LRINT)
9762 CASE_BUILTIN_F (BUILT_IN_LROUND)
9763 CASE_BUILTIN_F (BUILT_IN_MODF)
9764 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9765 CASE_BUILTIN_F (BUILT_IN_POW)
9766 CASE_BUILTIN_F (BUILT_IN_RINT)
9767 CASE_BUILTIN_F (BUILT_IN_ROUND)
9768 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9769 CASE_BUILTIN_F (BUILT_IN_SINH)
9770 CASE_BUILTIN_F (BUILT_IN_TANH)
9771 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9772 /* True if the 1st argument is nonnegative. */
9773 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9775 CASE_BUILTIN_F (BUILT_IN_FMAX)
9776 /* True if the 1st OR 2nd arguments are nonnegative. */
9777 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9778 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9780 CASE_BUILTIN_F (BUILT_IN_FMIN)
9781 /* True if the 1st AND 2nd arguments are nonnegative. */
9782 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9783 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9785 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9786 /* True if the 2nd argument is nonnegative. */
9787 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9789 default:
9790 break;
9791 #undef CASE_BUILTIN_F
9792 #undef CASE_BUILTIN_I
9796 /* ... fall through ... */
9798 default:
9799 if (truth_value_p (TREE_CODE (t)))
9800 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9801 return 1;
9804 /* We don't know sign of `t', so be conservative and return false. */
9805 return 0;
9808 /* Return true when T is an address and is known to be nonzero.
9809 For floating point we further ensure that T is not denormal.
9810 Similar logic is present in nonzero_address in rtlanal.h. */
9812 static bool
9813 tree_expr_nonzero_p (tree t)
9815 tree type = TREE_TYPE (t);
9817 /* Doing something useful for floating point would need more work. */
9818 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9819 return false;
9821 switch (TREE_CODE (t))
9823 case ABS_EXPR:
9824 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9825 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9827 case INTEGER_CST:
9828 /* We used to test for !integer_zerop here. This does not work correctly
9829 if TREE_CONSTANT_OVERFLOW (t). */
9830 return (TREE_INT_CST_LOW (t) != 0
9831 || TREE_INT_CST_HIGH (t) != 0);
9833 case PLUS_EXPR:
9834 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9836 /* With the presence of negative values it is hard
9837 to say something. */
9838 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9839 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9840 return false;
9841 /* One of operands must be positive and the other non-negative. */
9842 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9843 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9845 break;
9847 case MULT_EXPR:
9848 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9850 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9851 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9853 break;
9855 case NOP_EXPR:
9857 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9858 tree outer_type = TREE_TYPE (t);
9860 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9861 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9863 break;
9865 case ADDR_EXPR:
9867 tree base = get_base_address (TREE_OPERAND (t, 0));
9869 if (!base)
9870 return false;
9872 /* Weak declarations may link to NULL. */
9873 if (DECL_P (base))
9874 return !DECL_WEAK (base);
9876 /* Constants are never weak. */
9877 if (CONSTANT_CLASS_P (base))
9878 return true;
9880 return false;
9883 case COND_EXPR:
9884 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9885 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9887 case MIN_EXPR:
9888 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9889 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9891 case MAX_EXPR:
9892 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9894 /* When both operands are nonzero, then MAX must be too. */
9895 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9896 return true;
9898 /* MAX where operand 0 is positive is positive. */
9899 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9901 /* MAX where operand 1 is positive is positive. */
9902 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9903 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9904 return true;
9905 break;
9907 case COMPOUND_EXPR:
9908 case MODIFY_EXPR:
9909 case BIND_EXPR:
9910 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9912 case SAVE_EXPR:
9913 case NON_LVALUE_EXPR:
9914 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9916 case BIT_IOR_EXPR:
9917 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9918 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9920 default:
9921 break;
9923 return false;
9926 /* See if we are applying CODE, a relational to the highest or lowest
9927 possible integer of TYPE. If so, then the result is a compile
9928 time constant. */
9930 static tree
9931 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9932 tree *op1_p)
9934 tree op0 = *op0_p;
9935 tree op1 = *op1_p;
9936 enum tree_code code = *code_p;
9937 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9939 if (TREE_CODE (op1) == INTEGER_CST
9940 && ! TREE_CONSTANT_OVERFLOW (op1)
9941 && width <= HOST_BITS_PER_WIDE_INT
9942 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9943 || POINTER_TYPE_P (TREE_TYPE (op1))))
9945 unsigned HOST_WIDE_INT signed_max;
9946 unsigned HOST_WIDE_INT max, min;
9948 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9950 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9952 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9953 min = 0;
9955 else
9957 max = signed_max;
9958 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9961 if (TREE_INT_CST_HIGH (op1) == 0
9962 && TREE_INT_CST_LOW (op1) == max)
9963 switch (code)
9965 case GT_EXPR:
9966 return omit_one_operand (type, integer_zero_node, op0);
9968 case GE_EXPR:
9969 *code_p = EQ_EXPR;
9970 break;
9971 case LE_EXPR:
9972 return omit_one_operand (type, integer_one_node, op0);
9974 case LT_EXPR:
9975 *code_p = NE_EXPR;
9976 break;
9978 /* The GE_EXPR and LT_EXPR cases above are not normally
9979 reached because of previous transformations. */
9981 default:
9982 break;
9984 else if (TREE_INT_CST_HIGH (op1) == 0
9985 && TREE_INT_CST_LOW (op1) == max - 1)
9986 switch (code)
9988 case GT_EXPR:
9989 *code_p = EQ_EXPR;
9990 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9991 break;
9992 case LE_EXPR:
9993 *code_p = NE_EXPR;
9994 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9995 break;
9996 default:
9997 break;
9999 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10000 && TREE_INT_CST_LOW (op1) == min)
10001 switch (code)
10003 case LT_EXPR:
10004 return omit_one_operand (type, integer_zero_node, op0);
10006 case LE_EXPR:
10007 *code_p = EQ_EXPR;
10008 break;
10010 case GE_EXPR:
10011 return omit_one_operand (type, integer_one_node, op0);
10013 case GT_EXPR:
10014 *code_p = NE_EXPR;
10015 break;
10017 default:
10018 break;
10020 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10021 && TREE_INT_CST_LOW (op1) == min + 1)
10022 switch (code)
10024 case GE_EXPR:
10025 *code_p = NE_EXPR;
10026 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10027 break;
10028 case LT_EXPR:
10029 *code_p = EQ_EXPR;
10030 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10031 break;
10032 default:
10033 break;
10036 else if (TREE_INT_CST_HIGH (op1) == 0
10037 && TREE_INT_CST_LOW (op1) == signed_max
10038 && TYPE_UNSIGNED (TREE_TYPE (op1))
10039 /* signed_type does not work on pointer types. */
10040 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10042 /* The following case also applies to X < signed_max+1
10043 and X >= signed_max+1 because previous transformations. */
10044 if (code == LE_EXPR || code == GT_EXPR)
10046 tree st0, st1, exp, retval;
10047 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10048 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10050 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10051 type,
10052 fold_convert (st0, op0),
10053 fold_convert (st1, integer_zero_node));
10055 retval
10056 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
10057 TREE_TYPE (exp),
10058 TREE_OPERAND (exp, 0),
10059 TREE_OPERAND (exp, 1));
10061 /* If we are in gimple form, then returning EXP would create
10062 non-gimple expressions. Clearing it is safe and insures
10063 we do not allow a non-gimple expression to escape. */
10064 if (in_gimple_form)
10065 exp = NULL;
10067 return (retval ? retval : exp);
10072 return NULL_TREE;
10076 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10077 attempt to fold the expression to a constant without modifying TYPE,
10078 OP0 or OP1.
10080 If the expression could be simplified to a constant, then return
10081 the constant. If the expression would not be simplified to a
10082 constant, then return NULL_TREE.
10084 Note this is primarily designed to be called after gimplification
10085 of the tree structures and when at least one operand is a constant.
10086 As a result of those simplifying assumptions this routine is far
10087 simpler than the generic fold routine. */
10089 tree
10090 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
10091 tree op0, tree op1)
10093 int wins = 1;
10094 tree subop0;
10095 tree subop1;
10096 tree tem;
10098 /* If this is a commutative operation, and ARG0 is a constant, move it
10099 to ARG1 to reduce the number of tests below. */
10100 if (commutative_tree_code (code)
10101 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10103 tem = op0;
10104 op0 = op1;
10105 op1 = tem;
10108 /* If either operand is a complex type, extract its real component. */
10109 if (TREE_CODE (op0) == COMPLEX_CST)
10110 subop0 = TREE_REALPART (op0);
10111 else
10112 subop0 = op0;
10114 if (TREE_CODE (op1) == COMPLEX_CST)
10115 subop1 = TREE_REALPART (op1);
10116 else
10117 subop1 = op1;
10119 /* Note if either argument is not a real or integer constant.
10120 With a few exceptions, simplification is limited to cases
10121 where both arguments are constants. */
10122 if ((TREE_CODE (subop0) != INTEGER_CST
10123 && TREE_CODE (subop0) != REAL_CST)
10124 || (TREE_CODE (subop1) != INTEGER_CST
10125 && TREE_CODE (subop1) != REAL_CST))
10126 wins = 0;
10128 switch (code)
10130 case PLUS_EXPR:
10131 /* (plus (address) (const_int)) is a constant. */
10132 if (TREE_CODE (op0) == PLUS_EXPR
10133 && TREE_CODE (op1) == INTEGER_CST
10134 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10135 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10136 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10137 == ADDR_EXPR)))
10138 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10140 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10141 const_binop (PLUS_EXPR, op1,
10142 TREE_OPERAND (op0, 1), 0));
10144 case BIT_XOR_EXPR:
10146 binary:
10147 if (!wins)
10148 return NULL_TREE;
10150 /* Both arguments are constants. Simplify. */
10151 tem = const_binop (code, op0, op1, 0);
10152 if (tem != NULL_TREE)
10154 /* The return value should always have the same type as
10155 the original expression. */
10156 if (TREE_TYPE (tem) != type)
10157 tem = fold_convert (type, tem);
10159 return tem;
10161 return NULL_TREE;
10163 case MINUS_EXPR:
10164 /* Fold &x - &x. This can happen from &x.foo - &x.
10165 This is unsafe for certain floats even in non-IEEE formats.
10166 In IEEE, it is unsafe because it does wrong for NaNs.
10167 Also note that operand_equal_p is always false if an
10168 operand is volatile. */
10169 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10170 return fold_convert (type, integer_zero_node);
10172 goto binary;
10174 case MULT_EXPR:
10175 case BIT_AND_EXPR:
10176 /* Special case multiplication or bitwise AND where one argument
10177 is zero. */
10178 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10179 return omit_one_operand (type, op1, op0);
10180 else
10181 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10182 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10183 && real_zerop (op1))
10184 return omit_one_operand (type, op1, op0);
10186 goto binary;
10188 case BIT_IOR_EXPR:
10189 /* Special case when we know the result will be all ones. */
10190 if (integer_all_onesp (op1))
10191 return omit_one_operand (type, op1, op0);
10193 goto binary;
10195 case TRUNC_DIV_EXPR:
10196 case ROUND_DIV_EXPR:
10197 case FLOOR_DIV_EXPR:
10198 case CEIL_DIV_EXPR:
10199 case EXACT_DIV_EXPR:
10200 case TRUNC_MOD_EXPR:
10201 case ROUND_MOD_EXPR:
10202 case FLOOR_MOD_EXPR:
10203 case CEIL_MOD_EXPR:
10204 case RDIV_EXPR:
10205 /* Division by zero is undefined. */
10206 if (integer_zerop (op1))
10207 return NULL_TREE;
10209 if (TREE_CODE (op1) == REAL_CST
10210 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10211 && real_zerop (op1))
10212 return NULL_TREE;
10214 goto binary;
10216 case MIN_EXPR:
10217 if (INTEGRAL_TYPE_P (type)
10218 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10219 return omit_one_operand (type, op1, op0);
10221 goto binary;
10223 case MAX_EXPR:
10224 if (INTEGRAL_TYPE_P (type)
10225 && TYPE_MAX_VALUE (type)
10226 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10227 return omit_one_operand (type, op1, op0);
10229 goto binary;
10231 case RSHIFT_EXPR:
10232 /* Optimize -1 >> x for arithmetic right shifts. */
10233 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10234 return omit_one_operand (type, op0, op1);
10235 /* ... fall through ... */
10237 case LSHIFT_EXPR:
10238 if (integer_zerop (op0))
10239 return omit_one_operand (type, op0, op1);
10241 /* Since negative shift count is not well-defined, don't
10242 try to compute it in the compiler. */
10243 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10244 return NULL_TREE;
10246 goto binary;
10248 case LROTATE_EXPR:
10249 case RROTATE_EXPR:
10250 /* -1 rotated either direction by any amount is still -1. */
10251 if (integer_all_onesp (op0))
10252 return omit_one_operand (type, op0, op1);
10254 /* 0 rotated either direction by any amount is still zero. */
10255 if (integer_zerop (op0))
10256 return omit_one_operand (type, op0, op1);
10258 goto binary;
10260 case COMPLEX_EXPR:
10261 if (wins)
10262 return build_complex (type, op0, op1);
10263 return NULL_TREE;
10265 case LT_EXPR:
10266 case LE_EXPR:
10267 case GT_EXPR:
10268 case GE_EXPR:
10269 case EQ_EXPR:
10270 case NE_EXPR:
10271 /* If one arg is a real or integer constant, put it last. */
10272 if ((TREE_CODE (op0) == INTEGER_CST
10273 && TREE_CODE (op1) != INTEGER_CST)
10274 || (TREE_CODE (op0) == REAL_CST
10275 && TREE_CODE (op0) != REAL_CST))
10277 tree temp;
10279 temp = op0;
10280 op0 = op1;
10281 op1 = temp;
10282 code = swap_tree_comparison (code);
10285 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10286 This transformation affects the cases which are handled in later
10287 optimizations involving comparisons with non-negative constants. */
10288 if (TREE_CODE (op1) == INTEGER_CST
10289 && TREE_CODE (op0) != INTEGER_CST
10290 && tree_int_cst_sgn (op1) > 0)
10292 switch (code)
10294 case GE_EXPR:
10295 code = GT_EXPR;
10296 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10297 break;
10299 case LT_EXPR:
10300 code = LE_EXPR;
10301 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10302 break;
10304 default:
10305 break;
10309 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10310 if (tem)
10311 return tem;
10313 /* Fall through. */
10315 case ORDERED_EXPR:
10316 case UNORDERED_EXPR:
10317 case UNLT_EXPR:
10318 case UNLE_EXPR:
10319 case UNGT_EXPR:
10320 case UNGE_EXPR:
10321 case UNEQ_EXPR:
10322 case LTGT_EXPR:
10323 if (!wins)
10324 return NULL_TREE;
10326 return fold_relational_const (code, type, op0, op1);
10328 case RANGE_EXPR:
10329 /* This could probably be handled. */
10330 return NULL_TREE;
10332 case TRUTH_AND_EXPR:
10333 /* If second arg is constant zero, result is zero, but first arg
10334 must be evaluated. */
10335 if (integer_zerop (op1))
10336 return omit_one_operand (type, op1, op0);
10337 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10338 case will be handled here. */
10339 if (integer_zerop (op0))
10340 return omit_one_operand (type, op0, op1);
10341 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10342 return constant_boolean_node (true, type);
10343 return NULL_TREE;
10345 case TRUTH_OR_EXPR:
10346 /* If second arg is constant true, result is true, but we must
10347 evaluate first arg. */
10348 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10349 return omit_one_operand (type, op1, op0);
10350 /* Likewise for first arg, but note this only occurs here for
10351 TRUTH_OR_EXPR. */
10352 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10353 return omit_one_operand (type, op0, op1);
10354 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10355 return constant_boolean_node (false, type);
10356 return NULL_TREE;
10358 case TRUTH_XOR_EXPR:
10359 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10361 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10362 return constant_boolean_node (x, type);
10364 return NULL_TREE;
10366 default:
10367 return NULL_TREE;
10371 /* Given the components of a unary expression CODE, TYPE and OP0,
10372 attempt to fold the expression to a constant without modifying
10373 TYPE or OP0.
10375 If the expression could be simplified to a constant, then return
10376 the constant. If the expression would not be simplified to a
10377 constant, then return NULL_TREE.
10379 Note this is primarily designed to be called after gimplification
10380 of the tree structures and when op0 is a constant. As a result
10381 of those simplifying assumptions this routine is far simpler than
10382 the generic fold routine. */
10384 tree
10385 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10386 tree op0)
10388 /* Make sure we have a suitable constant argument. */
10389 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10391 tree subop;
10393 if (TREE_CODE (op0) == COMPLEX_CST)
10394 subop = TREE_REALPART (op0);
10395 else
10396 subop = op0;
10398 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10399 return NULL_TREE;
10402 switch (code)
10404 case NOP_EXPR:
10405 case FLOAT_EXPR:
10406 case CONVERT_EXPR:
10407 case FIX_TRUNC_EXPR:
10408 case FIX_FLOOR_EXPR:
10409 case FIX_CEIL_EXPR:
10410 return fold_convert_const (code, type, op0);
10412 case NEGATE_EXPR:
10413 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10414 return fold_negate_const (op0, type);
10415 else
10416 return NULL_TREE;
10418 case ABS_EXPR:
10419 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10420 return fold_abs_const (op0, type);
10421 else
10422 return NULL_TREE;
10424 case BIT_NOT_EXPR:
10425 if (TREE_CODE (op0) == INTEGER_CST)
10426 return fold_not_const (op0, type);
10427 else
10428 return NULL_TREE;
10430 case REALPART_EXPR:
10431 if (TREE_CODE (op0) == COMPLEX_CST)
10432 return TREE_REALPART (op0);
10433 else
10434 return NULL_TREE;
10436 case IMAGPART_EXPR:
10437 if (TREE_CODE (op0) == COMPLEX_CST)
10438 return TREE_IMAGPART (op0);
10439 else
10440 return NULL_TREE;
10442 case CONJ_EXPR:
10443 if (TREE_CODE (op0) == COMPLEX_CST
10444 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10445 return build_complex (type, TREE_REALPART (op0),
10446 negate_expr (TREE_IMAGPART (op0)));
10447 return NULL_TREE;
10449 default:
10450 return NULL_TREE;
10454 /* If EXP represents referencing an element in a constant string
10455 (either via pointer arithmetic or array indexing), return the
10456 tree representing the value accessed, otherwise return NULL. */
10458 tree
10459 fold_read_from_constant_string (tree exp)
10461 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10463 tree exp1 = TREE_OPERAND (exp, 0);
10464 tree index;
10465 tree string;
10467 if (TREE_CODE (exp) == INDIRECT_REF)
10468 string = string_constant (exp1, &index);
10469 else
10471 tree low_bound = array_ref_low_bound (exp);
10472 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10474 /* Optimize the special-case of a zero lower bound.
10476 We convert the low_bound to sizetype to avoid some problems
10477 with constant folding. (E.g. suppose the lower bound is 1,
10478 and its mode is QI. Without the conversion,l (ARRAY
10479 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10480 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10481 if (! integer_zerop (low_bound))
10482 index = size_diffop (index, fold_convert (sizetype, low_bound));
10484 string = exp1;
10487 if (string
10488 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10489 && TREE_CODE (string) == STRING_CST
10490 && TREE_CODE (index) == INTEGER_CST
10491 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10492 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10493 == MODE_INT)
10494 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10495 return fold_convert (TREE_TYPE (exp),
10496 build_int_cst (NULL_TREE,
10497 (TREE_STRING_POINTER (string)
10498 [TREE_INT_CST_LOW (index)])));
10500 return NULL;
10503 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10504 an integer constant or real constant.
10506 TYPE is the type of the result. */
10508 static tree
10509 fold_negate_const (tree arg0, tree type)
10511 tree t = NULL_TREE;
10513 switch (TREE_CODE (arg0))
10515 case INTEGER_CST:
10517 unsigned HOST_WIDE_INT low;
10518 HOST_WIDE_INT high;
10519 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10520 TREE_INT_CST_HIGH (arg0),
10521 &low, &high);
10522 t = build_int_cst_wide (type, low, high);
10523 t = force_fit_type (t, 1,
10524 (overflow | TREE_OVERFLOW (arg0))
10525 && !TYPE_UNSIGNED (type),
10526 TREE_CONSTANT_OVERFLOW (arg0));
10527 break;
10530 case REAL_CST:
10531 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10532 break;
10534 default:
10535 gcc_unreachable ();
10538 return t;
10541 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10542 an integer constant or real constant.
10544 TYPE is the type of the result. */
10546 tree
10547 fold_abs_const (tree arg0, tree type)
10549 tree t = NULL_TREE;
10551 switch (TREE_CODE (arg0))
10553 case INTEGER_CST:
10554 /* If the value is unsigned, then the absolute value is
10555 the same as the ordinary value. */
10556 if (TYPE_UNSIGNED (type))
10557 t = arg0;
10558 /* Similarly, if the value is non-negative. */
10559 else if (INT_CST_LT (integer_minus_one_node, arg0))
10560 t = arg0;
10561 /* If the value is negative, then the absolute value is
10562 its negation. */
10563 else
10565 unsigned HOST_WIDE_INT low;
10566 HOST_WIDE_INT high;
10567 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10568 TREE_INT_CST_HIGH (arg0),
10569 &low, &high);
10570 t = build_int_cst_wide (type, low, high);
10571 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10572 TREE_CONSTANT_OVERFLOW (arg0));
10574 break;
10576 case REAL_CST:
10577 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10578 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10579 else
10580 t = arg0;
10581 break;
10583 default:
10584 gcc_unreachable ();
10587 return t;
10590 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10591 constant. TYPE is the type of the result. */
10593 static tree
10594 fold_not_const (tree arg0, tree type)
10596 tree t = NULL_TREE;
10598 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10600 t = build_int_cst_wide (type,
10601 ~ TREE_INT_CST_LOW (arg0),
10602 ~ TREE_INT_CST_HIGH (arg0));
10603 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10604 TREE_CONSTANT_OVERFLOW (arg0));
10606 return t;
10609 /* Given CODE, a relational operator, the target type, TYPE and two
10610 constant operands OP0 and OP1, return the result of the
10611 relational operation. If the result is not a compile time
10612 constant, then return NULL_TREE. */
10614 static tree
10615 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10617 int result, invert;
10619 /* From here on, the only cases we handle are when the result is
10620 known to be a constant. */
10622 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10624 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10625 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10627 /* Handle the cases where either operand is a NaN. */
10628 if (real_isnan (c0) || real_isnan (c1))
10630 switch (code)
10632 case EQ_EXPR:
10633 case ORDERED_EXPR:
10634 result = 0;
10635 break;
10637 case NE_EXPR:
10638 case UNORDERED_EXPR:
10639 case UNLT_EXPR:
10640 case UNLE_EXPR:
10641 case UNGT_EXPR:
10642 case UNGE_EXPR:
10643 case UNEQ_EXPR:
10644 result = 1;
10645 break;
10647 case LT_EXPR:
10648 case LE_EXPR:
10649 case GT_EXPR:
10650 case GE_EXPR:
10651 case LTGT_EXPR:
10652 if (flag_trapping_math)
10653 return NULL_TREE;
10654 result = 0;
10655 break;
10657 default:
10658 gcc_unreachable ();
10661 return constant_boolean_node (result, type);
10664 return constant_boolean_node (real_compare (code, c0, c1), type);
10667 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10669 To compute GT, swap the arguments and do LT.
10670 To compute GE, do LT and invert the result.
10671 To compute LE, swap the arguments, do LT and invert the result.
10672 To compute NE, do EQ and invert the result.
10674 Therefore, the code below must handle only EQ and LT. */
10676 if (code == LE_EXPR || code == GT_EXPR)
10678 tree tem = op0;
10679 op0 = op1;
10680 op1 = tem;
10681 code = swap_tree_comparison (code);
10684 /* Note that it is safe to invert for real values here because we
10685 have already handled the one case that it matters. */
10687 invert = 0;
10688 if (code == NE_EXPR || code == GE_EXPR)
10690 invert = 1;
10691 code = invert_tree_comparison (code, false);
10694 /* Compute a result for LT or EQ if args permit;
10695 Otherwise return T. */
10696 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10698 if (code == EQ_EXPR)
10699 result = tree_int_cst_equal (op0, op1);
10700 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10701 result = INT_CST_LT_UNSIGNED (op0, op1);
10702 else
10703 result = INT_CST_LT (op0, op1);
10705 else
10706 return NULL_TREE;
10708 if (invert)
10709 result ^= 1;
10710 return constant_boolean_node (result, type);
10713 /* Build an expression for the a clean point containing EXPR with type TYPE.
10714 Don't build a cleanup point expression for EXPR which don't have side
10715 effects. */
10717 tree
10718 fold_build_cleanup_point_expr (tree type, tree expr)
10720 /* If the expression does not have side effects then we don't have to wrap
10721 it with a cleanup point expression. */
10722 if (!TREE_SIDE_EFFECTS (expr))
10723 return expr;
10725 return build1 (CLEANUP_POINT_EXPR, type, expr);
10728 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10729 avoid confusing the gimplify process. */
10731 tree
10732 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10734 /* The size of the object is not relevant when talking about its address. */
10735 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10736 t = TREE_OPERAND (t, 0);
10738 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10739 if (TREE_CODE (t) == INDIRECT_REF
10740 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
10742 t = TREE_OPERAND (t, 0);
10743 if (TREE_TYPE (t) != ptrtype)
10744 t = build1 (NOP_EXPR, ptrtype, t);
10746 else
10748 tree base = t;
10750 while (handled_component_p (base)
10751 || TREE_CODE (base) == REALPART_EXPR
10752 || TREE_CODE (base) == IMAGPART_EXPR)
10753 base = TREE_OPERAND (base, 0);
10754 if (DECL_P (base))
10755 TREE_ADDRESSABLE (base) = 1;
10757 t = build1 (ADDR_EXPR, ptrtype, t);
10760 return t;
10763 tree
10764 build_fold_addr_expr (tree t)
10766 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10769 /* Builds an expression for an indirection through T, simplifying some
10770 cases. */
10772 tree
10773 build_fold_indirect_ref (tree t)
10775 tree type = TREE_TYPE (TREE_TYPE (t));
10776 tree sub = t;
10777 tree subtype;
10779 STRIP_NOPS (sub);
10780 if (TREE_CODE (sub) == ADDR_EXPR)
10782 tree op = TREE_OPERAND (sub, 0);
10783 tree optype = TREE_TYPE (op);
10784 /* *&p => p */
10785 if (lang_hooks.types_compatible_p (type, optype))
10786 return op;
10787 /* *(foo *)&fooarray => fooarray[0] */
10788 else if (TREE_CODE (optype) == ARRAY_TYPE
10789 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10790 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10793 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10794 subtype = TREE_TYPE (sub);
10795 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10796 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10798 sub = build_fold_indirect_ref (sub);
10799 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10802 return build1 (INDIRECT_REF, type, t);
10805 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10806 whose result is ignored. The type of the returned tree need not be
10807 the same as the original expression. */
10809 tree
10810 fold_ignored_result (tree t)
10812 if (!TREE_SIDE_EFFECTS (t))
10813 return integer_zero_node;
10815 for (;;)
10816 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10818 case tcc_unary:
10819 t = TREE_OPERAND (t, 0);
10820 break;
10822 case tcc_binary:
10823 case tcc_comparison:
10824 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10825 t = TREE_OPERAND (t, 0);
10826 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10827 t = TREE_OPERAND (t, 1);
10828 else
10829 return t;
10830 break;
10832 case tcc_expression:
10833 switch (TREE_CODE (t))
10835 case COMPOUND_EXPR:
10836 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10837 return t;
10838 t = TREE_OPERAND (t, 0);
10839 break;
10841 case COND_EXPR:
10842 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10843 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10844 return t;
10845 t = TREE_OPERAND (t, 0);
10846 break;
10848 default:
10849 return t;
10851 break;
10853 default:
10854 return t;
10858 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10859 This can only be applied to objects of a sizetype. */
10861 tree
10862 round_up (tree value, int divisor)
10864 tree div = NULL_TREE;
10866 gcc_assert (divisor > 0);
10867 if (divisor == 1)
10868 return value;
10870 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10871 have to do anything. Only do this when we are not given a const,
10872 because in that case, this check is more expensive than just
10873 doing it. */
10874 if (TREE_CODE (value) != INTEGER_CST)
10876 div = build_int_cst (TREE_TYPE (value), divisor);
10878 if (multiple_of_p (TREE_TYPE (value), value, div))
10879 return value;
10882 /* If divisor is a power of two, simplify this to bit manipulation. */
10883 if (divisor == (divisor & -divisor))
10885 tree t;
10887 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10888 value = size_binop (PLUS_EXPR, value, t);
10889 t = build_int_cst (TREE_TYPE (value), -divisor);
10890 value = size_binop (BIT_AND_EXPR, value, t);
10892 else
10894 if (!div)
10895 div = build_int_cst (TREE_TYPE (value), divisor);
10896 value = size_binop (CEIL_DIV_EXPR, value, div);
10897 value = size_binop (MULT_EXPR, value, div);
10900 return value;
10903 /* Likewise, but round down. */
10905 tree
10906 round_down (tree value, int divisor)
10908 tree div = NULL_TREE;
10910 gcc_assert (divisor > 0);
10911 if (divisor == 1)
10912 return value;
10914 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10915 have to do anything. Only do this when we are not given a const,
10916 because in that case, this check is more expensive than just
10917 doing it. */
10918 if (TREE_CODE (value) != INTEGER_CST)
10920 div = build_int_cst (TREE_TYPE (value), divisor);
10922 if (multiple_of_p (TREE_TYPE (value), value, div))
10923 return value;
10926 /* If divisor is a power of two, simplify this to bit manipulation. */
10927 if (divisor == (divisor & -divisor))
10929 tree t;
10931 t = build_int_cst (TREE_TYPE (value), -divisor);
10932 value = size_binop (BIT_AND_EXPR, value, t);
10934 else
10936 if (!div)
10937 div = build_int_cst (TREE_TYPE (value), divisor);
10938 value = size_binop (FLOOR_DIV_EXPR, value, div);
10939 value = size_binop (MULT_EXPR, value, div);
10942 return value;
10945 /* Returns the pointer to the base of the object addressed by EXP and
10946 extracts the information about the offset of the access, storing it
10947 to PBITPOS and POFFSET. */
10949 static tree
10950 split_address_to_core_and_offset (tree exp,
10951 HOST_WIDE_INT *pbitpos, tree *poffset)
10953 tree core;
10954 enum machine_mode mode;
10955 int unsignedp, volatilep;
10956 HOST_WIDE_INT bitsize;
10958 if (TREE_CODE (exp) == ADDR_EXPR)
10960 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
10961 poffset, &mode, &unsignedp, &volatilep);
10963 if (TREE_CODE (core) == INDIRECT_REF)
10964 core = TREE_OPERAND (core, 0);
10966 else
10968 core = exp;
10969 *pbitpos = 0;
10970 *poffset = NULL_TREE;
10973 return core;
10976 /* Returns true if addresses of E1 and E2 differ by a constant, false
10977 otherwise. If they do, E1 - E2 is stored in *DIFF. */
10979 bool
10980 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
10982 tree core1, core2;
10983 HOST_WIDE_INT bitpos1, bitpos2;
10984 tree toffset1, toffset2, tdiff, type;
10986 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
10987 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
10989 if (bitpos1 % BITS_PER_UNIT != 0
10990 || bitpos2 % BITS_PER_UNIT != 0
10991 || !operand_equal_p (core1, core2, 0))
10992 return false;
10994 if (toffset1 && toffset2)
10996 type = TREE_TYPE (toffset1);
10997 if (type != TREE_TYPE (toffset2))
10998 toffset2 = fold_convert (type, toffset2);
11000 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11001 if (!host_integerp (tdiff, 0))
11002 return false;
11004 *diff = tree_low_cst (tdiff, 0);
11006 else if (toffset1 || toffset2)
11008 /* If only one of the offsets is non-constant, the difference cannot
11009 be a constant. */
11010 return false;
11012 else
11013 *diff = 0;
11015 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11016 return true;