PR middle-end/19583
[official-gcc.git] / gcc / fold-const.c
blob294f94cbf1b67cdda2713b6f61962ed8118f751a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (tree, enum tree_code,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static tree fold_relational_hi_lo (enum tree_code *, const tree,
136 tree *, tree *);
137 static bool tree_expr_nonzero_p (tree);
139 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
140 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
141 and SUM1. Then this yields nonzero if overflow occurred during the
142 addition.
144 Overflow occurs if A and B have the same sign, but A and SUM differ in
145 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
146 sign. */
147 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
150 We do that by representing the two-word integer in 4 words, with only
151 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
152 number. The value of the word is LOWPART + HIGHPART * BASE. */
154 #define LOWPART(x) \
155 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
156 #define HIGHPART(x) \
157 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
158 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160 /* Unpack a two-word integer into 4 words.
161 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
162 WORDS points to the array of HOST_WIDE_INTs. */
164 static void
165 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 words[0] = LOWPART (low);
168 words[1] = HIGHPART (low);
169 words[2] = LOWPART (hi);
170 words[3] = HIGHPART (hi);
173 /* Pack an array of 4 words into a two-word integer.
174 WORDS points to the array of words.
175 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
177 static void
178 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
179 HOST_WIDE_INT *hi)
181 *low = words[0] + words[1] * BASE;
182 *hi = words[2] + words[3] * BASE;
185 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
186 in overflow of the value, when >0 we are only interested in signed
187 overflow, for <0 we are interested in any overflow. OVERFLOWED
188 indicates whether overflow has already occurred. CONST_OVERFLOWED
189 indicates whether constant overflow has already occurred. We force
190 T's value to be within range of T's type (by setting to 0 or 1 all
191 the bits outside the type's range). We set TREE_OVERFLOWED if,
192 OVERFLOWED is nonzero,
193 or OVERFLOWABLE is >0 and signed overflow occurs
194 or OVERFLOWABLE is <0 and any overflow occurs
195 We set TREE_CONSTANT_OVERFLOWED if,
196 CONST_OVERFLOWED is nonzero
197 or we set TREE_OVERFLOWED.
198 We return either the original T, or a copy. */
200 tree
201 force_fit_type (tree t, int overflowable,
202 bool overflowed, bool overflowed_const)
204 unsigned HOST_WIDE_INT low;
205 HOST_WIDE_INT high;
206 unsigned int prec;
207 int sign_extended_type;
209 gcc_assert (TREE_CODE (t) == INTEGER_CST);
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
216 prec = POINTER_SIZE;
217 else
218 prec = TYPE_PRECISION (TREE_TYPE (t));
219 /* Size types *are* sign extended. */
220 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
221 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
222 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
224 /* First clear all bits that are beyond the type's precision. */
226 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
228 else if (prec > HOST_BITS_PER_WIDE_INT)
229 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 else
232 high = 0;
233 if (prec < HOST_BITS_PER_WIDE_INT)
234 low &= ~((HOST_WIDE_INT) (-1) << prec);
237 if (!sign_extended_type)
238 /* No sign extension */;
239 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
240 /* Correct width already. */;
241 else if (prec > HOST_BITS_PER_WIDE_INT)
243 /* Sign extend top half? */
244 if (high & ((unsigned HOST_WIDE_INT)1
245 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
246 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
248 else if (prec == HOST_BITS_PER_WIDE_INT)
250 if ((HOST_WIDE_INT)low < 0)
251 high = -1;
253 else
255 /* Sign extend bottom half? */
256 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
258 high = -1;
259 low |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value changed, return a new node. */
264 if (overflowed || overflowed_const
265 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
267 t = build_int_cst_wide (TREE_TYPE (t), low, high);
269 if (overflowed
270 || overflowable < 0
271 || (overflowable > 0 && sign_extended_type))
273 t = copy_node (t);
274 TREE_OVERFLOW (t) = 1;
275 TREE_CONSTANT_OVERFLOW (t) = 1;
277 else if (overflowed_const)
279 t = copy_node (t);
280 TREE_CONSTANT_OVERFLOW (t) = 1;
284 return t;
287 /* Add two doubleword integers with doubleword result.
288 Each argument is given as two `HOST_WIDE_INT' pieces.
289 One argument is L1 and H1; the other, L2 and H2.
290 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
294 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
295 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 unsigned HOST_WIDE_INT l;
298 HOST_WIDE_INT h;
300 l = l1 + l2;
301 h = h1 + h2 + (l < l1);
303 *lv = l;
304 *hv = h;
305 return OVERFLOW_SUM_SIGN (h1, h2, h);
308 /* Negate a doubleword integer with doubleword result.
309 Return nonzero if the operation overflows, assuming it's signed.
310 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
311 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
314 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
315 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
317 if (l1 == 0)
319 *lv = 0;
320 *hv = - h1;
321 return (*hv & h1) < 0;
323 else
325 *lv = -l1;
326 *hv = ~h1;
327 return 0;
331 /* Multiply two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows, assuming it's signed.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
342 HOST_WIDE_INT arg1[4];
343 HOST_WIDE_INT arg2[4];
344 HOST_WIDE_INT prod[4 * 2];
345 unsigned HOST_WIDE_INT carry;
346 int i, j, k;
347 unsigned HOST_WIDE_INT toplow, neglow;
348 HOST_WIDE_INT tophigh, neghigh;
350 encode (arg1, l1, h1);
351 encode (arg2, l2, h2);
353 memset (prod, 0, sizeof prod);
355 for (i = 0; i < 4; i++)
357 carry = 0;
358 for (j = 0; j < 4; j++)
360 k = i + j;
361 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
362 carry += arg1[i] * arg2[j];
363 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
364 carry += prod[k];
365 prod[k] = LOWPART (carry);
366 carry = HIGHPART (carry);
368 prod[i + 4] = carry;
371 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
373 /* Check for overflow by calculating the top half of the answer in full;
374 it should agree with the low half's sign bit. */
375 decode (prod + 4, &toplow, &tophigh);
376 if (h1 < 0)
378 neg_double (l2, h2, &neglow, &neghigh);
379 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
381 if (h2 < 0)
383 neg_double (l1, h1, &neglow, &neghigh);
384 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
386 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
389 /* Shift the doubleword integer in L1, H1 left by COUNT places
390 keeping only PREC bits of result.
391 Shift right if COUNT is negative.
392 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
393 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
395 void
396 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
397 HOST_WIDE_INT count, unsigned int prec,
398 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
400 unsigned HOST_WIDE_INT signmask;
402 if (count < 0)
404 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 return;
408 if (SHIFT_COUNT_TRUNCATED)
409 count %= prec;
411 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
413 /* Shifting by the host word size is undefined according to the
414 ANSI standard, so we must handle this as a special case. */
415 *hv = 0;
416 *lv = 0;
418 else if (count >= HOST_BITS_PER_WIDE_INT)
420 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
421 *lv = 0;
423 else
425 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
426 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 *lv = l1 << count;
430 /* Sign extend all bits that are beyond the precision. */
432 signmask = -((prec > HOST_BITS_PER_WIDE_INT
433 ? ((unsigned HOST_WIDE_INT) *hv
434 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
435 : (*lv >> (prec - 1))) & 1);
437 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
439 else if (prec >= HOST_BITS_PER_WIDE_INT)
441 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
442 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 else
446 *hv = signmask;
447 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
448 *lv |= signmask << prec;
452 /* Shift the doubleword integer in L1, H1 right by COUNT places
453 keeping only PREC bits of result. COUNT must be positive.
454 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
455 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
457 void
458 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
459 HOST_WIDE_INT count, unsigned int prec,
460 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
461 int arith)
463 unsigned HOST_WIDE_INT signmask;
465 signmask = (arith
466 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
467 : 0);
469 if (SHIFT_COUNT_TRUNCATED)
470 count %= prec;
472 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
474 /* Shifting by the host word size is undefined according to the
475 ANSI standard, so we must handle this as a special case. */
476 *hv = 0;
477 *lv = 0;
479 else if (count >= HOST_BITS_PER_WIDE_INT)
481 *hv = 0;
482 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
484 else
486 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
487 *lv = ((l1 >> count)
488 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
491 /* Zero / sign extend all bits that are beyond the precision. */
493 if (count >= (HOST_WIDE_INT)prec)
495 *hv = signmask;
496 *lv = signmask;
498 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
500 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
502 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
503 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 else
507 *hv = signmask;
508 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
509 *lv |= signmask << (prec - count);
513 /* Rotate the doubleword integer in L1, H1 left by COUNT places
514 keeping only PREC bits of result.
515 Rotate right if COUNT is negative.
516 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
518 void
519 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
520 HOST_WIDE_INT count, unsigned int prec,
521 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
523 unsigned HOST_WIDE_INT s1l, s2l;
524 HOST_WIDE_INT s1h, s2h;
526 count %= prec;
527 if (count < 0)
528 count += prec;
530 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
531 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
532 *lv = s1l | s2l;
533 *hv = s1h | s2h;
536 /* Rotate the doubleword integer in L1, H1 left by COUNT places
537 keeping only PREC bits of result. COUNT must be positive.
538 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
540 void
541 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
542 HOST_WIDE_INT count, unsigned int prec,
543 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
545 unsigned HOST_WIDE_INT s1l, s2l;
546 HOST_WIDE_INT s1h, s2h;
548 count %= prec;
549 if (count < 0)
550 count += prec;
552 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
553 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
554 *lv = s1l | s2l;
555 *hv = s1h | s2h;
558 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
559 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
560 CODE is a tree code for a kind of division, one of
561 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
562 or EXACT_DIV_EXPR
563 It controls how the quotient is rounded to an integer.
564 Return nonzero if the operation overflows.
565 UNS nonzero says do unsigned division. */
568 div_and_round_double (enum tree_code code, int uns,
569 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
570 HOST_WIDE_INT hnum_orig,
571 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
572 HOST_WIDE_INT hden_orig,
573 unsigned HOST_WIDE_INT *lquo,
574 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT *hrem)
577 int quo_neg = 0;
578 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
579 HOST_WIDE_INT den[4], quo[4];
580 int i, j;
581 unsigned HOST_WIDE_INT work;
582 unsigned HOST_WIDE_INT carry = 0;
583 unsigned HOST_WIDE_INT lnum = lnum_orig;
584 HOST_WIDE_INT hnum = hnum_orig;
585 unsigned HOST_WIDE_INT lden = lden_orig;
586 HOST_WIDE_INT hden = hden_orig;
587 int overflow = 0;
589 if (hden == 0 && lden == 0)
590 overflow = 1, lden = 1;
592 /* Calculate quotient sign and convert operands to unsigned. */
593 if (!uns)
595 if (hnum < 0)
597 quo_neg = ~ quo_neg;
598 /* (minimum integer) / (-1) is the only overflow case. */
599 if (neg_double (lnum, hnum, &lnum, &hnum)
600 && ((HOST_WIDE_INT) lden & hden) == -1)
601 overflow = 1;
603 if (hden < 0)
605 quo_neg = ~ quo_neg;
606 neg_double (lden, hden, &lden, &hden);
610 if (hnum == 0 && hden == 0)
611 { /* single precision */
612 *hquo = *hrem = 0;
613 /* This unsigned division rounds toward zero. */
614 *lquo = lnum / lden;
615 goto finish_up;
618 if (hnum == 0)
619 { /* trivial case: dividend < divisor */
620 /* hden != 0 already checked. */
621 *hquo = *lquo = 0;
622 *hrem = hnum;
623 *lrem = lnum;
624 goto finish_up;
627 memset (quo, 0, sizeof quo);
629 memset (num, 0, sizeof num); /* to zero 9th element */
630 memset (den, 0, sizeof den);
632 encode (num, lnum, hnum);
633 encode (den, lden, hden);
635 /* Special code for when the divisor < BASE. */
636 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
638 /* hnum != 0 already checked. */
639 for (i = 4 - 1; i >= 0; i--)
641 work = num[i] + carry * BASE;
642 quo[i] = work / lden;
643 carry = work % lden;
646 else
648 /* Full double precision division,
649 with thanks to Don Knuth's "Seminumerical Algorithms". */
650 int num_hi_sig, den_hi_sig;
651 unsigned HOST_WIDE_INT quo_est, scale;
653 /* Find the highest nonzero divisor digit. */
654 for (i = 4 - 1;; i--)
655 if (den[i] != 0)
657 den_hi_sig = i;
658 break;
661 /* Insure that the first digit of the divisor is at least BASE/2.
662 This is required by the quotient digit estimation algorithm. */
664 scale = BASE / (den[den_hi_sig] + 1);
665 if (scale > 1)
666 { /* scale divisor and dividend */
667 carry = 0;
668 for (i = 0; i <= 4 - 1; i++)
670 work = (num[i] * scale) + carry;
671 num[i] = LOWPART (work);
672 carry = HIGHPART (work);
675 num[4] = carry;
676 carry = 0;
677 for (i = 0; i <= 4 - 1; i++)
679 work = (den[i] * scale) + carry;
680 den[i] = LOWPART (work);
681 carry = HIGHPART (work);
682 if (den[i] != 0) den_hi_sig = i;
686 num_hi_sig = 4;
688 /* Main loop */
689 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
691 /* Guess the next quotient digit, quo_est, by dividing the first
692 two remaining dividend digits by the high order quotient digit.
693 quo_est is never low and is at most 2 high. */
694 unsigned HOST_WIDE_INT tmp;
696 num_hi_sig = i + den_hi_sig + 1;
697 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698 if (num[num_hi_sig] != den[den_hi_sig])
699 quo_est = work / den[den_hi_sig];
700 else
701 quo_est = BASE - 1;
703 /* Refine quo_est so it's usually correct, and at most one high. */
704 tmp = work - quo_est * den[den_hi_sig];
705 if (tmp < BASE
706 && (den[den_hi_sig - 1] * quo_est
707 > (tmp * BASE + num[num_hi_sig - 2])))
708 quo_est--;
710 /* Try QUO_EST as the quotient digit, by multiplying the
711 divisor by QUO_EST and subtracting from the remaining dividend.
712 Keep in mind that QUO_EST is the I - 1st digit. */
714 carry = 0;
715 for (j = 0; j <= den_hi_sig; j++)
717 work = quo_est * den[j] + carry;
718 carry = HIGHPART (work);
719 work = num[i + j] - LOWPART (work);
720 num[i + j] = LOWPART (work);
721 carry += HIGHPART (work) != 0;
724 /* If quo_est was high by one, then num[i] went negative and
725 we need to correct things. */
726 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
728 quo_est--;
729 carry = 0; /* add divisor back in */
730 for (j = 0; j <= den_hi_sig; j++)
732 work = num[i + j] + den[j] + carry;
733 carry = HIGHPART (work);
734 num[i + j] = LOWPART (work);
737 num [num_hi_sig] += carry;
740 /* Store the quotient digit. */
741 quo[i] = quo_est;
745 decode (quo, lquo, hquo);
747 finish_up:
748 /* If result is negative, make it so. */
749 if (quo_neg)
750 neg_double (*lquo, *hquo, lquo, hquo);
752 /* Compute trial remainder: rem = num - (quo * den) */
753 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754 neg_double (*lrem, *hrem, lrem, hrem);
755 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 switch (code)
759 case TRUNC_DIV_EXPR:
760 case TRUNC_MOD_EXPR: /* round toward zero */
761 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 return overflow;
764 case FLOOR_DIV_EXPR:
765 case FLOOR_MOD_EXPR: /* round toward negative infinity */
766 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
768 /* quo = quo - 1; */
769 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
770 lquo, hquo);
772 else
773 return overflow;
774 break;
776 case CEIL_DIV_EXPR:
777 case CEIL_MOD_EXPR: /* round toward positive infinity */
778 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
780 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
781 lquo, hquo);
783 else
784 return overflow;
785 break;
787 case ROUND_DIV_EXPR:
788 case ROUND_MOD_EXPR: /* round to closest integer */
790 unsigned HOST_WIDE_INT labs_rem = *lrem;
791 HOST_WIDE_INT habs_rem = *hrem;
792 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793 HOST_WIDE_INT habs_den = hden, htwice;
795 /* Get absolute values. */
796 if (*hrem < 0)
797 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
798 if (hden < 0)
799 neg_double (lden, hden, &labs_den, &habs_den);
801 /* If (2 * abs (lrem) >= abs (lden)) */
802 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803 labs_rem, habs_rem, &ltwice, &htwice);
805 if (((unsigned HOST_WIDE_INT) habs_den
806 < (unsigned HOST_WIDE_INT) htwice)
807 || (((unsigned HOST_WIDE_INT) habs_den
808 == (unsigned HOST_WIDE_INT) htwice)
809 && (labs_den < ltwice)))
811 if (*hquo < 0)
812 /* quo = quo - 1; */
813 add_double (*lquo, *hquo,
814 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815 else
816 /* quo = quo + 1; */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
820 else
821 return overflow;
823 break;
825 default:
826 gcc_unreachable ();
829 /* Compute true remainder: rem = num - (quo * den) */
830 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831 neg_double (*lrem, *hrem, lrem, hrem);
832 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 return overflow;
836 /* Return true if built-in mathematical function specified by CODE
837 preserves the sign of it argument, i.e. -f(x) == f(-x). */
839 static bool
840 negate_mathfn_p (enum built_in_function code)
842 switch (code)
844 case BUILT_IN_ASIN:
845 case BUILT_IN_ASINF:
846 case BUILT_IN_ASINL:
847 case BUILT_IN_ATAN:
848 case BUILT_IN_ATANF:
849 case BUILT_IN_ATANL:
850 case BUILT_IN_SIN:
851 case BUILT_IN_SINF:
852 case BUILT_IN_SINL:
853 case BUILT_IN_TAN:
854 case BUILT_IN_TANF:
855 case BUILT_IN_TANL:
856 return true;
858 default:
859 break;
861 return false;
864 /* Check whether we may negate an integer constant T without causing
865 overflow. */
867 bool
868 may_negate_without_overflow_p (tree t)
870 unsigned HOST_WIDE_INT val;
871 unsigned int prec;
872 tree type;
874 gcc_assert (TREE_CODE (t) == INTEGER_CST);
876 type = TREE_TYPE (t);
877 if (TYPE_UNSIGNED (type))
878 return false;
880 prec = TYPE_PRECISION (type);
881 if (prec > HOST_BITS_PER_WIDE_INT)
883 if (TREE_INT_CST_LOW (t) != 0)
884 return true;
885 prec -= HOST_BITS_PER_WIDE_INT;
886 val = TREE_INT_CST_HIGH (t);
888 else
889 val = TREE_INT_CST_LOW (t);
890 if (prec < HOST_BITS_PER_WIDE_INT)
891 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
892 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
895 /* Determine whether an expression T can be cheaply negated using
896 the function negate_expr. */
898 static bool
899 negate_expr_p (tree t)
901 tree type;
903 if (t == 0)
904 return false;
906 type = TREE_TYPE (t);
908 STRIP_SIGN_NOPS (t);
909 switch (TREE_CODE (t))
911 case INTEGER_CST:
912 if (TYPE_UNSIGNED (type) || ! flag_trapv)
913 return true;
915 /* Check that -CST will not overflow type. */
916 return may_negate_without_overflow_p (t);
918 case REAL_CST:
919 case NEGATE_EXPR:
920 return true;
922 case COMPLEX_CST:
923 return negate_expr_p (TREE_REALPART (t))
924 && negate_expr_p (TREE_IMAGPART (t));
926 case PLUS_EXPR:
927 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
928 return false;
929 /* -(A + B) -> (-B) - A. */
930 if (negate_expr_p (TREE_OPERAND (t, 1))
931 && reorder_operands_p (TREE_OPERAND (t, 0),
932 TREE_OPERAND (t, 1)))
933 return true;
934 /* -(A + B) -> (-A) - B. */
935 return negate_expr_p (TREE_OPERAND (t, 0));
937 case MINUS_EXPR:
938 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
939 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
940 && reorder_operands_p (TREE_OPERAND (t, 0),
941 TREE_OPERAND (t, 1));
943 case MULT_EXPR:
944 if (TYPE_UNSIGNED (TREE_TYPE (t)))
945 break;
947 /* Fall through. */
949 case RDIV_EXPR:
950 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
951 return negate_expr_p (TREE_OPERAND (t, 1))
952 || negate_expr_p (TREE_OPERAND (t, 0));
953 break;
955 case NOP_EXPR:
956 /* Negate -((double)float) as (double)(-float). */
957 if (TREE_CODE (type) == REAL_TYPE)
959 tree tem = strip_float_extensions (t);
960 if (tem != t)
961 return negate_expr_p (tem);
963 break;
965 case CALL_EXPR:
966 /* Negate -f(x) as f(-x). */
967 if (negate_mathfn_p (builtin_mathfn_code (t)))
968 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
969 break;
971 case RSHIFT_EXPR:
972 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
973 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
975 tree op1 = TREE_OPERAND (t, 1);
976 if (TREE_INT_CST_HIGH (op1) == 0
977 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
978 == TREE_INT_CST_LOW (op1))
979 return true;
981 break;
983 default:
984 break;
986 return false;
989 /* Given T, an expression, return the negation of T. Allow for T to be
990 null, in which case return null. */
992 static tree
993 negate_expr (tree t)
995 tree type;
996 tree tem;
998 if (t == 0)
999 return 0;
1001 type = TREE_TYPE (t);
1002 STRIP_SIGN_NOPS (t);
1004 switch (TREE_CODE (t))
1006 case INTEGER_CST:
1007 tem = fold_negate_const (t, type);
1008 if (! TREE_OVERFLOW (tem)
1009 || TYPE_UNSIGNED (type)
1010 || ! flag_trapv)
1011 return tem;
1012 break;
1014 case REAL_CST:
1015 tem = fold_negate_const (t, type);
1016 /* Two's complement FP formats, such as c4x, may overflow. */
1017 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1018 return fold_convert (type, tem);
1019 break;
1021 case COMPLEX_CST:
1023 tree rpart = negate_expr (TREE_REALPART (t));
1024 tree ipart = negate_expr (TREE_IMAGPART (t));
1026 if ((TREE_CODE (rpart) == REAL_CST
1027 && TREE_CODE (ipart) == REAL_CST)
1028 || (TREE_CODE (rpart) == INTEGER_CST
1029 && TREE_CODE (ipart) == INTEGER_CST))
1030 return build_complex (type, rpart, ipart);
1032 break;
1034 case NEGATE_EXPR:
1035 return fold_convert (type, TREE_OPERAND (t, 0));
1037 case PLUS_EXPR:
1038 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1040 /* -(A + B) -> (-B) - A. */
1041 if (negate_expr_p (TREE_OPERAND (t, 1))
1042 && reorder_operands_p (TREE_OPERAND (t, 0),
1043 TREE_OPERAND (t, 1)))
1045 tem = negate_expr (TREE_OPERAND (t, 1));
1046 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1047 tem, TREE_OPERAND (t, 0)));
1048 return fold_convert (type, tem);
1051 /* -(A + B) -> (-A) - B. */
1052 if (negate_expr_p (TREE_OPERAND (t, 0)))
1054 tem = negate_expr (TREE_OPERAND (t, 0));
1055 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1056 tem, TREE_OPERAND (t, 1)));
1057 return fold_convert (type, tem);
1060 break;
1062 case MINUS_EXPR:
1063 /* - (A - B) -> B - A */
1064 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1066 return fold_convert (type,
1067 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1068 TREE_OPERAND (t, 1),
1069 TREE_OPERAND (t, 0))));
1070 break;
1072 case MULT_EXPR:
1073 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1074 break;
1076 /* Fall through. */
1078 case RDIV_EXPR:
1079 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1081 tem = TREE_OPERAND (t, 1);
1082 if (negate_expr_p (tem))
1083 return fold_convert (type,
1084 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1085 TREE_OPERAND (t, 0),
1086 negate_expr (tem))));
1087 tem = TREE_OPERAND (t, 0);
1088 if (negate_expr_p (tem))
1089 return fold_convert (type,
1090 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1091 negate_expr (tem),
1092 TREE_OPERAND (t, 1))));
1094 break;
1096 case NOP_EXPR:
1097 /* Convert -((double)float) into (double)(-float). */
1098 if (TREE_CODE (type) == REAL_TYPE)
1100 tem = strip_float_extensions (t);
1101 if (tem != t && negate_expr_p (tem))
1102 return fold_convert (type, negate_expr (tem));
1104 break;
1106 case CALL_EXPR:
1107 /* Negate -f(x) as f(-x). */
1108 if (negate_mathfn_p (builtin_mathfn_code (t))
1109 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1111 tree fndecl, arg, arglist;
1113 fndecl = get_callee_fndecl (t);
1114 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1115 arglist = build_tree_list (NULL_TREE, arg);
1116 return build_function_call_expr (fndecl, arglist);
1118 break;
1120 case RSHIFT_EXPR:
1121 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1122 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1124 tree op1 = TREE_OPERAND (t, 1);
1125 if (TREE_INT_CST_HIGH (op1) == 0
1126 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1127 == TREE_INT_CST_LOW (op1))
1129 tree ntype = TYPE_UNSIGNED (type)
1130 ? lang_hooks.types.signed_type (type)
1131 : lang_hooks.types.unsigned_type (type);
1132 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1133 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1134 return fold_convert (type, temp);
1137 break;
1139 default:
1140 break;
1143 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1144 return fold_convert (type, tem);
1147 /* Split a tree IN into a constant, literal and variable parts that could be
1148 combined with CODE to make IN. "constant" means an expression with
1149 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1150 commutative arithmetic operation. Store the constant part into *CONP,
1151 the literal in *LITP and return the variable part. If a part isn't
1152 present, set it to null. If the tree does not decompose in this way,
1153 return the entire tree as the variable part and the other parts as null.
1155 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1156 case, we negate an operand that was subtracted. Except if it is a
1157 literal for which we use *MINUS_LITP instead.
1159 If NEGATE_P is true, we are negating all of IN, again except a literal
1160 for which we use *MINUS_LITP instead.
1162 If IN is itself a literal or constant, return it as appropriate.
1164 Note that we do not guarantee that any of the three values will be the
1165 same type as IN, but they will have the same signedness and mode. */
1167 static tree
1168 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1169 tree *minus_litp, int negate_p)
1171 tree var = 0;
1173 *conp = 0;
1174 *litp = 0;
1175 *minus_litp = 0;
1177 /* Strip any conversions that don't change the machine mode or signedness. */
1178 STRIP_SIGN_NOPS (in);
1180 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1181 *litp = in;
1182 else if (TREE_CODE (in) == code
1183 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1184 /* We can associate addition and subtraction together (even
1185 though the C standard doesn't say so) for integers because
1186 the value is not affected. For reals, the value might be
1187 affected, so we can't. */
1188 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1189 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1191 tree op0 = TREE_OPERAND (in, 0);
1192 tree op1 = TREE_OPERAND (in, 1);
1193 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1194 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1196 /* First see if either of the operands is a literal, then a constant. */
1197 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1198 *litp = op0, op0 = 0;
1199 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1200 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1202 if (op0 != 0 && TREE_CONSTANT (op0))
1203 *conp = op0, op0 = 0;
1204 else if (op1 != 0 && TREE_CONSTANT (op1))
1205 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1207 /* If we haven't dealt with either operand, this is not a case we can
1208 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1209 if (op0 != 0 && op1 != 0)
1210 var = in;
1211 else if (op0 != 0)
1212 var = op0;
1213 else
1214 var = op1, neg_var_p = neg1_p;
1216 /* Now do any needed negations. */
1217 if (neg_litp_p)
1218 *minus_litp = *litp, *litp = 0;
1219 if (neg_conp_p)
1220 *conp = negate_expr (*conp);
1221 if (neg_var_p)
1222 var = negate_expr (var);
1224 else if (TREE_CONSTANT (in))
1225 *conp = in;
1226 else
1227 var = in;
1229 if (negate_p)
1231 if (*litp)
1232 *minus_litp = *litp, *litp = 0;
1233 else if (*minus_litp)
1234 *litp = *minus_litp, *minus_litp = 0;
1235 *conp = negate_expr (*conp);
1236 var = negate_expr (var);
1239 return var;
1242 /* Re-associate trees split by the above function. T1 and T2 are either
1243 expressions to associate or null. Return the new expression, if any. If
1244 we build an operation, do it in TYPE and with CODE. */
1246 static tree
1247 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1249 if (t1 == 0)
1250 return t2;
1251 else if (t2 == 0)
1252 return t1;
1254 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1255 try to fold this since we will have infinite recursion. But do
1256 deal with any NEGATE_EXPRs. */
1257 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1258 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1260 if (code == PLUS_EXPR)
1262 if (TREE_CODE (t1) == NEGATE_EXPR)
1263 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1264 fold_convert (type, TREE_OPERAND (t1, 0)));
1265 else if (TREE_CODE (t2) == NEGATE_EXPR)
1266 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1267 fold_convert (type, TREE_OPERAND (t2, 0)));
1268 else if (integer_zerop (t2))
1269 return fold_convert (type, t1);
1271 else if (code == MINUS_EXPR)
1273 if (integer_zerop (t2))
1274 return fold_convert (type, t1);
1277 return build2 (code, type, fold_convert (type, t1),
1278 fold_convert (type, t2));
1281 return fold (build2 (code, type, fold_convert (type, t1),
1282 fold_convert (type, t2)));
1285 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1286 to produce a new constant.
1288 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1290 tree
1291 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1293 unsigned HOST_WIDE_INT int1l, int2l;
1294 HOST_WIDE_INT int1h, int2h;
1295 unsigned HOST_WIDE_INT low;
1296 HOST_WIDE_INT hi;
1297 unsigned HOST_WIDE_INT garbagel;
1298 HOST_WIDE_INT garbageh;
1299 tree t;
1300 tree type = TREE_TYPE (arg1);
1301 int uns = TYPE_UNSIGNED (type);
1302 int is_sizetype
1303 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1304 int overflow = 0;
1305 int no_overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1312 switch (code)
1314 case BIT_IOR_EXPR:
1315 low = int1l | int2l, hi = int1h | int2h;
1316 break;
1318 case BIT_XOR_EXPR:
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1320 break;
1322 case BIT_AND_EXPR:
1323 low = int1l & int2l, hi = int1h & int2h;
1324 break;
1326 case RSHIFT_EXPR:
1327 int2l = -int2l;
1328 case LSHIFT_EXPR:
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 &low, &hi, !uns);
1334 no_overflow = 1;
1335 break;
1337 case RROTATE_EXPR:
1338 int2l = - int2l;
1339 case LROTATE_EXPR:
1340 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1341 &low, &hi);
1342 break;
1344 case PLUS_EXPR:
1345 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1346 break;
1348 case MINUS_EXPR:
1349 neg_double (int2l, int2h, &low, &hi);
1350 add_double (int1l, int1h, low, hi, &low, &hi);
1351 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1352 break;
1354 case MULT_EXPR:
1355 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1356 break;
1358 case TRUNC_DIV_EXPR:
1359 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1360 case EXACT_DIV_EXPR:
1361 /* This is a shortcut for a common special case. */
1362 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1363 && ! TREE_CONSTANT_OVERFLOW (arg1)
1364 && ! TREE_CONSTANT_OVERFLOW (arg2)
1365 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1367 if (code == CEIL_DIV_EXPR)
1368 int1l += int2l - 1;
1370 low = int1l / int2l, hi = 0;
1371 break;
1374 /* ... fall through ... */
1376 case ROUND_DIV_EXPR:
1377 if (int2h == 0 && int2l == 1)
1379 low = int1l, hi = int1h;
1380 break;
1382 if (int1l == int2l && int1h == int2h
1383 && ! (int1l == 0 && int1h == 0))
1385 low = 1, hi = 0;
1386 break;
1388 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1389 &low, &hi, &garbagel, &garbageh);
1390 break;
1392 case TRUNC_MOD_EXPR:
1393 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1394 /* This is a shortcut for a common special case. */
1395 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1396 && ! TREE_CONSTANT_OVERFLOW (arg1)
1397 && ! TREE_CONSTANT_OVERFLOW (arg2)
1398 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1400 if (code == CEIL_MOD_EXPR)
1401 int1l += int2l - 1;
1402 low = int1l % int2l, hi = 0;
1403 break;
1406 /* ... fall through ... */
1408 case ROUND_MOD_EXPR:
1409 overflow = div_and_round_double (code, uns,
1410 int1l, int1h, int2l, int2h,
1411 &garbagel, &garbageh, &low, &hi);
1412 break;
1414 case MIN_EXPR:
1415 case MAX_EXPR:
1416 if (uns)
1417 low = (((unsigned HOST_WIDE_INT) int1h
1418 < (unsigned HOST_WIDE_INT) int2h)
1419 || (((unsigned HOST_WIDE_INT) int1h
1420 == (unsigned HOST_WIDE_INT) int2h)
1421 && int1l < int2l));
1422 else
1423 low = (int1h < int2h
1424 || (int1h == int2h && int1l < int2l));
1426 if (low == (code == MIN_EXPR))
1427 low = int1l, hi = int1h;
1428 else
1429 low = int2l, hi = int2h;
1430 break;
1432 default:
1433 gcc_unreachable ();
1436 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1438 if (notrunc)
1440 /* Propagate overflow flags ourselves. */
1441 if (((!uns || is_sizetype) && overflow)
1442 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1444 t = copy_node (t);
1445 TREE_OVERFLOW (t) = 1;
1446 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1450 t = copy_node (t);
1451 TREE_CONSTANT_OVERFLOW (t) = 1;
1454 else
1455 t = force_fit_type (t, 1,
1456 ((!uns || is_sizetype) && overflow)
1457 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1458 TREE_CONSTANT_OVERFLOW (arg1)
1459 | TREE_CONSTANT_OVERFLOW (arg2));
1461 return t;
1464 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1465 constant. We assume ARG1 and ARG2 have the same data type, or at least
1466 are the same kind of constant and the same machine mode.
1468 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1470 static tree
1471 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1473 STRIP_NOPS (arg1);
1474 STRIP_NOPS (arg2);
1476 if (TREE_CODE (arg1) == INTEGER_CST)
1477 return int_const_binop (code, arg1, arg2, notrunc);
1479 if (TREE_CODE (arg1) == REAL_CST)
1481 enum machine_mode mode;
1482 REAL_VALUE_TYPE d1;
1483 REAL_VALUE_TYPE d2;
1484 REAL_VALUE_TYPE value;
1485 REAL_VALUE_TYPE result;
1486 bool inexact;
1487 tree t, type;
1489 d1 = TREE_REAL_CST (arg1);
1490 d2 = TREE_REAL_CST (arg2);
1492 type = TREE_TYPE (arg1);
1493 mode = TYPE_MODE (type);
1495 /* Don't perform operation if we honor signaling NaNs and
1496 either operand is a NaN. */
1497 if (HONOR_SNANS (mode)
1498 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1499 return NULL_TREE;
1501 /* Don't perform operation if it would raise a division
1502 by zero exception. */
1503 if (code == RDIV_EXPR
1504 && REAL_VALUES_EQUAL (d2, dconst0)
1505 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1506 return NULL_TREE;
1508 /* If either operand is a NaN, just return it. Otherwise, set up
1509 for floating-point trap; we return an overflow. */
1510 if (REAL_VALUE_ISNAN (d1))
1511 return arg1;
1512 else if (REAL_VALUE_ISNAN (d2))
1513 return arg2;
1515 inexact = real_arithmetic (&value, code, &d1, &d2);
1516 real_convert (&result, mode, &value);
1518 /* Don't constant fold this floating point operation if the
1519 result may dependent upon the run-time rounding mode and
1520 flag_rounding_math is set, or if GCC's software emulation
1521 is unable to accurately represent the result. */
1523 if ((flag_rounding_math
1524 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1525 && !flag_unsafe_math_optimizations))
1526 && (inexact || !real_identical (&result, &value)))
1527 return NULL_TREE;
1529 t = build_real (type, result);
1531 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1532 TREE_CONSTANT_OVERFLOW (t)
1533 = TREE_OVERFLOW (t)
1534 | TREE_CONSTANT_OVERFLOW (arg1)
1535 | TREE_CONSTANT_OVERFLOW (arg2);
1536 return t;
1538 if (TREE_CODE (arg1) == COMPLEX_CST)
1540 tree type = TREE_TYPE (arg1);
1541 tree r1 = TREE_REALPART (arg1);
1542 tree i1 = TREE_IMAGPART (arg1);
1543 tree r2 = TREE_REALPART (arg2);
1544 tree i2 = TREE_IMAGPART (arg2);
1545 tree t;
1547 switch (code)
1549 case PLUS_EXPR:
1550 t = build_complex (type,
1551 const_binop (PLUS_EXPR, r1, r2, notrunc),
1552 const_binop (PLUS_EXPR, i1, i2, notrunc));
1553 break;
1555 case MINUS_EXPR:
1556 t = build_complex (type,
1557 const_binop (MINUS_EXPR, r1, r2, notrunc),
1558 const_binop (MINUS_EXPR, i1, i2, notrunc));
1559 break;
1561 case MULT_EXPR:
1562 t = build_complex (type,
1563 const_binop (MINUS_EXPR,
1564 const_binop (MULT_EXPR,
1565 r1, r2, notrunc),
1566 const_binop (MULT_EXPR,
1567 i1, i2, notrunc),
1568 notrunc),
1569 const_binop (PLUS_EXPR,
1570 const_binop (MULT_EXPR,
1571 r1, i2, notrunc),
1572 const_binop (MULT_EXPR,
1573 i1, r2, notrunc),
1574 notrunc));
1575 break;
1577 case RDIV_EXPR:
1579 tree magsquared
1580 = const_binop (PLUS_EXPR,
1581 const_binop (MULT_EXPR, r2, r2, notrunc),
1582 const_binop (MULT_EXPR, i2, i2, notrunc),
1583 notrunc);
1585 t = build_complex (type,
1586 const_binop
1587 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1588 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1589 const_binop (PLUS_EXPR,
1590 const_binop (MULT_EXPR, r1, r2,
1591 notrunc),
1592 const_binop (MULT_EXPR, i1, i2,
1593 notrunc),
1594 notrunc),
1595 magsquared, notrunc),
1596 const_binop
1597 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1598 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1599 const_binop (MINUS_EXPR,
1600 const_binop (MULT_EXPR, i1, r2,
1601 notrunc),
1602 const_binop (MULT_EXPR, r1, i2,
1603 notrunc),
1604 notrunc),
1605 magsquared, notrunc));
1607 break;
1609 default:
1610 gcc_unreachable ();
1612 return t;
1614 return 0;
1617 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1618 indicates which particular sizetype to create. */
1620 tree
1621 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1623 return build_int_cst (sizetype_tab[(int) kind], number);
1626 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1627 is a tree code. The type of the result is taken from the operands.
1628 Both must be the same type integer type and it must be a size type.
1629 If the operands are constant, so is the result. */
1631 tree
1632 size_binop (enum tree_code code, tree arg0, tree arg1)
1634 tree type = TREE_TYPE (arg0);
1636 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1637 && type == TREE_TYPE (arg1));
1639 /* Handle the special case of two integer constants faster. */
1640 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1642 /* And some specific cases even faster than that. */
1643 if (code == PLUS_EXPR && integer_zerop (arg0))
1644 return arg1;
1645 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1646 && integer_zerop (arg1))
1647 return arg0;
1648 else if (code == MULT_EXPR && integer_onep (arg0))
1649 return arg1;
1651 /* Handle general case of two integer constants. */
1652 return int_const_binop (code, arg0, arg1, 0);
1655 if (arg0 == error_mark_node || arg1 == error_mark_node)
1656 return error_mark_node;
1658 return fold (build2 (code, type, arg0, arg1));
1661 /* Given two values, either both of sizetype or both of bitsizetype,
1662 compute the difference between the two values. Return the value
1663 in signed type corresponding to the type of the operands. */
1665 tree
1666 size_diffop (tree arg0, tree arg1)
1668 tree type = TREE_TYPE (arg0);
1669 tree ctype;
1671 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1672 && type == TREE_TYPE (arg1));
1674 /* If the type is already signed, just do the simple thing. */
1675 if (!TYPE_UNSIGNED (type))
1676 return size_binop (MINUS_EXPR, arg0, arg1);
1678 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1680 /* If either operand is not a constant, do the conversions to the signed
1681 type and subtract. The hardware will do the right thing with any
1682 overflow in the subtraction. */
1683 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1684 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1685 fold_convert (ctype, arg1));
1687 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1688 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1689 overflow) and negate (which can't either). Special-case a result
1690 of zero while we're here. */
1691 if (tree_int_cst_equal (arg0, arg1))
1692 return fold_convert (ctype, integer_zero_node);
1693 else if (tree_int_cst_lt (arg1, arg0))
1694 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1695 else
1696 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1697 fold_convert (ctype, size_binop (MINUS_EXPR,
1698 arg1, arg0)));
1701 /* A subroutine of fold_convert_const handling conversions of an
1702 INTEGER_CST to another integer type. */
1704 static tree
1705 fold_convert_const_int_from_int (tree type, tree arg1)
1707 tree t;
1709 /* Given an integer constant, make new constant with new type,
1710 appropriately sign-extended or truncated. */
1711 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1712 TREE_INT_CST_HIGH (arg1));
1714 t = force_fit_type (t,
1715 /* Don't set the overflow when
1716 converting a pointer */
1717 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1718 (TREE_INT_CST_HIGH (arg1) < 0
1719 && (TYPE_UNSIGNED (type)
1720 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1721 | TREE_OVERFLOW (arg1),
1722 TREE_CONSTANT_OVERFLOW (arg1));
1724 return t;
1727 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1728 to an integer type. */
1730 static tree
1731 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1733 int overflow = 0;
1734 tree t;
1736 /* The following code implements the floating point to integer
1737 conversion rules required by the Java Language Specification,
1738 that IEEE NaNs are mapped to zero and values that overflow
1739 the target precision saturate, i.e. values greater than
1740 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1741 are mapped to INT_MIN. These semantics are allowed by the
1742 C and C++ standards that simply state that the behavior of
1743 FP-to-integer conversion is unspecified upon overflow. */
1745 HOST_WIDE_INT high, low;
1746 REAL_VALUE_TYPE r;
1747 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1749 switch (code)
1751 case FIX_TRUNC_EXPR:
1752 real_trunc (&r, VOIDmode, &x);
1753 break;
1755 case FIX_CEIL_EXPR:
1756 real_ceil (&r, VOIDmode, &x);
1757 break;
1759 case FIX_FLOOR_EXPR:
1760 real_floor (&r, VOIDmode, &x);
1761 break;
1763 case FIX_ROUND_EXPR:
1764 real_round (&r, VOIDmode, &x);
1765 break;
1767 default:
1768 gcc_unreachable ();
1771 /* If R is NaN, return zero and show we have an overflow. */
1772 if (REAL_VALUE_ISNAN (r))
1774 overflow = 1;
1775 high = 0;
1776 low = 0;
1779 /* See if R is less than the lower bound or greater than the
1780 upper bound. */
1782 if (! overflow)
1784 tree lt = TYPE_MIN_VALUE (type);
1785 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1786 if (REAL_VALUES_LESS (r, l))
1788 overflow = 1;
1789 high = TREE_INT_CST_HIGH (lt);
1790 low = TREE_INT_CST_LOW (lt);
1794 if (! overflow)
1796 tree ut = TYPE_MAX_VALUE (type);
1797 if (ut)
1799 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1800 if (REAL_VALUES_LESS (u, r))
1802 overflow = 1;
1803 high = TREE_INT_CST_HIGH (ut);
1804 low = TREE_INT_CST_LOW (ut);
1809 if (! overflow)
1810 REAL_VALUE_TO_INT (&low, &high, r);
1812 t = build_int_cst_wide (type, low, high);
1814 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1815 TREE_CONSTANT_OVERFLOW (arg1));
1816 return t;
1819 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1820 to another floating point type. */
1822 static tree
1823 fold_convert_const_real_from_real (tree type, tree arg1)
1825 REAL_VALUE_TYPE value;
1826 tree t;
1828 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1829 t = build_real (type, value);
1831 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1832 TREE_CONSTANT_OVERFLOW (t)
1833 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1834 return t;
1837 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1838 type TYPE. If no simplification can be done return NULL_TREE. */
1840 static tree
1841 fold_convert_const (enum tree_code code, tree type, tree arg1)
1843 if (TREE_TYPE (arg1) == type)
1844 return arg1;
1846 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return fold_convert_const_int_from_int (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_int_from_real (code, type, arg1);
1853 else if (TREE_CODE (type) == REAL_TYPE)
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return build_real_from_int_cst (type, arg1);
1857 if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_real_from_real (type, arg1);
1860 return NULL_TREE;
1863 /* Construct a vector of zero elements of vector type TYPE. */
1865 static tree
1866 build_zero_vector (tree type)
1868 tree elem, list;
1869 int i, units;
1871 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1872 units = TYPE_VECTOR_SUBPARTS (type);
1874 list = NULL_TREE;
1875 for (i = 0; i < units; i++)
1876 list = tree_cons (NULL_TREE, elem, list);
1877 return build_vector (type, list);
1880 /* Convert expression ARG to type TYPE. Used by the middle-end for
1881 simple conversions in preference to calling the front-end's convert. */
1883 tree
1884 fold_convert (tree type, tree arg)
1886 tree orig = TREE_TYPE (arg);
1887 tree tem;
1889 if (type == orig)
1890 return arg;
1892 if (TREE_CODE (arg) == ERROR_MARK
1893 || TREE_CODE (type) == ERROR_MARK
1894 || TREE_CODE (orig) == ERROR_MARK)
1895 return error_mark_node;
1897 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1898 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1899 TYPE_MAIN_VARIANT (orig)))
1900 return fold (build1 (NOP_EXPR, type, arg));
1902 switch (TREE_CODE (type))
1904 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1905 case POINTER_TYPE: case REFERENCE_TYPE:
1906 case OFFSET_TYPE:
1907 if (TREE_CODE (arg) == INTEGER_CST)
1909 tem = fold_convert_const (NOP_EXPR, type, arg);
1910 if (tem != NULL_TREE)
1911 return tem;
1913 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1914 || TREE_CODE (orig) == OFFSET_TYPE)
1915 return fold (build1 (NOP_EXPR, type, arg));
1916 if (TREE_CODE (orig) == COMPLEX_TYPE)
1918 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1919 return fold_convert (type, tem);
1921 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1922 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1923 return fold (build1 (NOP_EXPR, type, arg));
1925 case REAL_TYPE:
1926 if (TREE_CODE (arg) == INTEGER_CST)
1928 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1929 if (tem != NULL_TREE)
1930 return tem;
1932 else if (TREE_CODE (arg) == REAL_CST)
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 return tem;
1939 switch (TREE_CODE (orig))
1941 case INTEGER_TYPE: case CHAR_TYPE:
1942 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1943 case POINTER_TYPE: case REFERENCE_TYPE:
1944 return fold (build1 (FLOAT_EXPR, type, arg));
1946 case REAL_TYPE:
1947 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1948 type, arg));
1950 case COMPLEX_TYPE:
1951 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1952 return fold_convert (type, tem);
1954 default:
1955 gcc_unreachable ();
1958 case COMPLEX_TYPE:
1959 switch (TREE_CODE (orig))
1961 case INTEGER_TYPE: case CHAR_TYPE:
1962 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1963 case POINTER_TYPE: case REFERENCE_TYPE:
1964 case REAL_TYPE:
1965 return build2 (COMPLEX_EXPR, type,
1966 fold_convert (TREE_TYPE (type), arg),
1967 fold_convert (TREE_TYPE (type), integer_zero_node));
1968 case COMPLEX_TYPE:
1970 tree rpart, ipart;
1972 if (TREE_CODE (arg) == COMPLEX_EXPR)
1974 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1975 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1976 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1979 arg = save_expr (arg);
1980 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1981 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1982 rpart = fold_convert (TREE_TYPE (type), rpart);
1983 ipart = fold_convert (TREE_TYPE (type), ipart);
1984 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1987 default:
1988 gcc_unreachable ();
1991 case VECTOR_TYPE:
1992 if (integer_zerop (arg))
1993 return build_zero_vector (type);
1994 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1995 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1996 || TREE_CODE (orig) == VECTOR_TYPE);
1997 return fold (build1 (NOP_EXPR, type, arg));
1999 case VOID_TYPE:
2000 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2002 default:
2003 gcc_unreachable ();
2007 /* Return an expr equal to X but certainly not valid as an lvalue. */
2009 tree
2010 non_lvalue (tree x)
2012 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2013 us. */
2014 if (in_gimple_form)
2015 return x;
2017 /* We only need to wrap lvalue tree codes. */
2018 switch (TREE_CODE (x))
2020 case VAR_DECL:
2021 case PARM_DECL:
2022 case RESULT_DECL:
2023 case LABEL_DECL:
2024 case FUNCTION_DECL:
2025 case SSA_NAME:
2027 case COMPONENT_REF:
2028 case INDIRECT_REF:
2029 case ALIGN_INDIRECT_REF:
2030 case MISALIGNED_INDIRECT_REF:
2031 case ARRAY_REF:
2032 case ARRAY_RANGE_REF:
2033 case BIT_FIELD_REF:
2034 case OBJ_TYPE_REF:
2036 case REALPART_EXPR:
2037 case IMAGPART_EXPR:
2038 case PREINCREMENT_EXPR:
2039 case PREDECREMENT_EXPR:
2040 case SAVE_EXPR:
2041 case TRY_CATCH_EXPR:
2042 case WITH_CLEANUP_EXPR:
2043 case COMPOUND_EXPR:
2044 case MODIFY_EXPR:
2045 case TARGET_EXPR:
2046 case COND_EXPR:
2047 case BIND_EXPR:
2048 case MIN_EXPR:
2049 case MAX_EXPR:
2050 break;
2052 default:
2053 /* Assume the worst for front-end tree codes. */
2054 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2055 break;
2056 return x;
2058 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2061 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2062 Zero means allow extended lvalues. */
2064 int pedantic_lvalues;
2066 /* When pedantic, return an expr equal to X but certainly not valid as a
2067 pedantic lvalue. Otherwise, return X. */
2069 static tree
2070 pedantic_non_lvalue (tree x)
2072 if (pedantic_lvalues)
2073 return non_lvalue (x);
2074 else
2075 return x;
2078 /* Given a tree comparison code, return the code that is the logical inverse
2079 of the given code. It is not safe to do this for floating-point
2080 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2081 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2083 static enum tree_code
2084 invert_tree_comparison (enum tree_code code, bool honor_nans)
2086 if (honor_nans && flag_trapping_math)
2087 return ERROR_MARK;
2089 switch (code)
2091 case EQ_EXPR:
2092 return NE_EXPR;
2093 case NE_EXPR:
2094 return EQ_EXPR;
2095 case GT_EXPR:
2096 return honor_nans ? UNLE_EXPR : LE_EXPR;
2097 case GE_EXPR:
2098 return honor_nans ? UNLT_EXPR : LT_EXPR;
2099 case LT_EXPR:
2100 return honor_nans ? UNGE_EXPR : GE_EXPR;
2101 case LE_EXPR:
2102 return honor_nans ? UNGT_EXPR : GT_EXPR;
2103 case LTGT_EXPR:
2104 return UNEQ_EXPR;
2105 case UNEQ_EXPR:
2106 return LTGT_EXPR;
2107 case UNGT_EXPR:
2108 return LE_EXPR;
2109 case UNGE_EXPR:
2110 return LT_EXPR;
2111 case UNLT_EXPR:
2112 return GE_EXPR;
2113 case UNLE_EXPR:
2114 return GT_EXPR;
2115 case ORDERED_EXPR:
2116 return UNORDERED_EXPR;
2117 case UNORDERED_EXPR:
2118 return ORDERED_EXPR;
2119 default:
2120 gcc_unreachable ();
2124 /* Similar, but return the comparison that results if the operands are
2125 swapped. This is safe for floating-point. */
2127 enum tree_code
2128 swap_tree_comparison (enum tree_code code)
2130 switch (code)
2132 case EQ_EXPR:
2133 case NE_EXPR:
2134 return code;
2135 case GT_EXPR:
2136 return LT_EXPR;
2137 case GE_EXPR:
2138 return LE_EXPR;
2139 case LT_EXPR:
2140 return GT_EXPR;
2141 case LE_EXPR:
2142 return GE_EXPR;
2143 default:
2144 gcc_unreachable ();
2149 /* Convert a comparison tree code from an enum tree_code representation
2150 into a compcode bit-based encoding. This function is the inverse of
2151 compcode_to_comparison. */
2153 static enum comparison_code
2154 comparison_to_compcode (enum tree_code code)
2156 switch (code)
2158 case LT_EXPR:
2159 return COMPCODE_LT;
2160 case EQ_EXPR:
2161 return COMPCODE_EQ;
2162 case LE_EXPR:
2163 return COMPCODE_LE;
2164 case GT_EXPR:
2165 return COMPCODE_GT;
2166 case NE_EXPR:
2167 return COMPCODE_NE;
2168 case GE_EXPR:
2169 return COMPCODE_GE;
2170 case ORDERED_EXPR:
2171 return COMPCODE_ORD;
2172 case UNORDERED_EXPR:
2173 return COMPCODE_UNORD;
2174 case UNLT_EXPR:
2175 return COMPCODE_UNLT;
2176 case UNEQ_EXPR:
2177 return COMPCODE_UNEQ;
2178 case UNLE_EXPR:
2179 return COMPCODE_UNLE;
2180 case UNGT_EXPR:
2181 return COMPCODE_UNGT;
2182 case LTGT_EXPR:
2183 return COMPCODE_LTGT;
2184 case UNGE_EXPR:
2185 return COMPCODE_UNGE;
2186 default:
2187 gcc_unreachable ();
2191 /* Convert a compcode bit-based encoding of a comparison operator back
2192 to GCC's enum tree_code representation. This function is the
2193 inverse of comparison_to_compcode. */
2195 static enum tree_code
2196 compcode_to_comparison (enum comparison_code code)
2198 switch (code)
2200 case COMPCODE_LT:
2201 return LT_EXPR;
2202 case COMPCODE_EQ:
2203 return EQ_EXPR;
2204 case COMPCODE_LE:
2205 return LE_EXPR;
2206 case COMPCODE_GT:
2207 return GT_EXPR;
2208 case COMPCODE_NE:
2209 return NE_EXPR;
2210 case COMPCODE_GE:
2211 return GE_EXPR;
2212 case COMPCODE_ORD:
2213 return ORDERED_EXPR;
2214 case COMPCODE_UNORD:
2215 return UNORDERED_EXPR;
2216 case COMPCODE_UNLT:
2217 return UNLT_EXPR;
2218 case COMPCODE_UNEQ:
2219 return UNEQ_EXPR;
2220 case COMPCODE_UNLE:
2221 return UNLE_EXPR;
2222 case COMPCODE_UNGT:
2223 return UNGT_EXPR;
2224 case COMPCODE_LTGT:
2225 return LTGT_EXPR;
2226 case COMPCODE_UNGE:
2227 return UNGE_EXPR;
2228 default:
2229 gcc_unreachable ();
2233 /* Return a tree for the comparison which is the combination of
2234 doing the AND or OR (depending on CODE) of the two operations LCODE
2235 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2236 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2237 if this makes the transformation invalid. */
2239 tree
2240 combine_comparisons (enum tree_code code, enum tree_code lcode,
2241 enum tree_code rcode, tree truth_type,
2242 tree ll_arg, tree lr_arg)
2244 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2245 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2246 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2247 enum comparison_code compcode;
2249 switch (code)
2251 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2252 compcode = lcompcode & rcompcode;
2253 break;
2255 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2256 compcode = lcompcode | rcompcode;
2257 break;
2259 default:
2260 return NULL_TREE;
2263 if (!honor_nans)
2265 /* Eliminate unordered comparisons, as well as LTGT and ORD
2266 which are not used unless the mode has NaNs. */
2267 compcode &= ~COMPCODE_UNORD;
2268 if (compcode == COMPCODE_LTGT)
2269 compcode = COMPCODE_NE;
2270 else if (compcode == COMPCODE_ORD)
2271 compcode = COMPCODE_TRUE;
2273 else if (flag_trapping_math)
2275 /* Check that the original operation and the optimized ones will trap
2276 under the same condition. */
2277 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2278 && (lcompcode != COMPCODE_EQ)
2279 && (lcompcode != COMPCODE_ORD);
2280 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2281 && (rcompcode != COMPCODE_EQ)
2282 && (rcompcode != COMPCODE_ORD);
2283 bool trap = (compcode & COMPCODE_UNORD) == 0
2284 && (compcode != COMPCODE_EQ)
2285 && (compcode != COMPCODE_ORD);
2287 /* In a short-circuited boolean expression the LHS might be
2288 such that the RHS, if evaluated, will never trap. For
2289 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2290 if neither x nor y is NaN. (This is a mixed blessing: for
2291 example, the expression above will never trap, hence
2292 optimizing it to x < y would be invalid). */
2293 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2294 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2295 rtrap = false;
2297 /* If the comparison was short-circuited, and only the RHS
2298 trapped, we may now generate a spurious trap. */
2299 if (rtrap && !ltrap
2300 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2301 return NULL_TREE;
2303 /* If we changed the conditions that cause a trap, we lose. */
2304 if ((ltrap || rtrap) != trap)
2305 return NULL_TREE;
2308 if (compcode == COMPCODE_TRUE)
2309 return constant_boolean_node (true, truth_type);
2310 else if (compcode == COMPCODE_FALSE)
2311 return constant_boolean_node (false, truth_type);
2312 else
2313 return fold (build2 (compcode_to_comparison (compcode),
2314 truth_type, ll_arg, lr_arg));
2317 /* Return nonzero if CODE is a tree code that represents a truth value. */
2319 static int
2320 truth_value_p (enum tree_code code)
2322 return (TREE_CODE_CLASS (code) == tcc_comparison
2323 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2324 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2325 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2359 return 0;
2361 /* If both types don't have the same signedness, then we can't consider
2362 them equal. We must check this before the STRIP_NOPS calls
2363 because they may change the signedness of the arguments. */
2364 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2365 return 0;
2367 STRIP_NOPS (arg0);
2368 STRIP_NOPS (arg1);
2370 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2371 /* This is needed for conversions and for COMPONENT_REF.
2372 Might as well play it safe and always test this. */
2373 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2374 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2375 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2376 return 0;
2378 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2379 We don't care about side effects in that case because the SAVE_EXPR
2380 takes care of that for us. In all other cases, two expressions are
2381 equal if they have no side effects. If we have two identical
2382 expressions with side effects that should be treated the same due
2383 to the only side effects being identical SAVE_EXPR's, that will
2384 be detected in the recursive calls below. */
2385 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2386 && (TREE_CODE (arg0) == SAVE_EXPR
2387 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2388 return 1;
2390 /* Next handle constant cases, those for which we can return 1 even
2391 if ONLY_CONST is set. */
2392 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2393 switch (TREE_CODE (arg0))
2395 case INTEGER_CST:
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && tree_int_cst_equal (arg0, arg1));
2400 case REAL_CST:
2401 return (! TREE_CONSTANT_OVERFLOW (arg0)
2402 && ! TREE_CONSTANT_OVERFLOW (arg1)
2403 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2404 TREE_REAL_CST (arg1)));
2406 case VECTOR_CST:
2408 tree v1, v2;
2410 if (TREE_CONSTANT_OVERFLOW (arg0)
2411 || TREE_CONSTANT_OVERFLOW (arg1))
2412 return 0;
2414 v1 = TREE_VECTOR_CST_ELTS (arg0);
2415 v2 = TREE_VECTOR_CST_ELTS (arg1);
2416 while (v1 && v2)
2418 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2419 flags))
2420 return 0;
2421 v1 = TREE_CHAIN (v1);
2422 v2 = TREE_CHAIN (v2);
2425 return 1;
2428 case COMPLEX_CST:
2429 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2430 flags)
2431 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2432 flags));
2434 case STRING_CST:
2435 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2436 && ! memcmp (TREE_STRING_POINTER (arg0),
2437 TREE_STRING_POINTER (arg1),
2438 TREE_STRING_LENGTH (arg0)));
2440 case ADDR_EXPR:
2441 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2443 default:
2444 break;
2447 if (flags & OEP_ONLY_CONST)
2448 return 0;
2450 /* Define macros to test an operand from arg0 and arg1 for equality and a
2451 variant that allows null and views null as being different from any
2452 non-null value. In the latter case, if either is null, the both
2453 must be; otherwise, do the normal comparison. */
2454 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2455 TREE_OPERAND (arg1, N), flags)
2457 #define OP_SAME_WITH_NULL(N) \
2458 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2459 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2461 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2463 case tcc_unary:
2464 /* Two conversions are equal only if signedness and modes match. */
2465 switch (TREE_CODE (arg0))
2467 case NOP_EXPR:
2468 case CONVERT_EXPR:
2469 case FIX_CEIL_EXPR:
2470 case FIX_TRUNC_EXPR:
2471 case FIX_FLOOR_EXPR:
2472 case FIX_ROUND_EXPR:
2473 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2474 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2475 return 0;
2476 break;
2477 default:
2478 break;
2481 return OP_SAME (0);
2484 case tcc_comparison:
2485 case tcc_binary:
2486 if (OP_SAME (0) && OP_SAME (1))
2487 return 1;
2489 /* For commutative ops, allow the other order. */
2490 return (commutative_tree_code (TREE_CODE (arg0))
2491 && operand_equal_p (TREE_OPERAND (arg0, 0),
2492 TREE_OPERAND (arg1, 1), flags)
2493 && operand_equal_p (TREE_OPERAND (arg0, 1),
2494 TREE_OPERAND (arg1, 0), flags));
2496 case tcc_reference:
2497 /* If either of the pointer (or reference) expressions we are
2498 dereferencing contain a side effect, these cannot be equal. */
2499 if (TREE_SIDE_EFFECTS (arg0)
2500 || TREE_SIDE_EFFECTS (arg1))
2501 return 0;
2503 switch (TREE_CODE (arg0))
2505 case INDIRECT_REF:
2506 case ALIGN_INDIRECT_REF:
2507 case MISALIGNED_INDIRECT_REF:
2508 case REALPART_EXPR:
2509 case IMAGPART_EXPR:
2510 return OP_SAME (0);
2512 case ARRAY_REF:
2513 case ARRAY_RANGE_REF:
2514 /* Operands 2 and 3 may be null. */
2515 return (OP_SAME (0)
2516 && OP_SAME (1)
2517 && OP_SAME_WITH_NULL (2)
2518 && OP_SAME_WITH_NULL (3));
2520 case COMPONENT_REF:
2521 /* Handle operand 2 the same as for ARRAY_REF. */
2522 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2524 case BIT_FIELD_REF:
2525 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2527 default:
2528 return 0;
2531 case tcc_expression:
2532 switch (TREE_CODE (arg0))
2534 case ADDR_EXPR:
2535 case TRUTH_NOT_EXPR:
2536 return OP_SAME (0);
2538 case TRUTH_ANDIF_EXPR:
2539 case TRUTH_ORIF_EXPR:
2540 return OP_SAME (0) && OP_SAME (1);
2542 case TRUTH_AND_EXPR:
2543 case TRUTH_OR_EXPR:
2544 case TRUTH_XOR_EXPR:
2545 if (OP_SAME (0) && OP_SAME (1))
2546 return 1;
2548 /* Otherwise take into account this is a commutative operation. */
2549 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2550 TREE_OPERAND (arg1, 1), flags)
2551 && operand_equal_p (TREE_OPERAND (arg0, 1),
2552 TREE_OPERAND (arg1, 0), flags));
2554 case CALL_EXPR:
2555 /* If the CALL_EXPRs call different functions, then they
2556 clearly can not be equal. */
2557 if (!OP_SAME (0))
2558 return 0;
2561 unsigned int cef = call_expr_flags (arg0);
2562 if (flags & OEP_PURE_SAME)
2563 cef &= ECF_CONST | ECF_PURE;
2564 else
2565 cef &= ECF_CONST;
2566 if (!cef)
2567 return 0;
2570 /* Now see if all the arguments are the same. operand_equal_p
2571 does not handle TREE_LIST, so we walk the operands here
2572 feeding them to operand_equal_p. */
2573 arg0 = TREE_OPERAND (arg0, 1);
2574 arg1 = TREE_OPERAND (arg1, 1);
2575 while (arg0 && arg1)
2577 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2578 flags))
2579 return 0;
2581 arg0 = TREE_CHAIN (arg0);
2582 arg1 = TREE_CHAIN (arg1);
2585 /* If we get here and both argument lists are exhausted
2586 then the CALL_EXPRs are equal. */
2587 return ! (arg0 || arg1);
2589 default:
2590 return 0;
2593 case tcc_declaration:
2594 /* Consider __builtin_sqrt equal to sqrt. */
2595 return (TREE_CODE (arg0) == FUNCTION_DECL
2596 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2597 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2598 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2600 default:
2601 return 0;
2604 #undef OP_SAME
2605 #undef OP_SAME_WITH_NULL
2608 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2609 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2611 When in doubt, return 0. */
2613 static int
2614 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2616 int unsignedp1, unsignedpo;
2617 tree primarg0, primarg1, primother;
2618 unsigned int correct_width;
2620 if (operand_equal_p (arg0, arg1, 0))
2621 return 1;
2623 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2624 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2625 return 0;
2627 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2628 and see if the inner values are the same. This removes any
2629 signedness comparison, which doesn't matter here. */
2630 primarg0 = arg0, primarg1 = arg1;
2631 STRIP_NOPS (primarg0);
2632 STRIP_NOPS (primarg1);
2633 if (operand_equal_p (primarg0, primarg1, 0))
2634 return 1;
2636 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2637 actual comparison operand, ARG0.
2639 First throw away any conversions to wider types
2640 already present in the operands. */
2642 primarg1 = get_narrower (arg1, &unsignedp1);
2643 primother = get_narrower (other, &unsignedpo);
2645 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2646 if (unsignedp1 == unsignedpo
2647 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2648 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2650 tree type = TREE_TYPE (arg0);
2652 /* Make sure shorter operand is extended the right way
2653 to match the longer operand. */
2654 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2655 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2657 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2658 return 1;
2661 return 0;
2664 /* See if ARG is an expression that is either a comparison or is performing
2665 arithmetic on comparisons. The comparisons must only be comparing
2666 two different values, which will be stored in *CVAL1 and *CVAL2; if
2667 they are nonzero it means that some operands have already been found.
2668 No variables may be used anywhere else in the expression except in the
2669 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2670 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2672 If this is true, return 1. Otherwise, return zero. */
2674 static int
2675 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2677 enum tree_code code = TREE_CODE (arg);
2678 enum tree_code_class class = TREE_CODE_CLASS (code);
2680 /* We can handle some of the tcc_expression cases here. */
2681 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2682 class = tcc_unary;
2683 else if (class == tcc_expression
2684 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2685 || code == COMPOUND_EXPR))
2686 class = tcc_binary;
2688 else if (class == tcc_expression && code == SAVE_EXPR
2689 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2691 /* If we've already found a CVAL1 or CVAL2, this expression is
2692 two complex to handle. */
2693 if (*cval1 || *cval2)
2694 return 0;
2696 class = tcc_unary;
2697 *save_p = 1;
2700 switch (class)
2702 case tcc_unary:
2703 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2705 case tcc_binary:
2706 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2707 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2708 cval1, cval2, save_p));
2710 case tcc_constant:
2711 return 1;
2713 case tcc_expression:
2714 if (code == COND_EXPR)
2715 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2716 cval1, cval2, save_p)
2717 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2718 cval1, cval2, save_p)
2719 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2720 cval1, cval2, save_p));
2721 return 0;
2723 case tcc_comparison:
2724 /* First see if we can handle the first operand, then the second. For
2725 the second operand, we know *CVAL1 can't be zero. It must be that
2726 one side of the comparison is each of the values; test for the
2727 case where this isn't true by failing if the two operands
2728 are the same. */
2730 if (operand_equal_p (TREE_OPERAND (arg, 0),
2731 TREE_OPERAND (arg, 1), 0))
2732 return 0;
2734 if (*cval1 == 0)
2735 *cval1 = TREE_OPERAND (arg, 0);
2736 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2738 else if (*cval2 == 0)
2739 *cval2 = TREE_OPERAND (arg, 0);
2740 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2742 else
2743 return 0;
2745 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2747 else if (*cval2 == 0)
2748 *cval2 = TREE_OPERAND (arg, 1);
2749 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2751 else
2752 return 0;
2754 return 1;
2756 default:
2757 return 0;
2761 /* ARG is a tree that is known to contain just arithmetic operations and
2762 comparisons. Evaluate the operations in the tree substituting NEW0 for
2763 any occurrence of OLD0 as an operand of a comparison and likewise for
2764 NEW1 and OLD1. */
2766 static tree
2767 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2769 tree type = TREE_TYPE (arg);
2770 enum tree_code code = TREE_CODE (arg);
2771 enum tree_code_class class = TREE_CODE_CLASS (code);
2773 /* We can handle some of the tcc_expression cases here. */
2774 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2775 class = tcc_unary;
2776 else if (class == tcc_expression
2777 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2778 class = tcc_binary;
2780 switch (class)
2782 case tcc_unary:
2783 return fold (build1 (code, type,
2784 eval_subst (TREE_OPERAND (arg, 0),
2785 old0, new0, old1, new1)));
2787 case tcc_binary:
2788 return fold (build2 (code, type,
2789 eval_subst (TREE_OPERAND (arg, 0),
2790 old0, new0, old1, new1),
2791 eval_subst (TREE_OPERAND (arg, 1),
2792 old0, new0, old1, new1)));
2794 case tcc_expression:
2795 switch (code)
2797 case SAVE_EXPR:
2798 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2800 case COMPOUND_EXPR:
2801 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2803 case COND_EXPR:
2804 return fold (build3 (code, type,
2805 eval_subst (TREE_OPERAND (arg, 0),
2806 old0, new0, old1, new1),
2807 eval_subst (TREE_OPERAND (arg, 1),
2808 old0, new0, old1, new1),
2809 eval_subst (TREE_OPERAND (arg, 2),
2810 old0, new0, old1, new1)));
2811 default:
2812 break;
2814 /* Fall through - ??? */
2816 case tcc_comparison:
2818 tree arg0 = TREE_OPERAND (arg, 0);
2819 tree arg1 = TREE_OPERAND (arg, 1);
2821 /* We need to check both for exact equality and tree equality. The
2822 former will be true if the operand has a side-effect. In that
2823 case, we know the operand occurred exactly once. */
2825 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2826 arg0 = new0;
2827 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2828 arg0 = new1;
2830 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2831 arg1 = new0;
2832 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2833 arg1 = new1;
2835 return fold (build2 (code, type, arg0, arg1));
2838 default:
2839 return arg;
2843 /* Return a tree for the case when the result of an expression is RESULT
2844 converted to TYPE and OMITTED was previously an operand of the expression
2845 but is now not needed (e.g., we folded OMITTED * 0).
2847 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2848 the conversion of RESULT to TYPE. */
2850 tree
2851 omit_one_operand (tree type, tree result, tree omitted)
2853 tree t = fold_convert (type, result);
2855 if (TREE_SIDE_EFFECTS (omitted))
2856 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2858 return non_lvalue (t);
2861 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2863 static tree
2864 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2866 tree t = fold_convert (type, result);
2868 if (TREE_SIDE_EFFECTS (omitted))
2869 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2871 return pedantic_non_lvalue (t);
2874 /* Return a tree for the case when the result of an expression is RESULT
2875 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2876 of the expression but are now not needed.
2878 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2879 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2880 evaluated before OMITTED2. Otherwise, if neither has side effects,
2881 just do the conversion of RESULT to TYPE. */
2883 tree
2884 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2886 tree t = fold_convert (type, result);
2888 if (TREE_SIDE_EFFECTS (omitted2))
2889 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2890 if (TREE_SIDE_EFFECTS (omitted1))
2891 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2893 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2897 /* Return a simplified tree node for the truth-negation of ARG. This
2898 never alters ARG itself. We assume that ARG is an operation that
2899 returns a truth value (0 or 1).
2901 FIXME: one would think we would fold the result, but it causes
2902 problems with the dominator optimizer. */
2903 tree
2904 invert_truthvalue (tree arg)
2906 tree type = TREE_TYPE (arg);
2907 enum tree_code code = TREE_CODE (arg);
2909 if (code == ERROR_MARK)
2910 return arg;
2912 /* If this is a comparison, we can simply invert it, except for
2913 floating-point non-equality comparisons, in which case we just
2914 enclose a TRUTH_NOT_EXPR around what we have. */
2916 if (TREE_CODE_CLASS (code) == tcc_comparison)
2918 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2919 if (FLOAT_TYPE_P (op_type)
2920 && flag_trapping_math
2921 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2922 && code != NE_EXPR && code != EQ_EXPR)
2923 return build1 (TRUTH_NOT_EXPR, type, arg);
2924 else
2926 code = invert_tree_comparison (code,
2927 HONOR_NANS (TYPE_MODE (op_type)));
2928 if (code == ERROR_MARK)
2929 return build1 (TRUTH_NOT_EXPR, type, arg);
2930 else
2931 return build2 (code, type,
2932 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2936 switch (code)
2938 case INTEGER_CST:
2939 return fold_convert (type,
2940 build_int_cst (NULL_TREE, integer_zerop (arg)));
2942 case TRUTH_AND_EXPR:
2943 return build2 (TRUTH_OR_EXPR, type,
2944 invert_truthvalue (TREE_OPERAND (arg, 0)),
2945 invert_truthvalue (TREE_OPERAND (arg, 1)));
2947 case TRUTH_OR_EXPR:
2948 return build2 (TRUTH_AND_EXPR, type,
2949 invert_truthvalue (TREE_OPERAND (arg, 0)),
2950 invert_truthvalue (TREE_OPERAND (arg, 1)));
2952 case TRUTH_XOR_EXPR:
2953 /* Here we can invert either operand. We invert the first operand
2954 unless the second operand is a TRUTH_NOT_EXPR in which case our
2955 result is the XOR of the first operand with the inside of the
2956 negation of the second operand. */
2958 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2959 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2960 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2961 else
2962 return build2 (TRUTH_XOR_EXPR, type,
2963 invert_truthvalue (TREE_OPERAND (arg, 0)),
2964 TREE_OPERAND (arg, 1));
2966 case TRUTH_ANDIF_EXPR:
2967 return build2 (TRUTH_ORIF_EXPR, type,
2968 invert_truthvalue (TREE_OPERAND (arg, 0)),
2969 invert_truthvalue (TREE_OPERAND (arg, 1)));
2971 case TRUTH_ORIF_EXPR:
2972 return build2 (TRUTH_ANDIF_EXPR, type,
2973 invert_truthvalue (TREE_OPERAND (arg, 0)),
2974 invert_truthvalue (TREE_OPERAND (arg, 1)));
2976 case TRUTH_NOT_EXPR:
2977 return TREE_OPERAND (arg, 0);
2979 case COND_EXPR:
2980 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2981 invert_truthvalue (TREE_OPERAND (arg, 1)),
2982 invert_truthvalue (TREE_OPERAND (arg, 2)));
2984 case COMPOUND_EXPR:
2985 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2986 invert_truthvalue (TREE_OPERAND (arg, 1)));
2988 case NON_LVALUE_EXPR:
2989 return invert_truthvalue (TREE_OPERAND (arg, 0));
2991 case NOP_EXPR:
2992 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2993 break;
2995 case CONVERT_EXPR:
2996 case FLOAT_EXPR:
2997 return build1 (TREE_CODE (arg), type,
2998 invert_truthvalue (TREE_OPERAND (arg, 0)));
3000 case BIT_AND_EXPR:
3001 if (!integer_onep (TREE_OPERAND (arg, 1)))
3002 break;
3003 return build2 (EQ_EXPR, type, arg,
3004 fold_convert (type, integer_zero_node));
3006 case SAVE_EXPR:
3007 return build1 (TRUTH_NOT_EXPR, type, arg);
3009 case CLEANUP_POINT_EXPR:
3010 return build1 (CLEANUP_POINT_EXPR, type,
3011 invert_truthvalue (TREE_OPERAND (arg, 0)));
3013 default:
3014 break;
3016 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3017 return build1 (TRUTH_NOT_EXPR, type, arg);
3020 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3021 operands are another bit-wise operation with a common input. If so,
3022 distribute the bit operations to save an operation and possibly two if
3023 constants are involved. For example, convert
3024 (A | B) & (A | C) into A | (B & C)
3025 Further simplification will occur if B and C are constants.
3027 If this optimization cannot be done, 0 will be returned. */
3029 static tree
3030 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3032 tree common;
3033 tree left, right;
3035 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3036 || TREE_CODE (arg0) == code
3037 || (TREE_CODE (arg0) != BIT_AND_EXPR
3038 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3039 return 0;
3041 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3043 common = TREE_OPERAND (arg0, 0);
3044 left = TREE_OPERAND (arg0, 1);
3045 right = TREE_OPERAND (arg1, 1);
3047 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3049 common = TREE_OPERAND (arg0, 0);
3050 left = TREE_OPERAND (arg0, 1);
3051 right = TREE_OPERAND (arg1, 0);
3053 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3055 common = TREE_OPERAND (arg0, 1);
3056 left = TREE_OPERAND (arg0, 0);
3057 right = TREE_OPERAND (arg1, 1);
3059 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3061 common = TREE_OPERAND (arg0, 1);
3062 left = TREE_OPERAND (arg0, 0);
3063 right = TREE_OPERAND (arg1, 0);
3065 else
3066 return 0;
3068 return fold (build2 (TREE_CODE (arg0), type, common,
3069 fold (build2 (code, type, left, right))));
3072 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3073 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3075 static tree
3076 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3077 int unsignedp)
3079 tree result = build3 (BIT_FIELD_REF, type, inner,
3080 size_int (bitsize), bitsize_int (bitpos));
3082 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3084 return result;
3087 /* Optimize a bit-field compare.
3089 There are two cases: First is a compare against a constant and the
3090 second is a comparison of two items where the fields are at the same
3091 bit position relative to the start of a chunk (byte, halfword, word)
3092 large enough to contain it. In these cases we can avoid the shift
3093 implicit in bitfield extractions.
3095 For constants, we emit a compare of the shifted constant with the
3096 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3097 compared. For two fields at the same position, we do the ANDs with the
3098 similar mask and compare the result of the ANDs.
3100 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3101 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3102 are the left and right operands of the comparison, respectively.
3104 If the optimization described above can be done, we return the resulting
3105 tree. Otherwise we return zero. */
3107 static tree
3108 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3109 tree lhs, tree rhs)
3111 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3112 tree type = TREE_TYPE (lhs);
3113 tree signed_type, unsigned_type;
3114 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3115 enum machine_mode lmode, rmode, nmode;
3116 int lunsignedp, runsignedp;
3117 int lvolatilep = 0, rvolatilep = 0;
3118 tree linner, rinner = NULL_TREE;
3119 tree mask;
3120 tree offset;
3122 /* Get all the information about the extractions being done. If the bit size
3123 if the same as the size of the underlying object, we aren't doing an
3124 extraction at all and so can do nothing. We also don't want to
3125 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3126 then will no longer be able to replace it. */
3127 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3128 &lunsignedp, &lvolatilep, false);
3129 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3130 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3131 return 0;
3133 if (!const_p)
3135 /* If this is not a constant, we can only do something if bit positions,
3136 sizes, and signedness are the same. */
3137 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3138 &runsignedp, &rvolatilep, false);
3140 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3141 || lunsignedp != runsignedp || offset != 0
3142 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3143 return 0;
3146 /* See if we can find a mode to refer to this field. We should be able to,
3147 but fail if we can't. */
3148 nmode = get_best_mode (lbitsize, lbitpos,
3149 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3150 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3151 TYPE_ALIGN (TREE_TYPE (rinner))),
3152 word_mode, lvolatilep || rvolatilep);
3153 if (nmode == VOIDmode)
3154 return 0;
3156 /* Set signed and unsigned types of the precision of this mode for the
3157 shifts below. */
3158 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3159 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3161 /* Compute the bit position and size for the new reference and our offset
3162 within it. If the new reference is the same size as the original, we
3163 won't optimize anything, so return zero. */
3164 nbitsize = GET_MODE_BITSIZE (nmode);
3165 nbitpos = lbitpos & ~ (nbitsize - 1);
3166 lbitpos -= nbitpos;
3167 if (nbitsize == lbitsize)
3168 return 0;
3170 if (BYTES_BIG_ENDIAN)
3171 lbitpos = nbitsize - lbitsize - lbitpos;
3173 /* Make the mask to be used against the extracted field. */
3174 mask = build_int_cst (unsigned_type, -1);
3175 mask = force_fit_type (mask, 0, false, false);
3176 mask = fold_convert (unsigned_type, mask);
3177 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3178 mask = const_binop (RSHIFT_EXPR, mask,
3179 size_int (nbitsize - lbitsize - lbitpos), 0);
3181 if (! const_p)
3182 /* If not comparing with constant, just rework the comparison
3183 and return. */
3184 return build2 (code, compare_type,
3185 build2 (BIT_AND_EXPR, unsigned_type,
3186 make_bit_field_ref (linner, unsigned_type,
3187 nbitsize, nbitpos, 1),
3188 mask),
3189 build2 (BIT_AND_EXPR, unsigned_type,
3190 make_bit_field_ref (rinner, unsigned_type,
3191 nbitsize, nbitpos, 1),
3192 mask));
3194 /* Otherwise, we are handling the constant case. See if the constant is too
3195 big for the field. Warn and return a tree of for 0 (false) if so. We do
3196 this not only for its own sake, but to avoid having to test for this
3197 error case below. If we didn't, we might generate wrong code.
3199 For unsigned fields, the constant shifted right by the field length should
3200 be all zero. For signed fields, the high-order bits should agree with
3201 the sign bit. */
3203 if (lunsignedp)
3205 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3206 fold_convert (unsigned_type, rhs),
3207 size_int (lbitsize), 0)))
3209 warning ("comparison is always %d due to width of bit-field",
3210 code == NE_EXPR);
3211 return constant_boolean_node (code == NE_EXPR, compare_type);
3214 else
3216 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3217 size_int (lbitsize - 1), 0);
3218 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3220 warning ("comparison is always %d due to width of bit-field",
3221 code == NE_EXPR);
3222 return constant_boolean_node (code == NE_EXPR, compare_type);
3226 /* Single-bit compares should always be against zero. */
3227 if (lbitsize == 1 && ! integer_zerop (rhs))
3229 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3230 rhs = fold_convert (type, integer_zero_node);
3233 /* Make a new bitfield reference, shift the constant over the
3234 appropriate number of bits and mask it with the computed mask
3235 (in case this was a signed field). If we changed it, make a new one. */
3236 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3237 if (lvolatilep)
3239 TREE_SIDE_EFFECTS (lhs) = 1;
3240 TREE_THIS_VOLATILE (lhs) = 1;
3243 rhs = fold (const_binop (BIT_AND_EXPR,
3244 const_binop (LSHIFT_EXPR,
3245 fold_convert (unsigned_type, rhs),
3246 size_int (lbitpos), 0),
3247 mask, 0));
3249 return build2 (code, compare_type,
3250 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3251 rhs);
3254 /* Subroutine for fold_truthop: decode a field reference.
3256 If EXP is a comparison reference, we return the innermost reference.
3258 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3259 set to the starting bit number.
3261 If the innermost field can be completely contained in a mode-sized
3262 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3264 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3265 otherwise it is not changed.
3267 *PUNSIGNEDP is set to the signedness of the field.
3269 *PMASK is set to the mask used. This is either contained in a
3270 BIT_AND_EXPR or derived from the width of the field.
3272 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3274 Return 0 if this is not a component reference or is one that we can't
3275 do anything with. */
3277 static tree
3278 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3279 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3280 int *punsignedp, int *pvolatilep,
3281 tree *pmask, tree *pand_mask)
3283 tree outer_type = 0;
3284 tree and_mask = 0;
3285 tree mask, inner, offset;
3286 tree unsigned_type;
3287 unsigned int precision;
3289 /* All the optimizations using this function assume integer fields.
3290 There are problems with FP fields since the type_for_size call
3291 below can fail for, e.g., XFmode. */
3292 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3293 return 0;
3295 /* We are interested in the bare arrangement of bits, so strip everything
3296 that doesn't affect the machine mode. However, record the type of the
3297 outermost expression if it may matter below. */
3298 if (TREE_CODE (exp) == NOP_EXPR
3299 || TREE_CODE (exp) == CONVERT_EXPR
3300 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3301 outer_type = TREE_TYPE (exp);
3302 STRIP_NOPS (exp);
3304 if (TREE_CODE (exp) == BIT_AND_EXPR)
3306 and_mask = TREE_OPERAND (exp, 1);
3307 exp = TREE_OPERAND (exp, 0);
3308 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3309 if (TREE_CODE (and_mask) != INTEGER_CST)
3310 return 0;
3313 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3314 punsignedp, pvolatilep, false);
3315 if ((inner == exp && and_mask == 0)
3316 || *pbitsize < 0 || offset != 0
3317 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3318 return 0;
3320 /* If the number of bits in the reference is the same as the bitsize of
3321 the outer type, then the outer type gives the signedness. Otherwise
3322 (in case of a small bitfield) the signedness is unchanged. */
3323 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3324 *punsignedp = TYPE_UNSIGNED (outer_type);
3326 /* Compute the mask to access the bitfield. */
3327 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3328 precision = TYPE_PRECISION (unsigned_type);
3330 mask = build_int_cst (unsigned_type, -1);
3331 mask = force_fit_type (mask, 0, false, false);
3333 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3334 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3336 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3337 if (and_mask != 0)
3338 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3339 fold_convert (unsigned_type, and_mask), mask));
3341 *pmask = mask;
3342 *pand_mask = and_mask;
3343 return inner;
3346 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3347 bit positions. */
3349 static int
3350 all_ones_mask_p (tree mask, int size)
3352 tree type = TREE_TYPE (mask);
3353 unsigned int precision = TYPE_PRECISION (type);
3354 tree tmask;
3356 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3357 tmask = force_fit_type (tmask, 0, false, false);
3359 return
3360 tree_int_cst_equal (mask,
3361 const_binop (RSHIFT_EXPR,
3362 const_binop (LSHIFT_EXPR, tmask,
3363 size_int (precision - size),
3365 size_int (precision - size), 0));
3368 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3369 represents the sign bit of EXP's type. If EXP represents a sign
3370 or zero extension, also test VAL against the unextended type.
3371 The return value is the (sub)expression whose sign bit is VAL,
3372 or NULL_TREE otherwise. */
3374 static tree
3375 sign_bit_p (tree exp, tree val)
3377 unsigned HOST_WIDE_INT mask_lo, lo;
3378 HOST_WIDE_INT mask_hi, hi;
3379 int width;
3380 tree t;
3382 /* Tree EXP must have an integral type. */
3383 t = TREE_TYPE (exp);
3384 if (! INTEGRAL_TYPE_P (t))
3385 return NULL_TREE;
3387 /* Tree VAL must be an integer constant. */
3388 if (TREE_CODE (val) != INTEGER_CST
3389 || TREE_CONSTANT_OVERFLOW (val))
3390 return NULL_TREE;
3392 width = TYPE_PRECISION (t);
3393 if (width > HOST_BITS_PER_WIDE_INT)
3395 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3396 lo = 0;
3398 mask_hi = ((unsigned HOST_WIDE_INT) -1
3399 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3400 mask_lo = -1;
3402 else
3404 hi = 0;
3405 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3407 mask_hi = 0;
3408 mask_lo = ((unsigned HOST_WIDE_INT) -1
3409 >> (HOST_BITS_PER_WIDE_INT - width));
3412 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3413 treat VAL as if it were unsigned. */
3414 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3415 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3416 return exp;
3418 /* Handle extension from a narrower type. */
3419 if (TREE_CODE (exp) == NOP_EXPR
3420 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3421 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3423 return NULL_TREE;
3426 /* Subroutine for fold_truthop: determine if an operand is simple enough
3427 to be evaluated unconditionally. */
3429 static int
3430 simple_operand_p (tree exp)
3432 /* Strip any conversions that don't change the machine mode. */
3433 STRIP_NOPS (exp);
3435 return (CONSTANT_CLASS_P (exp)
3436 || TREE_CODE (exp) == SSA_NAME
3437 || (DECL_P (exp)
3438 && ! TREE_ADDRESSABLE (exp)
3439 && ! TREE_THIS_VOLATILE (exp)
3440 && ! DECL_NONLOCAL (exp)
3441 /* Don't regard global variables as simple. They may be
3442 allocated in ways unknown to the compiler (shared memory,
3443 #pragma weak, etc). */
3444 && ! TREE_PUBLIC (exp)
3445 && ! DECL_EXTERNAL (exp)
3446 /* Loading a static variable is unduly expensive, but global
3447 registers aren't expensive. */
3448 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3451 /* The following functions are subroutines to fold_range_test and allow it to
3452 try to change a logical combination of comparisons into a range test.
3454 For example, both
3455 X == 2 || X == 3 || X == 4 || X == 5
3457 X >= 2 && X <= 5
3458 are converted to
3459 (unsigned) (X - 2) <= 3
3461 We describe each set of comparisons as being either inside or outside
3462 a range, using a variable named like IN_P, and then describe the
3463 range with a lower and upper bound. If one of the bounds is omitted,
3464 it represents either the highest or lowest value of the type.
3466 In the comments below, we represent a range by two numbers in brackets
3467 preceded by a "+" to designate being inside that range, or a "-" to
3468 designate being outside that range, so the condition can be inverted by
3469 flipping the prefix. An omitted bound is represented by a "-". For
3470 example, "- [-, 10]" means being outside the range starting at the lowest
3471 possible value and ending at 10, in other words, being greater than 10.
3472 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3473 always false.
3475 We set up things so that the missing bounds are handled in a consistent
3476 manner so neither a missing bound nor "true" and "false" need to be
3477 handled using a special case. */
3479 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3480 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3481 and UPPER1_P are nonzero if the respective argument is an upper bound
3482 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3483 must be specified for a comparison. ARG1 will be converted to ARG0's
3484 type if both are specified. */
3486 static tree
3487 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3488 tree arg1, int upper1_p)
3490 tree tem;
3491 int result;
3492 int sgn0, sgn1;
3494 /* If neither arg represents infinity, do the normal operation.
3495 Else, if not a comparison, return infinity. Else handle the special
3496 comparison rules. Note that most of the cases below won't occur, but
3497 are handled for consistency. */
3499 if (arg0 != 0 && arg1 != 0)
3501 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3502 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3503 STRIP_NOPS (tem);
3504 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3507 if (TREE_CODE_CLASS (code) != tcc_comparison)
3508 return 0;
3510 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3511 for neither. In real maths, we cannot assume open ended ranges are
3512 the same. But, this is computer arithmetic, where numbers are finite.
3513 We can therefore make the transformation of any unbounded range with
3514 the value Z, Z being greater than any representable number. This permits
3515 us to treat unbounded ranges as equal. */
3516 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3517 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3518 switch (code)
3520 case EQ_EXPR:
3521 result = sgn0 == sgn1;
3522 break;
3523 case NE_EXPR:
3524 result = sgn0 != sgn1;
3525 break;
3526 case LT_EXPR:
3527 result = sgn0 < sgn1;
3528 break;
3529 case LE_EXPR:
3530 result = sgn0 <= sgn1;
3531 break;
3532 case GT_EXPR:
3533 result = sgn0 > sgn1;
3534 break;
3535 case GE_EXPR:
3536 result = sgn0 >= sgn1;
3537 break;
3538 default:
3539 gcc_unreachable ();
3542 return constant_boolean_node (result, type);
3545 /* Given EXP, a logical expression, set the range it is testing into
3546 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3547 actually being tested. *PLOW and *PHIGH will be made of the same type
3548 as the returned expression. If EXP is not a comparison, we will most
3549 likely not be returning a useful value and range. */
3551 static tree
3552 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3554 enum tree_code code;
3555 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3556 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3557 int in_p, n_in_p;
3558 tree low, high, n_low, n_high;
3560 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3561 and see if we can refine the range. Some of the cases below may not
3562 happen, but it doesn't seem worth worrying about this. We "continue"
3563 the outer loop when we've changed something; otherwise we "break"
3564 the switch, which will "break" the while. */
3566 in_p = 0;
3567 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3569 while (1)
3571 code = TREE_CODE (exp);
3572 exp_type = TREE_TYPE (exp);
3574 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3576 if (TREE_CODE_LENGTH (code) > 0)
3577 arg0 = TREE_OPERAND (exp, 0);
3578 if (TREE_CODE_CLASS (code) == tcc_comparison
3579 || TREE_CODE_CLASS (code) == tcc_unary
3580 || TREE_CODE_CLASS (code) == tcc_binary)
3581 arg0_type = TREE_TYPE (arg0);
3582 if (TREE_CODE_CLASS (code) == tcc_binary
3583 || TREE_CODE_CLASS (code) == tcc_comparison
3584 || (TREE_CODE_CLASS (code) == tcc_expression
3585 && TREE_CODE_LENGTH (code) > 1))
3586 arg1 = TREE_OPERAND (exp, 1);
3589 switch (code)
3591 case TRUTH_NOT_EXPR:
3592 in_p = ! in_p, exp = arg0;
3593 continue;
3595 case EQ_EXPR: case NE_EXPR:
3596 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3597 /* We can only do something if the range is testing for zero
3598 and if the second operand is an integer constant. Note that
3599 saying something is "in" the range we make is done by
3600 complementing IN_P since it will set in the initial case of
3601 being not equal to zero; "out" is leaving it alone. */
3602 if (low == 0 || high == 0
3603 || ! integer_zerop (low) || ! integer_zerop (high)
3604 || TREE_CODE (arg1) != INTEGER_CST)
3605 break;
3607 switch (code)
3609 case NE_EXPR: /* - [c, c] */
3610 low = high = arg1;
3611 break;
3612 case EQ_EXPR: /* + [c, c] */
3613 in_p = ! in_p, low = high = arg1;
3614 break;
3615 case GT_EXPR: /* - [-, c] */
3616 low = 0, high = arg1;
3617 break;
3618 case GE_EXPR: /* + [c, -] */
3619 in_p = ! in_p, low = arg1, high = 0;
3620 break;
3621 case LT_EXPR: /* - [c, -] */
3622 low = arg1, high = 0;
3623 break;
3624 case LE_EXPR: /* + [-, c] */
3625 in_p = ! in_p, low = 0, high = arg1;
3626 break;
3627 default:
3628 gcc_unreachable ();
3631 /* If this is an unsigned comparison, we also know that EXP is
3632 greater than or equal to zero. We base the range tests we make
3633 on that fact, so we record it here so we can parse existing
3634 range tests. We test arg0_type since often the return type
3635 of, e.g. EQ_EXPR, is boolean. */
3636 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3638 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3639 in_p, low, high, 1,
3640 fold_convert (arg0_type, integer_zero_node),
3641 NULL_TREE))
3642 break;
3644 in_p = n_in_p, low = n_low, high = n_high;
3646 /* If the high bound is missing, but we have a nonzero low
3647 bound, reverse the range so it goes from zero to the low bound
3648 minus 1. */
3649 if (high == 0 && low && ! integer_zerop (low))
3651 in_p = ! in_p;
3652 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3653 integer_one_node, 0);
3654 low = fold_convert (arg0_type, integer_zero_node);
3658 exp = arg0;
3659 continue;
3661 case NEGATE_EXPR:
3662 /* (-x) IN [a,b] -> x in [-b, -a] */
3663 n_low = range_binop (MINUS_EXPR, exp_type,
3664 fold_convert (exp_type, integer_zero_node),
3665 0, high, 1);
3666 n_high = range_binop (MINUS_EXPR, exp_type,
3667 fold_convert (exp_type, integer_zero_node),
3668 0, low, 0);
3669 low = n_low, high = n_high;
3670 exp = arg0;
3671 continue;
3673 case BIT_NOT_EXPR:
3674 /* ~ X -> -X - 1 */
3675 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3676 fold_convert (exp_type, integer_one_node));
3677 continue;
3679 case PLUS_EXPR: case MINUS_EXPR:
3680 if (TREE_CODE (arg1) != INTEGER_CST)
3681 break;
3683 /* If EXP is signed, any overflow in the computation is undefined,
3684 so we don't worry about it so long as our computations on
3685 the bounds don't overflow. For unsigned, overflow is defined
3686 and this is exactly the right thing. */
3687 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3688 arg0_type, low, 0, arg1, 0);
3689 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3690 arg0_type, high, 1, arg1, 0);
3691 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3692 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3693 break;
3695 /* Check for an unsigned range which has wrapped around the maximum
3696 value thus making n_high < n_low, and normalize it. */
3697 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3699 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3700 integer_one_node, 0);
3701 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3702 integer_one_node, 0);
3704 /* If the range is of the form +/- [ x+1, x ], we won't
3705 be able to normalize it. But then, it represents the
3706 whole range or the empty set, so make it
3707 +/- [ -, - ]. */
3708 if (tree_int_cst_equal (n_low, low)
3709 && tree_int_cst_equal (n_high, high))
3710 low = high = 0;
3711 else
3712 in_p = ! in_p;
3714 else
3715 low = n_low, high = n_high;
3717 exp = arg0;
3718 continue;
3720 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3721 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3722 break;
3724 if (! INTEGRAL_TYPE_P (arg0_type)
3725 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3726 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3727 break;
3729 n_low = low, n_high = high;
3731 if (n_low != 0)
3732 n_low = fold_convert (arg0_type, n_low);
3734 if (n_high != 0)
3735 n_high = fold_convert (arg0_type, n_high);
3738 /* If we're converting arg0 from an unsigned type, to exp,
3739 a signed type, we will be doing the comparison as unsigned.
3740 The tests above have already verified that LOW and HIGH
3741 are both positive.
3743 So we have to ensure that we will handle large unsigned
3744 values the same way that the current signed bounds treat
3745 negative values. */
3747 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3749 tree high_positive;
3750 tree equiv_type = lang_hooks.types.type_for_mode
3751 (TYPE_MODE (arg0_type), 1);
3753 /* A range without an upper bound is, naturally, unbounded.
3754 Since convert would have cropped a very large value, use
3755 the max value for the destination type. */
3756 high_positive
3757 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3758 : TYPE_MAX_VALUE (arg0_type);
3760 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3761 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3762 fold_convert (arg0_type,
3763 high_positive),
3764 fold_convert (arg0_type,
3765 integer_one_node)));
3767 /* If the low bound is specified, "and" the range with the
3768 range for which the original unsigned value will be
3769 positive. */
3770 if (low != 0)
3772 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3773 1, n_low, n_high, 1,
3774 fold_convert (arg0_type,
3775 integer_zero_node),
3776 high_positive))
3777 break;
3779 in_p = (n_in_p == in_p);
3781 else
3783 /* Otherwise, "or" the range with the range of the input
3784 that will be interpreted as negative. */
3785 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3786 0, n_low, n_high, 1,
3787 fold_convert (arg0_type,
3788 integer_zero_node),
3789 high_positive))
3790 break;
3792 in_p = (in_p != n_in_p);
3796 exp = arg0;
3797 low = n_low, high = n_high;
3798 continue;
3800 default:
3801 break;
3804 break;
3807 /* If EXP is a constant, we can evaluate whether this is true or false. */
3808 if (TREE_CODE (exp) == INTEGER_CST)
3810 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3811 exp, 0, low, 0))
3812 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3813 exp, 1, high, 1)));
3814 low = high = 0;
3815 exp = 0;
3818 *pin_p = in_p, *plow = low, *phigh = high;
3819 return exp;
3822 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3823 type, TYPE, return an expression to test if EXP is in (or out of, depending
3824 on IN_P) the range. Return 0 if the test couldn't be created. */
3826 static tree
3827 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3829 tree etype = TREE_TYPE (exp);
3830 tree value;
3832 if (! in_p)
3834 value = build_range_check (type, exp, 1, low, high);
3835 if (value != 0)
3836 return invert_truthvalue (value);
3838 return 0;
3841 if (low == 0 && high == 0)
3842 return fold_convert (type, integer_one_node);
3844 if (low == 0)
3845 return fold (build2 (LE_EXPR, type, exp, high));
3847 if (high == 0)
3848 return fold (build2 (GE_EXPR, type, exp, low));
3850 if (operand_equal_p (low, high, 0))
3851 return fold (build2 (EQ_EXPR, type, exp, low));
3853 if (integer_zerop (low))
3855 if (! TYPE_UNSIGNED (etype))
3857 etype = lang_hooks.types.unsigned_type (etype);
3858 high = fold_convert (etype, high);
3859 exp = fold_convert (etype, exp);
3861 return build_range_check (type, exp, 1, 0, high);
3864 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3865 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3867 unsigned HOST_WIDE_INT lo;
3868 HOST_WIDE_INT hi;
3869 int prec;
3871 prec = TYPE_PRECISION (etype);
3872 if (prec <= HOST_BITS_PER_WIDE_INT)
3874 hi = 0;
3875 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3877 else
3879 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3880 lo = (unsigned HOST_WIDE_INT) -1;
3883 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3885 if (TYPE_UNSIGNED (etype))
3887 etype = lang_hooks.types.signed_type (etype);
3888 exp = fold_convert (etype, exp);
3890 return fold (build2 (GT_EXPR, type, exp,
3891 fold_convert (etype, integer_zero_node)));
3895 value = const_binop (MINUS_EXPR, high, low, 0);
3896 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3898 tree utype, minv, maxv;
3900 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3901 for the type in question, as we rely on this here. */
3902 switch (TREE_CODE (etype))
3904 case INTEGER_TYPE:
3905 case ENUMERAL_TYPE:
3906 case CHAR_TYPE:
3907 utype = lang_hooks.types.unsigned_type (etype);
3908 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3909 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3910 integer_one_node, 1);
3911 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3912 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3913 minv, 1, maxv, 1)))
3915 etype = utype;
3916 high = fold_convert (etype, high);
3917 low = fold_convert (etype, low);
3918 exp = fold_convert (etype, exp);
3919 value = const_binop (MINUS_EXPR, high, low, 0);
3921 break;
3922 default:
3923 break;
3927 if (value != 0 && ! TREE_OVERFLOW (value))
3928 return build_range_check (type,
3929 fold (build2 (MINUS_EXPR, etype, exp, low)),
3930 1, fold_convert (etype, integer_zero_node),
3931 value);
3933 return 0;
3936 /* Given two ranges, see if we can merge them into one. Return 1 if we
3937 can, 0 if we can't. Set the output range into the specified parameters. */
3939 static int
3940 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3941 tree high0, int in1_p, tree low1, tree high1)
3943 int no_overlap;
3944 int subset;
3945 int temp;
3946 tree tem;
3947 int in_p;
3948 tree low, high;
3949 int lowequal = ((low0 == 0 && low1 == 0)
3950 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3951 low0, 0, low1, 0)));
3952 int highequal = ((high0 == 0 && high1 == 0)
3953 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3954 high0, 1, high1, 1)));
3956 /* Make range 0 be the range that starts first, or ends last if they
3957 start at the same value. Swap them if it isn't. */
3958 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3959 low0, 0, low1, 0))
3960 || (lowequal
3961 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3962 high1, 1, high0, 1))))
3964 temp = in0_p, in0_p = in1_p, in1_p = temp;
3965 tem = low0, low0 = low1, low1 = tem;
3966 tem = high0, high0 = high1, high1 = tem;
3969 /* Now flag two cases, whether the ranges are disjoint or whether the
3970 second range is totally subsumed in the first. Note that the tests
3971 below are simplified by the ones above. */
3972 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3973 high0, 1, low1, 0));
3974 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3975 high1, 1, high0, 1));
3977 /* We now have four cases, depending on whether we are including or
3978 excluding the two ranges. */
3979 if (in0_p && in1_p)
3981 /* If they don't overlap, the result is false. If the second range
3982 is a subset it is the result. Otherwise, the range is from the start
3983 of the second to the end of the first. */
3984 if (no_overlap)
3985 in_p = 0, low = high = 0;
3986 else if (subset)
3987 in_p = 1, low = low1, high = high1;
3988 else
3989 in_p = 1, low = low1, high = high0;
3992 else if (in0_p && ! in1_p)
3994 /* If they don't overlap, the result is the first range. If they are
3995 equal, the result is false. If the second range is a subset of the
3996 first, and the ranges begin at the same place, we go from just after
3997 the end of the first range to the end of the second. If the second
3998 range is not a subset of the first, or if it is a subset and both
3999 ranges end at the same place, the range starts at the start of the
4000 first range and ends just before the second range.
4001 Otherwise, we can't describe this as a single range. */
4002 if (no_overlap)
4003 in_p = 1, low = low0, high = high0;
4004 else if (lowequal && highequal)
4005 in_p = 0, low = high = 0;
4006 else if (subset && lowequal)
4008 in_p = 1, high = high0;
4009 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4010 integer_one_node, 0);
4012 else if (! subset || highequal)
4014 in_p = 1, low = low0;
4015 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4016 integer_one_node, 0);
4018 else
4019 return 0;
4022 else if (! in0_p && in1_p)
4024 /* If they don't overlap, the result is the second range. If the second
4025 is a subset of the first, the result is false. Otherwise,
4026 the range starts just after the first range and ends at the
4027 end of the second. */
4028 if (no_overlap)
4029 in_p = 1, low = low1, high = high1;
4030 else if (subset || highequal)
4031 in_p = 0, low = high = 0;
4032 else
4034 in_p = 1, high = high1;
4035 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4036 integer_one_node, 0);
4040 else
4042 /* The case where we are excluding both ranges. Here the complex case
4043 is if they don't overlap. In that case, the only time we have a
4044 range is if they are adjacent. If the second is a subset of the
4045 first, the result is the first. Otherwise, the range to exclude
4046 starts at the beginning of the first range and ends at the end of the
4047 second. */
4048 if (no_overlap)
4050 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4051 range_binop (PLUS_EXPR, NULL_TREE,
4052 high0, 1,
4053 integer_one_node, 1),
4054 1, low1, 0)))
4055 in_p = 0, low = low0, high = high1;
4056 else
4058 /* Canonicalize - [min, x] into - [-, x]. */
4059 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4060 switch (TREE_CODE (TREE_TYPE (low0)))
4062 case ENUMERAL_TYPE:
4063 if (TYPE_PRECISION (TREE_TYPE (low0))
4064 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4065 break;
4066 /* FALLTHROUGH */
4067 case INTEGER_TYPE:
4068 case CHAR_TYPE:
4069 if (tree_int_cst_equal (low0,
4070 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4071 low0 = 0;
4072 break;
4073 case POINTER_TYPE:
4074 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4075 && integer_zerop (low0))
4076 low0 = 0;
4077 break;
4078 default:
4079 break;
4082 /* Canonicalize - [x, max] into - [x, -]. */
4083 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4084 switch (TREE_CODE (TREE_TYPE (high1)))
4086 case ENUMERAL_TYPE:
4087 if (TYPE_PRECISION (TREE_TYPE (high1))
4088 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4089 break;
4090 /* FALLTHROUGH */
4091 case INTEGER_TYPE:
4092 case CHAR_TYPE:
4093 if (tree_int_cst_equal (high1,
4094 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4095 high1 = 0;
4096 break;
4097 case POINTER_TYPE:
4098 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4099 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4100 high1, 1,
4101 integer_one_node, 1)))
4102 high1 = 0;
4103 break;
4104 default:
4105 break;
4108 /* The ranges might be also adjacent between the maximum and
4109 minimum values of the given type. For
4110 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4111 return + [x + 1, y - 1]. */
4112 if (low0 == 0 && high1 == 0)
4114 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4115 integer_one_node, 1);
4116 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4117 integer_one_node, 0);
4118 if (low == 0 || high == 0)
4119 return 0;
4121 in_p = 1;
4123 else
4124 return 0;
4127 else if (subset)
4128 in_p = 0, low = low0, high = high0;
4129 else
4130 in_p = 0, low = low0, high = high1;
4133 *pin_p = in_p, *plow = low, *phigh = high;
4134 return 1;
4138 /* Subroutine of fold, looking inside expressions of the form
4139 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4140 of the COND_EXPR. This function is being used also to optimize
4141 A op B ? C : A, by reversing the comparison first.
4143 Return a folded expression whose code is not a COND_EXPR
4144 anymore, or NULL_TREE if no folding opportunity is found. */
4146 static tree
4147 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4149 enum tree_code comp_code = TREE_CODE (arg0);
4150 tree arg00 = TREE_OPERAND (arg0, 0);
4151 tree arg01 = TREE_OPERAND (arg0, 1);
4152 tree arg1_type = TREE_TYPE (arg1);
4153 tree tem;
4155 STRIP_NOPS (arg1);
4156 STRIP_NOPS (arg2);
4158 /* If we have A op 0 ? A : -A, consider applying the following
4159 transformations:
4161 A == 0? A : -A same as -A
4162 A != 0? A : -A same as A
4163 A >= 0? A : -A same as abs (A)
4164 A > 0? A : -A same as abs (A)
4165 A <= 0? A : -A same as -abs (A)
4166 A < 0? A : -A same as -abs (A)
4168 None of these transformations work for modes with signed
4169 zeros. If A is +/-0, the first two transformations will
4170 change the sign of the result (from +0 to -0, or vice
4171 versa). The last four will fix the sign of the result,
4172 even though the original expressions could be positive or
4173 negative, depending on the sign of A.
4175 Note that all these transformations are correct if A is
4176 NaN, since the two alternatives (A and -A) are also NaNs. */
4177 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4178 ? real_zerop (arg01)
4179 : integer_zerop (arg01))
4180 && TREE_CODE (arg2) == NEGATE_EXPR
4181 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4182 switch (comp_code)
4184 case EQ_EXPR:
4185 case UNEQ_EXPR:
4186 tem = fold_convert (arg1_type, arg1);
4187 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4188 case NE_EXPR:
4189 case LTGT_EXPR:
4190 return pedantic_non_lvalue (fold_convert (type, arg1));
4191 case UNGE_EXPR:
4192 case UNGT_EXPR:
4193 if (flag_trapping_math)
4194 break;
4195 /* Fall through. */
4196 case GE_EXPR:
4197 case GT_EXPR:
4198 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4199 arg1 = fold_convert (lang_hooks.types.signed_type
4200 (TREE_TYPE (arg1)), arg1);
4201 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4202 return pedantic_non_lvalue (fold_convert (type, tem));
4203 case UNLE_EXPR:
4204 case UNLT_EXPR:
4205 if (flag_trapping_math)
4206 break;
4207 case LE_EXPR:
4208 case LT_EXPR:
4209 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4210 arg1 = fold_convert (lang_hooks.types.signed_type
4211 (TREE_TYPE (arg1)), arg1);
4212 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4213 return negate_expr (fold_convert (type, tem));
4214 default:
4215 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4216 break;
4219 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4220 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4221 both transformations are correct when A is NaN: A != 0
4222 is then true, and A == 0 is false. */
4224 if (integer_zerop (arg01) && integer_zerop (arg2))
4226 if (comp_code == NE_EXPR)
4227 return pedantic_non_lvalue (fold_convert (type, arg1));
4228 else if (comp_code == EQ_EXPR)
4229 return fold_convert (type, integer_zero_node);
4232 /* Try some transformations of A op B ? A : B.
4234 A == B? A : B same as B
4235 A != B? A : B same as A
4236 A >= B? A : B same as max (A, B)
4237 A > B? A : B same as max (B, A)
4238 A <= B? A : B same as min (A, B)
4239 A < B? A : B same as min (B, A)
4241 As above, these transformations don't work in the presence
4242 of signed zeros. For example, if A and B are zeros of
4243 opposite sign, the first two transformations will change
4244 the sign of the result. In the last four, the original
4245 expressions give different results for (A=+0, B=-0) and
4246 (A=-0, B=+0), but the transformed expressions do not.
4248 The first two transformations are correct if either A or B
4249 is a NaN. In the first transformation, the condition will
4250 be false, and B will indeed be chosen. In the case of the
4251 second transformation, the condition A != B will be true,
4252 and A will be chosen.
4254 The conversions to max() and min() are not correct if B is
4255 a number and A is not. The conditions in the original
4256 expressions will be false, so all four give B. The min()
4257 and max() versions would give a NaN instead. */
4258 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4260 tree comp_op0 = arg00;
4261 tree comp_op1 = arg01;
4262 tree comp_type = TREE_TYPE (comp_op0);
4264 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4265 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4267 comp_type = type;
4268 comp_op0 = arg1;
4269 comp_op1 = arg2;
4272 switch (comp_code)
4274 case EQ_EXPR:
4275 return pedantic_non_lvalue (fold_convert (type, arg2));
4276 case NE_EXPR:
4277 return pedantic_non_lvalue (fold_convert (type, arg1));
4278 case LE_EXPR:
4279 case LT_EXPR:
4280 case UNLE_EXPR:
4281 case UNLT_EXPR:
4282 /* In C++ a ?: expression can be an lvalue, so put the
4283 operand which will be used if they are equal first
4284 so that we can convert this back to the
4285 corresponding COND_EXPR. */
4286 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4288 comp_op0 = fold_convert (comp_type, comp_op0);
4289 comp_op1 = fold_convert (comp_type, comp_op1);
4290 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4291 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4292 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4293 return pedantic_non_lvalue (fold_convert (type, tem));
4295 break;
4296 case GE_EXPR:
4297 case GT_EXPR:
4298 case UNGE_EXPR:
4299 case UNGT_EXPR:
4300 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4302 comp_op0 = fold_convert (comp_type, comp_op0);
4303 comp_op1 = fold_convert (comp_type, comp_op1);
4304 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4305 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4306 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4307 return pedantic_non_lvalue (fold_convert (type, tem));
4309 break;
4310 case UNEQ_EXPR:
4311 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4312 return pedantic_non_lvalue (fold_convert (type, arg2));
4313 break;
4314 case LTGT_EXPR:
4315 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4316 return pedantic_non_lvalue (fold_convert (type, arg1));
4317 break;
4318 default:
4319 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4320 break;
4324 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4325 we might still be able to simplify this. For example,
4326 if C1 is one less or one more than C2, this might have started
4327 out as a MIN or MAX and been transformed by this function.
4328 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4330 if (INTEGRAL_TYPE_P (type)
4331 && TREE_CODE (arg01) == INTEGER_CST
4332 && TREE_CODE (arg2) == INTEGER_CST)
4333 switch (comp_code)
4335 case EQ_EXPR:
4336 /* We can replace A with C1 in this case. */
4337 arg1 = fold_convert (type, arg01);
4338 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4340 case LT_EXPR:
4341 /* If C1 is C2 + 1, this is min(A, C2). */
4342 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4343 OEP_ONLY_CONST)
4344 && operand_equal_p (arg01,
4345 const_binop (PLUS_EXPR, arg2,
4346 integer_one_node, 0),
4347 OEP_ONLY_CONST))
4348 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4349 type, arg1, arg2)));
4350 break;
4352 case LE_EXPR:
4353 /* If C1 is C2 - 1, this is min(A, C2). */
4354 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4355 OEP_ONLY_CONST)
4356 && operand_equal_p (arg01,
4357 const_binop (MINUS_EXPR, arg2,
4358 integer_one_node, 0),
4359 OEP_ONLY_CONST))
4360 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4361 type, arg1, arg2)));
4362 break;
4364 case GT_EXPR:
4365 /* If C1 is C2 - 1, this is max(A, C2). */
4366 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4367 OEP_ONLY_CONST)
4368 && operand_equal_p (arg01,
4369 const_binop (MINUS_EXPR, arg2,
4370 integer_one_node, 0),
4371 OEP_ONLY_CONST))
4372 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4373 type, arg1, arg2)));
4374 break;
4376 case GE_EXPR:
4377 /* If C1 is C2 + 1, this is max(A, C2). */
4378 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4379 OEP_ONLY_CONST)
4380 && operand_equal_p (arg01,
4381 const_binop (PLUS_EXPR, arg2,
4382 integer_one_node, 0),
4383 OEP_ONLY_CONST))
4384 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4385 type, arg1, arg2)));
4386 break;
4387 case NE_EXPR:
4388 break;
4389 default:
4390 gcc_unreachable ();
4393 return NULL_TREE;
4398 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4399 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4400 #endif
4402 /* EXP is some logical combination of boolean tests. See if we can
4403 merge it into some range test. Return the new tree if so. */
4405 static tree
4406 fold_range_test (tree exp)
4408 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4409 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4410 int in0_p, in1_p, in_p;
4411 tree low0, low1, low, high0, high1, high;
4412 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4413 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4414 tree tem;
4416 /* If this is an OR operation, invert both sides; we will invert
4417 again at the end. */
4418 if (or_op)
4419 in0_p = ! in0_p, in1_p = ! in1_p;
4421 /* If both expressions are the same, if we can merge the ranges, and we
4422 can build the range test, return it or it inverted. If one of the
4423 ranges is always true or always false, consider it to be the same
4424 expression as the other. */
4425 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4426 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4427 in1_p, low1, high1)
4428 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4429 lhs != 0 ? lhs
4430 : rhs != 0 ? rhs : integer_zero_node,
4431 in_p, low, high))))
4432 return or_op ? invert_truthvalue (tem) : tem;
4434 /* On machines where the branch cost is expensive, if this is a
4435 short-circuited branch and the underlying object on both sides
4436 is the same, make a non-short-circuit operation. */
4437 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4438 && lhs != 0 && rhs != 0
4439 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4440 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4441 && operand_equal_p (lhs, rhs, 0))
4443 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4444 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4445 which cases we can't do this. */
4446 if (simple_operand_p (lhs))
4447 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4448 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4449 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4450 TREE_OPERAND (exp, 1));
4452 else if (lang_hooks.decls.global_bindings_p () == 0
4453 && ! CONTAINS_PLACEHOLDER_P (lhs))
4455 tree common = save_expr (lhs);
4457 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4458 or_op ? ! in0_p : in0_p,
4459 low0, high0))
4460 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4461 or_op ? ! in1_p : in1_p,
4462 low1, high1))))
4463 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4464 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4465 TREE_TYPE (exp), lhs, rhs);
4469 return 0;
4472 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4473 bit value. Arrange things so the extra bits will be set to zero if and
4474 only if C is signed-extended to its full width. If MASK is nonzero,
4475 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4477 static tree
4478 unextend (tree c, int p, int unsignedp, tree mask)
4480 tree type = TREE_TYPE (c);
4481 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4482 tree temp;
4484 if (p == modesize || unsignedp)
4485 return c;
4487 /* We work by getting just the sign bit into the low-order bit, then
4488 into the high-order bit, then sign-extend. We then XOR that value
4489 with C. */
4490 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4491 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4493 /* We must use a signed type in order to get an arithmetic right shift.
4494 However, we must also avoid introducing accidental overflows, so that
4495 a subsequent call to integer_zerop will work. Hence we must
4496 do the type conversion here. At this point, the constant is either
4497 zero or one, and the conversion to a signed type can never overflow.
4498 We could get an overflow if this conversion is done anywhere else. */
4499 if (TYPE_UNSIGNED (type))
4500 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4502 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4503 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4504 if (mask != 0)
4505 temp = const_binop (BIT_AND_EXPR, temp,
4506 fold_convert (TREE_TYPE (c), mask), 0);
4507 /* If necessary, convert the type back to match the type of C. */
4508 if (TYPE_UNSIGNED (type))
4509 temp = fold_convert (type, temp);
4511 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4514 /* Find ways of folding logical expressions of LHS and RHS:
4515 Try to merge two comparisons to the same innermost item.
4516 Look for range tests like "ch >= '0' && ch <= '9'".
4517 Look for combinations of simple terms on machines with expensive branches
4518 and evaluate the RHS unconditionally.
4520 For example, if we have p->a == 2 && p->b == 4 and we can make an
4521 object large enough to span both A and B, we can do this with a comparison
4522 against the object ANDed with the a mask.
4524 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4525 operations to do this with one comparison.
4527 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4528 function and the one above.
4530 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4531 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4533 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4534 two operands.
4536 We return the simplified tree or 0 if no optimization is possible. */
4538 static tree
4539 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4541 /* If this is the "or" of two comparisons, we can do something if
4542 the comparisons are NE_EXPR. If this is the "and", we can do something
4543 if the comparisons are EQ_EXPR. I.e.,
4544 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4546 WANTED_CODE is this operation code. For single bit fields, we can
4547 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4548 comparison for one-bit fields. */
4550 enum tree_code wanted_code;
4551 enum tree_code lcode, rcode;
4552 tree ll_arg, lr_arg, rl_arg, rr_arg;
4553 tree ll_inner, lr_inner, rl_inner, rr_inner;
4554 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4555 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4556 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4557 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4558 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4559 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4560 enum machine_mode lnmode, rnmode;
4561 tree ll_mask, lr_mask, rl_mask, rr_mask;
4562 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4563 tree l_const, r_const;
4564 tree lntype, rntype, result;
4565 int first_bit, end_bit;
4566 int volatilep;
4568 /* Start by getting the comparison codes. Fail if anything is volatile.
4569 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4570 it were surrounded with a NE_EXPR. */
4572 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4573 return 0;
4575 lcode = TREE_CODE (lhs);
4576 rcode = TREE_CODE (rhs);
4578 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4580 lhs = build2 (NE_EXPR, truth_type, lhs,
4581 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4582 lcode = NE_EXPR;
4585 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4587 rhs = build2 (NE_EXPR, truth_type, rhs,
4588 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4589 rcode = NE_EXPR;
4592 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4593 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4594 return 0;
4596 ll_arg = TREE_OPERAND (lhs, 0);
4597 lr_arg = TREE_OPERAND (lhs, 1);
4598 rl_arg = TREE_OPERAND (rhs, 0);
4599 rr_arg = TREE_OPERAND (rhs, 1);
4601 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4602 if (simple_operand_p (ll_arg)
4603 && simple_operand_p (lr_arg))
4605 tree result;
4606 if (operand_equal_p (ll_arg, rl_arg, 0)
4607 && operand_equal_p (lr_arg, rr_arg, 0))
4609 result = combine_comparisons (code, lcode, rcode,
4610 truth_type, ll_arg, lr_arg);
4611 if (result)
4612 return result;
4614 else if (operand_equal_p (ll_arg, rr_arg, 0)
4615 && operand_equal_p (lr_arg, rl_arg, 0))
4617 result = combine_comparisons (code, lcode,
4618 swap_tree_comparison (rcode),
4619 truth_type, ll_arg, lr_arg);
4620 if (result)
4621 return result;
4625 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4626 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4628 /* If the RHS can be evaluated unconditionally and its operands are
4629 simple, it wins to evaluate the RHS unconditionally on machines
4630 with expensive branches. In this case, this isn't a comparison
4631 that can be merged. Avoid doing this if the RHS is a floating-point
4632 comparison since those can trap. */
4634 if (BRANCH_COST >= 2
4635 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4636 && simple_operand_p (rl_arg)
4637 && simple_operand_p (rr_arg))
4639 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4640 if (code == TRUTH_OR_EXPR
4641 && lcode == NE_EXPR && integer_zerop (lr_arg)
4642 && rcode == NE_EXPR && integer_zerop (rr_arg)
4643 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4644 return build2 (NE_EXPR, truth_type,
4645 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4646 ll_arg, rl_arg),
4647 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4649 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4650 if (code == TRUTH_AND_EXPR
4651 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4652 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4653 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4654 return build2 (EQ_EXPR, truth_type,
4655 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4656 ll_arg, rl_arg),
4657 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4659 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4660 return build2 (code, truth_type, lhs, rhs);
4663 /* See if the comparisons can be merged. Then get all the parameters for
4664 each side. */
4666 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4667 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4668 return 0;
4670 volatilep = 0;
4671 ll_inner = decode_field_reference (ll_arg,
4672 &ll_bitsize, &ll_bitpos, &ll_mode,
4673 &ll_unsignedp, &volatilep, &ll_mask,
4674 &ll_and_mask);
4675 lr_inner = decode_field_reference (lr_arg,
4676 &lr_bitsize, &lr_bitpos, &lr_mode,
4677 &lr_unsignedp, &volatilep, &lr_mask,
4678 &lr_and_mask);
4679 rl_inner = decode_field_reference (rl_arg,
4680 &rl_bitsize, &rl_bitpos, &rl_mode,
4681 &rl_unsignedp, &volatilep, &rl_mask,
4682 &rl_and_mask);
4683 rr_inner = decode_field_reference (rr_arg,
4684 &rr_bitsize, &rr_bitpos, &rr_mode,
4685 &rr_unsignedp, &volatilep, &rr_mask,
4686 &rr_and_mask);
4688 /* It must be true that the inner operation on the lhs of each
4689 comparison must be the same if we are to be able to do anything.
4690 Then see if we have constants. If not, the same must be true for
4691 the rhs's. */
4692 if (volatilep || ll_inner == 0 || rl_inner == 0
4693 || ! operand_equal_p (ll_inner, rl_inner, 0))
4694 return 0;
4696 if (TREE_CODE (lr_arg) == INTEGER_CST
4697 && TREE_CODE (rr_arg) == INTEGER_CST)
4698 l_const = lr_arg, r_const = rr_arg;
4699 else if (lr_inner == 0 || rr_inner == 0
4700 || ! operand_equal_p (lr_inner, rr_inner, 0))
4701 return 0;
4702 else
4703 l_const = r_const = 0;
4705 /* If either comparison code is not correct for our logical operation,
4706 fail. However, we can convert a one-bit comparison against zero into
4707 the opposite comparison against that bit being set in the field. */
4709 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4710 if (lcode != wanted_code)
4712 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4714 /* Make the left operand unsigned, since we are only interested
4715 in the value of one bit. Otherwise we are doing the wrong
4716 thing below. */
4717 ll_unsignedp = 1;
4718 l_const = ll_mask;
4720 else
4721 return 0;
4724 /* This is analogous to the code for l_const above. */
4725 if (rcode != wanted_code)
4727 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4729 rl_unsignedp = 1;
4730 r_const = rl_mask;
4732 else
4733 return 0;
4736 /* After this point all optimizations will generate bit-field
4737 references, which we might not want. */
4738 if (! lang_hooks.can_use_bit_fields_p ())
4739 return 0;
4741 /* See if we can find a mode that contains both fields being compared on
4742 the left. If we can't, fail. Otherwise, update all constants and masks
4743 to be relative to a field of that size. */
4744 first_bit = MIN (ll_bitpos, rl_bitpos);
4745 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4746 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4747 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4748 volatilep);
4749 if (lnmode == VOIDmode)
4750 return 0;
4752 lnbitsize = GET_MODE_BITSIZE (lnmode);
4753 lnbitpos = first_bit & ~ (lnbitsize - 1);
4754 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4755 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4757 if (BYTES_BIG_ENDIAN)
4759 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4760 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4763 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4764 size_int (xll_bitpos), 0);
4765 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4766 size_int (xrl_bitpos), 0);
4768 if (l_const)
4770 l_const = fold_convert (lntype, l_const);
4771 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4772 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4773 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4774 fold (build1 (BIT_NOT_EXPR,
4775 lntype, ll_mask)),
4776 0)))
4778 warning ("comparison is always %d", wanted_code == NE_EXPR);
4780 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4783 if (r_const)
4785 r_const = fold_convert (lntype, r_const);
4786 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4787 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4788 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4789 fold (build1 (BIT_NOT_EXPR,
4790 lntype, rl_mask)),
4791 0)))
4793 warning ("comparison is always %d", wanted_code == NE_EXPR);
4795 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4799 /* If the right sides are not constant, do the same for it. Also,
4800 disallow this optimization if a size or signedness mismatch occurs
4801 between the left and right sides. */
4802 if (l_const == 0)
4804 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4805 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4806 /* Make sure the two fields on the right
4807 correspond to the left without being swapped. */
4808 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4809 return 0;
4811 first_bit = MIN (lr_bitpos, rr_bitpos);
4812 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4813 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4814 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4815 volatilep);
4816 if (rnmode == VOIDmode)
4817 return 0;
4819 rnbitsize = GET_MODE_BITSIZE (rnmode);
4820 rnbitpos = first_bit & ~ (rnbitsize - 1);
4821 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4822 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4824 if (BYTES_BIG_ENDIAN)
4826 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4827 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4830 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4831 size_int (xlr_bitpos), 0);
4832 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4833 size_int (xrr_bitpos), 0);
4835 /* Make a mask that corresponds to both fields being compared.
4836 Do this for both items being compared. If the operands are the
4837 same size and the bits being compared are in the same position
4838 then we can do this by masking both and comparing the masked
4839 results. */
4840 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4841 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4842 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4844 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4845 ll_unsignedp || rl_unsignedp);
4846 if (! all_ones_mask_p (ll_mask, lnbitsize))
4847 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4849 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4850 lr_unsignedp || rr_unsignedp);
4851 if (! all_ones_mask_p (lr_mask, rnbitsize))
4852 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4854 return build2 (wanted_code, truth_type, lhs, rhs);
4857 /* There is still another way we can do something: If both pairs of
4858 fields being compared are adjacent, we may be able to make a wider
4859 field containing them both.
4861 Note that we still must mask the lhs/rhs expressions. Furthermore,
4862 the mask must be shifted to account for the shift done by
4863 make_bit_field_ref. */
4864 if ((ll_bitsize + ll_bitpos == rl_bitpos
4865 && lr_bitsize + lr_bitpos == rr_bitpos)
4866 || (ll_bitpos == rl_bitpos + rl_bitsize
4867 && lr_bitpos == rr_bitpos + rr_bitsize))
4869 tree type;
4871 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4872 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4873 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4874 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4876 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4877 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4878 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4879 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4881 /* Convert to the smaller type before masking out unwanted bits. */
4882 type = lntype;
4883 if (lntype != rntype)
4885 if (lnbitsize > rnbitsize)
4887 lhs = fold_convert (rntype, lhs);
4888 ll_mask = fold_convert (rntype, ll_mask);
4889 type = rntype;
4891 else if (lnbitsize < rnbitsize)
4893 rhs = fold_convert (lntype, rhs);
4894 lr_mask = fold_convert (lntype, lr_mask);
4895 type = lntype;
4899 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4900 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4902 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4903 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4905 return build2 (wanted_code, truth_type, lhs, rhs);
4908 return 0;
4911 /* Handle the case of comparisons with constants. If there is something in
4912 common between the masks, those bits of the constants must be the same.
4913 If not, the condition is always false. Test for this to avoid generating
4914 incorrect code below. */
4915 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4916 if (! integer_zerop (result)
4917 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4918 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4920 if (wanted_code == NE_EXPR)
4922 warning ("%<or%> of unmatched not-equal tests is always 1");
4923 return constant_boolean_node (true, truth_type);
4925 else
4927 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4928 return constant_boolean_node (false, truth_type);
4932 /* Construct the expression we will return. First get the component
4933 reference we will make. Unless the mask is all ones the width of
4934 that field, perform the mask operation. Then compare with the
4935 merged constant. */
4936 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4937 ll_unsignedp || rl_unsignedp);
4939 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4940 if (! all_ones_mask_p (ll_mask, lnbitsize))
4941 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4943 return build2 (wanted_code, truth_type, result,
4944 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4947 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4948 constant. */
4950 static tree
4951 optimize_minmax_comparison (tree t)
4953 tree type = TREE_TYPE (t);
4954 tree arg0 = TREE_OPERAND (t, 0);
4955 enum tree_code op_code;
4956 tree comp_const = TREE_OPERAND (t, 1);
4957 tree minmax_const;
4958 int consts_equal, consts_lt;
4959 tree inner;
4961 STRIP_SIGN_NOPS (arg0);
4963 op_code = TREE_CODE (arg0);
4964 minmax_const = TREE_OPERAND (arg0, 1);
4965 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4966 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4967 inner = TREE_OPERAND (arg0, 0);
4969 /* If something does not permit us to optimize, return the original tree. */
4970 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4971 || TREE_CODE (comp_const) != INTEGER_CST
4972 || TREE_CONSTANT_OVERFLOW (comp_const)
4973 || TREE_CODE (minmax_const) != INTEGER_CST
4974 || TREE_CONSTANT_OVERFLOW (minmax_const))
4975 return t;
4977 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4978 and GT_EXPR, doing the rest with recursive calls using logical
4979 simplifications. */
4980 switch (TREE_CODE (t))
4982 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4983 return
4984 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4986 case GE_EXPR:
4987 return
4988 fold (build2 (TRUTH_ORIF_EXPR, type,
4989 optimize_minmax_comparison
4990 (build2 (EQ_EXPR, type, arg0, comp_const)),
4991 optimize_minmax_comparison
4992 (build2 (GT_EXPR, type, arg0, comp_const))));
4994 case EQ_EXPR:
4995 if (op_code == MAX_EXPR && consts_equal)
4996 /* MAX (X, 0) == 0 -> X <= 0 */
4997 return fold (build2 (LE_EXPR, type, inner, comp_const));
4999 else if (op_code == MAX_EXPR && consts_lt)
5000 /* MAX (X, 0) == 5 -> X == 5 */
5001 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5003 else if (op_code == MAX_EXPR)
5004 /* MAX (X, 0) == -1 -> false */
5005 return omit_one_operand (type, integer_zero_node, inner);
5007 else if (consts_equal)
5008 /* MIN (X, 0) == 0 -> X >= 0 */
5009 return fold (build2 (GE_EXPR, type, inner, comp_const));
5011 else if (consts_lt)
5012 /* MIN (X, 0) == 5 -> false */
5013 return omit_one_operand (type, integer_zero_node, inner);
5015 else
5016 /* MIN (X, 0) == -1 -> X == -1 */
5017 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5019 case GT_EXPR:
5020 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5021 /* MAX (X, 0) > 0 -> X > 0
5022 MAX (X, 0) > 5 -> X > 5 */
5023 return fold (build2 (GT_EXPR, type, inner, comp_const));
5025 else if (op_code == MAX_EXPR)
5026 /* MAX (X, 0) > -1 -> true */
5027 return omit_one_operand (type, integer_one_node, inner);
5029 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5030 /* MIN (X, 0) > 0 -> false
5031 MIN (X, 0) > 5 -> false */
5032 return omit_one_operand (type, integer_zero_node, inner);
5034 else
5035 /* MIN (X, 0) > -1 -> X > -1 */
5036 return fold (build2 (GT_EXPR, type, inner, comp_const));
5038 default:
5039 return t;
5043 /* T is an integer expression that is being multiplied, divided, or taken a
5044 modulus (CODE says which and what kind of divide or modulus) by a
5045 constant C. See if we can eliminate that operation by folding it with
5046 other operations already in T. WIDE_TYPE, if non-null, is a type that
5047 should be used for the computation if wider than our type.
5049 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5050 (X * 2) + (Y * 4). We must, however, be assured that either the original
5051 expression would not overflow or that overflow is undefined for the type
5052 in the language in question.
5054 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5055 the machine has a multiply-accumulate insn or that this is part of an
5056 addressing calculation.
5058 If we return a non-null expression, it is an equivalent form of the
5059 original computation, but need not be in the original type. */
5061 static tree
5062 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5064 /* To avoid exponential search depth, refuse to allow recursion past
5065 three levels. Beyond that (1) it's highly unlikely that we'll find
5066 something interesting and (2) we've probably processed it before
5067 when we built the inner expression. */
5069 static int depth;
5070 tree ret;
5072 if (depth > 3)
5073 return NULL;
5075 depth++;
5076 ret = extract_muldiv_1 (t, c, code, wide_type);
5077 depth--;
5079 return ret;
5082 static tree
5083 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5085 tree type = TREE_TYPE (t);
5086 enum tree_code tcode = TREE_CODE (t);
5087 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5088 > GET_MODE_SIZE (TYPE_MODE (type)))
5089 ? wide_type : type);
5090 tree t1, t2;
5091 int same_p = tcode == code;
5092 tree op0 = NULL_TREE, op1 = NULL_TREE;
5094 /* Don't deal with constants of zero here; they confuse the code below. */
5095 if (integer_zerop (c))
5096 return NULL_TREE;
5098 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5099 op0 = TREE_OPERAND (t, 0);
5101 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5102 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5104 /* Note that we need not handle conditional operations here since fold
5105 already handles those cases. So just do arithmetic here. */
5106 switch (tcode)
5108 case INTEGER_CST:
5109 /* For a constant, we can always simplify if we are a multiply
5110 or (for divide and modulus) if it is a multiple of our constant. */
5111 if (code == MULT_EXPR
5112 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5113 return const_binop (code, fold_convert (ctype, t),
5114 fold_convert (ctype, c), 0);
5115 break;
5117 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5118 /* If op0 is an expression ... */
5119 if ((COMPARISON_CLASS_P (op0)
5120 || UNARY_CLASS_P (op0)
5121 || BINARY_CLASS_P (op0)
5122 || EXPRESSION_CLASS_P (op0))
5123 /* ... and is unsigned, and its type is smaller than ctype,
5124 then we cannot pass through as widening. */
5125 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5126 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5127 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5128 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5129 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5130 /* ... or this is a truncation (t is narrower than op0),
5131 then we cannot pass through this narrowing. */
5132 || (GET_MODE_SIZE (TYPE_MODE (type))
5133 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5134 /* ... or signedness changes for division or modulus,
5135 then we cannot pass through this conversion. */
5136 || (code != MULT_EXPR
5137 && (TYPE_UNSIGNED (ctype)
5138 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5139 break;
5141 /* Pass the constant down and see if we can make a simplification. If
5142 we can, replace this expression with the inner simplification for
5143 possible later conversion to our or some other type. */
5144 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5145 && TREE_CODE (t2) == INTEGER_CST
5146 && ! TREE_CONSTANT_OVERFLOW (t2)
5147 && (0 != (t1 = extract_muldiv (op0, t2, code,
5148 code == MULT_EXPR
5149 ? ctype : NULL_TREE))))
5150 return t1;
5151 break;
5153 case ABS_EXPR:
5154 /* If widening the type changes it from signed to unsigned, then we
5155 must avoid building ABS_EXPR itself as unsigned. */
5156 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5158 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5159 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5161 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5162 return fold_convert (ctype, t1);
5164 break;
5166 /* FALLTHROUGH */
5167 case NEGATE_EXPR:
5168 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5169 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5170 break;
5172 case MIN_EXPR: case MAX_EXPR:
5173 /* If widening the type changes the signedness, then we can't perform
5174 this optimization as that changes the result. */
5175 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5176 break;
5178 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5179 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5180 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5182 if (tree_int_cst_sgn (c) < 0)
5183 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5185 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5186 fold_convert (ctype, t2)));
5188 break;
5190 case LSHIFT_EXPR: case RSHIFT_EXPR:
5191 /* If the second operand is constant, this is a multiplication
5192 or floor division, by a power of two, so we can treat it that
5193 way unless the multiplier or divisor overflows. Signed
5194 left-shift overflow is implementation-defined rather than
5195 undefined in C90, so do not convert signed left shift into
5196 multiplication. */
5197 if (TREE_CODE (op1) == INTEGER_CST
5198 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5199 /* const_binop may not detect overflow correctly,
5200 so check for it explicitly here. */
5201 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5202 && TREE_INT_CST_HIGH (op1) == 0
5203 && 0 != (t1 = fold_convert (ctype,
5204 const_binop (LSHIFT_EXPR,
5205 size_one_node,
5206 op1, 0)))
5207 && ! TREE_OVERFLOW (t1))
5208 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5209 ? MULT_EXPR : FLOOR_DIV_EXPR,
5210 ctype, fold_convert (ctype, op0), t1),
5211 c, code, wide_type);
5212 break;
5214 case PLUS_EXPR: case MINUS_EXPR:
5215 /* See if we can eliminate the operation on both sides. If we can, we
5216 can return a new PLUS or MINUS. If we can't, the only remaining
5217 cases where we can do anything are if the second operand is a
5218 constant. */
5219 t1 = extract_muldiv (op0, c, code, wide_type);
5220 t2 = extract_muldiv (op1, c, code, wide_type);
5221 if (t1 != 0 && t2 != 0
5222 && (code == MULT_EXPR
5223 /* If not multiplication, we can only do this if both operands
5224 are divisible by c. */
5225 || (multiple_of_p (ctype, op0, c)
5226 && multiple_of_p (ctype, op1, c))))
5227 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5228 fold_convert (ctype, t2)));
5230 /* If this was a subtraction, negate OP1 and set it to be an addition.
5231 This simplifies the logic below. */
5232 if (tcode == MINUS_EXPR)
5233 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5235 if (TREE_CODE (op1) != INTEGER_CST)
5236 break;
5238 /* If either OP1 or C are negative, this optimization is not safe for
5239 some of the division and remainder types while for others we need
5240 to change the code. */
5241 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5243 if (code == CEIL_DIV_EXPR)
5244 code = FLOOR_DIV_EXPR;
5245 else if (code == FLOOR_DIV_EXPR)
5246 code = CEIL_DIV_EXPR;
5247 else if (code != MULT_EXPR
5248 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5249 break;
5252 /* If it's a multiply or a division/modulus operation of a multiple
5253 of our constant, do the operation and verify it doesn't overflow. */
5254 if (code == MULT_EXPR
5255 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5257 op1 = const_binop (code, fold_convert (ctype, op1),
5258 fold_convert (ctype, c), 0);
5259 /* We allow the constant to overflow with wrapping semantics. */
5260 if (op1 == 0
5261 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5262 break;
5264 else
5265 break;
5267 /* If we have an unsigned type is not a sizetype, we cannot widen
5268 the operation since it will change the result if the original
5269 computation overflowed. */
5270 if (TYPE_UNSIGNED (ctype)
5271 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5272 && ctype != type)
5273 break;
5275 /* If we were able to eliminate our operation from the first side,
5276 apply our operation to the second side and reform the PLUS. */
5277 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5278 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5280 /* The last case is if we are a multiply. In that case, we can
5281 apply the distributive law to commute the multiply and addition
5282 if the multiplication of the constants doesn't overflow. */
5283 if (code == MULT_EXPR)
5284 return fold (build2 (tcode, ctype,
5285 fold (build2 (code, ctype,
5286 fold_convert (ctype, op0),
5287 fold_convert (ctype, c))),
5288 op1));
5290 break;
5292 case MULT_EXPR:
5293 /* We have a special case here if we are doing something like
5294 (C * 8) % 4 since we know that's zero. */
5295 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5296 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5297 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5298 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5299 return omit_one_operand (type, integer_zero_node, op0);
5301 /* ... fall through ... */
5303 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5304 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5305 /* If we can extract our operation from the LHS, do so and return a
5306 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5307 do something only if the second operand is a constant. */
5308 if (same_p
5309 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5310 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5311 fold_convert (ctype, op1)));
5312 else if (tcode == MULT_EXPR && code == MULT_EXPR
5313 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5314 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5315 fold_convert (ctype, t1)));
5316 else if (TREE_CODE (op1) != INTEGER_CST)
5317 return 0;
5319 /* If these are the same operation types, we can associate them
5320 assuming no overflow. */
5321 if (tcode == code
5322 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5323 fold_convert (ctype, c), 0))
5324 && ! TREE_OVERFLOW (t1))
5325 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5327 /* If these operations "cancel" each other, we have the main
5328 optimizations of this pass, which occur when either constant is a
5329 multiple of the other, in which case we replace this with either an
5330 operation or CODE or TCODE.
5332 If we have an unsigned type that is not a sizetype, we cannot do
5333 this since it will change the result if the original computation
5334 overflowed. */
5335 if ((! TYPE_UNSIGNED (ctype)
5336 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5337 && ! flag_wrapv
5338 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5339 || (tcode == MULT_EXPR
5340 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5341 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5343 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5344 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5345 fold_convert (ctype,
5346 const_binop (TRUNC_DIV_EXPR,
5347 op1, c, 0))));
5348 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5349 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5350 fold_convert (ctype,
5351 const_binop (TRUNC_DIV_EXPR,
5352 c, op1, 0))));
5354 break;
5356 default:
5357 break;
5360 return 0;
5363 /* Return a node which has the indicated constant VALUE (either 0 or
5364 1), and is of the indicated TYPE. */
5366 tree
5367 constant_boolean_node (int value, tree type)
5369 if (type == integer_type_node)
5370 return value ? integer_one_node : integer_zero_node;
5371 else if (type == boolean_type_node)
5372 return value ? boolean_true_node : boolean_false_node;
5373 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5374 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5375 : integer_zero_node);
5376 else
5377 return build_int_cst (type, value);
5381 /* Return true if expr looks like an ARRAY_REF and set base and
5382 offset to the appropriate trees. If there is no offset,
5383 offset is set to NULL_TREE. */
5385 static bool
5386 extract_array_ref (tree expr, tree *base, tree *offset)
5388 /* We have to be careful with stripping nops as with the
5389 base type the meaning of the offset can change. */
5390 tree inner_expr = expr;
5391 STRIP_NOPS (inner_expr);
5392 /* One canonical form is a PLUS_EXPR with the first
5393 argument being an ADDR_EXPR with a possible NOP_EXPR
5394 attached. */
5395 if (TREE_CODE (expr) == PLUS_EXPR)
5397 tree op0 = TREE_OPERAND (expr, 0);
5398 STRIP_NOPS (op0);
5399 if (TREE_CODE (op0) == ADDR_EXPR)
5401 *base = TREE_OPERAND (expr, 0);
5402 *offset = TREE_OPERAND (expr, 1);
5403 return true;
5406 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5407 which we transform into an ADDR_EXPR with appropriate
5408 offset. For other arguments to the ADDR_EXPR we assume
5409 zero offset and as such do not care about the ADDR_EXPR
5410 type and strip possible nops from it. */
5411 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5413 tree op0 = TREE_OPERAND (inner_expr, 0);
5414 if (TREE_CODE (op0) == ARRAY_REF)
5416 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5417 *offset = TREE_OPERAND (op0, 1);
5419 else
5421 *base = inner_expr;
5422 *offset = NULL_TREE;
5424 return true;
5427 return false;
5431 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5432 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5433 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5434 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5435 COND is the first argument to CODE; otherwise (as in the example
5436 given here), it is the second argument. TYPE is the type of the
5437 original expression. Return NULL_TREE if no simplification is
5438 possible. */
5440 static tree
5441 fold_binary_op_with_conditional_arg (tree t, enum tree_code code, tree cond,
5442 tree arg, int cond_first_p)
5444 const tree type = TREE_TYPE (t);
5445 tree cond_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 0))
5446 : TREE_TYPE (TREE_OPERAND (t, 1));
5447 tree arg_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 1))
5448 : TREE_TYPE (TREE_OPERAND (t, 0));
5449 tree test, true_value, false_value;
5450 tree lhs = NULL_TREE;
5451 tree rhs = NULL_TREE;
5453 /* This transformation is only worthwhile if we don't have to wrap
5454 arg in a SAVE_EXPR, and the operation can be simplified on at least
5455 one of the branches once its pushed inside the COND_EXPR. */
5456 if (!TREE_CONSTANT (arg))
5457 return NULL_TREE;
5459 if (TREE_CODE (cond) == COND_EXPR)
5461 test = TREE_OPERAND (cond, 0);
5462 true_value = TREE_OPERAND (cond, 1);
5463 false_value = TREE_OPERAND (cond, 2);
5464 /* If this operand throws an expression, then it does not make
5465 sense to try to perform a logical or arithmetic operation
5466 involving it. */
5467 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5468 lhs = true_value;
5469 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5470 rhs = false_value;
5472 else
5474 tree testtype = TREE_TYPE (cond);
5475 test = cond;
5476 true_value = constant_boolean_node (true, testtype);
5477 false_value = constant_boolean_node (false, testtype);
5480 arg = fold_convert (arg_type, arg);
5481 if (lhs == 0)
5483 true_value = fold_convert (cond_type, true_value);
5484 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5485 : build2 (code, type, arg, true_value));
5487 if (rhs == 0)
5489 false_value = fold_convert (cond_type, false_value);
5490 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5491 : build2 (code, type, arg, false_value));
5494 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5495 return fold_convert (type, test);
5499 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5501 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5502 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5503 ADDEND is the same as X.
5505 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5506 and finite. The problematic cases are when X is zero, and its mode
5507 has signed zeros. In the case of rounding towards -infinity,
5508 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5509 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5511 static bool
5512 fold_real_zero_addition_p (tree type, tree addend, int negate)
5514 if (!real_zerop (addend))
5515 return false;
5517 /* Don't allow the fold with -fsignaling-nans. */
5518 if (HONOR_SNANS (TYPE_MODE (type)))
5519 return false;
5521 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5522 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5523 return true;
5525 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5526 if (TREE_CODE (addend) == REAL_CST
5527 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5528 negate = !negate;
5530 /* The mode has signed zeros, and we have to honor their sign.
5531 In this situation, there is only one case we can return true for.
5532 X - 0 is the same as X unless rounding towards -infinity is
5533 supported. */
5534 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5537 /* Subroutine of fold() that checks comparisons of built-in math
5538 functions against real constants.
5540 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5541 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5542 is the type of the result and ARG0 and ARG1 are the operands of the
5543 comparison. ARG1 must be a TREE_REAL_CST.
5545 The function returns the constant folded tree if a simplification
5546 can be made, and NULL_TREE otherwise. */
5548 static tree
5549 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5550 tree type, tree arg0, tree arg1)
5552 REAL_VALUE_TYPE c;
5554 if (BUILTIN_SQRT_P (fcode))
5556 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5557 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5559 c = TREE_REAL_CST (arg1);
5560 if (REAL_VALUE_NEGATIVE (c))
5562 /* sqrt(x) < y is always false, if y is negative. */
5563 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5564 return omit_one_operand (type, integer_zero_node, arg);
5566 /* sqrt(x) > y is always true, if y is negative and we
5567 don't care about NaNs, i.e. negative values of x. */
5568 if (code == NE_EXPR || !HONOR_NANS (mode))
5569 return omit_one_operand (type, integer_one_node, arg);
5571 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5572 return fold (build2 (GE_EXPR, type, arg,
5573 build_real (TREE_TYPE (arg), dconst0)));
5575 else if (code == GT_EXPR || code == GE_EXPR)
5577 REAL_VALUE_TYPE c2;
5579 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5580 real_convert (&c2, mode, &c2);
5582 if (REAL_VALUE_ISINF (c2))
5584 /* sqrt(x) > y is x == +Inf, when y is very large. */
5585 if (HONOR_INFINITIES (mode))
5586 return fold (build2 (EQ_EXPR, type, arg,
5587 build_real (TREE_TYPE (arg), c2)));
5589 /* sqrt(x) > y is always false, when y is very large
5590 and we don't care about infinities. */
5591 return omit_one_operand (type, integer_zero_node, arg);
5594 /* sqrt(x) > c is the same as x > c*c. */
5595 return fold (build2 (code, type, arg,
5596 build_real (TREE_TYPE (arg), c2)));
5598 else if (code == LT_EXPR || code == LE_EXPR)
5600 REAL_VALUE_TYPE c2;
5602 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5603 real_convert (&c2, mode, &c2);
5605 if (REAL_VALUE_ISINF (c2))
5607 /* sqrt(x) < y is always true, when y is a very large
5608 value and we don't care about NaNs or Infinities. */
5609 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5610 return omit_one_operand (type, integer_one_node, arg);
5612 /* sqrt(x) < y is x != +Inf when y is very large and we
5613 don't care about NaNs. */
5614 if (! HONOR_NANS (mode))
5615 return fold (build2 (NE_EXPR, type, arg,
5616 build_real (TREE_TYPE (arg), c2)));
5618 /* sqrt(x) < y is x >= 0 when y is very large and we
5619 don't care about Infinities. */
5620 if (! HONOR_INFINITIES (mode))
5621 return fold (build2 (GE_EXPR, type, arg,
5622 build_real (TREE_TYPE (arg), dconst0)));
5624 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5625 if (lang_hooks.decls.global_bindings_p () != 0
5626 || CONTAINS_PLACEHOLDER_P (arg))
5627 return NULL_TREE;
5629 arg = save_expr (arg);
5630 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5631 fold (build2 (GE_EXPR, type, arg,
5632 build_real (TREE_TYPE (arg),
5633 dconst0))),
5634 fold (build2 (NE_EXPR, type, arg,
5635 build_real (TREE_TYPE (arg),
5636 c2)))));
5639 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5640 if (! HONOR_NANS (mode))
5641 return fold (build2 (code, type, arg,
5642 build_real (TREE_TYPE (arg), c2)));
5644 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5645 if (lang_hooks.decls.global_bindings_p () == 0
5646 && ! CONTAINS_PLACEHOLDER_P (arg))
5648 arg = save_expr (arg);
5649 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5650 fold (build2 (GE_EXPR, type, arg,
5651 build_real (TREE_TYPE (arg),
5652 dconst0))),
5653 fold (build2 (code, type, arg,
5654 build_real (TREE_TYPE (arg),
5655 c2)))));
5660 return NULL_TREE;
5663 /* Subroutine of fold() that optimizes comparisons against Infinities,
5664 either +Inf or -Inf.
5666 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5667 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5668 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5670 The function returns the constant folded tree if a simplification
5671 can be made, and NULL_TREE otherwise. */
5673 static tree
5674 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5676 enum machine_mode mode;
5677 REAL_VALUE_TYPE max;
5678 tree temp;
5679 bool neg;
5681 mode = TYPE_MODE (TREE_TYPE (arg0));
5683 /* For negative infinity swap the sense of the comparison. */
5684 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5685 if (neg)
5686 code = swap_tree_comparison (code);
5688 switch (code)
5690 case GT_EXPR:
5691 /* x > +Inf is always false, if with ignore sNANs. */
5692 if (HONOR_SNANS (mode))
5693 return NULL_TREE;
5694 return omit_one_operand (type, integer_zero_node, arg0);
5696 case LE_EXPR:
5697 /* x <= +Inf is always true, if we don't case about NaNs. */
5698 if (! HONOR_NANS (mode))
5699 return omit_one_operand (type, integer_one_node, arg0);
5701 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5702 if (lang_hooks.decls.global_bindings_p () == 0
5703 && ! CONTAINS_PLACEHOLDER_P (arg0))
5705 arg0 = save_expr (arg0);
5706 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5708 break;
5710 case EQ_EXPR:
5711 case GE_EXPR:
5712 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5713 real_maxval (&max, neg, mode);
5714 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5715 arg0, build_real (TREE_TYPE (arg0), max)));
5717 case LT_EXPR:
5718 /* x < +Inf is always equal to x <= DBL_MAX. */
5719 real_maxval (&max, neg, mode);
5720 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5721 arg0, build_real (TREE_TYPE (arg0), max)));
5723 case NE_EXPR:
5724 /* x != +Inf is always equal to !(x > DBL_MAX). */
5725 real_maxval (&max, neg, mode);
5726 if (! HONOR_NANS (mode))
5727 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5728 arg0, build_real (TREE_TYPE (arg0), max)));
5730 /* The transformation below creates non-gimple code and thus is
5731 not appropriate if we are in gimple form. */
5732 if (in_gimple_form)
5733 return NULL_TREE;
5735 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5736 arg0, build_real (TREE_TYPE (arg0), max)));
5737 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5739 default:
5740 break;
5743 return NULL_TREE;
5746 /* Subroutine of fold() that optimizes comparisons of a division by
5747 a nonzero integer constant against an integer constant, i.e.
5748 X/C1 op C2.
5750 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5751 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5752 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5754 The function returns the constant folded tree if a simplification
5755 can be made, and NULL_TREE otherwise. */
5757 static tree
5758 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5760 tree prod, tmp, hi, lo;
5761 tree arg00 = TREE_OPERAND (arg0, 0);
5762 tree arg01 = TREE_OPERAND (arg0, 1);
5763 unsigned HOST_WIDE_INT lpart;
5764 HOST_WIDE_INT hpart;
5765 int overflow;
5767 /* We have to do this the hard way to detect unsigned overflow.
5768 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5769 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5770 TREE_INT_CST_HIGH (arg01),
5771 TREE_INT_CST_LOW (arg1),
5772 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5773 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5774 prod = force_fit_type (prod, -1, overflow, false);
5776 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5778 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5779 lo = prod;
5781 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5782 overflow = add_double (TREE_INT_CST_LOW (prod),
5783 TREE_INT_CST_HIGH (prod),
5784 TREE_INT_CST_LOW (tmp),
5785 TREE_INT_CST_HIGH (tmp),
5786 &lpart, &hpart);
5787 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5788 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5789 TREE_CONSTANT_OVERFLOW (prod));
5791 else if (tree_int_cst_sgn (arg01) >= 0)
5793 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5794 switch (tree_int_cst_sgn (arg1))
5796 case -1:
5797 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5798 hi = prod;
5799 break;
5801 case 0:
5802 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5803 hi = tmp;
5804 break;
5806 case 1:
5807 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5808 lo = prod;
5809 break;
5811 default:
5812 gcc_unreachable ();
5815 else
5817 /* A negative divisor reverses the relational operators. */
5818 code = swap_tree_comparison (code);
5820 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5821 switch (tree_int_cst_sgn (arg1))
5823 case -1:
5824 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5825 lo = prod;
5826 break;
5828 case 0:
5829 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5830 lo = tmp;
5831 break;
5833 case 1:
5834 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5835 hi = prod;
5836 break;
5838 default:
5839 gcc_unreachable ();
5843 switch (code)
5845 case EQ_EXPR:
5846 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5847 return omit_one_operand (type, integer_zero_node, arg00);
5848 if (TREE_OVERFLOW (hi))
5849 return fold (build2 (GE_EXPR, type, arg00, lo));
5850 if (TREE_OVERFLOW (lo))
5851 return fold (build2 (LE_EXPR, type, arg00, hi));
5852 return build_range_check (type, arg00, 1, lo, hi);
5854 case NE_EXPR:
5855 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5856 return omit_one_operand (type, integer_one_node, arg00);
5857 if (TREE_OVERFLOW (hi))
5858 return fold (build2 (LT_EXPR, type, arg00, lo));
5859 if (TREE_OVERFLOW (lo))
5860 return fold (build2 (GT_EXPR, type, arg00, hi));
5861 return build_range_check (type, arg00, 0, lo, hi);
5863 case LT_EXPR:
5864 if (TREE_OVERFLOW (lo))
5865 return omit_one_operand (type, integer_zero_node, arg00);
5866 return fold (build2 (LT_EXPR, type, arg00, lo));
5868 case LE_EXPR:
5869 if (TREE_OVERFLOW (hi))
5870 return omit_one_operand (type, integer_one_node, arg00);
5871 return fold (build2 (LE_EXPR, type, arg00, hi));
5873 case GT_EXPR:
5874 if (TREE_OVERFLOW (hi))
5875 return omit_one_operand (type, integer_zero_node, arg00);
5876 return fold (build2 (GT_EXPR, type, arg00, hi));
5878 case GE_EXPR:
5879 if (TREE_OVERFLOW (lo))
5880 return omit_one_operand (type, integer_one_node, arg00);
5881 return fold (build2 (GE_EXPR, type, arg00, lo));
5883 default:
5884 break;
5887 return NULL_TREE;
5891 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5892 equality/inequality test, then return a simplified form of
5893 the test using shifts and logical operations. Otherwise return
5894 NULL. TYPE is the desired result type. */
5896 tree
5897 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5898 tree result_type)
5900 /* If this is testing a single bit, we can optimize the test. */
5901 if ((code == NE_EXPR || code == EQ_EXPR)
5902 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5903 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5905 tree inner = TREE_OPERAND (arg0, 0);
5906 tree type = TREE_TYPE (arg0);
5907 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5908 enum machine_mode operand_mode = TYPE_MODE (type);
5909 int ops_unsigned;
5910 tree signed_type, unsigned_type, intermediate_type;
5911 tree arg00;
5913 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5914 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5915 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5916 if (arg00 != NULL_TREE
5917 /* This is only a win if casting to a signed type is cheap,
5918 i.e. when arg00's type is not a partial mode. */
5919 && TYPE_PRECISION (TREE_TYPE (arg00))
5920 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5922 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5923 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5924 result_type, fold_convert (stype, arg00),
5925 fold_convert (stype, integer_zero_node)));
5928 /* Otherwise we have (A & C) != 0 where C is a single bit,
5929 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5930 Similarly for (A & C) == 0. */
5932 /* If INNER is a right shift of a constant and it plus BITNUM does
5933 not overflow, adjust BITNUM and INNER. */
5934 if (TREE_CODE (inner) == RSHIFT_EXPR
5935 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5936 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5937 && bitnum < TYPE_PRECISION (type)
5938 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5939 bitnum - TYPE_PRECISION (type)))
5941 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5942 inner = TREE_OPERAND (inner, 0);
5945 /* If we are going to be able to omit the AND below, we must do our
5946 operations as unsigned. If we must use the AND, we have a choice.
5947 Normally unsigned is faster, but for some machines signed is. */
5948 #ifdef LOAD_EXTEND_OP
5949 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5950 && !flag_syntax_only) ? 0 : 1;
5951 #else
5952 ops_unsigned = 1;
5953 #endif
5955 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5956 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5957 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5958 inner = fold_convert (intermediate_type, inner);
5960 if (bitnum != 0)
5961 inner = build2 (RSHIFT_EXPR, intermediate_type,
5962 inner, size_int (bitnum));
5964 if (code == EQ_EXPR)
5965 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5966 inner, integer_one_node));
5968 /* Put the AND last so it can combine with more things. */
5969 inner = build2 (BIT_AND_EXPR, intermediate_type,
5970 inner, integer_one_node);
5972 /* Make sure to return the proper type. */
5973 inner = fold_convert (result_type, inner);
5975 return inner;
5977 return NULL_TREE;
5980 /* Check whether we are allowed to reorder operands arg0 and arg1,
5981 such that the evaluation of arg1 occurs before arg0. */
5983 static bool
5984 reorder_operands_p (tree arg0, tree arg1)
5986 if (! flag_evaluation_order)
5987 return true;
5988 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5989 return true;
5990 return ! TREE_SIDE_EFFECTS (arg0)
5991 && ! TREE_SIDE_EFFECTS (arg1);
5994 /* Test whether it is preferable two swap two operands, ARG0 and
5995 ARG1, for example because ARG0 is an integer constant and ARG1
5996 isn't. If REORDER is true, only recommend swapping if we can
5997 evaluate the operands in reverse order. */
5999 bool
6000 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6002 STRIP_SIGN_NOPS (arg0);
6003 STRIP_SIGN_NOPS (arg1);
6005 if (TREE_CODE (arg1) == INTEGER_CST)
6006 return 0;
6007 if (TREE_CODE (arg0) == INTEGER_CST)
6008 return 1;
6010 if (TREE_CODE (arg1) == REAL_CST)
6011 return 0;
6012 if (TREE_CODE (arg0) == REAL_CST)
6013 return 1;
6015 if (TREE_CODE (arg1) == COMPLEX_CST)
6016 return 0;
6017 if (TREE_CODE (arg0) == COMPLEX_CST)
6018 return 1;
6020 if (TREE_CONSTANT (arg1))
6021 return 0;
6022 if (TREE_CONSTANT (arg0))
6023 return 1;
6025 if (optimize_size)
6026 return 0;
6028 if (reorder && flag_evaluation_order
6029 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6030 return 0;
6032 if (DECL_P (arg1))
6033 return 0;
6034 if (DECL_P (arg0))
6035 return 1;
6037 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6038 for commutative and comparison operators. Ensuring a canonical
6039 form allows the optimizers to find additional redundancies without
6040 having to explicitly check for both orderings. */
6041 if (TREE_CODE (arg0) == SSA_NAME
6042 && TREE_CODE (arg1) == SSA_NAME
6043 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6044 return 1;
6046 return 0;
6049 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6050 ARG0 is extended to a wider type. */
6052 static tree
6053 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6055 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6056 tree arg1_unw;
6057 tree shorter_type, outer_type;
6058 tree min, max;
6059 bool above, below;
6061 if (arg0_unw == arg0)
6062 return NULL_TREE;
6063 shorter_type = TREE_TYPE (arg0_unw);
6065 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6066 return NULL_TREE;
6068 arg1_unw = get_unwidened (arg1, shorter_type);
6069 if (!arg1_unw)
6070 return NULL_TREE;
6072 /* If possible, express the comparison in the shorter mode. */
6073 if ((code == EQ_EXPR || code == NE_EXPR
6074 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6075 && (TREE_TYPE (arg1_unw) == shorter_type
6076 || (TREE_CODE (arg1_unw) == INTEGER_CST
6077 && TREE_CODE (shorter_type) == INTEGER_TYPE
6078 && int_fits_type_p (arg1_unw, shorter_type))))
6079 return fold (build (code, type, arg0_unw,
6080 fold_convert (shorter_type, arg1_unw)));
6082 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6083 return NULL_TREE;
6085 /* If we are comparing with the integer that does not fit into the range
6086 of the shorter type, the result is known. */
6087 outer_type = TREE_TYPE (arg1_unw);
6088 min = lower_bound_in_type (outer_type, shorter_type);
6089 max = upper_bound_in_type (outer_type, shorter_type);
6091 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6092 max, arg1_unw));
6093 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6094 arg1_unw, min));
6096 switch (code)
6098 case EQ_EXPR:
6099 if (above || below)
6100 return omit_one_operand (type, integer_zero_node, arg0);
6101 break;
6103 case NE_EXPR:
6104 if (above || below)
6105 return omit_one_operand (type, integer_one_node, arg0);
6106 break;
6108 case LT_EXPR:
6109 case LE_EXPR:
6110 if (above)
6111 return omit_one_operand (type, integer_one_node, arg0);
6112 else if (below)
6113 return omit_one_operand (type, integer_zero_node, arg0);
6115 case GT_EXPR:
6116 case GE_EXPR:
6117 if (above)
6118 return omit_one_operand (type, integer_zero_node, arg0);
6119 else if (below)
6120 return omit_one_operand (type, integer_one_node, arg0);
6122 default:
6123 break;
6126 return NULL_TREE;
6129 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6130 ARG0 just the signedness is changed. */
6132 static tree
6133 fold_sign_changed_comparison (enum tree_code code, tree type,
6134 tree arg0, tree arg1)
6136 tree arg0_inner, tmp;
6137 tree inner_type, outer_type;
6139 if (TREE_CODE (arg0) != NOP_EXPR)
6140 return NULL_TREE;
6142 outer_type = TREE_TYPE (arg0);
6143 arg0_inner = TREE_OPERAND (arg0, 0);
6144 inner_type = TREE_TYPE (arg0_inner);
6146 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6147 return NULL_TREE;
6149 if (TREE_CODE (arg1) != INTEGER_CST
6150 && !(TREE_CODE (arg1) == NOP_EXPR
6151 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6152 return NULL_TREE;
6154 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6155 && code != NE_EXPR
6156 && code != EQ_EXPR)
6157 return NULL_TREE;
6159 if (TREE_CODE (arg1) == INTEGER_CST)
6161 tmp = build_int_cst_wide (inner_type,
6162 TREE_INT_CST_LOW (arg1),
6163 TREE_INT_CST_HIGH (arg1));
6164 arg1 = force_fit_type (tmp, 0,
6165 TREE_OVERFLOW (arg1),
6166 TREE_CONSTANT_OVERFLOW (arg1));
6168 else
6169 arg1 = fold_convert (inner_type, arg1);
6171 return fold (build (code, type, arg0_inner, arg1));
6174 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6175 step of the array. ADDR is the address. MULT is the multiplicative expression.
6176 If the function succeeds, the new address expression is returned. Otherwise
6177 NULL_TREE is returned. */
6179 static tree
6180 try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
6182 tree s, delta, step;
6183 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6184 tree ref = TREE_OPERAND (addr, 0), pref;
6185 tree ret, pos;
6186 tree itype;
6188 STRIP_NOPS (arg0);
6189 STRIP_NOPS (arg1);
6191 if (TREE_CODE (arg0) == INTEGER_CST)
6193 s = arg0;
6194 delta = arg1;
6196 else if (TREE_CODE (arg1) == INTEGER_CST)
6198 s = arg1;
6199 delta = arg0;
6201 else
6202 return NULL_TREE;
6204 for (;; ref = TREE_OPERAND (ref, 0))
6206 if (TREE_CODE (ref) == ARRAY_REF)
6208 step = array_ref_element_size (ref);
6210 if (TREE_CODE (step) != INTEGER_CST)
6211 continue;
6213 itype = TREE_TYPE (step);
6215 /* If the type sizes do not match, we might run into problems
6216 when one of them would overflow. */
6217 if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
6218 continue;
6220 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6221 continue;
6223 delta = fold_convert (itype, delta);
6224 break;
6227 if (!handled_component_p (ref))
6228 return NULL_TREE;
6231 /* We found the suitable array reference. So copy everything up to it,
6232 and replace the index. */
6234 pref = TREE_OPERAND (addr, 0);
6235 ret = copy_node (pref);
6236 pos = ret;
6238 while (pref != ref)
6240 pref = TREE_OPERAND (pref, 0);
6241 TREE_OPERAND (pos, 0) = copy_node (pref);
6242 pos = TREE_OPERAND (pos, 0);
6245 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6246 TREE_OPERAND (pos, 1),
6247 delta));
6249 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6253 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6254 means A >= Y && A != MAX, but in this case we know that
6255 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6257 static tree
6258 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6260 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6262 if (TREE_CODE (bound) == LT_EXPR)
6263 a = TREE_OPERAND (bound, 0);
6264 else if (TREE_CODE (bound) == GT_EXPR)
6265 a = TREE_OPERAND (bound, 1);
6266 else
6267 return NULL_TREE;
6269 typea = TREE_TYPE (a);
6270 if (!INTEGRAL_TYPE_P (typea)
6271 && !POINTER_TYPE_P (typea))
6272 return NULL_TREE;
6274 if (TREE_CODE (ineq) == LT_EXPR)
6276 a1 = TREE_OPERAND (ineq, 1);
6277 y = TREE_OPERAND (ineq, 0);
6279 else if (TREE_CODE (ineq) == GT_EXPR)
6281 a1 = TREE_OPERAND (ineq, 0);
6282 y = TREE_OPERAND (ineq, 1);
6284 else
6285 return NULL_TREE;
6287 if (TREE_TYPE (a1) != typea)
6288 return NULL_TREE;
6290 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6291 if (!integer_onep (diff))
6292 return NULL_TREE;
6294 return fold (build2 (GE_EXPR, type, a, y));
6297 /* Perform constant folding and related simplification of EXPR.
6298 The related simplifications include x*1 => x, x*0 => 0, etc.,
6299 and application of the associative law.
6300 NOP_EXPR conversions may be removed freely (as long as we
6301 are careful not to change the type of the overall expression).
6302 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6303 but we can constant-fold them if they have constant operands. */
6305 #ifdef ENABLE_FOLD_CHECKING
6306 # define fold(x) fold_1 (x)
6307 static tree fold_1 (tree);
6308 static
6309 #endif
6310 tree
6311 fold (tree expr)
6313 const tree t = expr;
6314 const tree type = TREE_TYPE (expr);
6315 tree t1 = NULL_TREE;
6316 tree tem;
6317 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6318 enum tree_code code = TREE_CODE (t);
6319 enum tree_code_class kind = TREE_CODE_CLASS (code);
6321 /* WINS will be nonzero when the switch is done
6322 if all operands are constant. */
6323 int wins = 1;
6325 /* Return right away if a constant. */
6326 if (kind == tcc_constant)
6327 return t;
6329 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6331 tree subop;
6333 /* Special case for conversion ops that can have fixed point args. */
6334 arg0 = TREE_OPERAND (t, 0);
6336 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6337 if (arg0 != 0)
6338 STRIP_SIGN_NOPS (arg0);
6340 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6341 subop = TREE_REALPART (arg0);
6342 else
6343 subop = arg0;
6345 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6346 && TREE_CODE (subop) != REAL_CST)
6347 /* Note that TREE_CONSTANT isn't enough:
6348 static var addresses are constant but we can't
6349 do arithmetic on them. */
6350 wins = 0;
6352 else if (IS_EXPR_CODE_CLASS (kind))
6354 int len = TREE_CODE_LENGTH (code);
6355 int i;
6356 for (i = 0; i < len; i++)
6358 tree op = TREE_OPERAND (t, i);
6359 tree subop;
6361 if (op == 0)
6362 continue; /* Valid for CALL_EXPR, at least. */
6364 /* Strip any conversions that don't change the mode. This is
6365 safe for every expression, except for a comparison expression
6366 because its signedness is derived from its operands. So, in
6367 the latter case, only strip conversions that don't change the
6368 signedness.
6370 Note that this is done as an internal manipulation within the
6371 constant folder, in order to find the simplest representation
6372 of the arguments so that their form can be studied. In any
6373 cases, the appropriate type conversions should be put back in
6374 the tree that will get out of the constant folder. */
6375 if (kind == tcc_comparison)
6376 STRIP_SIGN_NOPS (op);
6377 else
6378 STRIP_NOPS (op);
6380 if (TREE_CODE (op) == COMPLEX_CST)
6381 subop = TREE_REALPART (op);
6382 else
6383 subop = op;
6385 if (TREE_CODE (subop) != INTEGER_CST
6386 && TREE_CODE (subop) != REAL_CST)
6387 /* Note that TREE_CONSTANT isn't enough:
6388 static var addresses are constant but we can't
6389 do arithmetic on them. */
6390 wins = 0;
6392 if (i == 0)
6393 arg0 = op;
6394 else if (i == 1)
6395 arg1 = op;
6399 /* If this is a commutative operation, and ARG0 is a constant, move it
6400 to ARG1 to reduce the number of tests below. */
6401 if (commutative_tree_code (code)
6402 && tree_swap_operands_p (arg0, arg1, true))
6403 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6404 TREE_OPERAND (t, 0)));
6406 /* Now WINS is set as described above,
6407 ARG0 is the first operand of EXPR,
6408 and ARG1 is the second operand (if it has more than one operand).
6410 First check for cases where an arithmetic operation is applied to a
6411 compound, conditional, or comparison operation. Push the arithmetic
6412 operation inside the compound or conditional to see if any folding
6413 can then be done. Convert comparison to conditional for this purpose.
6414 The also optimizes non-constant cases that used to be done in
6415 expand_expr.
6417 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6418 one of the operands is a comparison and the other is a comparison, a
6419 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6420 code below would make the expression more complex. Change it to a
6421 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6422 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6424 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6425 || code == EQ_EXPR || code == NE_EXPR)
6426 && ((truth_value_p (TREE_CODE (arg0))
6427 && (truth_value_p (TREE_CODE (arg1))
6428 || (TREE_CODE (arg1) == BIT_AND_EXPR
6429 && integer_onep (TREE_OPERAND (arg1, 1)))))
6430 || (truth_value_p (TREE_CODE (arg1))
6431 && (truth_value_p (TREE_CODE (arg0))
6432 || (TREE_CODE (arg0) == BIT_AND_EXPR
6433 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6435 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6436 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6437 : TRUTH_XOR_EXPR,
6438 type, fold_convert (boolean_type_node, arg0),
6439 fold_convert (boolean_type_node, arg1)));
6441 if (code == EQ_EXPR)
6442 tem = invert_truthvalue (tem);
6444 return tem;
6447 if (TREE_CODE_CLASS (code) == tcc_unary)
6449 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6450 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6451 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6452 else if (TREE_CODE (arg0) == COND_EXPR)
6454 tree arg01 = TREE_OPERAND (arg0, 1);
6455 tree arg02 = TREE_OPERAND (arg0, 2);
6456 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6457 arg01 = fold (build1 (code, type, arg01));
6458 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6459 arg02 = fold (build1 (code, type, arg02));
6460 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6461 arg01, arg02));
6463 /* If this was a conversion, and all we did was to move into
6464 inside the COND_EXPR, bring it back out. But leave it if
6465 it is a conversion from integer to integer and the
6466 result precision is no wider than a word since such a
6467 conversion is cheap and may be optimized away by combine,
6468 while it couldn't if it were outside the COND_EXPR. Then return
6469 so we don't get into an infinite recursion loop taking the
6470 conversion out and then back in. */
6472 if ((code == NOP_EXPR || code == CONVERT_EXPR
6473 || code == NON_LVALUE_EXPR)
6474 && TREE_CODE (tem) == COND_EXPR
6475 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6476 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6477 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6478 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6479 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6480 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6481 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6482 && (INTEGRAL_TYPE_P
6483 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6484 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6485 || flag_syntax_only))
6486 tem = build1 (code, type,
6487 build3 (COND_EXPR,
6488 TREE_TYPE (TREE_OPERAND
6489 (TREE_OPERAND (tem, 1), 0)),
6490 TREE_OPERAND (tem, 0),
6491 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6492 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6493 return tem;
6495 else if (COMPARISON_CLASS_P (arg0))
6497 if (TREE_CODE (type) == BOOLEAN_TYPE)
6499 arg0 = copy_node (arg0);
6500 TREE_TYPE (arg0) = type;
6501 return arg0;
6503 else if (TREE_CODE (type) != INTEGER_TYPE)
6504 return fold (build3 (COND_EXPR, type, arg0,
6505 fold (build1 (code, type,
6506 integer_one_node)),
6507 fold (build1 (code, type,
6508 integer_zero_node))));
6511 else if (TREE_CODE_CLASS (code) == tcc_comparison
6512 && TREE_CODE (arg0) == COMPOUND_EXPR)
6513 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6514 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6515 else if (TREE_CODE_CLASS (code) == tcc_comparison
6516 && TREE_CODE (arg1) == COMPOUND_EXPR)
6517 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6518 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6519 else if (TREE_CODE_CLASS (code) == tcc_binary
6520 || TREE_CODE_CLASS (code) == tcc_comparison)
6522 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6523 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6524 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6525 arg1)));
6526 if (TREE_CODE (arg1) == COMPOUND_EXPR
6527 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6528 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6529 fold (build2 (code, type,
6530 arg0, TREE_OPERAND (arg1, 1))));
6532 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6534 tem = fold_binary_op_with_conditional_arg (t, code, arg0, arg1,
6535 /*cond_first_p=*/1);
6536 if (tem != NULL_TREE)
6537 return tem;
6540 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6542 tem = fold_binary_op_with_conditional_arg (t, code, arg1, arg0,
6543 /*cond_first_p=*/0);
6544 if (tem != NULL_TREE)
6545 return tem;
6549 switch (code)
6551 case CONST_DECL:
6552 return fold (DECL_INITIAL (t));
6554 case NOP_EXPR:
6555 case FLOAT_EXPR:
6556 case CONVERT_EXPR:
6557 case FIX_TRUNC_EXPR:
6558 case FIX_CEIL_EXPR:
6559 case FIX_FLOOR_EXPR:
6560 case FIX_ROUND_EXPR:
6561 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6562 return TREE_OPERAND (t, 0);
6564 /* Handle cases of two conversions in a row. */
6565 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6566 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6568 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6569 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6570 int inside_int = INTEGRAL_TYPE_P (inside_type);
6571 int inside_ptr = POINTER_TYPE_P (inside_type);
6572 int inside_float = FLOAT_TYPE_P (inside_type);
6573 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6574 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6575 int inter_int = INTEGRAL_TYPE_P (inter_type);
6576 int inter_ptr = POINTER_TYPE_P (inter_type);
6577 int inter_float = FLOAT_TYPE_P (inter_type);
6578 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6579 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6580 int final_int = INTEGRAL_TYPE_P (type);
6581 int final_ptr = POINTER_TYPE_P (type);
6582 int final_float = FLOAT_TYPE_P (type);
6583 unsigned int final_prec = TYPE_PRECISION (type);
6584 int final_unsignedp = TYPE_UNSIGNED (type);
6586 /* In addition to the cases of two conversions in a row
6587 handled below, if we are converting something to its own
6588 type via an object of identical or wider precision, neither
6589 conversion is needed. */
6590 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6591 && ((inter_int && final_int) || (inter_float && final_float))
6592 && inter_prec >= final_prec)
6593 return fold (build1 (code, type,
6594 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6596 /* Likewise, if the intermediate and final types are either both
6597 float or both integer, we don't need the middle conversion if
6598 it is wider than the final type and doesn't change the signedness
6599 (for integers). Avoid this if the final type is a pointer
6600 since then we sometimes need the inner conversion. Likewise if
6601 the outer has a precision not equal to the size of its mode. */
6602 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6603 || (inter_float && inside_float))
6604 && inter_prec >= inside_prec
6605 && (inter_float || inter_unsignedp == inside_unsignedp)
6606 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6607 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6608 && ! final_ptr)
6609 return fold (build1 (code, type,
6610 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6612 /* If we have a sign-extension of a zero-extended value, we can
6613 replace that by a single zero-extension. */
6614 if (inside_int && inter_int && final_int
6615 && inside_prec < inter_prec && inter_prec < final_prec
6616 && inside_unsignedp && !inter_unsignedp)
6617 return fold (build1 (code, type,
6618 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6620 /* Two conversions in a row are not needed unless:
6621 - some conversion is floating-point (overstrict for now), or
6622 - the intermediate type is narrower than both initial and
6623 final, or
6624 - the intermediate type and innermost type differ in signedness,
6625 and the outermost type is wider than the intermediate, or
6626 - the initial type is a pointer type and the precisions of the
6627 intermediate and final types differ, or
6628 - the final type is a pointer type and the precisions of the
6629 initial and intermediate types differ. */
6630 if (! inside_float && ! inter_float && ! final_float
6631 && (inter_prec > inside_prec || inter_prec > final_prec)
6632 && ! (inside_int && inter_int
6633 && inter_unsignedp != inside_unsignedp
6634 && inter_prec < final_prec)
6635 && ((inter_unsignedp && inter_prec > inside_prec)
6636 == (final_unsignedp && final_prec > inter_prec))
6637 && ! (inside_ptr && inter_prec != final_prec)
6638 && ! (final_ptr && inside_prec != inter_prec)
6639 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6640 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6641 && ! final_ptr)
6642 return fold (build1 (code, type,
6643 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6646 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6647 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6648 /* Detect assigning a bitfield. */
6649 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6650 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6652 /* Don't leave an assignment inside a conversion
6653 unless assigning a bitfield. */
6654 tree prev = TREE_OPERAND (t, 0);
6655 tem = copy_node (t);
6656 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6657 /* First do the assignment, then return converted constant. */
6658 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6659 TREE_NO_WARNING (tem) = 1;
6660 TREE_USED (tem) = 1;
6661 return tem;
6664 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6665 constants (if x has signed type, the sign bit cannot be set
6666 in c). This folds extension into the BIT_AND_EXPR. */
6667 if (INTEGRAL_TYPE_P (type)
6668 && TREE_CODE (type) != BOOLEAN_TYPE
6669 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6670 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6672 tree and = TREE_OPERAND (t, 0);
6673 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6674 int change = 0;
6676 if (TYPE_UNSIGNED (TREE_TYPE (and))
6677 || (TYPE_PRECISION (type)
6678 <= TYPE_PRECISION (TREE_TYPE (and))))
6679 change = 1;
6680 else if (TYPE_PRECISION (TREE_TYPE (and1))
6681 <= HOST_BITS_PER_WIDE_INT
6682 && host_integerp (and1, 1))
6684 unsigned HOST_WIDE_INT cst;
6686 cst = tree_low_cst (and1, 1);
6687 cst &= (HOST_WIDE_INT) -1
6688 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6689 change = (cst == 0);
6690 #ifdef LOAD_EXTEND_OP
6691 if (change
6692 && !flag_syntax_only
6693 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6694 == ZERO_EXTEND))
6696 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6697 and0 = fold_convert (uns, and0);
6698 and1 = fold_convert (uns, and1);
6700 #endif
6702 if (change)
6703 return fold (build2 (BIT_AND_EXPR, type,
6704 fold_convert (type, and0),
6705 fold_convert (type, and1)));
6708 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6709 T2 being pointers to types of the same size. */
6710 if (POINTER_TYPE_P (TREE_TYPE (t))
6711 && BINARY_CLASS_P (arg0)
6712 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6713 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6715 tree arg00 = TREE_OPERAND (arg0, 0);
6716 tree t0 = TREE_TYPE (t);
6717 tree t1 = TREE_TYPE (arg00);
6718 tree tt0 = TREE_TYPE (t0);
6719 tree tt1 = TREE_TYPE (t1);
6720 tree s0 = TYPE_SIZE (tt0);
6721 tree s1 = TYPE_SIZE (tt1);
6723 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6724 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6725 TREE_OPERAND (arg0, 1));
6728 tem = fold_convert_const (code, type, arg0);
6729 return tem ? tem : t;
6731 case VIEW_CONVERT_EXPR:
6732 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6733 return build1 (VIEW_CONVERT_EXPR, type,
6734 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6735 return t;
6737 case COMPONENT_REF:
6738 if (TREE_CODE (arg0) == CONSTRUCTOR
6739 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6741 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6742 if (m)
6743 return TREE_VALUE (m);
6745 return t;
6747 case RANGE_EXPR:
6748 if (TREE_CONSTANT (t) != wins)
6750 tem = copy_node (t);
6751 TREE_CONSTANT (tem) = wins;
6752 TREE_INVARIANT (tem) = wins;
6753 return tem;
6755 return t;
6757 case NEGATE_EXPR:
6758 if (negate_expr_p (arg0))
6759 return fold_convert (type, negate_expr (arg0));
6760 return t;
6762 case ABS_EXPR:
6763 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6764 return fold_abs_const (arg0, type);
6765 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6766 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6767 /* Convert fabs((double)float) into (double)fabsf(float). */
6768 else if (TREE_CODE (arg0) == NOP_EXPR
6769 && TREE_CODE (type) == REAL_TYPE)
6771 tree targ0 = strip_float_extensions (arg0);
6772 if (targ0 != arg0)
6773 return fold_convert (type, fold (build1 (ABS_EXPR,
6774 TREE_TYPE (targ0),
6775 targ0)));
6777 else if (tree_expr_nonnegative_p (arg0))
6778 return arg0;
6779 return t;
6781 case CONJ_EXPR:
6782 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6783 return fold_convert (type, arg0);
6784 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6785 return build2 (COMPLEX_EXPR, type,
6786 TREE_OPERAND (arg0, 0),
6787 negate_expr (TREE_OPERAND (arg0, 1)));
6788 else if (TREE_CODE (arg0) == COMPLEX_CST)
6789 return build_complex (type, TREE_REALPART (arg0),
6790 negate_expr (TREE_IMAGPART (arg0)));
6791 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6792 return fold (build2 (TREE_CODE (arg0), type,
6793 fold (build1 (CONJ_EXPR, type,
6794 TREE_OPERAND (arg0, 0))),
6795 fold (build1 (CONJ_EXPR, type,
6796 TREE_OPERAND (arg0, 1)))));
6797 else if (TREE_CODE (arg0) == CONJ_EXPR)
6798 return TREE_OPERAND (arg0, 0);
6799 return t;
6801 case BIT_NOT_EXPR:
6802 if (TREE_CODE (arg0) == INTEGER_CST)
6803 return fold_not_const (arg0, type);
6804 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6805 return TREE_OPERAND (arg0, 0);
6806 return t;
6808 case PLUS_EXPR:
6809 /* A + (-B) -> A - B */
6810 if (TREE_CODE (arg1) == NEGATE_EXPR)
6811 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6812 /* (-A) + B -> B - A */
6813 if (TREE_CODE (arg0) == NEGATE_EXPR
6814 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6815 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6816 if (! FLOAT_TYPE_P (type))
6818 if (integer_zerop (arg1))
6819 return non_lvalue (fold_convert (type, arg0));
6821 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6822 with a constant, and the two constants have no bits in common,
6823 we should treat this as a BIT_IOR_EXPR since this may produce more
6824 simplifications. */
6825 if (TREE_CODE (arg0) == BIT_AND_EXPR
6826 && TREE_CODE (arg1) == BIT_AND_EXPR
6827 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6828 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6829 && integer_zerop (const_binop (BIT_AND_EXPR,
6830 TREE_OPERAND (arg0, 1),
6831 TREE_OPERAND (arg1, 1), 0)))
6833 code = BIT_IOR_EXPR;
6834 goto bit_ior;
6837 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6838 (plus (plus (mult) (mult)) (foo)) so that we can
6839 take advantage of the factoring cases below. */
6840 if (((TREE_CODE (arg0) == PLUS_EXPR
6841 || TREE_CODE (arg0) == MINUS_EXPR)
6842 && TREE_CODE (arg1) == MULT_EXPR)
6843 || ((TREE_CODE (arg1) == PLUS_EXPR
6844 || TREE_CODE (arg1) == MINUS_EXPR)
6845 && TREE_CODE (arg0) == MULT_EXPR))
6847 tree parg0, parg1, parg, marg;
6848 enum tree_code pcode;
6850 if (TREE_CODE (arg1) == MULT_EXPR)
6851 parg = arg0, marg = arg1;
6852 else
6853 parg = arg1, marg = arg0;
6854 pcode = TREE_CODE (parg);
6855 parg0 = TREE_OPERAND (parg, 0);
6856 parg1 = TREE_OPERAND (parg, 1);
6857 STRIP_NOPS (parg0);
6858 STRIP_NOPS (parg1);
6860 if (TREE_CODE (parg0) == MULT_EXPR
6861 && TREE_CODE (parg1) != MULT_EXPR)
6862 return fold (build2 (pcode, type,
6863 fold (build2 (PLUS_EXPR, type,
6864 fold_convert (type, parg0),
6865 fold_convert (type, marg))),
6866 fold_convert (type, parg1)));
6867 if (TREE_CODE (parg0) != MULT_EXPR
6868 && TREE_CODE (parg1) == MULT_EXPR)
6869 return fold (build2 (PLUS_EXPR, type,
6870 fold_convert (type, parg0),
6871 fold (build2 (pcode, type,
6872 fold_convert (type, marg),
6873 fold_convert (type,
6874 parg1)))));
6877 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6879 tree arg00, arg01, arg10, arg11;
6880 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6882 /* (A * C) + (B * C) -> (A+B) * C.
6883 We are most concerned about the case where C is a constant,
6884 but other combinations show up during loop reduction. Since
6885 it is not difficult, try all four possibilities. */
6887 arg00 = TREE_OPERAND (arg0, 0);
6888 arg01 = TREE_OPERAND (arg0, 1);
6889 arg10 = TREE_OPERAND (arg1, 0);
6890 arg11 = TREE_OPERAND (arg1, 1);
6891 same = NULL_TREE;
6893 if (operand_equal_p (arg01, arg11, 0))
6894 same = arg01, alt0 = arg00, alt1 = arg10;
6895 else if (operand_equal_p (arg00, arg10, 0))
6896 same = arg00, alt0 = arg01, alt1 = arg11;
6897 else if (operand_equal_p (arg00, arg11, 0))
6898 same = arg00, alt0 = arg01, alt1 = arg10;
6899 else if (operand_equal_p (arg01, arg10, 0))
6900 same = arg01, alt0 = arg00, alt1 = arg11;
6902 /* No identical multiplicands; see if we can find a common
6903 power-of-two factor in non-power-of-two multiplies. This
6904 can help in multi-dimensional array access. */
6905 else if (TREE_CODE (arg01) == INTEGER_CST
6906 && TREE_CODE (arg11) == INTEGER_CST
6907 && TREE_INT_CST_HIGH (arg01) == 0
6908 && TREE_INT_CST_HIGH (arg11) == 0)
6910 HOST_WIDE_INT int01, int11, tmp;
6911 int01 = TREE_INT_CST_LOW (arg01);
6912 int11 = TREE_INT_CST_LOW (arg11);
6914 /* Move min of absolute values to int11. */
6915 if ((int01 >= 0 ? int01 : -int01)
6916 < (int11 >= 0 ? int11 : -int11))
6918 tmp = int01, int01 = int11, int11 = tmp;
6919 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6920 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6923 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6925 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6926 build_int_cst (NULL_TREE,
6927 int01 / int11)));
6928 alt1 = arg10;
6929 same = arg11;
6933 if (same)
6934 return fold (build2 (MULT_EXPR, type,
6935 fold (build2 (PLUS_EXPR, type,
6936 fold_convert (type, alt0),
6937 fold_convert (type, alt1))),
6938 same));
6941 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6942 of the array. Loop optimizer sometimes produce this type of
6943 expressions. */
6944 if (TREE_CODE (arg0) == ADDR_EXPR
6945 && TREE_CODE (arg1) == MULT_EXPR)
6947 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
6948 if (tem)
6949 return fold_convert (type, fold (tem));
6951 else if (TREE_CODE (arg1) == ADDR_EXPR
6952 && TREE_CODE (arg0) == MULT_EXPR)
6954 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
6955 if (tem)
6956 return fold_convert (type, fold (tem));
6959 else
6961 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6962 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6963 return non_lvalue (fold_convert (type, arg0));
6965 /* Likewise if the operands are reversed. */
6966 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6967 return non_lvalue (fold_convert (type, arg1));
6969 /* Convert X + -C into X - C. */
6970 if (TREE_CODE (arg1) == REAL_CST
6971 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6973 tem = fold_negate_const (arg1, type);
6974 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6975 return fold (build2 (MINUS_EXPR, type,
6976 fold_convert (type, arg0),
6977 fold_convert (type, tem)));
6980 /* Convert x+x into x*2.0. */
6981 if (operand_equal_p (arg0, arg1, 0)
6982 && SCALAR_FLOAT_TYPE_P (type))
6983 return fold (build2 (MULT_EXPR, type, arg0,
6984 build_real (type, dconst2)));
6986 /* Convert x*c+x into x*(c+1). */
6987 if (flag_unsafe_math_optimizations
6988 && TREE_CODE (arg0) == MULT_EXPR
6989 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6990 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6991 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6993 REAL_VALUE_TYPE c;
6995 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6996 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6997 return fold (build2 (MULT_EXPR, type, arg1,
6998 build_real (type, c)));
7001 /* Convert x+x*c into x*(c+1). */
7002 if (flag_unsafe_math_optimizations
7003 && TREE_CODE (arg1) == MULT_EXPR
7004 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7005 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7006 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7008 REAL_VALUE_TYPE c;
7010 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7011 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7012 return fold (build2 (MULT_EXPR, type, arg0,
7013 build_real (type, c)));
7016 /* Convert x*c1+x*c2 into x*(c1+c2). */
7017 if (flag_unsafe_math_optimizations
7018 && TREE_CODE (arg0) == MULT_EXPR
7019 && TREE_CODE (arg1) == MULT_EXPR
7020 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7021 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7022 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7023 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7024 && operand_equal_p (TREE_OPERAND (arg0, 0),
7025 TREE_OPERAND (arg1, 0), 0))
7027 REAL_VALUE_TYPE c1, c2;
7029 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7030 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7031 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7032 return fold (build2 (MULT_EXPR, type,
7033 TREE_OPERAND (arg0, 0),
7034 build_real (type, c1)));
7036 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7037 if (flag_unsafe_math_optimizations
7038 && TREE_CODE (arg1) == PLUS_EXPR
7039 && TREE_CODE (arg0) != MULT_EXPR)
7041 tree tree10 = TREE_OPERAND (arg1, 0);
7042 tree tree11 = TREE_OPERAND (arg1, 1);
7043 if (TREE_CODE (tree11) == MULT_EXPR
7044 && TREE_CODE (tree10) == MULT_EXPR)
7046 tree tree0;
7047 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
7048 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
7051 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7052 if (flag_unsafe_math_optimizations
7053 && TREE_CODE (arg0) == PLUS_EXPR
7054 && TREE_CODE (arg1) != MULT_EXPR)
7056 tree tree00 = TREE_OPERAND (arg0, 0);
7057 tree tree01 = TREE_OPERAND (arg0, 1);
7058 if (TREE_CODE (tree01) == MULT_EXPR
7059 && TREE_CODE (tree00) == MULT_EXPR)
7061 tree tree0;
7062 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
7063 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
7068 bit_rotate:
7069 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7070 is a rotate of A by C1 bits. */
7071 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7072 is a rotate of A by B bits. */
7074 enum tree_code code0, code1;
7075 code0 = TREE_CODE (arg0);
7076 code1 = TREE_CODE (arg1);
7077 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7078 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7079 && operand_equal_p (TREE_OPERAND (arg0, 0),
7080 TREE_OPERAND (arg1, 0), 0)
7081 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7083 tree tree01, tree11;
7084 enum tree_code code01, code11;
7086 tree01 = TREE_OPERAND (arg0, 1);
7087 tree11 = TREE_OPERAND (arg1, 1);
7088 STRIP_NOPS (tree01);
7089 STRIP_NOPS (tree11);
7090 code01 = TREE_CODE (tree01);
7091 code11 = TREE_CODE (tree11);
7092 if (code01 == INTEGER_CST
7093 && code11 == INTEGER_CST
7094 && TREE_INT_CST_HIGH (tree01) == 0
7095 && TREE_INT_CST_HIGH (tree11) == 0
7096 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7097 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7098 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7099 code0 == LSHIFT_EXPR ? tree01 : tree11);
7100 else if (code11 == MINUS_EXPR)
7102 tree tree110, tree111;
7103 tree110 = TREE_OPERAND (tree11, 0);
7104 tree111 = TREE_OPERAND (tree11, 1);
7105 STRIP_NOPS (tree110);
7106 STRIP_NOPS (tree111);
7107 if (TREE_CODE (tree110) == INTEGER_CST
7108 && 0 == compare_tree_int (tree110,
7109 TYPE_PRECISION
7110 (TREE_TYPE (TREE_OPERAND
7111 (arg0, 0))))
7112 && operand_equal_p (tree01, tree111, 0))
7113 return build2 ((code0 == LSHIFT_EXPR
7114 ? LROTATE_EXPR
7115 : RROTATE_EXPR),
7116 type, TREE_OPERAND (arg0, 0), tree01);
7118 else if (code01 == MINUS_EXPR)
7120 tree tree010, tree011;
7121 tree010 = TREE_OPERAND (tree01, 0);
7122 tree011 = TREE_OPERAND (tree01, 1);
7123 STRIP_NOPS (tree010);
7124 STRIP_NOPS (tree011);
7125 if (TREE_CODE (tree010) == INTEGER_CST
7126 && 0 == compare_tree_int (tree010,
7127 TYPE_PRECISION
7128 (TREE_TYPE (TREE_OPERAND
7129 (arg0, 0))))
7130 && operand_equal_p (tree11, tree011, 0))
7131 return build2 ((code0 != LSHIFT_EXPR
7132 ? LROTATE_EXPR
7133 : RROTATE_EXPR),
7134 type, TREE_OPERAND (arg0, 0), tree11);
7139 associate:
7140 /* In most languages, can't associate operations on floats through
7141 parentheses. Rather than remember where the parentheses were, we
7142 don't associate floats at all, unless the user has specified
7143 -funsafe-math-optimizations. */
7145 if (! wins
7146 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7148 tree var0, con0, lit0, minus_lit0;
7149 tree var1, con1, lit1, minus_lit1;
7151 /* Split both trees into variables, constants, and literals. Then
7152 associate each group together, the constants with literals,
7153 then the result with variables. This increases the chances of
7154 literals being recombined later and of generating relocatable
7155 expressions for the sum of a constant and literal. */
7156 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7157 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7158 code == MINUS_EXPR);
7160 /* Only do something if we found more than two objects. Otherwise,
7161 nothing has changed and we risk infinite recursion. */
7162 if (2 < ((var0 != 0) + (var1 != 0)
7163 + (con0 != 0) + (con1 != 0)
7164 + (lit0 != 0) + (lit1 != 0)
7165 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7167 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7168 if (code == MINUS_EXPR)
7169 code = PLUS_EXPR;
7171 var0 = associate_trees (var0, var1, code, type);
7172 con0 = associate_trees (con0, con1, code, type);
7173 lit0 = associate_trees (lit0, lit1, code, type);
7174 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7176 /* Preserve the MINUS_EXPR if the negative part of the literal is
7177 greater than the positive part. Otherwise, the multiplicative
7178 folding code (i.e extract_muldiv) may be fooled in case
7179 unsigned constants are subtracted, like in the following
7180 example: ((X*2 + 4) - 8U)/2. */
7181 if (minus_lit0 && lit0)
7183 if (TREE_CODE (lit0) == INTEGER_CST
7184 && TREE_CODE (minus_lit0) == INTEGER_CST
7185 && tree_int_cst_lt (lit0, minus_lit0))
7187 minus_lit0 = associate_trees (minus_lit0, lit0,
7188 MINUS_EXPR, type);
7189 lit0 = 0;
7191 else
7193 lit0 = associate_trees (lit0, minus_lit0,
7194 MINUS_EXPR, type);
7195 minus_lit0 = 0;
7198 if (minus_lit0)
7200 if (con0 == 0)
7201 return fold_convert (type,
7202 associate_trees (var0, minus_lit0,
7203 MINUS_EXPR, type));
7204 else
7206 con0 = associate_trees (con0, minus_lit0,
7207 MINUS_EXPR, type);
7208 return fold_convert (type,
7209 associate_trees (var0, con0,
7210 PLUS_EXPR, type));
7214 con0 = associate_trees (con0, lit0, code, type);
7215 return fold_convert (type, associate_trees (var0, con0,
7216 code, type));
7220 binary:
7221 if (wins)
7222 t1 = const_binop (code, arg0, arg1, 0);
7223 if (t1 != NULL_TREE)
7225 /* The return value should always have
7226 the same type as the original expression. */
7227 if (TREE_TYPE (t1) != type)
7228 t1 = fold_convert (type, t1);
7230 return t1;
7232 return t;
7234 case MINUS_EXPR:
7235 /* A - (-B) -> A + B */
7236 if (TREE_CODE (arg1) == NEGATE_EXPR)
7237 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7238 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7239 if (TREE_CODE (arg0) == NEGATE_EXPR
7240 && (FLOAT_TYPE_P (type)
7241 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7242 && negate_expr_p (arg1)
7243 && reorder_operands_p (arg0, arg1))
7244 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7245 TREE_OPERAND (arg0, 0)));
7247 if (! FLOAT_TYPE_P (type))
7249 if (! wins && integer_zerop (arg0))
7250 return negate_expr (fold_convert (type, arg1));
7251 if (integer_zerop (arg1))
7252 return non_lvalue (fold_convert (type, arg0));
7254 /* Fold A - (A & B) into ~B & A. */
7255 if (!TREE_SIDE_EFFECTS (arg0)
7256 && TREE_CODE (arg1) == BIT_AND_EXPR)
7258 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7259 return fold (build2 (BIT_AND_EXPR, type,
7260 fold (build1 (BIT_NOT_EXPR, type,
7261 TREE_OPERAND (arg1, 0))),
7262 arg0));
7263 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7264 return fold (build2 (BIT_AND_EXPR, type,
7265 fold (build1 (BIT_NOT_EXPR, type,
7266 TREE_OPERAND (arg1, 1))),
7267 arg0));
7270 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7271 any power of 2 minus 1. */
7272 if (TREE_CODE (arg0) == BIT_AND_EXPR
7273 && TREE_CODE (arg1) == BIT_AND_EXPR
7274 && operand_equal_p (TREE_OPERAND (arg0, 0),
7275 TREE_OPERAND (arg1, 0), 0))
7277 tree mask0 = TREE_OPERAND (arg0, 1);
7278 tree mask1 = TREE_OPERAND (arg1, 1);
7279 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7281 if (operand_equal_p (tem, mask1, 0))
7283 tem = fold (build2 (BIT_XOR_EXPR, type,
7284 TREE_OPERAND (arg0, 0), mask1));
7285 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7290 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7291 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7292 return non_lvalue (fold_convert (type, arg0));
7294 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7295 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7296 (-ARG1 + ARG0) reduces to -ARG1. */
7297 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7298 return negate_expr (fold_convert (type, arg1));
7300 /* Fold &x - &x. This can happen from &x.foo - &x.
7301 This is unsafe for certain floats even in non-IEEE formats.
7302 In IEEE, it is unsafe because it does wrong for NaNs.
7303 Also note that operand_equal_p is always false if an operand
7304 is volatile. */
7306 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7307 && operand_equal_p (arg0, arg1, 0))
7308 return fold_convert (type, integer_zero_node);
7310 /* A - B -> A + (-B) if B is easily negatable. */
7311 if (!wins && negate_expr_p (arg1)
7312 && ((FLOAT_TYPE_P (type)
7313 /* Avoid this transformation if B is a positive REAL_CST. */
7314 && (TREE_CODE (arg1) != REAL_CST
7315 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7316 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7317 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7319 /* Try folding difference of addresses. */
7321 HOST_WIDE_INT diff;
7323 if ((TREE_CODE (arg0) == ADDR_EXPR
7324 || TREE_CODE (arg1) == ADDR_EXPR)
7325 && ptr_difference_const (arg0, arg1, &diff))
7326 return build_int_cst_type (type, diff);
7329 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7330 of the array. Loop optimizer sometimes produce this type of
7331 expressions. */
7332 if (TREE_CODE (arg0) == ADDR_EXPR
7333 && TREE_CODE (arg1) == MULT_EXPR)
7335 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7336 if (tem)
7337 return fold_convert (type, fold (tem));
7340 if (TREE_CODE (arg0) == MULT_EXPR
7341 && TREE_CODE (arg1) == MULT_EXPR
7342 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7344 /* (A * C) - (B * C) -> (A-B) * C. */
7345 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7346 TREE_OPERAND (arg1, 1), 0))
7347 return fold (build2 (MULT_EXPR, type,
7348 fold (build2 (MINUS_EXPR, type,
7349 TREE_OPERAND (arg0, 0),
7350 TREE_OPERAND (arg1, 0))),
7351 TREE_OPERAND (arg0, 1)));
7352 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7353 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7354 TREE_OPERAND (arg1, 0), 0))
7355 return fold (build2 (MULT_EXPR, type,
7356 TREE_OPERAND (arg0, 0),
7357 fold (build2 (MINUS_EXPR, type,
7358 TREE_OPERAND (arg0, 1),
7359 TREE_OPERAND (arg1, 1)))));
7362 goto associate;
7364 case MULT_EXPR:
7365 /* (-A) * (-B) -> A * B */
7366 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7367 return fold (build2 (MULT_EXPR, type,
7368 TREE_OPERAND (arg0, 0),
7369 negate_expr (arg1)));
7370 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7371 return fold (build2 (MULT_EXPR, type,
7372 negate_expr (arg0),
7373 TREE_OPERAND (arg1, 0)));
7375 if (! FLOAT_TYPE_P (type))
7377 if (integer_zerop (arg1))
7378 return omit_one_operand (type, arg1, arg0);
7379 if (integer_onep (arg1))
7380 return non_lvalue (fold_convert (type, arg0));
7382 /* (a * (1 << b)) is (a << b) */
7383 if (TREE_CODE (arg1) == LSHIFT_EXPR
7384 && integer_onep (TREE_OPERAND (arg1, 0)))
7385 return fold (build2 (LSHIFT_EXPR, type, arg0,
7386 TREE_OPERAND (arg1, 1)));
7387 if (TREE_CODE (arg0) == LSHIFT_EXPR
7388 && integer_onep (TREE_OPERAND (arg0, 0)))
7389 return fold (build2 (LSHIFT_EXPR, type, arg1,
7390 TREE_OPERAND (arg0, 1)));
7392 if (TREE_CODE (arg1) == INTEGER_CST
7393 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7394 fold_convert (type, arg1),
7395 code, NULL_TREE)))
7396 return fold_convert (type, tem);
7399 else
7401 /* Maybe fold x * 0 to 0. The expressions aren't the same
7402 when x is NaN, since x * 0 is also NaN. Nor are they the
7403 same in modes with signed zeros, since multiplying a
7404 negative value by 0 gives -0, not +0. */
7405 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7406 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7407 && real_zerop (arg1))
7408 return omit_one_operand (type, arg1, arg0);
7409 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7410 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7411 && real_onep (arg1))
7412 return non_lvalue (fold_convert (type, arg0));
7414 /* Transform x * -1.0 into -x. */
7415 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7416 && real_minus_onep (arg1))
7417 return fold_convert (type, negate_expr (arg0));
7419 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7420 if (flag_unsafe_math_optimizations
7421 && TREE_CODE (arg0) == RDIV_EXPR
7422 && TREE_CODE (arg1) == REAL_CST
7423 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7425 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7426 arg1, 0);
7427 if (tem)
7428 return fold (build2 (RDIV_EXPR, type, tem,
7429 TREE_OPERAND (arg0, 1)));
7432 if (flag_unsafe_math_optimizations)
7434 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7435 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7437 /* Optimizations of root(...)*root(...). */
7438 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7440 tree rootfn, arg, arglist;
7441 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7442 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7444 /* Optimize sqrt(x)*sqrt(x) as x. */
7445 if (BUILTIN_SQRT_P (fcode0)
7446 && operand_equal_p (arg00, arg10, 0)
7447 && ! HONOR_SNANS (TYPE_MODE (type)))
7448 return arg00;
7450 /* Optimize root(x)*root(y) as root(x*y). */
7451 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7452 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7453 arglist = build_tree_list (NULL_TREE, arg);
7454 return build_function_call_expr (rootfn, arglist);
7457 /* Optimize expN(x)*expN(y) as expN(x+y). */
7458 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7460 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7461 tree arg = build2 (PLUS_EXPR, type,
7462 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7463 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7464 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7465 return build_function_call_expr (expfn, arglist);
7468 /* Optimizations of pow(...)*pow(...). */
7469 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7470 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7471 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7473 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7474 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7475 1)));
7476 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7477 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7478 1)));
7480 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7481 if (operand_equal_p (arg01, arg11, 0))
7483 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7484 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7485 tree arglist = tree_cons (NULL_TREE, fold (arg),
7486 build_tree_list (NULL_TREE,
7487 arg01));
7488 return build_function_call_expr (powfn, arglist);
7491 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7492 if (operand_equal_p (arg00, arg10, 0))
7494 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7495 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7496 tree arglist = tree_cons (NULL_TREE, arg00,
7497 build_tree_list (NULL_TREE,
7498 arg));
7499 return build_function_call_expr (powfn, arglist);
7503 /* Optimize tan(x)*cos(x) as sin(x). */
7504 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7505 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7506 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7507 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7508 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7509 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7510 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7511 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7513 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7515 if (sinfn != NULL_TREE)
7516 return build_function_call_expr (sinfn,
7517 TREE_OPERAND (arg0, 1));
7520 /* Optimize x*pow(x,c) as pow(x,c+1). */
7521 if (fcode1 == BUILT_IN_POW
7522 || fcode1 == BUILT_IN_POWF
7523 || fcode1 == BUILT_IN_POWL)
7525 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7526 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7527 1)));
7528 if (TREE_CODE (arg11) == REAL_CST
7529 && ! TREE_CONSTANT_OVERFLOW (arg11)
7530 && operand_equal_p (arg0, arg10, 0))
7532 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7533 REAL_VALUE_TYPE c;
7534 tree arg, arglist;
7536 c = TREE_REAL_CST (arg11);
7537 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7538 arg = build_real (type, c);
7539 arglist = build_tree_list (NULL_TREE, arg);
7540 arglist = tree_cons (NULL_TREE, arg0, arglist);
7541 return build_function_call_expr (powfn, arglist);
7545 /* Optimize pow(x,c)*x as pow(x,c+1). */
7546 if (fcode0 == BUILT_IN_POW
7547 || fcode0 == BUILT_IN_POWF
7548 || fcode0 == BUILT_IN_POWL)
7550 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7551 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7552 1)));
7553 if (TREE_CODE (arg01) == REAL_CST
7554 && ! TREE_CONSTANT_OVERFLOW (arg01)
7555 && operand_equal_p (arg1, arg00, 0))
7557 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7558 REAL_VALUE_TYPE c;
7559 tree arg, arglist;
7561 c = TREE_REAL_CST (arg01);
7562 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7563 arg = build_real (type, c);
7564 arglist = build_tree_list (NULL_TREE, arg);
7565 arglist = tree_cons (NULL_TREE, arg1, arglist);
7566 return build_function_call_expr (powfn, arglist);
7570 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7571 if (! optimize_size
7572 && operand_equal_p (arg0, arg1, 0))
7574 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7576 if (powfn)
7578 tree arg = build_real (type, dconst2);
7579 tree arglist = build_tree_list (NULL_TREE, arg);
7580 arglist = tree_cons (NULL_TREE, arg0, arglist);
7581 return build_function_call_expr (powfn, arglist);
7586 goto associate;
7588 case BIT_IOR_EXPR:
7589 bit_ior:
7590 if (integer_all_onesp (arg1))
7591 return omit_one_operand (type, arg1, arg0);
7592 if (integer_zerop (arg1))
7593 return non_lvalue (fold_convert (type, arg0));
7594 if (operand_equal_p (arg0, arg1, 0))
7595 return non_lvalue (fold_convert (type, arg0));
7597 /* ~X | X is -1. */
7598 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7599 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7601 t1 = build_int_cst (type, -1);
7602 t1 = force_fit_type (t1, 0, false, false);
7603 return omit_one_operand (type, t1, arg1);
7606 /* X | ~X is -1. */
7607 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7608 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7610 t1 = build_int_cst (type, -1);
7611 t1 = force_fit_type (t1, 0, false, false);
7612 return omit_one_operand (type, t1, arg0);
7615 t1 = distribute_bit_expr (code, type, arg0, arg1);
7616 if (t1 != NULL_TREE)
7617 return t1;
7619 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7621 This results in more efficient code for machines without a NAND
7622 instruction. Combine will canonicalize to the first form
7623 which will allow use of NAND instructions provided by the
7624 backend if they exist. */
7625 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7626 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7628 return fold (build1 (BIT_NOT_EXPR, type,
7629 build2 (BIT_AND_EXPR, type,
7630 TREE_OPERAND (arg0, 0),
7631 TREE_OPERAND (arg1, 0))));
7634 /* See if this can be simplified into a rotate first. If that
7635 is unsuccessful continue in the association code. */
7636 goto bit_rotate;
7638 case BIT_XOR_EXPR:
7639 if (integer_zerop (arg1))
7640 return non_lvalue (fold_convert (type, arg0));
7641 if (integer_all_onesp (arg1))
7642 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7643 if (operand_equal_p (arg0, arg1, 0))
7644 return omit_one_operand (type, integer_zero_node, arg0);
7646 /* ~X ^ X is -1. */
7647 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7648 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7650 t1 = build_int_cst (type, -1);
7651 t1 = force_fit_type (t1, 0, false, false);
7652 return omit_one_operand (type, t1, arg1);
7655 /* X ^ ~X is -1. */
7656 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7657 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7659 t1 = build_int_cst (type, -1);
7660 t1 = force_fit_type (t1, 0, false, false);
7661 return omit_one_operand (type, t1, arg0);
7664 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7665 with a constant, and the two constants have no bits in common,
7666 we should treat this as a BIT_IOR_EXPR since this may produce more
7667 simplifications. */
7668 if (TREE_CODE (arg0) == BIT_AND_EXPR
7669 && TREE_CODE (arg1) == BIT_AND_EXPR
7670 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7671 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7672 && integer_zerop (const_binop (BIT_AND_EXPR,
7673 TREE_OPERAND (arg0, 1),
7674 TREE_OPERAND (arg1, 1), 0)))
7676 code = BIT_IOR_EXPR;
7677 goto bit_ior;
7680 /* See if this can be simplified into a rotate first. If that
7681 is unsuccessful continue in the association code. */
7682 goto bit_rotate;
7684 case BIT_AND_EXPR:
7685 if (integer_all_onesp (arg1))
7686 return non_lvalue (fold_convert (type, arg0));
7687 if (integer_zerop (arg1))
7688 return omit_one_operand (type, arg1, arg0);
7689 if (operand_equal_p (arg0, arg1, 0))
7690 return non_lvalue (fold_convert (type, arg0));
7692 /* ~X & X is always zero. */
7693 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7694 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7695 return omit_one_operand (type, integer_zero_node, arg1);
7697 /* X & ~X is always zero. */
7698 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7699 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7700 return omit_one_operand (type, integer_zero_node, arg0);
7702 t1 = distribute_bit_expr (code, type, arg0, arg1);
7703 if (t1 != NULL_TREE)
7704 return t1;
7705 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7706 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7707 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7709 unsigned int prec
7710 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7712 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7713 && (~TREE_INT_CST_LOW (arg1)
7714 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7715 return fold_convert (type, TREE_OPERAND (arg0, 0));
7718 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7720 This results in more efficient code for machines without a NOR
7721 instruction. Combine will canonicalize to the first form
7722 which will allow use of NOR instructions provided by the
7723 backend if they exist. */
7724 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7725 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7727 return fold (build1 (BIT_NOT_EXPR, type,
7728 build2 (BIT_IOR_EXPR, type,
7729 TREE_OPERAND (arg0, 0),
7730 TREE_OPERAND (arg1, 0))));
7733 goto associate;
7735 case RDIV_EXPR:
7736 /* Don't touch a floating-point divide by zero unless the mode
7737 of the constant can represent infinity. */
7738 if (TREE_CODE (arg1) == REAL_CST
7739 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7740 && real_zerop (arg1))
7741 return t;
7743 /* (-A) / (-B) -> A / B */
7744 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7745 return fold (build2 (RDIV_EXPR, type,
7746 TREE_OPERAND (arg0, 0),
7747 negate_expr (arg1)));
7748 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7749 return fold (build2 (RDIV_EXPR, type,
7750 negate_expr (arg0),
7751 TREE_OPERAND (arg1, 0)));
7753 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7754 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7755 && real_onep (arg1))
7756 return non_lvalue (fold_convert (type, arg0));
7758 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7759 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7760 && real_minus_onep (arg1))
7761 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7763 /* If ARG1 is a constant, we can convert this to a multiply by the
7764 reciprocal. This does not have the same rounding properties,
7765 so only do this if -funsafe-math-optimizations. We can actually
7766 always safely do it if ARG1 is a power of two, but it's hard to
7767 tell if it is or not in a portable manner. */
7768 if (TREE_CODE (arg1) == REAL_CST)
7770 if (flag_unsafe_math_optimizations
7771 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7772 arg1, 0)))
7773 return fold (build2 (MULT_EXPR, type, arg0, tem));
7774 /* Find the reciprocal if optimizing and the result is exact. */
7775 if (optimize)
7777 REAL_VALUE_TYPE r;
7778 r = TREE_REAL_CST (arg1);
7779 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7781 tem = build_real (type, r);
7782 return fold (build2 (MULT_EXPR, type, arg0, tem));
7786 /* Convert A/B/C to A/(B*C). */
7787 if (flag_unsafe_math_optimizations
7788 && TREE_CODE (arg0) == RDIV_EXPR)
7789 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7790 fold (build2 (MULT_EXPR, type,
7791 TREE_OPERAND (arg0, 1), arg1))));
7793 /* Convert A/(B/C) to (A/B)*C. */
7794 if (flag_unsafe_math_optimizations
7795 && TREE_CODE (arg1) == RDIV_EXPR)
7796 return fold (build2 (MULT_EXPR, type,
7797 fold (build2 (RDIV_EXPR, type, arg0,
7798 TREE_OPERAND (arg1, 0))),
7799 TREE_OPERAND (arg1, 1)));
7801 /* Convert C1/(X*C2) into (C1/C2)/X. */
7802 if (flag_unsafe_math_optimizations
7803 && TREE_CODE (arg1) == MULT_EXPR
7804 && TREE_CODE (arg0) == REAL_CST
7805 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7807 tree tem = const_binop (RDIV_EXPR, arg0,
7808 TREE_OPERAND (arg1, 1), 0);
7809 if (tem)
7810 return fold (build2 (RDIV_EXPR, type, tem,
7811 TREE_OPERAND (arg1, 0)));
7814 if (flag_unsafe_math_optimizations)
7816 enum built_in_function fcode = builtin_mathfn_code (arg1);
7817 /* Optimize x/expN(y) into x*expN(-y). */
7818 if (BUILTIN_EXPONENT_P (fcode))
7820 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7821 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7822 tree arglist = build_tree_list (NULL_TREE,
7823 fold_convert (type, arg));
7824 arg1 = build_function_call_expr (expfn, arglist);
7825 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7828 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7829 if (fcode == BUILT_IN_POW
7830 || fcode == BUILT_IN_POWF
7831 || fcode == BUILT_IN_POWL)
7833 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7834 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7835 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7836 tree neg11 = fold_convert (type, negate_expr (arg11));
7837 tree arglist = tree_cons(NULL_TREE, arg10,
7838 build_tree_list (NULL_TREE, neg11));
7839 arg1 = build_function_call_expr (powfn, arglist);
7840 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7844 if (flag_unsafe_math_optimizations)
7846 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7847 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7849 /* Optimize sin(x)/cos(x) as tan(x). */
7850 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7851 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7852 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7853 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7854 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7856 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7858 if (tanfn != NULL_TREE)
7859 return build_function_call_expr (tanfn,
7860 TREE_OPERAND (arg0, 1));
7863 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7864 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7865 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7866 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7867 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7868 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7870 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7872 if (tanfn != NULL_TREE)
7874 tree tmp = TREE_OPERAND (arg0, 1);
7875 tmp = build_function_call_expr (tanfn, tmp);
7876 return fold (build2 (RDIV_EXPR, type,
7877 build_real (type, dconst1), tmp));
7881 /* Optimize pow(x,c)/x as pow(x,c-1). */
7882 if (fcode0 == BUILT_IN_POW
7883 || fcode0 == BUILT_IN_POWF
7884 || fcode0 == BUILT_IN_POWL)
7886 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7887 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7888 if (TREE_CODE (arg01) == REAL_CST
7889 && ! TREE_CONSTANT_OVERFLOW (arg01)
7890 && operand_equal_p (arg1, arg00, 0))
7892 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7893 REAL_VALUE_TYPE c;
7894 tree arg, arglist;
7896 c = TREE_REAL_CST (arg01);
7897 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7898 arg = build_real (type, c);
7899 arglist = build_tree_list (NULL_TREE, arg);
7900 arglist = tree_cons (NULL_TREE, arg1, arglist);
7901 return build_function_call_expr (powfn, arglist);
7905 goto binary;
7907 case TRUNC_DIV_EXPR:
7908 case ROUND_DIV_EXPR:
7909 case FLOOR_DIV_EXPR:
7910 case CEIL_DIV_EXPR:
7911 case EXACT_DIV_EXPR:
7912 if (integer_onep (arg1))
7913 return non_lvalue (fold_convert (type, arg0));
7914 if (integer_zerop (arg1))
7915 return t;
7916 /* X / -1 is -X. */
7917 if (!TYPE_UNSIGNED (type)
7918 && TREE_CODE (arg1) == INTEGER_CST
7919 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7920 && TREE_INT_CST_HIGH (arg1) == -1)
7921 return fold_convert (type, negate_expr (arg0));
7923 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7924 operation, EXACT_DIV_EXPR.
7926 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7927 At one time others generated faster code, it's not clear if they do
7928 after the last round to changes to the DIV code in expmed.c. */
7929 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7930 && multiple_of_p (type, arg0, arg1))
7931 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7933 if (TREE_CODE (arg1) == INTEGER_CST
7934 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7935 code, NULL_TREE)))
7936 return fold_convert (type, tem);
7938 goto binary;
7940 case CEIL_MOD_EXPR:
7941 case FLOOR_MOD_EXPR:
7942 case ROUND_MOD_EXPR:
7943 case TRUNC_MOD_EXPR:
7944 /* X % 1 is always zero, but be sure to preserve any side
7945 effects in X. */
7946 if (integer_onep (arg1))
7947 return omit_one_operand (type, integer_zero_node, arg0);
7949 /* X % 0, return X % 0 unchanged so that we can get the
7950 proper warnings and errors. */
7951 if (integer_zerop (arg1))
7952 return t;
7954 /* 0 % X is always zero, but be sure to preserve any side
7955 effects in X. Place this after checking for X == 0. */
7956 if (integer_zerop (arg0))
7957 return omit_one_operand (type, integer_zero_node, arg1);
7959 /* X % -1 is zero. */
7960 if (!TYPE_UNSIGNED (type)
7961 && TREE_CODE (arg1) == INTEGER_CST
7962 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7963 && TREE_INT_CST_HIGH (arg1) == -1)
7964 return omit_one_operand (type, integer_zero_node, arg0);
7966 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7967 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7968 if (code == TRUNC_MOD_EXPR
7969 && TYPE_UNSIGNED (type)
7970 && integer_pow2p (arg1))
7972 unsigned HOST_WIDE_INT high, low;
7973 tree mask;
7974 int l;
7976 l = tree_log2 (arg1);
7977 if (l >= HOST_BITS_PER_WIDE_INT)
7979 high = ((unsigned HOST_WIDE_INT) 1
7980 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7981 low = -1;
7983 else
7985 high = 0;
7986 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7989 mask = build_int_cst_wide (type, low, high);
7990 return fold (build2 (BIT_AND_EXPR, type,
7991 fold_convert (type, arg0), mask));
7994 /* X % -C is the same as X % C. */
7995 if (code == TRUNC_MOD_EXPR
7996 && !TYPE_UNSIGNED (type)
7997 && TREE_CODE (arg1) == INTEGER_CST
7998 && TREE_INT_CST_HIGH (arg1) < 0
7999 && !flag_trapv
8000 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8001 && !sign_bit_p (arg1, arg1))
8002 return fold (build2 (code, type, fold_convert (type, arg0),
8003 fold_convert (type, negate_expr (arg1))));
8005 /* X % -Y is the same as X % Y. */
8006 if (code == TRUNC_MOD_EXPR
8007 && !TYPE_UNSIGNED (type)
8008 && TREE_CODE (arg1) == NEGATE_EXPR
8009 && !flag_trapv)
8010 return fold (build2 (code, type, fold_convert (type, arg0),
8011 fold_convert (type, TREE_OPERAND (arg1, 0))));
8013 if (TREE_CODE (arg1) == INTEGER_CST
8014 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
8015 code, NULL_TREE)))
8016 return fold_convert (type, tem);
8018 goto binary;
8020 case LROTATE_EXPR:
8021 case RROTATE_EXPR:
8022 if (integer_all_onesp (arg0))
8023 return omit_one_operand (type, arg0, arg1);
8024 goto shift;
8026 case RSHIFT_EXPR:
8027 /* Optimize -1 >> x for arithmetic right shifts. */
8028 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8029 return omit_one_operand (type, arg0, arg1);
8030 /* ... fall through ... */
8032 case LSHIFT_EXPR:
8033 shift:
8034 if (integer_zerop (arg1))
8035 return non_lvalue (fold_convert (type, arg0));
8036 if (integer_zerop (arg0))
8037 return omit_one_operand (type, arg0, arg1);
8039 /* Since negative shift count is not well-defined,
8040 don't try to compute it in the compiler. */
8041 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8042 return t;
8043 /* Rewrite an LROTATE_EXPR by a constant into an
8044 RROTATE_EXPR by a new constant. */
8045 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8047 tree tem = build_int_cst (NULL_TREE,
8048 GET_MODE_BITSIZE (TYPE_MODE (type)));
8049 tem = fold_convert (TREE_TYPE (arg1), tem);
8050 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8051 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
8054 /* If we have a rotate of a bit operation with the rotate count and
8055 the second operand of the bit operation both constant,
8056 permute the two operations. */
8057 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8058 && (TREE_CODE (arg0) == BIT_AND_EXPR
8059 || TREE_CODE (arg0) == BIT_IOR_EXPR
8060 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8061 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8062 return fold (build2 (TREE_CODE (arg0), type,
8063 fold (build2 (code, type,
8064 TREE_OPERAND (arg0, 0), arg1)),
8065 fold (build2 (code, type,
8066 TREE_OPERAND (arg0, 1), arg1))));
8068 /* Two consecutive rotates adding up to the width of the mode can
8069 be ignored. */
8070 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8071 && TREE_CODE (arg0) == RROTATE_EXPR
8072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8073 && TREE_INT_CST_HIGH (arg1) == 0
8074 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8075 && ((TREE_INT_CST_LOW (arg1)
8076 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8077 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8078 return TREE_OPERAND (arg0, 0);
8080 goto binary;
8082 case MIN_EXPR:
8083 if (operand_equal_p (arg0, arg1, 0))
8084 return omit_one_operand (type, arg0, arg1);
8085 if (INTEGRAL_TYPE_P (type)
8086 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8087 return omit_one_operand (type, arg1, arg0);
8088 goto associate;
8090 case MAX_EXPR:
8091 if (operand_equal_p (arg0, arg1, 0))
8092 return omit_one_operand (type, arg0, arg1);
8093 if (INTEGRAL_TYPE_P (type)
8094 && TYPE_MAX_VALUE (type)
8095 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8096 return omit_one_operand (type, arg1, arg0);
8097 goto associate;
8099 case TRUTH_NOT_EXPR:
8100 /* The argument to invert_truthvalue must have Boolean type. */
8101 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8102 arg0 = fold_convert (boolean_type_node, arg0);
8104 /* Note that the operand of this must be an int
8105 and its values must be 0 or 1.
8106 ("true" is a fixed value perhaps depending on the language,
8107 but we don't handle values other than 1 correctly yet.) */
8108 tem = invert_truthvalue (arg0);
8109 /* Avoid infinite recursion. */
8110 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
8111 return t;
8112 return fold_convert (type, tem);
8114 case TRUTH_ANDIF_EXPR:
8115 /* Note that the operands of this must be ints
8116 and their values must be 0 or 1.
8117 ("true" is a fixed value perhaps depending on the language.) */
8118 /* If first arg is constant zero, return it. */
8119 if (integer_zerop (arg0))
8120 return fold_convert (type, arg0);
8121 case TRUTH_AND_EXPR:
8122 /* If either arg is constant true, drop it. */
8123 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8124 return non_lvalue (fold_convert (type, arg1));
8125 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8126 /* Preserve sequence points. */
8127 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8128 return non_lvalue (fold_convert (type, arg0));
8129 /* If second arg is constant zero, result is zero, but first arg
8130 must be evaluated. */
8131 if (integer_zerop (arg1))
8132 return omit_one_operand (type, arg1, arg0);
8133 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8134 case will be handled here. */
8135 if (integer_zerop (arg0))
8136 return omit_one_operand (type, arg0, arg1);
8138 /* !X && X is always false. */
8139 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8140 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8141 return omit_one_operand (type, integer_zero_node, arg1);
8142 /* X && !X is always false. */
8143 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8144 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8145 return omit_one_operand (type, integer_zero_node, arg0);
8147 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8148 means A >= Y && A != MAX, but in this case we know that
8149 A < X <= MAX. */
8151 if (!TREE_SIDE_EFFECTS (arg0)
8152 && !TREE_SIDE_EFFECTS (arg1))
8154 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8155 if (tem)
8156 return fold (build2 (code, type, tem, arg1));
8158 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8159 if (tem)
8160 return fold (build2 (code, type, arg0, tem));
8163 truth_andor:
8164 /* We only do these simplifications if we are optimizing. */
8165 if (!optimize)
8166 return t;
8168 /* Check for things like (A || B) && (A || C). We can convert this
8169 to A || (B && C). Note that either operator can be any of the four
8170 truth and/or operations and the transformation will still be
8171 valid. Also note that we only care about order for the
8172 ANDIF and ORIF operators. If B contains side effects, this
8173 might change the truth-value of A. */
8174 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8175 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8176 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8177 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8178 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8179 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8181 tree a00 = TREE_OPERAND (arg0, 0);
8182 tree a01 = TREE_OPERAND (arg0, 1);
8183 tree a10 = TREE_OPERAND (arg1, 0);
8184 tree a11 = TREE_OPERAND (arg1, 1);
8185 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8186 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8187 && (code == TRUTH_AND_EXPR
8188 || code == TRUTH_OR_EXPR));
8190 if (operand_equal_p (a00, a10, 0))
8191 return fold (build2 (TREE_CODE (arg0), type, a00,
8192 fold (build2 (code, type, a01, a11))));
8193 else if (commutative && operand_equal_p (a00, a11, 0))
8194 return fold (build2 (TREE_CODE (arg0), type, a00,
8195 fold (build2 (code, type, a01, a10))));
8196 else if (commutative && operand_equal_p (a01, a10, 0))
8197 return fold (build2 (TREE_CODE (arg0), type, a01,
8198 fold (build2 (code, type, a00, a11))));
8200 /* This case if tricky because we must either have commutative
8201 operators or else A10 must not have side-effects. */
8203 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8204 && operand_equal_p (a01, a11, 0))
8205 return fold (build2 (TREE_CODE (arg0), type,
8206 fold (build2 (code, type, a00, a10)),
8207 a01));
8210 /* See if we can build a range comparison. */
8211 if (0 != (tem = fold_range_test (t)))
8212 return tem;
8214 /* Check for the possibility of merging component references. If our
8215 lhs is another similar operation, try to merge its rhs with our
8216 rhs. Then try to merge our lhs and rhs. */
8217 if (TREE_CODE (arg0) == code
8218 && 0 != (tem = fold_truthop (code, type,
8219 TREE_OPERAND (arg0, 1), arg1)))
8220 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8222 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8223 return tem;
8225 return t;
8227 case TRUTH_ORIF_EXPR:
8228 /* Note that the operands of this must be ints
8229 and their values must be 0 or true.
8230 ("true" is a fixed value perhaps depending on the language.) */
8231 /* If first arg is constant true, return it. */
8232 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8233 return fold_convert (type, arg0);
8234 case TRUTH_OR_EXPR:
8235 /* If either arg is constant zero, drop it. */
8236 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8237 return non_lvalue (fold_convert (type, arg1));
8238 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8239 /* Preserve sequence points. */
8240 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8241 return non_lvalue (fold_convert (type, arg0));
8242 /* If second arg is constant true, result is true, but we must
8243 evaluate first arg. */
8244 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8245 return omit_one_operand (type, arg1, arg0);
8246 /* Likewise for first arg, but note this only occurs here for
8247 TRUTH_OR_EXPR. */
8248 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8249 return omit_one_operand (type, arg0, arg1);
8251 /* !X || X is always true. */
8252 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8253 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8254 return omit_one_operand (type, integer_one_node, arg1);
8255 /* X || !X is always true. */
8256 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8257 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8258 return omit_one_operand (type, integer_one_node, arg0);
8260 goto truth_andor;
8262 case TRUTH_XOR_EXPR:
8263 /* If the second arg is constant zero, drop it. */
8264 if (integer_zerop (arg1))
8265 return non_lvalue (fold_convert (type, arg0));
8266 /* If the second arg is constant true, this is a logical inversion. */
8267 if (integer_onep (arg1))
8268 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8269 /* Identical arguments cancel to zero. */
8270 if (operand_equal_p (arg0, arg1, 0))
8271 return omit_one_operand (type, integer_zero_node, arg0);
8273 /* !X ^ X is always true. */
8274 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8275 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8276 return omit_one_operand (type, integer_one_node, arg1);
8278 /* X ^ !X is always true. */
8279 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8280 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8281 return omit_one_operand (type, integer_one_node, arg0);
8283 return t;
8285 case EQ_EXPR:
8286 case NE_EXPR:
8287 case LT_EXPR:
8288 case GT_EXPR:
8289 case LE_EXPR:
8290 case GE_EXPR:
8291 /* If one arg is a real or integer constant, put it last. */
8292 if (tree_swap_operands_p (arg0, arg1, true))
8293 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8295 /* If this is an equality comparison of the address of a non-weak
8296 object against zero, then we know the result. */
8297 if ((code == EQ_EXPR || code == NE_EXPR)
8298 && TREE_CODE (arg0) == ADDR_EXPR
8299 && DECL_P (TREE_OPERAND (arg0, 0))
8300 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8301 && integer_zerop (arg1))
8302 return constant_boolean_node (code != EQ_EXPR, type);
8304 /* If this is an equality comparison of the address of two non-weak,
8305 unaliased symbols neither of which are extern (since we do not
8306 have access to attributes for externs), then we know the result. */
8307 if ((code == EQ_EXPR || code == NE_EXPR)
8308 && TREE_CODE (arg0) == ADDR_EXPR
8309 && DECL_P (TREE_OPERAND (arg0, 0))
8310 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8311 && ! lookup_attribute ("alias",
8312 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8313 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8314 && TREE_CODE (arg1) == ADDR_EXPR
8315 && DECL_P (TREE_OPERAND (arg1, 0))
8316 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8317 && ! lookup_attribute ("alias",
8318 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8319 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8320 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8321 ? code == EQ_EXPR : code != EQ_EXPR,
8322 type);
8324 /* If this is a comparison of two exprs that look like an
8325 ARRAY_REF of the same object, then we can fold this to a
8326 comparison of the two offsets. */
8327 if (COMPARISON_CLASS_P (t))
8329 tree base0, offset0, base1, offset1;
8331 if (extract_array_ref (arg0, &base0, &offset0)
8332 && extract_array_ref (arg1, &base1, &offset1)
8333 && operand_equal_p (base0, base1, 0))
8335 if (offset0 == NULL_TREE
8336 && offset1 == NULL_TREE)
8338 offset0 = integer_zero_node;
8339 offset1 = integer_zero_node;
8341 else if (offset0 == NULL_TREE)
8342 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8343 else if (offset1 == NULL_TREE)
8344 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8346 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8347 return fold (build2 (code, type, offset0, offset1));
8351 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8353 tree targ0 = strip_float_extensions (arg0);
8354 tree targ1 = strip_float_extensions (arg1);
8355 tree newtype = TREE_TYPE (targ0);
8357 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8358 newtype = TREE_TYPE (targ1);
8360 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8361 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8362 return fold (build2 (code, type, fold_convert (newtype, targ0),
8363 fold_convert (newtype, targ1)));
8365 /* (-a) CMP (-b) -> b CMP a */
8366 if (TREE_CODE (arg0) == NEGATE_EXPR
8367 && TREE_CODE (arg1) == NEGATE_EXPR)
8368 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8369 TREE_OPERAND (arg0, 0)));
8371 if (TREE_CODE (arg1) == REAL_CST)
8373 REAL_VALUE_TYPE cst;
8374 cst = TREE_REAL_CST (arg1);
8376 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8377 if (TREE_CODE (arg0) == NEGATE_EXPR)
8378 return
8379 fold (build2 (swap_tree_comparison (code), type,
8380 TREE_OPERAND (arg0, 0),
8381 build_real (TREE_TYPE (arg1),
8382 REAL_VALUE_NEGATE (cst))));
8384 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8385 /* a CMP (-0) -> a CMP 0 */
8386 if (REAL_VALUE_MINUS_ZERO (cst))
8387 return fold (build2 (code, type, arg0,
8388 build_real (TREE_TYPE (arg1), dconst0)));
8390 /* x != NaN is always true, other ops are always false. */
8391 if (REAL_VALUE_ISNAN (cst)
8392 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8394 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8395 return omit_one_operand (type, tem, arg0);
8398 /* Fold comparisons against infinity. */
8399 if (REAL_VALUE_ISINF (cst))
8401 tem = fold_inf_compare (code, type, arg0, arg1);
8402 if (tem != NULL_TREE)
8403 return tem;
8407 /* If this is a comparison of a real constant with a PLUS_EXPR
8408 or a MINUS_EXPR of a real constant, we can convert it into a
8409 comparison with a revised real constant as long as no overflow
8410 occurs when unsafe_math_optimizations are enabled. */
8411 if (flag_unsafe_math_optimizations
8412 && TREE_CODE (arg1) == REAL_CST
8413 && (TREE_CODE (arg0) == PLUS_EXPR
8414 || TREE_CODE (arg0) == MINUS_EXPR)
8415 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8416 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8417 ? MINUS_EXPR : PLUS_EXPR,
8418 arg1, TREE_OPERAND (arg0, 1), 0))
8419 && ! TREE_CONSTANT_OVERFLOW (tem))
8420 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8422 /* Likewise, we can simplify a comparison of a real constant with
8423 a MINUS_EXPR whose first operand is also a real constant, i.e.
8424 (c1 - x) < c2 becomes x > c1-c2. */
8425 if (flag_unsafe_math_optimizations
8426 && TREE_CODE (arg1) == REAL_CST
8427 && TREE_CODE (arg0) == MINUS_EXPR
8428 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8429 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8430 arg1, 0))
8431 && ! TREE_CONSTANT_OVERFLOW (tem))
8432 return fold (build2 (swap_tree_comparison (code), type,
8433 TREE_OPERAND (arg0, 1), tem));
8435 /* Fold comparisons against built-in math functions. */
8436 if (TREE_CODE (arg1) == REAL_CST
8437 && flag_unsafe_math_optimizations
8438 && ! flag_errno_math)
8440 enum built_in_function fcode = builtin_mathfn_code (arg0);
8442 if (fcode != END_BUILTINS)
8444 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8445 if (tem != NULL_TREE)
8446 return tem;
8451 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8452 if (TREE_CONSTANT (arg1)
8453 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8454 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8455 /* This optimization is invalid for ordered comparisons
8456 if CONST+INCR overflows or if foo+incr might overflow.
8457 This optimization is invalid for floating point due to rounding.
8458 For pointer types we assume overflow doesn't happen. */
8459 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8460 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8461 && (code == EQ_EXPR || code == NE_EXPR))))
8463 tree varop, newconst;
8465 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8467 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8468 arg1, TREE_OPERAND (arg0, 1)));
8469 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8470 TREE_OPERAND (arg0, 0),
8471 TREE_OPERAND (arg0, 1));
8473 else
8475 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8476 arg1, TREE_OPERAND (arg0, 1)));
8477 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8478 TREE_OPERAND (arg0, 0),
8479 TREE_OPERAND (arg0, 1));
8483 /* If VAROP is a reference to a bitfield, we must mask
8484 the constant by the width of the field. */
8485 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8486 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8487 && host_integerp (DECL_SIZE (TREE_OPERAND
8488 (TREE_OPERAND (varop, 0), 1)), 1))
8490 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8491 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8492 tree folded_compare, shift;
8494 /* First check whether the comparison would come out
8495 always the same. If we don't do that we would
8496 change the meaning with the masking. */
8497 folded_compare = fold (build2 (code, type,
8498 TREE_OPERAND (varop, 0), arg1));
8499 if (integer_zerop (folded_compare)
8500 || integer_onep (folded_compare))
8501 return omit_one_operand (type, folded_compare, varop);
8503 shift = build_int_cst (NULL_TREE,
8504 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8505 shift = fold_convert (TREE_TYPE (varop), shift);
8506 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8507 newconst, shift));
8508 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8509 newconst, shift));
8512 return fold (build2 (code, type, varop, newconst));
8515 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8516 This transformation affects the cases which are handled in later
8517 optimizations involving comparisons with non-negative constants. */
8518 if (TREE_CODE (arg1) == INTEGER_CST
8519 && TREE_CODE (arg0) != INTEGER_CST
8520 && tree_int_cst_sgn (arg1) > 0)
8522 switch (code)
8524 case GE_EXPR:
8525 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8526 return fold (build2 (GT_EXPR, type, arg0, arg1));
8528 case LT_EXPR:
8529 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8530 return fold (build2 (LE_EXPR, type, arg0, arg1));
8532 default:
8533 break;
8537 /* Comparisons with the highest or lowest possible integer of
8538 the specified size will have known values.
8540 This is quite similar to fold_relational_hi_lo, however,
8541 attempts to share the code have been nothing but trouble. */
8543 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8545 if (TREE_CODE (arg1) == INTEGER_CST
8546 && ! TREE_CONSTANT_OVERFLOW (arg1)
8547 && width <= 2 * HOST_BITS_PER_WIDE_INT
8548 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8549 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8551 HOST_WIDE_INT signed_max_hi;
8552 unsigned HOST_WIDE_INT signed_max_lo;
8553 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
8555 if (width <= HOST_BITS_PER_WIDE_INT)
8557 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
8558 - 1;
8559 signed_max_hi = 0;
8560 max_hi = 0;
8562 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8564 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8565 min_lo = 0;
8566 min_hi = 0;
8568 else
8570 max_lo = signed_max_lo;
8571 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8572 min_hi = -1;
8575 else
8577 width -= HOST_BITS_PER_WIDE_INT;
8578 signed_max_lo = -1;
8579 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
8580 - 1;
8581 max_lo = -1;
8582 min_lo = 0;
8584 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8586 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8587 min_hi = 0;
8589 else
8591 max_hi = signed_max_hi;
8592 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8596 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
8597 && TREE_INT_CST_LOW (arg1) == max_lo)
8598 switch (code)
8600 case GT_EXPR:
8601 return omit_one_operand (type, integer_zero_node, arg0);
8603 case GE_EXPR:
8604 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8606 case LE_EXPR:
8607 return omit_one_operand (type, integer_one_node, arg0);
8609 case LT_EXPR:
8610 return fold (build2 (NE_EXPR, type, arg0, arg1));
8612 /* The GE_EXPR and LT_EXPR cases above are not normally
8613 reached because of previous transformations. */
8615 default:
8616 break;
8618 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
8619 == max_hi
8620 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
8621 switch (code)
8623 case GT_EXPR:
8624 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8625 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8626 case LE_EXPR:
8627 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8628 return fold (build2 (NE_EXPR, type, arg0, arg1));
8629 default:
8630 break;
8632 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
8633 == min_hi
8634 && TREE_INT_CST_LOW (arg1) == min_lo)
8635 switch (code)
8637 case LT_EXPR:
8638 return omit_one_operand (type, integer_zero_node, arg0);
8640 case LE_EXPR:
8641 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8643 case GE_EXPR:
8644 return omit_one_operand (type, integer_one_node, arg0);
8646 case GT_EXPR:
8647 return fold (build2 (NE_EXPR, type, arg0, arg1));
8649 default:
8650 break;
8652 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
8653 == min_hi
8654 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
8655 switch (code)
8657 case GE_EXPR:
8658 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8659 return fold (build2 (NE_EXPR, type, arg0, arg1));
8660 case LT_EXPR:
8661 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8662 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8663 default:
8664 break;
8667 else if (!in_gimple_form
8668 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
8669 && TREE_INT_CST_LOW (arg1) == signed_max_lo
8670 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8671 /* signed_type does not work on pointer types. */
8672 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8674 /* The following case also applies to X < signed_max+1
8675 and X >= signed_max+1 because previous transformations. */
8676 if (code == LE_EXPR || code == GT_EXPR)
8678 tree st0, st1;
8679 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8680 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8681 return fold
8682 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8683 type, fold_convert (st0, arg0),
8684 fold_convert (st1, integer_zero_node)));
8690 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8691 a MINUS_EXPR of a constant, we can convert it into a comparison with
8692 a revised constant as long as no overflow occurs. */
8693 if ((code == EQ_EXPR || code == NE_EXPR)
8694 && TREE_CODE (arg1) == INTEGER_CST
8695 && (TREE_CODE (arg0) == PLUS_EXPR
8696 || TREE_CODE (arg0) == MINUS_EXPR)
8697 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8698 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8699 ? MINUS_EXPR : PLUS_EXPR,
8700 arg1, TREE_OPERAND (arg0, 1), 0))
8701 && ! TREE_CONSTANT_OVERFLOW (tem))
8702 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8704 /* Similarly for a NEGATE_EXPR. */
8705 else if ((code == EQ_EXPR || code == NE_EXPR)
8706 && TREE_CODE (arg0) == NEGATE_EXPR
8707 && TREE_CODE (arg1) == INTEGER_CST
8708 && 0 != (tem = negate_expr (arg1))
8709 && TREE_CODE (tem) == INTEGER_CST
8710 && ! TREE_CONSTANT_OVERFLOW (tem))
8711 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8713 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8714 for !=. Don't do this for ordered comparisons due to overflow. */
8715 else if ((code == NE_EXPR || code == EQ_EXPR)
8716 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8717 return fold (build2 (code, type,
8718 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8720 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8721 && TREE_CODE (arg0) == NOP_EXPR)
8723 /* If we are widening one operand of an integer comparison,
8724 see if the other operand is similarly being widened. Perhaps we
8725 can do the comparison in the narrower type. */
8726 tem = fold_widened_comparison (code, type, arg0, arg1);
8727 if (tem)
8728 return tem;
8730 /* Or if we are changing signedness. */
8731 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8732 if (tem)
8733 return tem;
8736 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8737 constant, we can simplify it. */
8738 else if (TREE_CODE (arg1) == INTEGER_CST
8739 && (TREE_CODE (arg0) == MIN_EXPR
8740 || TREE_CODE (arg0) == MAX_EXPR)
8741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8742 return optimize_minmax_comparison (t);
8744 /* If we are comparing an ABS_EXPR with a constant, we can
8745 convert all the cases into explicit comparisons, but they may
8746 well not be faster than doing the ABS and one comparison.
8747 But ABS (X) <= C is a range comparison, which becomes a subtraction
8748 and a comparison, and is probably faster. */
8749 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8750 && TREE_CODE (arg0) == ABS_EXPR
8751 && ! TREE_SIDE_EFFECTS (arg0)
8752 && (0 != (tem = negate_expr (arg1)))
8753 && TREE_CODE (tem) == INTEGER_CST
8754 && ! TREE_CONSTANT_OVERFLOW (tem))
8755 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8756 build2 (GE_EXPR, type,
8757 TREE_OPERAND (arg0, 0), tem),
8758 build2 (LE_EXPR, type,
8759 TREE_OPERAND (arg0, 0), arg1)));
8761 /* If this is an EQ or NE comparison with zero and ARG0 is
8762 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8763 two operations, but the latter can be done in one less insn
8764 on machines that have only two-operand insns or on which a
8765 constant cannot be the first operand. */
8766 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8767 && TREE_CODE (arg0) == BIT_AND_EXPR)
8769 tree arg00 = TREE_OPERAND (arg0, 0);
8770 tree arg01 = TREE_OPERAND (arg0, 1);
8771 if (TREE_CODE (arg00) == LSHIFT_EXPR
8772 && integer_onep (TREE_OPERAND (arg00, 0)))
8773 return
8774 fold (build2 (code, type,
8775 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8776 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8777 arg01, TREE_OPERAND (arg00, 1)),
8778 fold_convert (TREE_TYPE (arg0),
8779 integer_one_node)),
8780 arg1));
8781 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8782 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8783 return
8784 fold (build2 (code, type,
8785 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8786 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8787 arg00, TREE_OPERAND (arg01, 1)),
8788 fold_convert (TREE_TYPE (arg0),
8789 integer_one_node)),
8790 arg1));
8793 /* If this is an NE or EQ comparison of zero against the result of a
8794 signed MOD operation whose second operand is a power of 2, make
8795 the MOD operation unsigned since it is simpler and equivalent. */
8796 if ((code == NE_EXPR || code == EQ_EXPR)
8797 && integer_zerop (arg1)
8798 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8799 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8800 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8801 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8802 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8803 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8805 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8806 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8807 fold_convert (newtype,
8808 TREE_OPERAND (arg0, 0)),
8809 fold_convert (newtype,
8810 TREE_OPERAND (arg0, 1))));
8812 return fold (build2 (code, type, newmod,
8813 fold_convert (newtype, arg1)));
8816 /* If this is an NE comparison of zero with an AND of one, remove the
8817 comparison since the AND will give the correct value. */
8818 if (code == NE_EXPR && integer_zerop (arg1)
8819 && TREE_CODE (arg0) == BIT_AND_EXPR
8820 && integer_onep (TREE_OPERAND (arg0, 1)))
8821 return fold_convert (type, arg0);
8823 /* If we have (A & C) == C where C is a power of 2, convert this into
8824 (A & C) != 0. Similarly for NE_EXPR. */
8825 if ((code == EQ_EXPR || code == NE_EXPR)
8826 && TREE_CODE (arg0) == BIT_AND_EXPR
8827 && integer_pow2p (TREE_OPERAND (arg0, 1))
8828 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8829 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8830 arg0, fold_convert (TREE_TYPE (arg0),
8831 integer_zero_node)));
8833 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8834 2, then fold the expression into shifts and logical operations. */
8835 tem = fold_single_bit_test (code, arg0, arg1, type);
8836 if (tem)
8837 return tem;
8839 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8840 Similarly for NE_EXPR. */
8841 if ((code == EQ_EXPR || code == NE_EXPR)
8842 && TREE_CODE (arg0) == BIT_AND_EXPR
8843 && TREE_CODE (arg1) == INTEGER_CST
8844 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8846 tree notc = fold (build1 (BIT_NOT_EXPR,
8847 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8848 TREE_OPERAND (arg0, 1)));
8849 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8850 arg1, notc));
8851 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8852 if (integer_nonzerop (dandnotc))
8853 return omit_one_operand (type, rslt, arg0);
8856 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8857 Similarly for NE_EXPR. */
8858 if ((code == EQ_EXPR || code == NE_EXPR)
8859 && TREE_CODE (arg0) == BIT_IOR_EXPR
8860 && TREE_CODE (arg1) == INTEGER_CST
8861 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8863 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8864 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8865 TREE_OPERAND (arg0, 1), notd));
8866 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8867 if (integer_nonzerop (candnotd))
8868 return omit_one_operand (type, rslt, arg0);
8871 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8872 and similarly for >= into !=. */
8873 if ((code == LT_EXPR || code == GE_EXPR)
8874 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8875 && TREE_CODE (arg1) == LSHIFT_EXPR
8876 && integer_onep (TREE_OPERAND (arg1, 0)))
8877 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8878 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8879 TREE_OPERAND (arg1, 1)),
8880 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8882 else if ((code == LT_EXPR || code == GE_EXPR)
8883 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8884 && (TREE_CODE (arg1) == NOP_EXPR
8885 || TREE_CODE (arg1) == CONVERT_EXPR)
8886 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8887 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8888 return
8889 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8890 fold_convert (TREE_TYPE (arg0),
8891 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8892 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8893 1))),
8894 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8896 /* Simplify comparison of something with itself. (For IEEE
8897 floating-point, we can only do some of these simplifications.) */
8898 if (operand_equal_p (arg0, arg1, 0))
8900 switch (code)
8902 case EQ_EXPR:
8903 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8904 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8905 return constant_boolean_node (1, type);
8906 break;
8908 case GE_EXPR:
8909 case LE_EXPR:
8910 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8911 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8912 return constant_boolean_node (1, type);
8913 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8915 case NE_EXPR:
8916 /* For NE, we can only do this simplification if integer
8917 or we don't honor IEEE floating point NaNs. */
8918 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8919 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8920 break;
8921 /* ... fall through ... */
8922 case GT_EXPR:
8923 case LT_EXPR:
8924 return constant_boolean_node (0, type);
8925 default:
8926 gcc_unreachable ();
8930 /* If we are comparing an expression that just has comparisons
8931 of two integer values, arithmetic expressions of those comparisons,
8932 and constants, we can simplify it. There are only three cases
8933 to check: the two values can either be equal, the first can be
8934 greater, or the second can be greater. Fold the expression for
8935 those three values. Since each value must be 0 or 1, we have
8936 eight possibilities, each of which corresponds to the constant 0
8937 or 1 or one of the six possible comparisons.
8939 This handles common cases like (a > b) == 0 but also handles
8940 expressions like ((x > y) - (y > x)) > 0, which supposedly
8941 occur in macroized code. */
8943 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8945 tree cval1 = 0, cval2 = 0;
8946 int save_p = 0;
8948 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8949 /* Don't handle degenerate cases here; they should already
8950 have been handled anyway. */
8951 && cval1 != 0 && cval2 != 0
8952 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8953 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8954 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8955 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8956 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8957 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8958 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8960 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8961 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8963 /* We can't just pass T to eval_subst in case cval1 or cval2
8964 was the same as ARG1. */
8966 tree high_result
8967 = fold (build2 (code, type,
8968 eval_subst (arg0, cval1, maxval,
8969 cval2, minval),
8970 arg1));
8971 tree equal_result
8972 = fold (build2 (code, type,
8973 eval_subst (arg0, cval1, maxval,
8974 cval2, maxval),
8975 arg1));
8976 tree low_result
8977 = fold (build2 (code, type,
8978 eval_subst (arg0, cval1, minval,
8979 cval2, maxval),
8980 arg1));
8982 /* All three of these results should be 0 or 1. Confirm they
8983 are. Then use those values to select the proper code
8984 to use. */
8986 if ((integer_zerop (high_result)
8987 || integer_onep (high_result))
8988 && (integer_zerop (equal_result)
8989 || integer_onep (equal_result))
8990 && (integer_zerop (low_result)
8991 || integer_onep (low_result)))
8993 /* Make a 3-bit mask with the high-order bit being the
8994 value for `>', the next for '=', and the low for '<'. */
8995 switch ((integer_onep (high_result) * 4)
8996 + (integer_onep (equal_result) * 2)
8997 + integer_onep (low_result))
8999 case 0:
9000 /* Always false. */
9001 return omit_one_operand (type, integer_zero_node, arg0);
9002 case 1:
9003 code = LT_EXPR;
9004 break;
9005 case 2:
9006 code = EQ_EXPR;
9007 break;
9008 case 3:
9009 code = LE_EXPR;
9010 break;
9011 case 4:
9012 code = GT_EXPR;
9013 break;
9014 case 5:
9015 code = NE_EXPR;
9016 break;
9017 case 6:
9018 code = GE_EXPR;
9019 break;
9020 case 7:
9021 /* Always true. */
9022 return omit_one_operand (type, integer_one_node, arg0);
9025 tem = build2 (code, type, cval1, cval2);
9026 if (save_p)
9027 return save_expr (tem);
9028 else
9029 return fold (tem);
9034 /* If this is a comparison of a field, we may be able to simplify it. */
9035 if (((TREE_CODE (arg0) == COMPONENT_REF
9036 && lang_hooks.can_use_bit_fields_p ())
9037 || TREE_CODE (arg0) == BIT_FIELD_REF)
9038 && (code == EQ_EXPR || code == NE_EXPR)
9039 /* Handle the constant case even without -O
9040 to make sure the warnings are given. */
9041 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9043 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9044 if (t1)
9045 return t1;
9048 /* If this is a comparison of complex values and either or both sides
9049 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9050 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9051 This may prevent needless evaluations. */
9052 if ((code == EQ_EXPR || code == NE_EXPR)
9053 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9054 && (TREE_CODE (arg0) == COMPLEX_EXPR
9055 || TREE_CODE (arg1) == COMPLEX_EXPR
9056 || TREE_CODE (arg0) == COMPLEX_CST
9057 || TREE_CODE (arg1) == COMPLEX_CST))
9059 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9060 tree real0, imag0, real1, imag1;
9062 arg0 = save_expr (arg0);
9063 arg1 = save_expr (arg1);
9064 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
9065 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
9066 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
9067 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
9069 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9070 : TRUTH_ORIF_EXPR),
9071 type,
9072 fold (build2 (code, type, real0, real1)),
9073 fold (build2 (code, type, imag0, imag1))));
9076 /* Optimize comparisons of strlen vs zero to a compare of the
9077 first character of the string vs zero. To wit,
9078 strlen(ptr) == 0 => *ptr == 0
9079 strlen(ptr) != 0 => *ptr != 0
9080 Other cases should reduce to one of these two (or a constant)
9081 due to the return value of strlen being unsigned. */
9082 if ((code == EQ_EXPR || code == NE_EXPR)
9083 && integer_zerop (arg1)
9084 && TREE_CODE (arg0) == CALL_EXPR)
9086 tree fndecl = get_callee_fndecl (arg0);
9087 tree arglist;
9089 if (fndecl
9090 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9091 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9092 && (arglist = TREE_OPERAND (arg0, 1))
9093 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9094 && ! TREE_CHAIN (arglist))
9095 return fold (build2 (code, type,
9096 build1 (INDIRECT_REF, char_type_node,
9097 TREE_VALUE (arglist)),
9098 fold_convert (char_type_node,
9099 integer_zero_node)));
9102 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9103 into a single range test. */
9104 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9105 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9106 && TREE_CODE (arg1) == INTEGER_CST
9107 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9108 && !integer_zerop (TREE_OPERAND (arg0, 1))
9109 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9110 && !TREE_OVERFLOW (arg1))
9112 t1 = fold_div_compare (code, type, arg0, arg1);
9113 if (t1 != NULL_TREE)
9114 return t1;
9117 if ((code == EQ_EXPR || code == NE_EXPR)
9118 && !TREE_SIDE_EFFECTS (arg0)
9119 && integer_zerop (arg1)
9120 && tree_expr_nonzero_p (arg0))
9121 return constant_boolean_node (code==NE_EXPR, type);
9123 t1 = fold_relational_const (code, type, arg0, arg1);
9124 return t1 == NULL_TREE ? t : t1;
9126 case UNORDERED_EXPR:
9127 case ORDERED_EXPR:
9128 case UNLT_EXPR:
9129 case UNLE_EXPR:
9130 case UNGT_EXPR:
9131 case UNGE_EXPR:
9132 case UNEQ_EXPR:
9133 case LTGT_EXPR:
9134 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9136 t1 = fold_relational_const (code, type, arg0, arg1);
9137 if (t1 != NULL_TREE)
9138 return t1;
9141 /* If the first operand is NaN, the result is constant. */
9142 if (TREE_CODE (arg0) == REAL_CST
9143 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9144 && (code != LTGT_EXPR || ! flag_trapping_math))
9146 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9147 ? integer_zero_node
9148 : integer_one_node;
9149 return omit_one_operand (type, t1, arg1);
9152 /* If the second operand is NaN, the result is constant. */
9153 if (TREE_CODE (arg1) == REAL_CST
9154 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9155 && (code != LTGT_EXPR || ! flag_trapping_math))
9157 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9158 ? integer_zero_node
9159 : integer_one_node;
9160 return omit_one_operand (type, t1, arg0);
9163 /* Simplify unordered comparison of something with itself. */
9164 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9165 && operand_equal_p (arg0, arg1, 0))
9166 return constant_boolean_node (1, type);
9168 if (code == LTGT_EXPR
9169 && !flag_trapping_math
9170 && operand_equal_p (arg0, arg1, 0))
9171 return constant_boolean_node (0, type);
9173 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9175 tree targ0 = strip_float_extensions (arg0);
9176 tree targ1 = strip_float_extensions (arg1);
9177 tree newtype = TREE_TYPE (targ0);
9179 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9180 newtype = TREE_TYPE (targ1);
9182 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9183 return fold (build2 (code, type, fold_convert (newtype, targ0),
9184 fold_convert (newtype, targ1)));
9187 return t;
9189 case COND_EXPR:
9190 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9191 so all simple results must be passed through pedantic_non_lvalue. */
9192 if (TREE_CODE (arg0) == INTEGER_CST)
9194 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
9195 /* Only optimize constant conditions when the selected branch
9196 has the same type as the COND_EXPR. This avoids optimizing
9197 away "c ? x : throw", where the throw has a void type. */
9198 if (! VOID_TYPE_P (TREE_TYPE (tem))
9199 || VOID_TYPE_P (type))
9200 return pedantic_non_lvalue (tem);
9201 return t;
9203 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
9204 return pedantic_omit_one_operand (type, arg1, arg0);
9206 /* If we have A op B ? A : C, we may be able to convert this to a
9207 simpler expression, depending on the operation and the values
9208 of B and C. Signed zeros prevent all of these transformations,
9209 for reasons given above each one.
9211 Also try swapping the arguments and inverting the conditional. */
9212 if (COMPARISON_CLASS_P (arg0)
9213 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9214 arg1, TREE_OPERAND (arg0, 1))
9215 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9217 tem = fold_cond_expr_with_comparison (type, arg0,
9218 TREE_OPERAND (t, 1),
9219 TREE_OPERAND (t, 2));
9220 if (tem)
9221 return tem;
9224 if (COMPARISON_CLASS_P (arg0)
9225 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9226 TREE_OPERAND (t, 2),
9227 TREE_OPERAND (arg0, 1))
9228 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
9230 tem = invert_truthvalue (arg0);
9231 if (COMPARISON_CLASS_P (tem))
9233 tem = fold_cond_expr_with_comparison (type, tem,
9234 TREE_OPERAND (t, 2),
9235 TREE_OPERAND (t, 1));
9236 if (tem)
9237 return tem;
9241 /* If the second operand is simpler than the third, swap them
9242 since that produces better jump optimization results. */
9243 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
9244 TREE_OPERAND (t, 2), false))
9246 /* See if this can be inverted. If it can't, possibly because
9247 it was a floating-point inequality comparison, don't do
9248 anything. */
9249 tem = invert_truthvalue (arg0);
9251 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9252 return fold (build3 (code, type, tem,
9253 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
9256 /* Convert A ? 1 : 0 to simply A. */
9257 if (integer_onep (TREE_OPERAND (t, 1))
9258 && integer_zerop (TREE_OPERAND (t, 2))
9259 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9260 call to fold will try to move the conversion inside
9261 a COND, which will recurse. In that case, the COND_EXPR
9262 is probably the best choice, so leave it alone. */
9263 && type == TREE_TYPE (arg0))
9264 return pedantic_non_lvalue (arg0);
9266 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9267 over COND_EXPR in cases such as floating point comparisons. */
9268 if (integer_zerop (TREE_OPERAND (t, 1))
9269 && integer_onep (TREE_OPERAND (t, 2))
9270 && truth_value_p (TREE_CODE (arg0)))
9271 return pedantic_non_lvalue (fold_convert (type,
9272 invert_truthvalue (arg0)));
9274 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9275 if (TREE_CODE (arg0) == LT_EXPR
9276 && integer_zerop (TREE_OPERAND (arg0, 1))
9277 && integer_zerop (TREE_OPERAND (t, 2))
9278 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9279 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9280 TREE_TYPE (tem), tem, arg1)));
9282 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9283 already handled above. */
9284 if (TREE_CODE (arg0) == BIT_AND_EXPR
9285 && integer_onep (TREE_OPERAND (arg0, 1))
9286 && integer_zerop (TREE_OPERAND (t, 2))
9287 && integer_pow2p (arg1))
9289 tree tem = TREE_OPERAND (arg0, 0);
9290 STRIP_NOPS (tem);
9291 if (TREE_CODE (tem) == RSHIFT_EXPR
9292 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9293 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9294 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9295 return fold (build2 (BIT_AND_EXPR, type,
9296 TREE_OPERAND (tem, 0), arg1));
9299 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9300 is probably obsolete because the first operand should be a
9301 truth value (that's why we have the two cases above), but let's
9302 leave it in until we can confirm this for all front-ends. */
9303 if (integer_zerop (TREE_OPERAND (t, 2))
9304 && TREE_CODE (arg0) == NE_EXPR
9305 && integer_zerop (TREE_OPERAND (arg0, 1))
9306 && integer_pow2p (arg1)
9307 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9308 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9309 arg1, OEP_ONLY_CONST))
9310 return pedantic_non_lvalue (fold_convert (type,
9311 TREE_OPERAND (arg0, 0)));
9313 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9314 if (integer_zerop (TREE_OPERAND (t, 2))
9315 && truth_value_p (TREE_CODE (arg0))
9316 && truth_value_p (TREE_CODE (arg1)))
9317 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9319 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9320 if (integer_onep (TREE_OPERAND (t, 2))
9321 && truth_value_p (TREE_CODE (arg0))
9322 && truth_value_p (TREE_CODE (arg1)))
9324 /* Only perform transformation if ARG0 is easily inverted. */
9325 tem = invert_truthvalue (arg0);
9326 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9327 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9330 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9331 if (integer_zerop (arg1)
9332 && truth_value_p (TREE_CODE (arg0))
9333 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9335 /* Only perform transformation if ARG0 is easily inverted. */
9336 tem = invert_truthvalue (arg0);
9337 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9338 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9339 TREE_OPERAND (t, 2)));
9342 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9343 if (integer_onep (arg1)
9344 && truth_value_p (TREE_CODE (arg0))
9345 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9346 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9347 TREE_OPERAND (t, 2)));
9349 return t;
9351 case COMPOUND_EXPR:
9352 /* When pedantic, a compound expression can be neither an lvalue
9353 nor an integer constant expression. */
9354 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9355 return t;
9356 /* Don't let (0, 0) be null pointer constant. */
9357 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9358 : fold_convert (type, arg1);
9359 return pedantic_non_lvalue (tem);
9361 case COMPLEX_EXPR:
9362 if (wins)
9363 return build_complex (type, arg0, arg1);
9364 return t;
9366 case REALPART_EXPR:
9367 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9368 return t;
9369 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9370 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9371 TREE_OPERAND (arg0, 1));
9372 else if (TREE_CODE (arg0) == COMPLEX_CST)
9373 return TREE_REALPART (arg0);
9374 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9375 return fold (build2 (TREE_CODE (arg0), type,
9376 fold (build1 (REALPART_EXPR, type,
9377 TREE_OPERAND (arg0, 0))),
9378 fold (build1 (REALPART_EXPR, type,
9379 TREE_OPERAND (arg0, 1)))));
9380 return t;
9382 case IMAGPART_EXPR:
9383 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9384 return fold_convert (type, integer_zero_node);
9385 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9386 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9387 TREE_OPERAND (arg0, 0));
9388 else if (TREE_CODE (arg0) == COMPLEX_CST)
9389 return TREE_IMAGPART (arg0);
9390 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9391 return fold (build2 (TREE_CODE (arg0), type,
9392 fold (build1 (IMAGPART_EXPR, type,
9393 TREE_OPERAND (arg0, 0))),
9394 fold (build1 (IMAGPART_EXPR, type,
9395 TREE_OPERAND (arg0, 1)))));
9396 return t;
9398 case CALL_EXPR:
9399 /* Check for a built-in function. */
9400 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9401 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9402 == FUNCTION_DECL)
9403 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9405 tree tmp = fold_builtin (t, false);
9406 if (tmp)
9407 return tmp;
9409 return t;
9411 default:
9412 return t;
9413 } /* switch (code) */
9416 #ifdef ENABLE_FOLD_CHECKING
9417 #undef fold
9419 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9420 static void fold_check_failed (tree, tree);
9421 void print_fold_checksum (tree);
9423 /* When --enable-checking=fold, compute a digest of expr before
9424 and after actual fold call to see if fold did not accidentally
9425 change original expr. */
9427 tree
9428 fold (tree expr)
9430 tree ret;
9431 struct md5_ctx ctx;
9432 unsigned char checksum_before[16], checksum_after[16];
9433 htab_t ht;
9435 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9436 md5_init_ctx (&ctx);
9437 fold_checksum_tree (expr, &ctx, ht);
9438 md5_finish_ctx (&ctx, checksum_before);
9439 htab_empty (ht);
9441 ret = fold_1 (expr);
9443 md5_init_ctx (&ctx);
9444 fold_checksum_tree (expr, &ctx, ht);
9445 md5_finish_ctx (&ctx, checksum_after);
9446 htab_delete (ht);
9448 if (memcmp (checksum_before, checksum_after, 16))
9449 fold_check_failed (expr, ret);
9451 return ret;
9454 void
9455 print_fold_checksum (tree expr)
9457 struct md5_ctx ctx;
9458 unsigned char checksum[16], cnt;
9459 htab_t ht;
9461 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9462 md5_init_ctx (&ctx);
9463 fold_checksum_tree (expr, &ctx, ht);
9464 md5_finish_ctx (&ctx, checksum);
9465 htab_delete (ht);
9466 for (cnt = 0; cnt < 16; ++cnt)
9467 fprintf (stderr, "%02x", checksum[cnt]);
9468 putc ('\n', stderr);
9471 static void
9472 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9474 internal_error ("fold check: original tree changed by fold");
9477 static void
9478 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9480 void **slot;
9481 enum tree_code code;
9482 char buf[sizeof (struct tree_decl)];
9483 int i, len;
9485 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9486 <= sizeof (struct tree_decl))
9487 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9488 if (expr == NULL)
9489 return;
9490 slot = htab_find_slot (ht, expr, INSERT);
9491 if (*slot != NULL)
9492 return;
9493 *slot = expr;
9494 code = TREE_CODE (expr);
9495 if (TREE_CODE_CLASS (code) == tcc_declaration
9496 && DECL_ASSEMBLER_NAME_SET_P (expr))
9498 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9499 memcpy (buf, expr, tree_size (expr));
9500 expr = (tree) buf;
9501 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9503 else if (TREE_CODE_CLASS (code) == tcc_type
9504 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9505 || TYPE_CACHED_VALUES_P (expr)))
9507 /* Allow these fields to be modified. */
9508 memcpy (buf, expr, tree_size (expr));
9509 expr = (tree) buf;
9510 TYPE_POINTER_TO (expr) = NULL;
9511 TYPE_REFERENCE_TO (expr) = NULL;
9512 TYPE_CACHED_VALUES_P (expr) = 0;
9513 TYPE_CACHED_VALUES (expr) = NULL;
9515 md5_process_bytes (expr, tree_size (expr), ctx);
9516 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9517 if (TREE_CODE_CLASS (code) != tcc_type
9518 && TREE_CODE_CLASS (code) != tcc_declaration)
9519 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9520 switch (TREE_CODE_CLASS (code))
9522 case tcc_constant:
9523 switch (code)
9525 case STRING_CST:
9526 md5_process_bytes (TREE_STRING_POINTER (expr),
9527 TREE_STRING_LENGTH (expr), ctx);
9528 break;
9529 case COMPLEX_CST:
9530 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9531 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9532 break;
9533 case VECTOR_CST:
9534 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9535 break;
9536 default:
9537 break;
9539 break;
9540 case tcc_exceptional:
9541 switch (code)
9543 case TREE_LIST:
9544 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9545 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9546 break;
9547 case TREE_VEC:
9548 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9549 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9550 break;
9551 default:
9552 break;
9554 break;
9555 case tcc_expression:
9556 case tcc_reference:
9557 case tcc_comparison:
9558 case tcc_unary:
9559 case tcc_binary:
9560 case tcc_statement:
9561 len = TREE_CODE_LENGTH (code);
9562 for (i = 0; i < len; ++i)
9563 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9564 break;
9565 case tcc_declaration:
9566 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9567 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9568 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9569 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9570 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9571 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9572 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9573 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9574 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9575 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9576 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9577 break;
9578 case tcc_type:
9579 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9580 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9581 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9582 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9583 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9584 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9585 if (INTEGRAL_TYPE_P (expr)
9586 || SCALAR_FLOAT_TYPE_P (expr))
9588 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9589 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9591 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9592 if (TREE_CODE (expr) == RECORD_TYPE
9593 || TREE_CODE (expr) == UNION_TYPE
9594 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9595 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9596 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9597 break;
9598 default:
9599 break;
9603 #endif
9605 /* Perform constant folding and related simplification of initializer
9606 expression EXPR. This behaves identically to "fold" but ignores
9607 potential run-time traps and exceptions that fold must preserve. */
9609 tree
9610 fold_initializer (tree expr)
9612 int saved_signaling_nans = flag_signaling_nans;
9613 int saved_trapping_math = flag_trapping_math;
9614 int saved_rounding_math = flag_rounding_math;
9615 int saved_trapv = flag_trapv;
9616 tree result;
9618 flag_signaling_nans = 0;
9619 flag_trapping_math = 0;
9620 flag_rounding_math = 0;
9621 flag_trapv = 0;
9623 result = fold (expr);
9625 flag_signaling_nans = saved_signaling_nans;
9626 flag_trapping_math = saved_trapping_math;
9627 flag_rounding_math = saved_rounding_math;
9628 flag_trapv = saved_trapv;
9630 return result;
9633 /* Determine if first argument is a multiple of second argument. Return 0 if
9634 it is not, or we cannot easily determined it to be.
9636 An example of the sort of thing we care about (at this point; this routine
9637 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9638 fold cases do now) is discovering that
9640 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9642 is a multiple of
9644 SAVE_EXPR (J * 8)
9646 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9648 This code also handles discovering that
9650 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9652 is a multiple of 8 so we don't have to worry about dealing with a
9653 possible remainder.
9655 Note that we *look* inside a SAVE_EXPR only to determine how it was
9656 calculated; it is not safe for fold to do much of anything else with the
9657 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9658 at run time. For example, the latter example above *cannot* be implemented
9659 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9660 evaluation time of the original SAVE_EXPR is not necessarily the same at
9661 the time the new expression is evaluated. The only optimization of this
9662 sort that would be valid is changing
9664 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9666 divided by 8 to
9668 SAVE_EXPR (I) * SAVE_EXPR (J)
9670 (where the same SAVE_EXPR (J) is used in the original and the
9671 transformed version). */
9673 static int
9674 multiple_of_p (tree type, tree top, tree bottom)
9676 if (operand_equal_p (top, bottom, 0))
9677 return 1;
9679 if (TREE_CODE (type) != INTEGER_TYPE)
9680 return 0;
9682 switch (TREE_CODE (top))
9684 case BIT_AND_EXPR:
9685 /* Bitwise and provides a power of two multiple. If the mask is
9686 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
9687 if (!integer_pow2p (bottom))
9688 return 0;
9689 /* FALLTHRU */
9691 case MULT_EXPR:
9692 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9693 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9695 case PLUS_EXPR:
9696 case MINUS_EXPR:
9697 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9698 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9700 case LSHIFT_EXPR:
9701 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9703 tree op1, t1;
9705 op1 = TREE_OPERAND (top, 1);
9706 /* const_binop may not detect overflow correctly,
9707 so check for it explicitly here. */
9708 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9709 > TREE_INT_CST_LOW (op1)
9710 && TREE_INT_CST_HIGH (op1) == 0
9711 && 0 != (t1 = fold_convert (type,
9712 const_binop (LSHIFT_EXPR,
9713 size_one_node,
9714 op1, 0)))
9715 && ! TREE_OVERFLOW (t1))
9716 return multiple_of_p (type, t1, bottom);
9718 return 0;
9720 case NOP_EXPR:
9721 /* Can't handle conversions from non-integral or wider integral type. */
9722 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9723 || (TYPE_PRECISION (type)
9724 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9725 return 0;
9727 /* .. fall through ... */
9729 case SAVE_EXPR:
9730 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9732 case INTEGER_CST:
9733 if (TREE_CODE (bottom) != INTEGER_CST
9734 || (TYPE_UNSIGNED (type)
9735 && (tree_int_cst_sgn (top) < 0
9736 || tree_int_cst_sgn (bottom) < 0)))
9737 return 0;
9738 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9739 top, bottom, 0));
9741 default:
9742 return 0;
9746 /* Return true if `t' is known to be non-negative. */
9749 tree_expr_nonnegative_p (tree t)
9751 switch (TREE_CODE (t))
9753 case ABS_EXPR:
9754 return 1;
9756 case INTEGER_CST:
9757 return tree_int_cst_sgn (t) >= 0;
9759 case REAL_CST:
9760 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9762 case PLUS_EXPR:
9763 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9764 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9765 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9767 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9768 both unsigned and at least 2 bits shorter than the result. */
9769 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9770 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9771 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9773 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9774 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9775 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9776 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9778 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9779 TYPE_PRECISION (inner2)) + 1;
9780 return prec < TYPE_PRECISION (TREE_TYPE (t));
9783 break;
9785 case MULT_EXPR:
9786 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9788 /* x * x for floating point x is always non-negative. */
9789 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9790 return 1;
9791 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9792 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9795 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9796 both unsigned and their total bits is shorter than the result. */
9797 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9798 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9799 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9801 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9802 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9803 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9804 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9805 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9806 < TYPE_PRECISION (TREE_TYPE (t));
9808 return 0;
9810 case TRUNC_DIV_EXPR:
9811 case CEIL_DIV_EXPR:
9812 case FLOOR_DIV_EXPR:
9813 case ROUND_DIV_EXPR:
9814 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9815 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9817 case TRUNC_MOD_EXPR:
9818 case CEIL_MOD_EXPR:
9819 case FLOOR_MOD_EXPR:
9820 case ROUND_MOD_EXPR:
9821 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9823 case RDIV_EXPR:
9824 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9825 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9827 case BIT_AND_EXPR:
9828 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9829 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9830 case BIT_IOR_EXPR:
9831 case BIT_XOR_EXPR:
9832 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9833 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9835 case NOP_EXPR:
9837 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9838 tree outer_type = TREE_TYPE (t);
9840 if (TREE_CODE (outer_type) == REAL_TYPE)
9842 if (TREE_CODE (inner_type) == REAL_TYPE)
9843 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9844 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9846 if (TYPE_UNSIGNED (inner_type))
9847 return 1;
9848 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9851 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9853 if (TREE_CODE (inner_type) == REAL_TYPE)
9854 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9855 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9856 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9857 && TYPE_UNSIGNED (inner_type);
9860 break;
9862 case COND_EXPR:
9863 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9864 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9865 case COMPOUND_EXPR:
9866 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9867 case MIN_EXPR:
9868 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9869 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9870 case MAX_EXPR:
9871 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9872 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9873 case MODIFY_EXPR:
9874 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9875 case BIND_EXPR:
9876 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9877 case SAVE_EXPR:
9878 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9879 case NON_LVALUE_EXPR:
9880 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9881 case FLOAT_EXPR:
9882 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9884 case TARGET_EXPR:
9886 tree temp = TARGET_EXPR_SLOT (t);
9887 t = TARGET_EXPR_INITIAL (t);
9889 /* If the initializer is non-void, then it's a normal expression
9890 that will be assigned to the slot. */
9891 if (!VOID_TYPE_P (t))
9892 return tree_expr_nonnegative_p (t);
9894 /* Otherwise, the initializer sets the slot in some way. One common
9895 way is an assignment statement at the end of the initializer. */
9896 while (1)
9898 if (TREE_CODE (t) == BIND_EXPR)
9899 t = expr_last (BIND_EXPR_BODY (t));
9900 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9901 || TREE_CODE (t) == TRY_CATCH_EXPR)
9902 t = expr_last (TREE_OPERAND (t, 0));
9903 else if (TREE_CODE (t) == STATEMENT_LIST)
9904 t = expr_last (t);
9905 else
9906 break;
9908 if (TREE_CODE (t) == MODIFY_EXPR
9909 && TREE_OPERAND (t, 0) == temp)
9910 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9912 return 0;
9915 case CALL_EXPR:
9917 tree fndecl = get_callee_fndecl (t);
9918 tree arglist = TREE_OPERAND (t, 1);
9919 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9920 switch (DECL_FUNCTION_CODE (fndecl))
9922 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9923 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9924 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9925 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9927 CASE_BUILTIN_F (BUILT_IN_ACOS)
9928 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9929 CASE_BUILTIN_F (BUILT_IN_CABS)
9930 CASE_BUILTIN_F (BUILT_IN_COSH)
9931 CASE_BUILTIN_F (BUILT_IN_ERFC)
9932 CASE_BUILTIN_F (BUILT_IN_EXP)
9933 CASE_BUILTIN_F (BUILT_IN_EXP10)
9934 CASE_BUILTIN_F (BUILT_IN_EXP2)
9935 CASE_BUILTIN_F (BUILT_IN_FABS)
9936 CASE_BUILTIN_F (BUILT_IN_FDIM)
9937 CASE_BUILTIN_F (BUILT_IN_FREXP)
9938 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9939 CASE_BUILTIN_F (BUILT_IN_POW10)
9940 CASE_BUILTIN_I (BUILT_IN_FFS)
9941 CASE_BUILTIN_I (BUILT_IN_PARITY)
9942 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9943 /* Always true. */
9944 return 1;
9946 CASE_BUILTIN_F (BUILT_IN_SQRT)
9947 /* sqrt(-0.0) is -0.0. */
9948 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9949 return 1;
9950 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9952 CASE_BUILTIN_F (BUILT_IN_ASINH)
9953 CASE_BUILTIN_F (BUILT_IN_ATAN)
9954 CASE_BUILTIN_F (BUILT_IN_ATANH)
9955 CASE_BUILTIN_F (BUILT_IN_CBRT)
9956 CASE_BUILTIN_F (BUILT_IN_CEIL)
9957 CASE_BUILTIN_F (BUILT_IN_ERF)
9958 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9959 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9960 CASE_BUILTIN_F (BUILT_IN_FMOD)
9961 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9962 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9963 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9964 CASE_BUILTIN_F (BUILT_IN_LRINT)
9965 CASE_BUILTIN_F (BUILT_IN_LROUND)
9966 CASE_BUILTIN_F (BUILT_IN_MODF)
9967 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9968 CASE_BUILTIN_F (BUILT_IN_POW)
9969 CASE_BUILTIN_F (BUILT_IN_RINT)
9970 CASE_BUILTIN_F (BUILT_IN_ROUND)
9971 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9972 CASE_BUILTIN_F (BUILT_IN_SINH)
9973 CASE_BUILTIN_F (BUILT_IN_TANH)
9974 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9975 /* True if the 1st argument is nonnegative. */
9976 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9978 CASE_BUILTIN_F (BUILT_IN_FMAX)
9979 /* True if the 1st OR 2nd arguments are nonnegative. */
9980 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9981 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9983 CASE_BUILTIN_F (BUILT_IN_FMIN)
9984 /* True if the 1st AND 2nd arguments are nonnegative. */
9985 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9986 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9988 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9989 /* True if the 2nd argument is nonnegative. */
9990 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9992 default:
9993 break;
9994 #undef CASE_BUILTIN_F
9995 #undef CASE_BUILTIN_I
9999 /* ... fall through ... */
10001 default:
10002 if (truth_value_p (TREE_CODE (t)))
10003 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10004 return 1;
10007 /* We don't know sign of `t', so be conservative and return false. */
10008 return 0;
10011 /* Return true when T is an address and is known to be nonzero.
10012 For floating point we further ensure that T is not denormal.
10013 Similar logic is present in nonzero_address in rtlanal.h. */
10015 static bool
10016 tree_expr_nonzero_p (tree t)
10018 tree type = TREE_TYPE (t);
10020 /* Doing something useful for floating point would need more work. */
10021 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10022 return false;
10024 switch (TREE_CODE (t))
10026 case ABS_EXPR:
10027 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10028 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10030 case INTEGER_CST:
10031 /* We used to test for !integer_zerop here. This does not work correctly
10032 if TREE_CONSTANT_OVERFLOW (t). */
10033 return (TREE_INT_CST_LOW (t) != 0
10034 || TREE_INT_CST_HIGH (t) != 0);
10036 case PLUS_EXPR:
10037 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10039 /* With the presence of negative values it is hard
10040 to say something. */
10041 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10042 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10043 return false;
10044 /* One of operands must be positive and the other non-negative. */
10045 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10046 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10048 break;
10050 case MULT_EXPR:
10051 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10053 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10054 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10056 break;
10058 case NOP_EXPR:
10060 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10061 tree outer_type = TREE_TYPE (t);
10063 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10064 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10066 break;
10068 case ADDR_EXPR:
10070 tree base = get_base_address (TREE_OPERAND (t, 0));
10072 if (!base)
10073 return false;
10075 /* Weak declarations may link to NULL. */
10076 if (DECL_P (base))
10077 return !DECL_WEAK (base);
10079 /* Constants are never weak. */
10080 if (CONSTANT_CLASS_P (base))
10081 return true;
10083 return false;
10086 case COND_EXPR:
10087 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10088 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10090 case MIN_EXPR:
10091 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10092 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10094 case MAX_EXPR:
10095 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10097 /* When both operands are nonzero, then MAX must be too. */
10098 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10099 return true;
10101 /* MAX where operand 0 is positive is positive. */
10102 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10104 /* MAX where operand 1 is positive is positive. */
10105 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10106 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10107 return true;
10108 break;
10110 case COMPOUND_EXPR:
10111 case MODIFY_EXPR:
10112 case BIND_EXPR:
10113 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10115 case SAVE_EXPR:
10116 case NON_LVALUE_EXPR:
10117 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10119 case BIT_IOR_EXPR:
10120 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10121 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10123 default:
10124 break;
10126 return false;
10129 /* See if we are applying CODE, a relational to the highest or lowest
10130 possible integer of TYPE. If so, then the result is a compile
10131 time constant. */
10133 static tree
10134 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10135 tree *op1_p)
10137 tree op0 = *op0_p;
10138 tree op1 = *op1_p;
10139 enum tree_code code = *code_p;
10140 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10142 if (TREE_CODE (op1) == INTEGER_CST
10143 && ! TREE_CONSTANT_OVERFLOW (op1)
10144 && width <= HOST_BITS_PER_WIDE_INT
10145 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10146 || POINTER_TYPE_P (TREE_TYPE (op1))))
10148 unsigned HOST_WIDE_INT signed_max;
10149 unsigned HOST_WIDE_INT max, min;
10151 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10153 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10155 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10156 min = 0;
10158 else
10160 max = signed_max;
10161 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10164 if (TREE_INT_CST_HIGH (op1) == 0
10165 && TREE_INT_CST_LOW (op1) == max)
10166 switch (code)
10168 case GT_EXPR:
10169 return omit_one_operand (type, integer_zero_node, op0);
10171 case GE_EXPR:
10172 *code_p = EQ_EXPR;
10173 break;
10174 case LE_EXPR:
10175 return omit_one_operand (type, integer_one_node, op0);
10177 case LT_EXPR:
10178 *code_p = NE_EXPR;
10179 break;
10181 /* The GE_EXPR and LT_EXPR cases above are not normally
10182 reached because of previous transformations. */
10184 default:
10185 break;
10187 else if (TREE_INT_CST_HIGH (op1) == 0
10188 && TREE_INT_CST_LOW (op1) == max - 1)
10189 switch (code)
10191 case GT_EXPR:
10192 *code_p = EQ_EXPR;
10193 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10194 break;
10195 case LE_EXPR:
10196 *code_p = NE_EXPR;
10197 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10198 break;
10199 default:
10200 break;
10202 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10203 && TREE_INT_CST_LOW (op1) == min)
10204 switch (code)
10206 case LT_EXPR:
10207 return omit_one_operand (type, integer_zero_node, op0);
10209 case LE_EXPR:
10210 *code_p = EQ_EXPR;
10211 break;
10213 case GE_EXPR:
10214 return omit_one_operand (type, integer_one_node, op0);
10216 case GT_EXPR:
10217 *code_p = NE_EXPR;
10218 break;
10220 default:
10221 break;
10223 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10224 && TREE_INT_CST_LOW (op1) == min + 1)
10225 switch (code)
10227 case GE_EXPR:
10228 *code_p = NE_EXPR;
10229 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10230 break;
10231 case LT_EXPR:
10232 *code_p = EQ_EXPR;
10233 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10234 break;
10235 default:
10236 break;
10239 else if (TREE_INT_CST_HIGH (op1) == 0
10240 && TREE_INT_CST_LOW (op1) == signed_max
10241 && TYPE_UNSIGNED (TREE_TYPE (op1))
10242 /* signed_type does not work on pointer types. */
10243 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10245 /* The following case also applies to X < signed_max+1
10246 and X >= signed_max+1 because previous transformations. */
10247 if (code == LE_EXPR || code == GT_EXPR)
10249 tree st0, st1, exp, retval;
10250 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10251 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10253 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10254 type,
10255 fold_convert (st0, op0),
10256 fold_convert (st1, integer_zero_node));
10258 retval = fold_binary_to_constant (TREE_CODE (exp),
10259 TREE_TYPE (exp),
10260 TREE_OPERAND (exp, 0),
10261 TREE_OPERAND (exp, 1));
10263 /* If we are in gimple form, then returning EXP would create
10264 non-gimple expressions. Clearing it is safe and insures
10265 we do not allow a non-gimple expression to escape. */
10266 if (in_gimple_form)
10267 exp = NULL;
10269 return (retval ? retval : exp);
10274 return NULL_TREE;
10278 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10279 attempt to fold the expression to a constant without modifying TYPE,
10280 OP0 or OP1.
10282 If the expression could be simplified to a constant, then return
10283 the constant. If the expression would not be simplified to a
10284 constant, then return NULL_TREE.
10286 Note this is primarily designed to be called after gimplification
10287 of the tree structures and when at least one operand is a constant.
10288 As a result of those simplifying assumptions this routine is far
10289 simpler than the generic fold routine. */
10291 tree
10292 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10294 int wins = 1;
10295 tree subop0;
10296 tree subop1;
10297 tree tem;
10299 /* If this is a commutative operation, and ARG0 is a constant, move it
10300 to ARG1 to reduce the number of tests below. */
10301 if (commutative_tree_code (code)
10302 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10304 tem = op0;
10305 op0 = op1;
10306 op1 = tem;
10309 /* If either operand is a complex type, extract its real component. */
10310 if (TREE_CODE (op0) == COMPLEX_CST)
10311 subop0 = TREE_REALPART (op0);
10312 else
10313 subop0 = op0;
10315 if (TREE_CODE (op1) == COMPLEX_CST)
10316 subop1 = TREE_REALPART (op1);
10317 else
10318 subop1 = op1;
10320 /* Note if either argument is not a real or integer constant.
10321 With a few exceptions, simplification is limited to cases
10322 where both arguments are constants. */
10323 if ((TREE_CODE (subop0) != INTEGER_CST
10324 && TREE_CODE (subop0) != REAL_CST)
10325 || (TREE_CODE (subop1) != INTEGER_CST
10326 && TREE_CODE (subop1) != REAL_CST))
10327 wins = 0;
10329 switch (code)
10331 case PLUS_EXPR:
10332 /* (plus (address) (const_int)) is a constant. */
10333 if (TREE_CODE (op0) == PLUS_EXPR
10334 && TREE_CODE (op1) == INTEGER_CST
10335 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10336 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10337 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10338 == ADDR_EXPR)))
10339 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10341 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10342 const_binop (PLUS_EXPR, op1,
10343 TREE_OPERAND (op0, 1), 0));
10345 case BIT_XOR_EXPR:
10347 binary:
10348 if (!wins)
10349 return NULL_TREE;
10351 /* Both arguments are constants. Simplify. */
10352 tem = const_binop (code, op0, op1, 0);
10353 if (tem != NULL_TREE)
10355 /* The return value should always have the same type as
10356 the original expression. */
10357 if (TREE_TYPE (tem) != type)
10358 tem = fold_convert (type, tem);
10360 return tem;
10362 return NULL_TREE;
10364 case MINUS_EXPR:
10365 /* Fold &x - &x. This can happen from &x.foo - &x.
10366 This is unsafe for certain floats even in non-IEEE formats.
10367 In IEEE, it is unsafe because it does wrong for NaNs.
10368 Also note that operand_equal_p is always false if an
10369 operand is volatile. */
10370 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10371 return fold_convert (type, integer_zero_node);
10373 goto binary;
10375 case MULT_EXPR:
10376 case BIT_AND_EXPR:
10377 /* Special case multiplication or bitwise AND where one argument
10378 is zero. */
10379 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10380 return omit_one_operand (type, op1, op0);
10381 else
10382 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10383 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10384 && real_zerop (op1))
10385 return omit_one_operand (type, op1, op0);
10387 goto binary;
10389 case BIT_IOR_EXPR:
10390 /* Special case when we know the result will be all ones. */
10391 if (integer_all_onesp (op1))
10392 return omit_one_operand (type, op1, op0);
10394 goto binary;
10396 case TRUNC_DIV_EXPR:
10397 case ROUND_DIV_EXPR:
10398 case FLOOR_DIV_EXPR:
10399 case CEIL_DIV_EXPR:
10400 case EXACT_DIV_EXPR:
10401 case TRUNC_MOD_EXPR:
10402 case ROUND_MOD_EXPR:
10403 case FLOOR_MOD_EXPR:
10404 case CEIL_MOD_EXPR:
10405 case RDIV_EXPR:
10406 /* Division by zero is undefined. */
10407 if (integer_zerop (op1))
10408 return NULL_TREE;
10410 if (TREE_CODE (op1) == REAL_CST
10411 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10412 && real_zerop (op1))
10413 return NULL_TREE;
10415 goto binary;
10417 case MIN_EXPR:
10418 if (INTEGRAL_TYPE_P (type)
10419 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10420 return omit_one_operand (type, op1, op0);
10422 goto binary;
10424 case MAX_EXPR:
10425 if (INTEGRAL_TYPE_P (type)
10426 && TYPE_MAX_VALUE (type)
10427 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10428 return omit_one_operand (type, op1, op0);
10430 goto binary;
10432 case RSHIFT_EXPR:
10433 /* Optimize -1 >> x for arithmetic right shifts. */
10434 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10435 return omit_one_operand (type, op0, op1);
10436 /* ... fall through ... */
10438 case LSHIFT_EXPR:
10439 if (integer_zerop (op0))
10440 return omit_one_operand (type, op0, op1);
10442 /* Since negative shift count is not well-defined, don't
10443 try to compute it in the compiler. */
10444 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10445 return NULL_TREE;
10447 goto binary;
10449 case LROTATE_EXPR:
10450 case RROTATE_EXPR:
10451 /* -1 rotated either direction by any amount is still -1. */
10452 if (integer_all_onesp (op0))
10453 return omit_one_operand (type, op0, op1);
10455 /* 0 rotated either direction by any amount is still zero. */
10456 if (integer_zerop (op0))
10457 return omit_one_operand (type, op0, op1);
10459 goto binary;
10461 case COMPLEX_EXPR:
10462 if (wins)
10463 return build_complex (type, op0, op1);
10464 return NULL_TREE;
10466 case LT_EXPR:
10467 case LE_EXPR:
10468 case GT_EXPR:
10469 case GE_EXPR:
10470 case EQ_EXPR:
10471 case NE_EXPR:
10472 /* If one arg is a real or integer constant, put it last. */
10473 if ((TREE_CODE (op0) == INTEGER_CST
10474 && TREE_CODE (op1) != INTEGER_CST)
10475 || (TREE_CODE (op0) == REAL_CST
10476 && TREE_CODE (op0) != REAL_CST))
10478 tree temp;
10480 temp = op0;
10481 op0 = op1;
10482 op1 = temp;
10483 code = swap_tree_comparison (code);
10486 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10487 This transformation affects the cases which are handled in later
10488 optimizations involving comparisons with non-negative constants. */
10489 if (TREE_CODE (op1) == INTEGER_CST
10490 && TREE_CODE (op0) != INTEGER_CST
10491 && tree_int_cst_sgn (op1) > 0)
10493 switch (code)
10495 case GE_EXPR:
10496 code = GT_EXPR;
10497 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10498 break;
10500 case LT_EXPR:
10501 code = LE_EXPR;
10502 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10503 break;
10505 default:
10506 break;
10510 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10511 if (tem)
10512 return tem;
10514 /* Fall through. */
10516 case ORDERED_EXPR:
10517 case UNORDERED_EXPR:
10518 case UNLT_EXPR:
10519 case UNLE_EXPR:
10520 case UNGT_EXPR:
10521 case UNGE_EXPR:
10522 case UNEQ_EXPR:
10523 case LTGT_EXPR:
10524 if (!wins)
10525 return NULL_TREE;
10527 return fold_relational_const (code, type, op0, op1);
10529 case RANGE_EXPR:
10530 /* This could probably be handled. */
10531 return NULL_TREE;
10533 case TRUTH_AND_EXPR:
10534 /* If second arg is constant zero, result is zero, but first arg
10535 must be evaluated. */
10536 if (integer_zerop (op1))
10537 return omit_one_operand (type, op1, op0);
10538 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10539 case will be handled here. */
10540 if (integer_zerop (op0))
10541 return omit_one_operand (type, op0, op1);
10542 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10543 return constant_boolean_node (true, type);
10544 return NULL_TREE;
10546 case TRUTH_OR_EXPR:
10547 /* If second arg is constant true, result is true, but we must
10548 evaluate first arg. */
10549 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10550 return omit_one_operand (type, op1, op0);
10551 /* Likewise for first arg, but note this only occurs here for
10552 TRUTH_OR_EXPR. */
10553 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10554 return omit_one_operand (type, op0, op1);
10555 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10556 return constant_boolean_node (false, type);
10557 return NULL_TREE;
10559 case TRUTH_XOR_EXPR:
10560 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10562 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10563 return constant_boolean_node (x, type);
10565 return NULL_TREE;
10567 default:
10568 return NULL_TREE;
10572 /* Given the components of a unary expression CODE, TYPE and OP0,
10573 attempt to fold the expression to a constant without modifying
10574 TYPE or OP0.
10576 If the expression could be simplified to a constant, then return
10577 the constant. If the expression would not be simplified to a
10578 constant, then return NULL_TREE.
10580 Note this is primarily designed to be called after gimplification
10581 of the tree structures and when op0 is a constant. As a result
10582 of those simplifying assumptions this routine is far simpler than
10583 the generic fold routine. */
10585 tree
10586 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10588 /* Make sure we have a suitable constant argument. */
10589 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10591 tree subop;
10593 if (TREE_CODE (op0) == COMPLEX_CST)
10594 subop = TREE_REALPART (op0);
10595 else
10596 subop = op0;
10598 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10599 return NULL_TREE;
10602 switch (code)
10604 case NOP_EXPR:
10605 case FLOAT_EXPR:
10606 case CONVERT_EXPR:
10607 case FIX_TRUNC_EXPR:
10608 case FIX_FLOOR_EXPR:
10609 case FIX_CEIL_EXPR:
10610 return fold_convert_const (code, type, op0);
10612 case NEGATE_EXPR:
10613 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10614 return fold_negate_const (op0, type);
10615 else
10616 return NULL_TREE;
10618 case ABS_EXPR:
10619 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10620 return fold_abs_const (op0, type);
10621 else
10622 return NULL_TREE;
10624 case BIT_NOT_EXPR:
10625 if (TREE_CODE (op0) == INTEGER_CST)
10626 return fold_not_const (op0, type);
10627 else
10628 return NULL_TREE;
10630 case REALPART_EXPR:
10631 if (TREE_CODE (op0) == COMPLEX_CST)
10632 return TREE_REALPART (op0);
10633 else
10634 return NULL_TREE;
10636 case IMAGPART_EXPR:
10637 if (TREE_CODE (op0) == COMPLEX_CST)
10638 return TREE_IMAGPART (op0);
10639 else
10640 return NULL_TREE;
10642 case CONJ_EXPR:
10643 if (TREE_CODE (op0) == COMPLEX_CST
10644 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10645 return build_complex (type, TREE_REALPART (op0),
10646 negate_expr (TREE_IMAGPART (op0)));
10647 return NULL_TREE;
10649 default:
10650 return NULL_TREE;
10654 /* If EXP represents referencing an element in a constant string
10655 (either via pointer arithmetic or array indexing), return the
10656 tree representing the value accessed, otherwise return NULL. */
10658 tree
10659 fold_read_from_constant_string (tree exp)
10661 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10663 tree exp1 = TREE_OPERAND (exp, 0);
10664 tree index;
10665 tree string;
10667 if (TREE_CODE (exp) == INDIRECT_REF)
10668 string = string_constant (exp1, &index);
10669 else
10671 tree low_bound = array_ref_low_bound (exp);
10672 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10674 /* Optimize the special-case of a zero lower bound.
10676 We convert the low_bound to sizetype to avoid some problems
10677 with constant folding. (E.g. suppose the lower bound is 1,
10678 and its mode is QI. Without the conversion,l (ARRAY
10679 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10680 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10681 if (! integer_zerop (low_bound))
10682 index = size_diffop (index, fold_convert (sizetype, low_bound));
10684 string = exp1;
10687 if (string
10688 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10689 && TREE_CODE (string) == STRING_CST
10690 && TREE_CODE (index) == INTEGER_CST
10691 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10692 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10693 == MODE_INT)
10694 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10695 return fold_convert (TREE_TYPE (exp),
10696 build_int_cst (NULL_TREE,
10697 (TREE_STRING_POINTER (string)
10698 [TREE_INT_CST_LOW (index)])));
10700 return NULL;
10703 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10704 an integer constant or real constant.
10706 TYPE is the type of the result. */
10708 static tree
10709 fold_negate_const (tree arg0, tree type)
10711 tree t = NULL_TREE;
10713 switch (TREE_CODE (arg0))
10715 case INTEGER_CST:
10717 unsigned HOST_WIDE_INT low;
10718 HOST_WIDE_INT high;
10719 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10720 TREE_INT_CST_HIGH (arg0),
10721 &low, &high);
10722 t = build_int_cst_wide (type, low, high);
10723 t = force_fit_type (t, 1,
10724 (overflow | TREE_OVERFLOW (arg0))
10725 && !TYPE_UNSIGNED (type),
10726 TREE_CONSTANT_OVERFLOW (arg0));
10727 break;
10730 case REAL_CST:
10731 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10732 break;
10734 default:
10735 gcc_unreachable ();
10738 return t;
10741 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10742 an integer constant or real constant.
10744 TYPE is the type of the result. */
10746 tree
10747 fold_abs_const (tree arg0, tree type)
10749 tree t = NULL_TREE;
10751 switch (TREE_CODE (arg0))
10753 case INTEGER_CST:
10754 /* If the value is unsigned, then the absolute value is
10755 the same as the ordinary value. */
10756 if (TYPE_UNSIGNED (type))
10757 t = arg0;
10758 /* Similarly, if the value is non-negative. */
10759 else if (INT_CST_LT (integer_minus_one_node, arg0))
10760 t = arg0;
10761 /* If the value is negative, then the absolute value is
10762 its negation. */
10763 else
10765 unsigned HOST_WIDE_INT low;
10766 HOST_WIDE_INT high;
10767 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10768 TREE_INT_CST_HIGH (arg0),
10769 &low, &high);
10770 t = build_int_cst_wide (type, low, high);
10771 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10772 TREE_CONSTANT_OVERFLOW (arg0));
10774 break;
10776 case REAL_CST:
10777 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10778 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10779 else
10780 t = arg0;
10781 break;
10783 default:
10784 gcc_unreachable ();
10787 return t;
10790 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10791 constant. TYPE is the type of the result. */
10793 static tree
10794 fold_not_const (tree arg0, tree type)
10796 tree t = NULL_TREE;
10798 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10800 t = build_int_cst_wide (type,
10801 ~ TREE_INT_CST_LOW (arg0),
10802 ~ TREE_INT_CST_HIGH (arg0));
10803 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10804 TREE_CONSTANT_OVERFLOW (arg0));
10806 return t;
10809 /* Given CODE, a relational operator, the target type, TYPE and two
10810 constant operands OP0 and OP1, return the result of the
10811 relational operation. If the result is not a compile time
10812 constant, then return NULL_TREE. */
10814 static tree
10815 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10817 int result, invert;
10819 /* From here on, the only cases we handle are when the result is
10820 known to be a constant. */
10822 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10824 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10825 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10827 /* Handle the cases where either operand is a NaN. */
10828 if (real_isnan (c0) || real_isnan (c1))
10830 switch (code)
10832 case EQ_EXPR:
10833 case ORDERED_EXPR:
10834 result = 0;
10835 break;
10837 case NE_EXPR:
10838 case UNORDERED_EXPR:
10839 case UNLT_EXPR:
10840 case UNLE_EXPR:
10841 case UNGT_EXPR:
10842 case UNGE_EXPR:
10843 case UNEQ_EXPR:
10844 result = 1;
10845 break;
10847 case LT_EXPR:
10848 case LE_EXPR:
10849 case GT_EXPR:
10850 case GE_EXPR:
10851 case LTGT_EXPR:
10852 if (flag_trapping_math)
10853 return NULL_TREE;
10854 result = 0;
10855 break;
10857 default:
10858 gcc_unreachable ();
10861 return constant_boolean_node (result, type);
10864 return constant_boolean_node (real_compare (code, c0, c1), type);
10867 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10869 To compute GT, swap the arguments and do LT.
10870 To compute GE, do LT and invert the result.
10871 To compute LE, swap the arguments, do LT and invert the result.
10872 To compute NE, do EQ and invert the result.
10874 Therefore, the code below must handle only EQ and LT. */
10876 if (code == LE_EXPR || code == GT_EXPR)
10878 tree tem = op0;
10879 op0 = op1;
10880 op1 = tem;
10881 code = swap_tree_comparison (code);
10884 /* Note that it is safe to invert for real values here because we
10885 have already handled the one case that it matters. */
10887 invert = 0;
10888 if (code == NE_EXPR || code == GE_EXPR)
10890 invert = 1;
10891 code = invert_tree_comparison (code, false);
10894 /* Compute a result for LT or EQ if args permit;
10895 Otherwise return T. */
10896 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10898 if (code == EQ_EXPR)
10899 result = tree_int_cst_equal (op0, op1);
10900 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10901 result = INT_CST_LT_UNSIGNED (op0, op1);
10902 else
10903 result = INT_CST_LT (op0, op1);
10905 else
10906 return NULL_TREE;
10908 if (invert)
10909 result ^= 1;
10910 return constant_boolean_node (result, type);
10913 /* Build an expression for the a clean point containing EXPR with type TYPE.
10914 Don't build a cleanup point expression for EXPR which don't have side
10915 effects. */
10917 tree
10918 fold_build_cleanup_point_expr (tree type, tree expr)
10920 /* If the expression does not have side effects then we don't have to wrap
10921 it with a cleanup point expression. */
10922 if (!TREE_SIDE_EFFECTS (expr))
10923 return expr;
10925 /* If the expression is a return, check to see if the expression inside the
10926 return has no side effects or the right hand side of the modify expression
10927 inside the return. If either don't have side effects set we don't need to
10928 wrap the expression in a cleanup point expression. Note we don't check the
10929 left hand side of the modify because it should always be a return decl. */
10930 if (TREE_CODE (expr) == RETURN_EXPR)
10932 tree op = TREE_OPERAND (expr, 0);
10933 if (!op || !TREE_SIDE_EFFECTS (op))
10934 return expr;
10935 op = TREE_OPERAND (op, 1);
10936 if (!TREE_SIDE_EFFECTS (op))
10937 return expr;
10940 return build1 (CLEANUP_POINT_EXPR, type, expr);
10943 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10944 avoid confusing the gimplify process. */
10946 tree
10947 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10949 /* The size of the object is not relevant when talking about its address. */
10950 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10951 t = TREE_OPERAND (t, 0);
10953 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10954 if (TREE_CODE (t) == INDIRECT_REF
10955 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
10957 t = TREE_OPERAND (t, 0);
10958 if (TREE_TYPE (t) != ptrtype)
10959 t = build1 (NOP_EXPR, ptrtype, t);
10961 else
10963 tree base = t;
10965 while (handled_component_p (base))
10966 base = TREE_OPERAND (base, 0);
10967 if (DECL_P (base))
10968 TREE_ADDRESSABLE (base) = 1;
10970 t = build1 (ADDR_EXPR, ptrtype, t);
10973 return t;
10976 tree
10977 build_fold_addr_expr (tree t)
10979 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10982 /* Builds an expression for an indirection through T, simplifying some
10983 cases. */
10985 tree
10986 build_fold_indirect_ref (tree t)
10988 tree type = TREE_TYPE (TREE_TYPE (t));
10989 tree sub = t;
10990 tree subtype;
10992 STRIP_NOPS (sub);
10993 if (TREE_CODE (sub) == ADDR_EXPR)
10995 tree op = TREE_OPERAND (sub, 0);
10996 tree optype = TREE_TYPE (op);
10997 /* *&p => p */
10998 if (lang_hooks.types_compatible_p (type, optype))
10999 return op;
11000 /* *(foo *)&fooarray => fooarray[0] */
11001 else if (TREE_CODE (optype) == ARRAY_TYPE
11002 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11003 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
11006 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11007 subtype = TREE_TYPE (sub);
11008 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11009 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11011 sub = build_fold_indirect_ref (sub);
11012 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
11015 return build1 (INDIRECT_REF, type, t);
11018 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11019 whose result is ignored. The type of the returned tree need not be
11020 the same as the original expression. */
11022 tree
11023 fold_ignored_result (tree t)
11025 if (!TREE_SIDE_EFFECTS (t))
11026 return integer_zero_node;
11028 for (;;)
11029 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11031 case tcc_unary:
11032 t = TREE_OPERAND (t, 0);
11033 break;
11035 case tcc_binary:
11036 case tcc_comparison:
11037 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11038 t = TREE_OPERAND (t, 0);
11039 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11040 t = TREE_OPERAND (t, 1);
11041 else
11042 return t;
11043 break;
11045 case tcc_expression:
11046 switch (TREE_CODE (t))
11048 case COMPOUND_EXPR:
11049 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11050 return t;
11051 t = TREE_OPERAND (t, 0);
11052 break;
11054 case COND_EXPR:
11055 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11056 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11057 return t;
11058 t = TREE_OPERAND (t, 0);
11059 break;
11061 default:
11062 return t;
11064 break;
11066 default:
11067 return t;
11071 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11072 This can only be applied to objects of a sizetype. */
11074 tree
11075 round_up (tree value, int divisor)
11077 tree div = NULL_TREE;
11079 gcc_assert (divisor > 0);
11080 if (divisor == 1)
11081 return value;
11083 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11084 have to do anything. Only do this when we are not given a const,
11085 because in that case, this check is more expensive than just
11086 doing it. */
11087 if (TREE_CODE (value) != INTEGER_CST)
11089 div = build_int_cst (TREE_TYPE (value), divisor);
11091 if (multiple_of_p (TREE_TYPE (value), value, div))
11092 return value;
11095 /* If divisor is a power of two, simplify this to bit manipulation. */
11096 if (divisor == (divisor & -divisor))
11098 tree t;
11100 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11101 value = size_binop (PLUS_EXPR, value, t);
11102 t = build_int_cst (TREE_TYPE (value), -divisor);
11103 value = size_binop (BIT_AND_EXPR, value, t);
11105 else
11107 if (!div)
11108 div = build_int_cst (TREE_TYPE (value), divisor);
11109 value = size_binop (CEIL_DIV_EXPR, value, div);
11110 value = size_binop (MULT_EXPR, value, div);
11113 return value;
11116 /* Likewise, but round down. */
11118 tree
11119 round_down (tree value, int divisor)
11121 tree div = NULL_TREE;
11123 gcc_assert (divisor > 0);
11124 if (divisor == 1)
11125 return value;
11127 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11128 have to do anything. Only do this when we are not given a const,
11129 because in that case, this check is more expensive than just
11130 doing it. */
11131 if (TREE_CODE (value) != INTEGER_CST)
11133 div = build_int_cst (TREE_TYPE (value), divisor);
11135 if (multiple_of_p (TREE_TYPE (value), value, div))
11136 return value;
11139 /* If divisor is a power of two, simplify this to bit manipulation. */
11140 if (divisor == (divisor & -divisor))
11142 tree t;
11144 t = build_int_cst (TREE_TYPE (value), -divisor);
11145 value = size_binop (BIT_AND_EXPR, value, t);
11147 else
11149 if (!div)
11150 div = build_int_cst (TREE_TYPE (value), divisor);
11151 value = size_binop (FLOOR_DIV_EXPR, value, div);
11152 value = size_binop (MULT_EXPR, value, div);
11155 return value;
11158 /* Returns the pointer to the base of the object addressed by EXP and
11159 extracts the information about the offset of the access, storing it
11160 to PBITPOS and POFFSET. */
11162 static tree
11163 split_address_to_core_and_offset (tree exp,
11164 HOST_WIDE_INT *pbitpos, tree *poffset)
11166 tree core;
11167 enum machine_mode mode;
11168 int unsignedp, volatilep;
11169 HOST_WIDE_INT bitsize;
11171 if (TREE_CODE (exp) == ADDR_EXPR)
11173 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11174 poffset, &mode, &unsignedp, &volatilep,
11175 false);
11177 if (TREE_CODE (core) == INDIRECT_REF)
11178 core = TREE_OPERAND (core, 0);
11180 else
11182 core = exp;
11183 *pbitpos = 0;
11184 *poffset = NULL_TREE;
11187 return core;
11190 /* Returns true if addresses of E1 and E2 differ by a constant, false
11191 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11193 bool
11194 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11196 tree core1, core2;
11197 HOST_WIDE_INT bitpos1, bitpos2;
11198 tree toffset1, toffset2, tdiff, type;
11200 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11201 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11203 if (bitpos1 % BITS_PER_UNIT != 0
11204 || bitpos2 % BITS_PER_UNIT != 0
11205 || !operand_equal_p (core1, core2, 0))
11206 return false;
11208 if (toffset1 && toffset2)
11210 type = TREE_TYPE (toffset1);
11211 if (type != TREE_TYPE (toffset2))
11212 toffset2 = fold_convert (type, toffset2);
11214 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11215 if (!host_integerp (tdiff, 0))
11216 return false;
11218 *diff = tree_low_cst (tdiff, 0);
11220 else if (toffset1 || toffset2)
11222 /* If only one of the offsets is non-constant, the difference cannot
11223 be a constant. */
11224 return false;
11226 else
11227 *diff = 0;
11229 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11230 return true;