PR target/16201
[official-gcc.git] / gcc / fold-const.c
blob22857428a5c575c890ee705240db561a093d35eb
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (tree, enum tree_code,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static tree fold_relational_hi_lo (enum tree_code *, const tree,
136 tree *, tree *);
137 static bool tree_expr_nonzero_p (tree);
139 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
140 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
141 and SUM1. Then this yields nonzero if overflow occurred during the
142 addition.
144 Overflow occurs if A and B have the same sign, but A and SUM differ in
145 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
146 sign. */
147 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
150 We do that by representing the two-word integer in 4 words, with only
151 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
152 number. The value of the word is LOWPART + HIGHPART * BASE. */
154 #define LOWPART(x) \
155 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
156 #define HIGHPART(x) \
157 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
158 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160 /* Unpack a two-word integer into 4 words.
161 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
162 WORDS points to the array of HOST_WIDE_INTs. */
164 static void
165 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 words[0] = LOWPART (low);
168 words[1] = HIGHPART (low);
169 words[2] = LOWPART (hi);
170 words[3] = HIGHPART (hi);
173 /* Pack an array of 4 words into a two-word integer.
174 WORDS points to the array of words.
175 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
177 static void
178 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
179 HOST_WIDE_INT *hi)
181 *low = words[0] + words[1] * BASE;
182 *hi = words[2] + words[3] * BASE;
185 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
186 in overflow of the value, when >0 we are only interested in signed
187 overflow, for <0 we are interested in any overflow. OVERFLOWED
188 indicates whether overflow has already occurred. CONST_OVERFLOWED
189 indicates whether constant overflow has already occurred. We force
190 T's value to be within range of T's type (by setting to 0 or 1 all
191 the bits outside the type's range). We set TREE_OVERFLOWED if,
192 OVERFLOWED is nonzero,
193 or OVERFLOWABLE is >0 and signed overflow occurs
194 or OVERFLOWABLE is <0 and any overflow occurs
195 We set TREE_CONSTANT_OVERFLOWED if,
196 CONST_OVERFLOWED is nonzero
197 or we set TREE_OVERFLOWED.
198 We return either the original T, or a copy. */
200 tree
201 force_fit_type (tree t, int overflowable,
202 bool overflowed, bool overflowed_const)
204 unsigned HOST_WIDE_INT low;
205 HOST_WIDE_INT high;
206 unsigned int prec;
207 int sign_extended_type;
209 gcc_assert (TREE_CODE (t) == INTEGER_CST);
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
216 prec = POINTER_SIZE;
217 else
218 prec = TYPE_PRECISION (TREE_TYPE (t));
219 /* Size types *are* sign extended. */
220 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
221 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
222 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
224 /* First clear all bits that are beyond the type's precision. */
226 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
228 else if (prec > HOST_BITS_PER_WIDE_INT)
229 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 else
232 high = 0;
233 if (prec < HOST_BITS_PER_WIDE_INT)
234 low &= ~((HOST_WIDE_INT) (-1) << prec);
237 if (!sign_extended_type)
238 /* No sign extension */;
239 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
240 /* Correct width already. */;
241 else if (prec > HOST_BITS_PER_WIDE_INT)
243 /* Sign extend top half? */
244 if (high & ((unsigned HOST_WIDE_INT)1
245 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
246 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
248 else if (prec == HOST_BITS_PER_WIDE_INT)
250 if ((HOST_WIDE_INT)low < 0)
251 high = -1;
253 else
255 /* Sign extend bottom half? */
256 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
258 high = -1;
259 low |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value changed, return a new node. */
264 if (overflowed || overflowed_const
265 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
267 t = build_int_cst_wide (TREE_TYPE (t), low, high);
269 if (overflowed
270 || overflowable < 0
271 || (overflowable > 0 && sign_extended_type))
273 t = copy_node (t);
274 TREE_OVERFLOW (t) = 1;
275 TREE_CONSTANT_OVERFLOW (t) = 1;
277 else if (overflowed_const)
279 t = copy_node (t);
280 TREE_CONSTANT_OVERFLOW (t) = 1;
284 return t;
287 /* Add two doubleword integers with doubleword result.
288 Each argument is given as two `HOST_WIDE_INT' pieces.
289 One argument is L1 and H1; the other, L2 and H2.
290 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
294 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
295 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 unsigned HOST_WIDE_INT l;
298 HOST_WIDE_INT h;
300 l = l1 + l2;
301 h = h1 + h2 + (l < l1);
303 *lv = l;
304 *hv = h;
305 return OVERFLOW_SUM_SIGN (h1, h2, h);
308 /* Negate a doubleword integer with doubleword result.
309 Return nonzero if the operation overflows, assuming it's signed.
310 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
311 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
314 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
315 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
317 if (l1 == 0)
319 *lv = 0;
320 *hv = - h1;
321 return (*hv & h1) < 0;
323 else
325 *lv = -l1;
326 *hv = ~h1;
327 return 0;
331 /* Multiply two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows, assuming it's signed.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
342 HOST_WIDE_INT arg1[4];
343 HOST_WIDE_INT arg2[4];
344 HOST_WIDE_INT prod[4 * 2];
345 unsigned HOST_WIDE_INT carry;
346 int i, j, k;
347 unsigned HOST_WIDE_INT toplow, neglow;
348 HOST_WIDE_INT tophigh, neghigh;
350 encode (arg1, l1, h1);
351 encode (arg2, l2, h2);
353 memset (prod, 0, sizeof prod);
355 for (i = 0; i < 4; i++)
357 carry = 0;
358 for (j = 0; j < 4; j++)
360 k = i + j;
361 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
362 carry += arg1[i] * arg2[j];
363 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
364 carry += prod[k];
365 prod[k] = LOWPART (carry);
366 carry = HIGHPART (carry);
368 prod[i + 4] = carry;
371 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
373 /* Check for overflow by calculating the top half of the answer in full;
374 it should agree with the low half's sign bit. */
375 decode (prod + 4, &toplow, &tophigh);
376 if (h1 < 0)
378 neg_double (l2, h2, &neglow, &neghigh);
379 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
381 if (h2 < 0)
383 neg_double (l1, h1, &neglow, &neghigh);
384 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
386 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
389 /* Shift the doubleword integer in L1, H1 left by COUNT places
390 keeping only PREC bits of result.
391 Shift right if COUNT is negative.
392 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
393 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
395 void
396 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
397 HOST_WIDE_INT count, unsigned int prec,
398 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
400 unsigned HOST_WIDE_INT signmask;
402 if (count < 0)
404 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 return;
408 if (SHIFT_COUNT_TRUNCATED)
409 count %= prec;
411 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
413 /* Shifting by the host word size is undefined according to the
414 ANSI standard, so we must handle this as a special case. */
415 *hv = 0;
416 *lv = 0;
418 else if (count >= HOST_BITS_PER_WIDE_INT)
420 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
421 *lv = 0;
423 else
425 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
426 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 *lv = l1 << count;
430 /* Sign extend all bits that are beyond the precision. */
432 signmask = -((prec > HOST_BITS_PER_WIDE_INT
433 ? ((unsigned HOST_WIDE_INT) *hv
434 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
435 : (*lv >> (prec - 1))) & 1);
437 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
439 else if (prec >= HOST_BITS_PER_WIDE_INT)
441 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
442 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 else
446 *hv = signmask;
447 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
448 *lv |= signmask << prec;
452 /* Shift the doubleword integer in L1, H1 right by COUNT places
453 keeping only PREC bits of result. COUNT must be positive.
454 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
455 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
457 void
458 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
459 HOST_WIDE_INT count, unsigned int prec,
460 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
461 int arith)
463 unsigned HOST_WIDE_INT signmask;
465 signmask = (arith
466 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
467 : 0);
469 if (SHIFT_COUNT_TRUNCATED)
470 count %= prec;
472 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
474 /* Shifting by the host word size is undefined according to the
475 ANSI standard, so we must handle this as a special case. */
476 *hv = 0;
477 *lv = 0;
479 else if (count >= HOST_BITS_PER_WIDE_INT)
481 *hv = 0;
482 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
484 else
486 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
487 *lv = ((l1 >> count)
488 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
491 /* Zero / sign extend all bits that are beyond the precision. */
493 if (count >= (HOST_WIDE_INT)prec)
495 *hv = signmask;
496 *lv = signmask;
498 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
500 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
502 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
503 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 else
507 *hv = signmask;
508 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
509 *lv |= signmask << (prec - count);
513 /* Rotate the doubleword integer in L1, H1 left by COUNT places
514 keeping only PREC bits of result.
515 Rotate right if COUNT is negative.
516 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
518 void
519 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
520 HOST_WIDE_INT count, unsigned int prec,
521 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
523 unsigned HOST_WIDE_INT s1l, s2l;
524 HOST_WIDE_INT s1h, s2h;
526 count %= prec;
527 if (count < 0)
528 count += prec;
530 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
531 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
532 *lv = s1l | s2l;
533 *hv = s1h | s2h;
536 /* Rotate the doubleword integer in L1, H1 left by COUNT places
537 keeping only PREC bits of result. COUNT must be positive.
538 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
540 void
541 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
542 HOST_WIDE_INT count, unsigned int prec,
543 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
545 unsigned HOST_WIDE_INT s1l, s2l;
546 HOST_WIDE_INT s1h, s2h;
548 count %= prec;
549 if (count < 0)
550 count += prec;
552 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
553 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
554 *lv = s1l | s2l;
555 *hv = s1h | s2h;
558 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
559 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
560 CODE is a tree code for a kind of division, one of
561 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
562 or EXACT_DIV_EXPR
563 It controls how the quotient is rounded to an integer.
564 Return nonzero if the operation overflows.
565 UNS nonzero says do unsigned division. */
568 div_and_round_double (enum tree_code code, int uns,
569 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
570 HOST_WIDE_INT hnum_orig,
571 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
572 HOST_WIDE_INT hden_orig,
573 unsigned HOST_WIDE_INT *lquo,
574 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT *hrem)
577 int quo_neg = 0;
578 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
579 HOST_WIDE_INT den[4], quo[4];
580 int i, j;
581 unsigned HOST_WIDE_INT work;
582 unsigned HOST_WIDE_INT carry = 0;
583 unsigned HOST_WIDE_INT lnum = lnum_orig;
584 HOST_WIDE_INT hnum = hnum_orig;
585 unsigned HOST_WIDE_INT lden = lden_orig;
586 HOST_WIDE_INT hden = hden_orig;
587 int overflow = 0;
589 if (hden == 0 && lden == 0)
590 overflow = 1, lden = 1;
592 /* Calculate quotient sign and convert operands to unsigned. */
593 if (!uns)
595 if (hnum < 0)
597 quo_neg = ~ quo_neg;
598 /* (minimum integer) / (-1) is the only overflow case. */
599 if (neg_double (lnum, hnum, &lnum, &hnum)
600 && ((HOST_WIDE_INT) lden & hden) == -1)
601 overflow = 1;
603 if (hden < 0)
605 quo_neg = ~ quo_neg;
606 neg_double (lden, hden, &lden, &hden);
610 if (hnum == 0 && hden == 0)
611 { /* single precision */
612 *hquo = *hrem = 0;
613 /* This unsigned division rounds toward zero. */
614 *lquo = lnum / lden;
615 goto finish_up;
618 if (hnum == 0)
619 { /* trivial case: dividend < divisor */
620 /* hden != 0 already checked. */
621 *hquo = *lquo = 0;
622 *hrem = hnum;
623 *lrem = lnum;
624 goto finish_up;
627 memset (quo, 0, sizeof quo);
629 memset (num, 0, sizeof num); /* to zero 9th element */
630 memset (den, 0, sizeof den);
632 encode (num, lnum, hnum);
633 encode (den, lden, hden);
635 /* Special code for when the divisor < BASE. */
636 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
638 /* hnum != 0 already checked. */
639 for (i = 4 - 1; i >= 0; i--)
641 work = num[i] + carry * BASE;
642 quo[i] = work / lden;
643 carry = work % lden;
646 else
648 /* Full double precision division,
649 with thanks to Don Knuth's "Seminumerical Algorithms". */
650 int num_hi_sig, den_hi_sig;
651 unsigned HOST_WIDE_INT quo_est, scale;
653 /* Find the highest nonzero divisor digit. */
654 for (i = 4 - 1;; i--)
655 if (den[i] != 0)
657 den_hi_sig = i;
658 break;
661 /* Insure that the first digit of the divisor is at least BASE/2.
662 This is required by the quotient digit estimation algorithm. */
664 scale = BASE / (den[den_hi_sig] + 1);
665 if (scale > 1)
666 { /* scale divisor and dividend */
667 carry = 0;
668 for (i = 0; i <= 4 - 1; i++)
670 work = (num[i] * scale) + carry;
671 num[i] = LOWPART (work);
672 carry = HIGHPART (work);
675 num[4] = carry;
676 carry = 0;
677 for (i = 0; i <= 4 - 1; i++)
679 work = (den[i] * scale) + carry;
680 den[i] = LOWPART (work);
681 carry = HIGHPART (work);
682 if (den[i] != 0) den_hi_sig = i;
686 num_hi_sig = 4;
688 /* Main loop */
689 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
691 /* Guess the next quotient digit, quo_est, by dividing the first
692 two remaining dividend digits by the high order quotient digit.
693 quo_est is never low and is at most 2 high. */
694 unsigned HOST_WIDE_INT tmp;
696 num_hi_sig = i + den_hi_sig + 1;
697 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698 if (num[num_hi_sig] != den[den_hi_sig])
699 quo_est = work / den[den_hi_sig];
700 else
701 quo_est = BASE - 1;
703 /* Refine quo_est so it's usually correct, and at most one high. */
704 tmp = work - quo_est * den[den_hi_sig];
705 if (tmp < BASE
706 && (den[den_hi_sig - 1] * quo_est
707 > (tmp * BASE + num[num_hi_sig - 2])))
708 quo_est--;
710 /* Try QUO_EST as the quotient digit, by multiplying the
711 divisor by QUO_EST and subtracting from the remaining dividend.
712 Keep in mind that QUO_EST is the I - 1st digit. */
714 carry = 0;
715 for (j = 0; j <= den_hi_sig; j++)
717 work = quo_est * den[j] + carry;
718 carry = HIGHPART (work);
719 work = num[i + j] - LOWPART (work);
720 num[i + j] = LOWPART (work);
721 carry += HIGHPART (work) != 0;
724 /* If quo_est was high by one, then num[i] went negative and
725 we need to correct things. */
726 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
728 quo_est--;
729 carry = 0; /* add divisor back in */
730 for (j = 0; j <= den_hi_sig; j++)
732 work = num[i + j] + den[j] + carry;
733 carry = HIGHPART (work);
734 num[i + j] = LOWPART (work);
737 num [num_hi_sig] += carry;
740 /* Store the quotient digit. */
741 quo[i] = quo_est;
745 decode (quo, lquo, hquo);
747 finish_up:
748 /* If result is negative, make it so. */
749 if (quo_neg)
750 neg_double (*lquo, *hquo, lquo, hquo);
752 /* Compute trial remainder: rem = num - (quo * den) */
753 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754 neg_double (*lrem, *hrem, lrem, hrem);
755 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 switch (code)
759 case TRUNC_DIV_EXPR:
760 case TRUNC_MOD_EXPR: /* round toward zero */
761 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 return overflow;
764 case FLOOR_DIV_EXPR:
765 case FLOOR_MOD_EXPR: /* round toward negative infinity */
766 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
768 /* quo = quo - 1; */
769 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
770 lquo, hquo);
772 else
773 return overflow;
774 break;
776 case CEIL_DIV_EXPR:
777 case CEIL_MOD_EXPR: /* round toward positive infinity */
778 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
780 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
781 lquo, hquo);
783 else
784 return overflow;
785 break;
787 case ROUND_DIV_EXPR:
788 case ROUND_MOD_EXPR: /* round to closest integer */
790 unsigned HOST_WIDE_INT labs_rem = *lrem;
791 HOST_WIDE_INT habs_rem = *hrem;
792 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793 HOST_WIDE_INT habs_den = hden, htwice;
795 /* Get absolute values. */
796 if (*hrem < 0)
797 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
798 if (hden < 0)
799 neg_double (lden, hden, &labs_den, &habs_den);
801 /* If (2 * abs (lrem) >= abs (lden)) */
802 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803 labs_rem, habs_rem, &ltwice, &htwice);
805 if (((unsigned HOST_WIDE_INT) habs_den
806 < (unsigned HOST_WIDE_INT) htwice)
807 || (((unsigned HOST_WIDE_INT) habs_den
808 == (unsigned HOST_WIDE_INT) htwice)
809 && (labs_den < ltwice)))
811 if (*hquo < 0)
812 /* quo = quo - 1; */
813 add_double (*lquo, *hquo,
814 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815 else
816 /* quo = quo + 1; */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
820 else
821 return overflow;
823 break;
825 default:
826 gcc_unreachable ();
829 /* Compute true remainder: rem = num - (quo * den) */
830 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831 neg_double (*lrem, *hrem, lrem, hrem);
832 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 return overflow;
836 /* Return true if built-in mathematical function specified by CODE
837 preserves the sign of it argument, i.e. -f(x) == f(-x). */
839 static bool
840 negate_mathfn_p (enum built_in_function code)
842 switch (code)
844 case BUILT_IN_ASIN:
845 case BUILT_IN_ASINF:
846 case BUILT_IN_ASINL:
847 case BUILT_IN_ATAN:
848 case BUILT_IN_ATANF:
849 case BUILT_IN_ATANL:
850 case BUILT_IN_SIN:
851 case BUILT_IN_SINF:
852 case BUILT_IN_SINL:
853 case BUILT_IN_TAN:
854 case BUILT_IN_TANF:
855 case BUILT_IN_TANL:
856 return true;
858 default:
859 break;
861 return false;
864 /* Check whether we may negate an integer constant T without causing
865 overflow. */
867 bool
868 may_negate_without_overflow_p (tree t)
870 unsigned HOST_WIDE_INT val;
871 unsigned int prec;
872 tree type;
874 gcc_assert (TREE_CODE (t) == INTEGER_CST);
876 type = TREE_TYPE (t);
877 if (TYPE_UNSIGNED (type))
878 return false;
880 prec = TYPE_PRECISION (type);
881 if (prec > HOST_BITS_PER_WIDE_INT)
883 if (TREE_INT_CST_LOW (t) != 0)
884 return true;
885 prec -= HOST_BITS_PER_WIDE_INT;
886 val = TREE_INT_CST_HIGH (t);
888 else
889 val = TREE_INT_CST_LOW (t);
890 if (prec < HOST_BITS_PER_WIDE_INT)
891 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
892 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
895 /* Determine whether an expression T can be cheaply negated using
896 the function negate_expr. */
898 static bool
899 negate_expr_p (tree t)
901 tree type;
903 if (t == 0)
904 return false;
906 type = TREE_TYPE (t);
908 STRIP_SIGN_NOPS (t);
909 switch (TREE_CODE (t))
911 case INTEGER_CST:
912 if (TYPE_UNSIGNED (type) || ! flag_trapv)
913 return true;
915 /* Check that -CST will not overflow type. */
916 return may_negate_without_overflow_p (t);
918 case REAL_CST:
919 case NEGATE_EXPR:
920 return true;
922 case COMPLEX_CST:
923 return negate_expr_p (TREE_REALPART (t))
924 && negate_expr_p (TREE_IMAGPART (t));
926 case PLUS_EXPR:
927 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
928 return false;
929 /* -(A + B) -> (-B) - A. */
930 if (negate_expr_p (TREE_OPERAND (t, 1))
931 && reorder_operands_p (TREE_OPERAND (t, 0),
932 TREE_OPERAND (t, 1)))
933 return true;
934 /* -(A + B) -> (-A) - B. */
935 return negate_expr_p (TREE_OPERAND (t, 0));
937 case MINUS_EXPR:
938 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
939 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
940 && reorder_operands_p (TREE_OPERAND (t, 0),
941 TREE_OPERAND (t, 1));
943 case MULT_EXPR:
944 if (TYPE_UNSIGNED (TREE_TYPE (t)))
945 break;
947 /* Fall through. */
949 case RDIV_EXPR:
950 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
951 return negate_expr_p (TREE_OPERAND (t, 1))
952 || negate_expr_p (TREE_OPERAND (t, 0));
953 break;
955 case NOP_EXPR:
956 /* Negate -((double)float) as (double)(-float). */
957 if (TREE_CODE (type) == REAL_TYPE)
959 tree tem = strip_float_extensions (t);
960 if (tem != t)
961 return negate_expr_p (tem);
963 break;
965 case CALL_EXPR:
966 /* Negate -f(x) as f(-x). */
967 if (negate_mathfn_p (builtin_mathfn_code (t)))
968 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
969 break;
971 case RSHIFT_EXPR:
972 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
973 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
975 tree op1 = TREE_OPERAND (t, 1);
976 if (TREE_INT_CST_HIGH (op1) == 0
977 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
978 == TREE_INT_CST_LOW (op1))
979 return true;
981 break;
983 default:
984 break;
986 return false;
989 /* Given T, an expression, return the negation of T. Allow for T to be
990 null, in which case return null. */
992 static tree
993 negate_expr (tree t)
995 tree type;
996 tree tem;
998 if (t == 0)
999 return 0;
1001 type = TREE_TYPE (t);
1002 STRIP_SIGN_NOPS (t);
1004 switch (TREE_CODE (t))
1006 case INTEGER_CST:
1007 tem = fold_negate_const (t, type);
1008 if (! TREE_OVERFLOW (tem)
1009 || TYPE_UNSIGNED (type)
1010 || ! flag_trapv)
1011 return tem;
1012 break;
1014 case REAL_CST:
1015 tem = fold_negate_const (t, type);
1016 /* Two's complement FP formats, such as c4x, may overflow. */
1017 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1018 return fold_convert (type, tem);
1019 break;
1021 case COMPLEX_CST:
1023 tree rpart = negate_expr (TREE_REALPART (t));
1024 tree ipart = negate_expr (TREE_IMAGPART (t));
1026 if ((TREE_CODE (rpart) == REAL_CST
1027 && TREE_CODE (ipart) == REAL_CST)
1028 || (TREE_CODE (rpart) == INTEGER_CST
1029 && TREE_CODE (ipart) == INTEGER_CST))
1030 return build_complex (type, rpart, ipart);
1032 break;
1034 case NEGATE_EXPR:
1035 return fold_convert (type, TREE_OPERAND (t, 0));
1037 case PLUS_EXPR:
1038 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1040 /* -(A + B) -> (-B) - A. */
1041 if (negate_expr_p (TREE_OPERAND (t, 1))
1042 && reorder_operands_p (TREE_OPERAND (t, 0),
1043 TREE_OPERAND (t, 1)))
1045 tem = negate_expr (TREE_OPERAND (t, 1));
1046 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1047 tem, TREE_OPERAND (t, 0)));
1048 return fold_convert (type, tem);
1051 /* -(A + B) -> (-A) - B. */
1052 if (negate_expr_p (TREE_OPERAND (t, 0)))
1054 tem = negate_expr (TREE_OPERAND (t, 0));
1055 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1056 tem, TREE_OPERAND (t, 1)));
1057 return fold_convert (type, tem);
1060 break;
1062 case MINUS_EXPR:
1063 /* - (A - B) -> B - A */
1064 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1066 return fold_convert (type,
1067 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1068 TREE_OPERAND (t, 1),
1069 TREE_OPERAND (t, 0))));
1070 break;
1072 case MULT_EXPR:
1073 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1074 break;
1076 /* Fall through. */
1078 case RDIV_EXPR:
1079 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1081 tem = TREE_OPERAND (t, 1);
1082 if (negate_expr_p (tem))
1083 return fold_convert (type,
1084 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1085 TREE_OPERAND (t, 0),
1086 negate_expr (tem))));
1087 tem = TREE_OPERAND (t, 0);
1088 if (negate_expr_p (tem))
1089 return fold_convert (type,
1090 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1091 negate_expr (tem),
1092 TREE_OPERAND (t, 1))));
1094 break;
1096 case NOP_EXPR:
1097 /* Convert -((double)float) into (double)(-float). */
1098 if (TREE_CODE (type) == REAL_TYPE)
1100 tem = strip_float_extensions (t);
1101 if (tem != t && negate_expr_p (tem))
1102 return fold_convert (type, negate_expr (tem));
1104 break;
1106 case CALL_EXPR:
1107 /* Negate -f(x) as f(-x). */
1108 if (negate_mathfn_p (builtin_mathfn_code (t))
1109 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1111 tree fndecl, arg, arglist;
1113 fndecl = get_callee_fndecl (t);
1114 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1115 arglist = build_tree_list (NULL_TREE, arg);
1116 return build_function_call_expr (fndecl, arglist);
1118 break;
1120 case RSHIFT_EXPR:
1121 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1122 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1124 tree op1 = TREE_OPERAND (t, 1);
1125 if (TREE_INT_CST_HIGH (op1) == 0
1126 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1127 == TREE_INT_CST_LOW (op1))
1129 tree ntype = TYPE_UNSIGNED (type)
1130 ? lang_hooks.types.signed_type (type)
1131 : lang_hooks.types.unsigned_type (type);
1132 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1133 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1134 return fold_convert (type, temp);
1137 break;
1139 default:
1140 break;
1143 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1144 return fold_convert (type, tem);
1147 /* Split a tree IN into a constant, literal and variable parts that could be
1148 combined with CODE to make IN. "constant" means an expression with
1149 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1150 commutative arithmetic operation. Store the constant part into *CONP,
1151 the literal in *LITP and return the variable part. If a part isn't
1152 present, set it to null. If the tree does not decompose in this way,
1153 return the entire tree as the variable part and the other parts as null.
1155 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1156 case, we negate an operand that was subtracted. Except if it is a
1157 literal for which we use *MINUS_LITP instead.
1159 If NEGATE_P is true, we are negating all of IN, again except a literal
1160 for which we use *MINUS_LITP instead.
1162 If IN is itself a literal or constant, return it as appropriate.
1164 Note that we do not guarantee that any of the three values will be the
1165 same type as IN, but they will have the same signedness and mode. */
1167 static tree
1168 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1169 tree *minus_litp, int negate_p)
1171 tree var = 0;
1173 *conp = 0;
1174 *litp = 0;
1175 *minus_litp = 0;
1177 /* Strip any conversions that don't change the machine mode or signedness. */
1178 STRIP_SIGN_NOPS (in);
1180 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1181 *litp = in;
1182 else if (TREE_CODE (in) == code
1183 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1184 /* We can associate addition and subtraction together (even
1185 though the C standard doesn't say so) for integers because
1186 the value is not affected. For reals, the value might be
1187 affected, so we can't. */
1188 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1189 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1191 tree op0 = TREE_OPERAND (in, 0);
1192 tree op1 = TREE_OPERAND (in, 1);
1193 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1194 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1196 /* First see if either of the operands is a literal, then a constant. */
1197 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1198 *litp = op0, op0 = 0;
1199 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1200 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1202 if (op0 != 0 && TREE_CONSTANT (op0))
1203 *conp = op0, op0 = 0;
1204 else if (op1 != 0 && TREE_CONSTANT (op1))
1205 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1207 /* If we haven't dealt with either operand, this is not a case we can
1208 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1209 if (op0 != 0 && op1 != 0)
1210 var = in;
1211 else if (op0 != 0)
1212 var = op0;
1213 else
1214 var = op1, neg_var_p = neg1_p;
1216 /* Now do any needed negations. */
1217 if (neg_litp_p)
1218 *minus_litp = *litp, *litp = 0;
1219 if (neg_conp_p)
1220 *conp = negate_expr (*conp);
1221 if (neg_var_p)
1222 var = negate_expr (var);
1224 else if (TREE_CONSTANT (in))
1225 *conp = in;
1226 else
1227 var = in;
1229 if (negate_p)
1231 if (*litp)
1232 *minus_litp = *litp, *litp = 0;
1233 else if (*minus_litp)
1234 *litp = *minus_litp, *minus_litp = 0;
1235 *conp = negate_expr (*conp);
1236 var = negate_expr (var);
1239 return var;
1242 /* Re-associate trees split by the above function. T1 and T2 are either
1243 expressions to associate or null. Return the new expression, if any. If
1244 we build an operation, do it in TYPE and with CODE. */
1246 static tree
1247 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1249 if (t1 == 0)
1250 return t2;
1251 else if (t2 == 0)
1252 return t1;
1254 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1255 try to fold this since we will have infinite recursion. But do
1256 deal with any NEGATE_EXPRs. */
1257 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1258 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1260 if (code == PLUS_EXPR)
1262 if (TREE_CODE (t1) == NEGATE_EXPR)
1263 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1264 fold_convert (type, TREE_OPERAND (t1, 0)));
1265 else if (TREE_CODE (t2) == NEGATE_EXPR)
1266 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1267 fold_convert (type, TREE_OPERAND (t2, 0)));
1268 else if (integer_zerop (t2))
1269 return fold_convert (type, t1);
1271 else if (code == MINUS_EXPR)
1273 if (integer_zerop (t2))
1274 return fold_convert (type, t1);
1277 return build2 (code, type, fold_convert (type, t1),
1278 fold_convert (type, t2));
1281 return fold (build2 (code, type, fold_convert (type, t1),
1282 fold_convert (type, t2)));
1285 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1286 to produce a new constant.
1288 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1290 tree
1291 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1293 unsigned HOST_WIDE_INT int1l, int2l;
1294 HOST_WIDE_INT int1h, int2h;
1295 unsigned HOST_WIDE_INT low;
1296 HOST_WIDE_INT hi;
1297 unsigned HOST_WIDE_INT garbagel;
1298 HOST_WIDE_INT garbageh;
1299 tree t;
1300 tree type = TREE_TYPE (arg1);
1301 int uns = TYPE_UNSIGNED (type);
1302 int is_sizetype
1303 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1304 int overflow = 0;
1305 int no_overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1312 switch (code)
1314 case BIT_IOR_EXPR:
1315 low = int1l | int2l, hi = int1h | int2h;
1316 break;
1318 case BIT_XOR_EXPR:
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1320 break;
1322 case BIT_AND_EXPR:
1323 low = int1l & int2l, hi = int1h & int2h;
1324 break;
1326 case RSHIFT_EXPR:
1327 int2l = -int2l;
1328 case LSHIFT_EXPR:
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 &low, &hi, !uns);
1334 no_overflow = 1;
1335 break;
1337 case RROTATE_EXPR:
1338 int2l = - int2l;
1339 case LROTATE_EXPR:
1340 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1341 &low, &hi);
1342 break;
1344 case PLUS_EXPR:
1345 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1346 break;
1348 case MINUS_EXPR:
1349 neg_double (int2l, int2h, &low, &hi);
1350 add_double (int1l, int1h, low, hi, &low, &hi);
1351 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1352 break;
1354 case MULT_EXPR:
1355 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1356 break;
1358 case TRUNC_DIV_EXPR:
1359 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1360 case EXACT_DIV_EXPR:
1361 /* This is a shortcut for a common special case. */
1362 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1363 && ! TREE_CONSTANT_OVERFLOW (arg1)
1364 && ! TREE_CONSTANT_OVERFLOW (arg2)
1365 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1367 if (code == CEIL_DIV_EXPR)
1368 int1l += int2l - 1;
1370 low = int1l / int2l, hi = 0;
1371 break;
1374 /* ... fall through ... */
1376 case ROUND_DIV_EXPR:
1377 if (int2h == 0 && int2l == 1)
1379 low = int1l, hi = int1h;
1380 break;
1382 if (int1l == int2l && int1h == int2h
1383 && ! (int1l == 0 && int1h == 0))
1385 low = 1, hi = 0;
1386 break;
1388 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1389 &low, &hi, &garbagel, &garbageh);
1390 break;
1392 case TRUNC_MOD_EXPR:
1393 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1394 /* This is a shortcut for a common special case. */
1395 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1396 && ! TREE_CONSTANT_OVERFLOW (arg1)
1397 && ! TREE_CONSTANT_OVERFLOW (arg2)
1398 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1400 if (code == CEIL_MOD_EXPR)
1401 int1l += int2l - 1;
1402 low = int1l % int2l, hi = 0;
1403 break;
1406 /* ... fall through ... */
1408 case ROUND_MOD_EXPR:
1409 overflow = div_and_round_double (code, uns,
1410 int1l, int1h, int2l, int2h,
1411 &garbagel, &garbageh, &low, &hi);
1412 break;
1414 case MIN_EXPR:
1415 case MAX_EXPR:
1416 if (uns)
1417 low = (((unsigned HOST_WIDE_INT) int1h
1418 < (unsigned HOST_WIDE_INT) int2h)
1419 || (((unsigned HOST_WIDE_INT) int1h
1420 == (unsigned HOST_WIDE_INT) int2h)
1421 && int1l < int2l));
1422 else
1423 low = (int1h < int2h
1424 || (int1h == int2h && int1l < int2l));
1426 if (low == (code == MIN_EXPR))
1427 low = int1l, hi = int1h;
1428 else
1429 low = int2l, hi = int2h;
1430 break;
1432 default:
1433 gcc_unreachable ();
1436 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1438 if (notrunc)
1440 /* Propagate overflow flags ourselves. */
1441 if (((!uns || is_sizetype) && overflow)
1442 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1444 t = copy_node (t);
1445 TREE_OVERFLOW (t) = 1;
1446 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1450 t = copy_node (t);
1451 TREE_CONSTANT_OVERFLOW (t) = 1;
1454 else
1455 t = force_fit_type (t, 1,
1456 ((!uns || is_sizetype) && overflow)
1457 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1458 TREE_CONSTANT_OVERFLOW (arg1)
1459 | TREE_CONSTANT_OVERFLOW (arg2));
1461 return t;
1464 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1465 constant. We assume ARG1 and ARG2 have the same data type, or at least
1466 are the same kind of constant and the same machine mode.
1468 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1470 static tree
1471 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1473 STRIP_NOPS (arg1);
1474 STRIP_NOPS (arg2);
1476 if (TREE_CODE (arg1) == INTEGER_CST)
1477 return int_const_binop (code, arg1, arg2, notrunc);
1479 if (TREE_CODE (arg1) == REAL_CST)
1481 enum machine_mode mode;
1482 REAL_VALUE_TYPE d1;
1483 REAL_VALUE_TYPE d2;
1484 REAL_VALUE_TYPE value;
1485 REAL_VALUE_TYPE result;
1486 bool inexact;
1487 tree t, type;
1489 d1 = TREE_REAL_CST (arg1);
1490 d2 = TREE_REAL_CST (arg2);
1492 type = TREE_TYPE (arg1);
1493 mode = TYPE_MODE (type);
1495 /* Don't perform operation if we honor signaling NaNs and
1496 either operand is a NaN. */
1497 if (HONOR_SNANS (mode)
1498 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1499 return NULL_TREE;
1501 /* Don't perform operation if it would raise a division
1502 by zero exception. */
1503 if (code == RDIV_EXPR
1504 && REAL_VALUES_EQUAL (d2, dconst0)
1505 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1506 return NULL_TREE;
1508 /* If either operand is a NaN, just return it. Otherwise, set up
1509 for floating-point trap; we return an overflow. */
1510 if (REAL_VALUE_ISNAN (d1))
1511 return arg1;
1512 else if (REAL_VALUE_ISNAN (d2))
1513 return arg2;
1515 inexact = real_arithmetic (&value, code, &d1, &d2);
1516 real_convert (&result, mode, &value);
1518 /* Don't constant fold this floating point operation if the
1519 result may dependent upon the run-time rounding mode and
1520 flag_rounding_math is set. */
1522 if (flag_rounding_math
1523 && (inexact || !real_identical (&result, &value)))
1524 return NULL_TREE;
1526 t = build_real (type, result);
1528 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1529 TREE_CONSTANT_OVERFLOW (t)
1530 = TREE_OVERFLOW (t)
1531 | TREE_CONSTANT_OVERFLOW (arg1)
1532 | TREE_CONSTANT_OVERFLOW (arg2);
1533 return t;
1535 if (TREE_CODE (arg1) == COMPLEX_CST)
1537 tree type = TREE_TYPE (arg1);
1538 tree r1 = TREE_REALPART (arg1);
1539 tree i1 = TREE_IMAGPART (arg1);
1540 tree r2 = TREE_REALPART (arg2);
1541 tree i2 = TREE_IMAGPART (arg2);
1542 tree t;
1544 switch (code)
1546 case PLUS_EXPR:
1547 t = build_complex (type,
1548 const_binop (PLUS_EXPR, r1, r2, notrunc),
1549 const_binop (PLUS_EXPR, i1, i2, notrunc));
1550 break;
1552 case MINUS_EXPR:
1553 t = build_complex (type,
1554 const_binop (MINUS_EXPR, r1, r2, notrunc),
1555 const_binop (MINUS_EXPR, i1, i2, notrunc));
1556 break;
1558 case MULT_EXPR:
1559 t = build_complex (type,
1560 const_binop (MINUS_EXPR,
1561 const_binop (MULT_EXPR,
1562 r1, r2, notrunc),
1563 const_binop (MULT_EXPR,
1564 i1, i2, notrunc),
1565 notrunc),
1566 const_binop (PLUS_EXPR,
1567 const_binop (MULT_EXPR,
1568 r1, i2, notrunc),
1569 const_binop (MULT_EXPR,
1570 i1, r2, notrunc),
1571 notrunc));
1572 break;
1574 case RDIV_EXPR:
1576 tree magsquared
1577 = const_binop (PLUS_EXPR,
1578 const_binop (MULT_EXPR, r2, r2, notrunc),
1579 const_binop (MULT_EXPR, i2, i2, notrunc),
1580 notrunc);
1582 t = build_complex (type,
1583 const_binop
1584 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1585 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1586 const_binop (PLUS_EXPR,
1587 const_binop (MULT_EXPR, r1, r2,
1588 notrunc),
1589 const_binop (MULT_EXPR, i1, i2,
1590 notrunc),
1591 notrunc),
1592 magsquared, notrunc),
1593 const_binop
1594 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1595 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1596 const_binop (MINUS_EXPR,
1597 const_binop (MULT_EXPR, i1, r2,
1598 notrunc),
1599 const_binop (MULT_EXPR, r1, i2,
1600 notrunc),
1601 notrunc),
1602 magsquared, notrunc));
1604 break;
1606 default:
1607 gcc_unreachable ();
1609 return t;
1611 return 0;
1614 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1615 indicates which particular sizetype to create. */
1617 tree
1618 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1620 return build_int_cst (sizetype_tab[(int) kind], number);
1623 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1624 is a tree code. The type of the result is taken from the operands.
1625 Both must be the same type integer type and it must be a size type.
1626 If the operands are constant, so is the result. */
1628 tree
1629 size_binop (enum tree_code code, tree arg0, tree arg1)
1631 tree type = TREE_TYPE (arg0);
1633 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1634 && type == TREE_TYPE (arg1));
1636 /* Handle the special case of two integer constants faster. */
1637 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1639 /* And some specific cases even faster than that. */
1640 if (code == PLUS_EXPR && integer_zerop (arg0))
1641 return arg1;
1642 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1643 && integer_zerop (arg1))
1644 return arg0;
1645 else if (code == MULT_EXPR && integer_onep (arg0))
1646 return arg1;
1648 /* Handle general case of two integer constants. */
1649 return int_const_binop (code, arg0, arg1, 0);
1652 if (arg0 == error_mark_node || arg1 == error_mark_node)
1653 return error_mark_node;
1655 return fold (build2 (code, type, arg0, arg1));
1658 /* Given two values, either both of sizetype or both of bitsizetype,
1659 compute the difference between the two values. Return the value
1660 in signed type corresponding to the type of the operands. */
1662 tree
1663 size_diffop (tree arg0, tree arg1)
1665 tree type = TREE_TYPE (arg0);
1666 tree ctype;
1668 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1669 && type == TREE_TYPE (arg1));
1671 /* If the type is already signed, just do the simple thing. */
1672 if (!TYPE_UNSIGNED (type))
1673 return size_binop (MINUS_EXPR, arg0, arg1);
1675 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1677 /* If either operand is not a constant, do the conversions to the signed
1678 type and subtract. The hardware will do the right thing with any
1679 overflow in the subtraction. */
1680 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1681 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1682 fold_convert (ctype, arg1));
1684 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1685 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1686 overflow) and negate (which can't either). Special-case a result
1687 of zero while we're here. */
1688 if (tree_int_cst_equal (arg0, arg1))
1689 return fold_convert (ctype, integer_zero_node);
1690 else if (tree_int_cst_lt (arg1, arg0))
1691 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1692 else
1693 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1694 fold_convert (ctype, size_binop (MINUS_EXPR,
1695 arg1, arg0)));
1698 /* A subroutine of fold_convert_const handling conversions of an
1699 INTEGER_CST to another integer type. */
1701 static tree
1702 fold_convert_const_int_from_int (tree type, tree arg1)
1704 tree t;
1706 /* Given an integer constant, make new constant with new type,
1707 appropriately sign-extended or truncated. */
1708 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1709 TREE_INT_CST_HIGH (arg1));
1711 t = force_fit_type (t,
1712 /* Don't set the overflow when
1713 converting a pointer */
1714 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1715 (TREE_INT_CST_HIGH (arg1) < 0
1716 && (TYPE_UNSIGNED (type)
1717 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1718 | TREE_OVERFLOW (arg1),
1719 TREE_CONSTANT_OVERFLOW (arg1));
1721 return t;
1724 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1725 to an integer type. */
1727 static tree
1728 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1730 int overflow = 0;
1731 tree t;
1733 /* The following code implements the floating point to integer
1734 conversion rules required by the Java Language Specification,
1735 that IEEE NaNs are mapped to zero and values that overflow
1736 the target precision saturate, i.e. values greater than
1737 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1738 are mapped to INT_MIN. These semantics are allowed by the
1739 C and C++ standards that simply state that the behavior of
1740 FP-to-integer conversion is unspecified upon overflow. */
1742 HOST_WIDE_INT high, low;
1743 REAL_VALUE_TYPE r;
1744 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1746 switch (code)
1748 case FIX_TRUNC_EXPR:
1749 real_trunc (&r, VOIDmode, &x);
1750 break;
1752 case FIX_CEIL_EXPR:
1753 real_ceil (&r, VOIDmode, &x);
1754 break;
1756 case FIX_FLOOR_EXPR:
1757 real_floor (&r, VOIDmode, &x);
1758 break;
1760 case FIX_ROUND_EXPR:
1761 real_round (&r, VOIDmode, &x);
1762 break;
1764 default:
1765 gcc_unreachable ();
1768 /* If R is NaN, return zero and show we have an overflow. */
1769 if (REAL_VALUE_ISNAN (r))
1771 overflow = 1;
1772 high = 0;
1773 low = 0;
1776 /* See if R is less than the lower bound or greater than the
1777 upper bound. */
1779 if (! overflow)
1781 tree lt = TYPE_MIN_VALUE (type);
1782 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1783 if (REAL_VALUES_LESS (r, l))
1785 overflow = 1;
1786 high = TREE_INT_CST_HIGH (lt);
1787 low = TREE_INT_CST_LOW (lt);
1791 if (! overflow)
1793 tree ut = TYPE_MAX_VALUE (type);
1794 if (ut)
1796 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1797 if (REAL_VALUES_LESS (u, r))
1799 overflow = 1;
1800 high = TREE_INT_CST_HIGH (ut);
1801 low = TREE_INT_CST_LOW (ut);
1806 if (! overflow)
1807 REAL_VALUE_TO_INT (&low, &high, r);
1809 t = build_int_cst_wide (type, low, high);
1811 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1812 TREE_CONSTANT_OVERFLOW (arg1));
1813 return t;
1816 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1817 to another floating point type. */
1819 static tree
1820 fold_convert_const_real_from_real (tree type, tree arg1)
1822 REAL_VALUE_TYPE value;
1823 tree t;
1825 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1826 t = build_real (type, value);
1828 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1829 TREE_CONSTANT_OVERFLOW (t)
1830 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1831 return t;
1834 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1835 type TYPE. If no simplification can be done return NULL_TREE. */
1837 static tree
1838 fold_convert_const (enum tree_code code, tree type, tree arg1)
1840 if (TREE_TYPE (arg1) == type)
1841 return arg1;
1843 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1845 if (TREE_CODE (arg1) == INTEGER_CST)
1846 return fold_convert_const_int_from_int (type, arg1);
1847 else if (TREE_CODE (arg1) == REAL_CST)
1848 return fold_convert_const_int_from_real (code, type, arg1);
1850 else if (TREE_CODE (type) == REAL_TYPE)
1852 if (TREE_CODE (arg1) == INTEGER_CST)
1853 return build_real_from_int_cst (type, arg1);
1854 if (TREE_CODE (arg1) == REAL_CST)
1855 return fold_convert_const_real_from_real (type, arg1);
1857 return NULL_TREE;
1860 /* Construct a vector of zero elements of vector type TYPE. */
1862 static tree
1863 build_zero_vector (tree type)
1865 tree elem, list;
1866 int i, units;
1868 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1869 units = TYPE_VECTOR_SUBPARTS (type);
1871 list = NULL_TREE;
1872 for (i = 0; i < units; i++)
1873 list = tree_cons (NULL_TREE, elem, list);
1874 return build_vector (type, list);
1877 /* Convert expression ARG to type TYPE. Used by the middle-end for
1878 simple conversions in preference to calling the front-end's convert. */
1880 tree
1881 fold_convert (tree type, tree arg)
1883 tree orig = TREE_TYPE (arg);
1884 tree tem;
1886 if (type == orig)
1887 return arg;
1889 if (TREE_CODE (arg) == ERROR_MARK
1890 || TREE_CODE (type) == ERROR_MARK
1891 || TREE_CODE (orig) == ERROR_MARK)
1892 return error_mark_node;
1894 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1895 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1896 TYPE_MAIN_VARIANT (orig)))
1897 return fold (build1 (NOP_EXPR, type, arg));
1899 switch (TREE_CODE (type))
1901 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1902 case POINTER_TYPE: case REFERENCE_TYPE:
1903 case OFFSET_TYPE:
1904 if (TREE_CODE (arg) == INTEGER_CST)
1906 tem = fold_convert_const (NOP_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1908 return tem;
1910 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1911 || TREE_CODE (orig) == OFFSET_TYPE)
1912 return fold (build1 (NOP_EXPR, type, arg));
1913 if (TREE_CODE (orig) == COMPLEX_TYPE)
1915 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1916 return fold_convert (type, tem);
1918 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1919 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1920 return fold (build1 (NOP_EXPR, type, arg));
1922 case REAL_TYPE:
1923 if (TREE_CODE (arg) == INTEGER_CST)
1925 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1926 if (tem != NULL_TREE)
1927 return tem;
1929 else if (TREE_CODE (arg) == REAL_CST)
1931 tem = fold_convert_const (NOP_EXPR, type, arg);
1932 if (tem != NULL_TREE)
1933 return tem;
1936 switch (TREE_CODE (orig))
1938 case INTEGER_TYPE: case CHAR_TYPE:
1939 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1940 case POINTER_TYPE: case REFERENCE_TYPE:
1941 return fold (build1 (FLOAT_EXPR, type, arg));
1943 case REAL_TYPE:
1944 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1945 type, arg));
1947 case COMPLEX_TYPE:
1948 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1949 return fold_convert (type, tem);
1951 default:
1952 gcc_unreachable ();
1955 case COMPLEX_TYPE:
1956 switch (TREE_CODE (orig))
1958 case INTEGER_TYPE: case CHAR_TYPE:
1959 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1960 case POINTER_TYPE: case REFERENCE_TYPE:
1961 case REAL_TYPE:
1962 return build2 (COMPLEX_EXPR, type,
1963 fold_convert (TREE_TYPE (type), arg),
1964 fold_convert (TREE_TYPE (type), integer_zero_node));
1965 case COMPLEX_TYPE:
1967 tree rpart, ipart;
1969 if (TREE_CODE (arg) == COMPLEX_EXPR)
1971 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1972 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1973 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1976 arg = save_expr (arg);
1977 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1978 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1979 rpart = fold_convert (TREE_TYPE (type), rpart);
1980 ipart = fold_convert (TREE_TYPE (type), ipart);
1981 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1984 default:
1985 gcc_unreachable ();
1988 case VECTOR_TYPE:
1989 if (integer_zerop (arg))
1990 return build_zero_vector (type);
1991 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1992 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1993 || TREE_CODE (orig) == VECTOR_TYPE);
1994 return fold (build1 (NOP_EXPR, type, arg));
1996 case VOID_TYPE:
1997 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
1999 default:
2000 gcc_unreachable ();
2004 /* Return an expr equal to X but certainly not valid as an lvalue. */
2006 tree
2007 non_lvalue (tree x)
2009 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2010 us. */
2011 if (in_gimple_form)
2012 return x;
2014 /* We only need to wrap lvalue tree codes. */
2015 switch (TREE_CODE (x))
2017 case VAR_DECL:
2018 case PARM_DECL:
2019 case RESULT_DECL:
2020 case LABEL_DECL:
2021 case FUNCTION_DECL:
2022 case SSA_NAME:
2024 case COMPONENT_REF:
2025 case INDIRECT_REF:
2026 case ALIGN_INDIRECT_REF:
2027 case MISALIGNED_INDIRECT_REF:
2028 case ARRAY_REF:
2029 case ARRAY_RANGE_REF:
2030 case BIT_FIELD_REF:
2031 case OBJ_TYPE_REF:
2033 case REALPART_EXPR:
2034 case IMAGPART_EXPR:
2035 case PREINCREMENT_EXPR:
2036 case PREDECREMENT_EXPR:
2037 case SAVE_EXPR:
2038 case TRY_CATCH_EXPR:
2039 case WITH_CLEANUP_EXPR:
2040 case COMPOUND_EXPR:
2041 case MODIFY_EXPR:
2042 case TARGET_EXPR:
2043 case COND_EXPR:
2044 case BIND_EXPR:
2045 case MIN_EXPR:
2046 case MAX_EXPR:
2047 break;
2049 default:
2050 /* Assume the worst for front-end tree codes. */
2051 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2052 break;
2053 return x;
2055 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2058 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2059 Zero means allow extended lvalues. */
2061 int pedantic_lvalues;
2063 /* When pedantic, return an expr equal to X but certainly not valid as a
2064 pedantic lvalue. Otherwise, return X. */
2066 static tree
2067 pedantic_non_lvalue (tree x)
2069 if (pedantic_lvalues)
2070 return non_lvalue (x);
2071 else
2072 return x;
2075 /* Given a tree comparison code, return the code that is the logical inverse
2076 of the given code. It is not safe to do this for floating-point
2077 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2078 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2080 static enum tree_code
2081 invert_tree_comparison (enum tree_code code, bool honor_nans)
2083 if (honor_nans && flag_trapping_math)
2084 return ERROR_MARK;
2086 switch (code)
2088 case EQ_EXPR:
2089 return NE_EXPR;
2090 case NE_EXPR:
2091 return EQ_EXPR;
2092 case GT_EXPR:
2093 return honor_nans ? UNLE_EXPR : LE_EXPR;
2094 case GE_EXPR:
2095 return honor_nans ? UNLT_EXPR : LT_EXPR;
2096 case LT_EXPR:
2097 return honor_nans ? UNGE_EXPR : GE_EXPR;
2098 case LE_EXPR:
2099 return honor_nans ? UNGT_EXPR : GT_EXPR;
2100 case LTGT_EXPR:
2101 return UNEQ_EXPR;
2102 case UNEQ_EXPR:
2103 return LTGT_EXPR;
2104 case UNGT_EXPR:
2105 return LE_EXPR;
2106 case UNGE_EXPR:
2107 return LT_EXPR;
2108 case UNLT_EXPR:
2109 return GE_EXPR;
2110 case UNLE_EXPR:
2111 return GT_EXPR;
2112 case ORDERED_EXPR:
2113 return UNORDERED_EXPR;
2114 case UNORDERED_EXPR:
2115 return ORDERED_EXPR;
2116 default:
2117 gcc_unreachable ();
2121 /* Similar, but return the comparison that results if the operands are
2122 swapped. This is safe for floating-point. */
2124 enum tree_code
2125 swap_tree_comparison (enum tree_code code)
2127 switch (code)
2129 case EQ_EXPR:
2130 case NE_EXPR:
2131 return code;
2132 case GT_EXPR:
2133 return LT_EXPR;
2134 case GE_EXPR:
2135 return LE_EXPR;
2136 case LT_EXPR:
2137 return GT_EXPR;
2138 case LE_EXPR:
2139 return GE_EXPR;
2140 default:
2141 gcc_unreachable ();
2146 /* Convert a comparison tree code from an enum tree_code representation
2147 into a compcode bit-based encoding. This function is the inverse of
2148 compcode_to_comparison. */
2150 static enum comparison_code
2151 comparison_to_compcode (enum tree_code code)
2153 switch (code)
2155 case LT_EXPR:
2156 return COMPCODE_LT;
2157 case EQ_EXPR:
2158 return COMPCODE_EQ;
2159 case LE_EXPR:
2160 return COMPCODE_LE;
2161 case GT_EXPR:
2162 return COMPCODE_GT;
2163 case NE_EXPR:
2164 return COMPCODE_NE;
2165 case GE_EXPR:
2166 return COMPCODE_GE;
2167 case ORDERED_EXPR:
2168 return COMPCODE_ORD;
2169 case UNORDERED_EXPR:
2170 return COMPCODE_UNORD;
2171 case UNLT_EXPR:
2172 return COMPCODE_UNLT;
2173 case UNEQ_EXPR:
2174 return COMPCODE_UNEQ;
2175 case UNLE_EXPR:
2176 return COMPCODE_UNLE;
2177 case UNGT_EXPR:
2178 return COMPCODE_UNGT;
2179 case LTGT_EXPR:
2180 return COMPCODE_LTGT;
2181 case UNGE_EXPR:
2182 return COMPCODE_UNGE;
2183 default:
2184 gcc_unreachable ();
2188 /* Convert a compcode bit-based encoding of a comparison operator back
2189 to GCC's enum tree_code representation. This function is the
2190 inverse of comparison_to_compcode. */
2192 static enum tree_code
2193 compcode_to_comparison (enum comparison_code code)
2195 switch (code)
2197 case COMPCODE_LT:
2198 return LT_EXPR;
2199 case COMPCODE_EQ:
2200 return EQ_EXPR;
2201 case COMPCODE_LE:
2202 return LE_EXPR;
2203 case COMPCODE_GT:
2204 return GT_EXPR;
2205 case COMPCODE_NE:
2206 return NE_EXPR;
2207 case COMPCODE_GE:
2208 return GE_EXPR;
2209 case COMPCODE_ORD:
2210 return ORDERED_EXPR;
2211 case COMPCODE_UNORD:
2212 return UNORDERED_EXPR;
2213 case COMPCODE_UNLT:
2214 return UNLT_EXPR;
2215 case COMPCODE_UNEQ:
2216 return UNEQ_EXPR;
2217 case COMPCODE_UNLE:
2218 return UNLE_EXPR;
2219 case COMPCODE_UNGT:
2220 return UNGT_EXPR;
2221 case COMPCODE_LTGT:
2222 return LTGT_EXPR;
2223 case COMPCODE_UNGE:
2224 return UNGE_EXPR;
2225 default:
2226 gcc_unreachable ();
2230 /* Return a tree for the comparison which is the combination of
2231 doing the AND or OR (depending on CODE) of the two operations LCODE
2232 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2233 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2234 if this makes the transformation invalid. */
2236 tree
2237 combine_comparisons (enum tree_code code, enum tree_code lcode,
2238 enum tree_code rcode, tree truth_type,
2239 tree ll_arg, tree lr_arg)
2241 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2242 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2243 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2244 enum comparison_code compcode;
2246 switch (code)
2248 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2249 compcode = lcompcode & rcompcode;
2250 break;
2252 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2253 compcode = lcompcode | rcompcode;
2254 break;
2256 default:
2257 return NULL_TREE;
2260 if (!honor_nans)
2262 /* Eliminate unordered comparisons, as well as LTGT and ORD
2263 which are not used unless the mode has NaNs. */
2264 compcode &= ~COMPCODE_UNORD;
2265 if (compcode == COMPCODE_LTGT)
2266 compcode = COMPCODE_NE;
2267 else if (compcode == COMPCODE_ORD)
2268 compcode = COMPCODE_TRUE;
2270 else if (flag_trapping_math)
2272 /* Check that the original operation and the optimized ones will trap
2273 under the same condition. */
2274 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2275 && (lcompcode != COMPCODE_EQ)
2276 && (lcompcode != COMPCODE_ORD);
2277 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2278 && (rcompcode != COMPCODE_EQ)
2279 && (rcompcode != COMPCODE_ORD);
2280 bool trap = (compcode & COMPCODE_UNORD) == 0
2281 && (compcode != COMPCODE_EQ)
2282 && (compcode != COMPCODE_ORD);
2284 /* In a short-circuited boolean expression the LHS might be
2285 such that the RHS, if evaluated, will never trap. For
2286 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2287 if neither x nor y is NaN. (This is a mixed blessing: for
2288 example, the expression above will never trap, hence
2289 optimizing it to x < y would be invalid). */
2290 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2291 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2292 rtrap = false;
2294 /* If the comparison was short-circuited, and only the RHS
2295 trapped, we may now generate a spurious trap. */
2296 if (rtrap && !ltrap
2297 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2298 return NULL_TREE;
2300 /* If we changed the conditions that cause a trap, we lose. */
2301 if ((ltrap || rtrap) != trap)
2302 return NULL_TREE;
2305 if (compcode == COMPCODE_TRUE)
2306 return constant_boolean_node (true, truth_type);
2307 else if (compcode == COMPCODE_FALSE)
2308 return constant_boolean_node (false, truth_type);
2309 else
2310 return fold (build2 (compcode_to_comparison (compcode),
2311 truth_type, ll_arg, lr_arg));
2314 /* Return nonzero if CODE is a tree code that represents a truth value. */
2316 static int
2317 truth_value_p (enum tree_code code)
2319 return (TREE_CODE_CLASS (code) == tcc_comparison
2320 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2321 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2322 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2325 /* Return nonzero if two operands (typically of the same tree node)
2326 are necessarily equal. If either argument has side-effects this
2327 function returns zero. FLAGS modifies behavior as follows:
2329 If OEP_ONLY_CONST is set, only return nonzero for constants.
2330 This function tests whether the operands are indistinguishable;
2331 it does not test whether they are equal using C's == operation.
2332 The distinction is important for IEEE floating point, because
2333 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2334 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2336 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2337 even though it may hold multiple values during a function.
2338 This is because a GCC tree node guarantees that nothing else is
2339 executed between the evaluation of its "operands" (which may often
2340 be evaluated in arbitrary order). Hence if the operands themselves
2341 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2342 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2343 unset means assuming isochronic (or instantaneous) tree equivalence.
2344 Unless comparing arbitrary expression trees, such as from different
2345 statements, this flag can usually be left unset.
2347 If OEP_PURE_SAME is set, then pure functions with identical arguments
2348 are considered the same. It is used when the caller has other ways
2349 to ensure that global memory is unchanged in between. */
2352 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2354 /* If either is ERROR_MARK, they aren't equal. */
2355 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2356 return 0;
2358 /* If both types don't have the same signedness, then we can't consider
2359 them equal. We must check this before the STRIP_NOPS calls
2360 because they may change the signedness of the arguments. */
2361 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2362 return 0;
2364 STRIP_NOPS (arg0);
2365 STRIP_NOPS (arg1);
2367 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2368 /* This is needed for conversions and for COMPONENT_REF.
2369 Might as well play it safe and always test this. */
2370 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2371 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2372 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2373 return 0;
2375 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2376 We don't care about side effects in that case because the SAVE_EXPR
2377 takes care of that for us. In all other cases, two expressions are
2378 equal if they have no side effects. If we have two identical
2379 expressions with side effects that should be treated the same due
2380 to the only side effects being identical SAVE_EXPR's, that will
2381 be detected in the recursive calls below. */
2382 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2383 && (TREE_CODE (arg0) == SAVE_EXPR
2384 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2385 return 1;
2387 /* Next handle constant cases, those for which we can return 1 even
2388 if ONLY_CONST is set. */
2389 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2390 switch (TREE_CODE (arg0))
2392 case INTEGER_CST:
2393 return (! TREE_CONSTANT_OVERFLOW (arg0)
2394 && ! TREE_CONSTANT_OVERFLOW (arg1)
2395 && tree_int_cst_equal (arg0, arg1));
2397 case REAL_CST:
2398 return (! TREE_CONSTANT_OVERFLOW (arg0)
2399 && ! TREE_CONSTANT_OVERFLOW (arg1)
2400 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2401 TREE_REAL_CST (arg1)));
2403 case VECTOR_CST:
2405 tree v1, v2;
2407 if (TREE_CONSTANT_OVERFLOW (arg0)
2408 || TREE_CONSTANT_OVERFLOW (arg1))
2409 return 0;
2411 v1 = TREE_VECTOR_CST_ELTS (arg0);
2412 v2 = TREE_VECTOR_CST_ELTS (arg1);
2413 while (v1 && v2)
2415 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2416 flags))
2417 return 0;
2418 v1 = TREE_CHAIN (v1);
2419 v2 = TREE_CHAIN (v2);
2422 return 1;
2425 case COMPLEX_CST:
2426 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2427 flags)
2428 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2429 flags));
2431 case STRING_CST:
2432 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2433 && ! memcmp (TREE_STRING_POINTER (arg0),
2434 TREE_STRING_POINTER (arg1),
2435 TREE_STRING_LENGTH (arg0)));
2437 case ADDR_EXPR:
2438 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2440 default:
2441 break;
2444 if (flags & OEP_ONLY_CONST)
2445 return 0;
2447 /* Define macros to test an operand from arg0 and arg1 for equality and a
2448 variant that allows null and views null as being different from any
2449 non-null value. In the latter case, if either is null, the both
2450 must be; otherwise, do the normal comparison. */
2451 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2452 TREE_OPERAND (arg1, N), flags)
2454 #define OP_SAME_WITH_NULL(N) \
2455 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2456 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2458 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2460 case tcc_unary:
2461 /* Two conversions are equal only if signedness and modes match. */
2462 switch (TREE_CODE (arg0))
2464 case NOP_EXPR:
2465 case CONVERT_EXPR:
2466 case FIX_CEIL_EXPR:
2467 case FIX_TRUNC_EXPR:
2468 case FIX_FLOOR_EXPR:
2469 case FIX_ROUND_EXPR:
2470 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2471 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2472 return 0;
2473 break;
2474 default:
2475 break;
2478 return OP_SAME (0);
2481 case tcc_comparison:
2482 case tcc_binary:
2483 if (OP_SAME (0) && OP_SAME (1))
2484 return 1;
2486 /* For commutative ops, allow the other order. */
2487 return (commutative_tree_code (TREE_CODE (arg0))
2488 && operand_equal_p (TREE_OPERAND (arg0, 0),
2489 TREE_OPERAND (arg1, 1), flags)
2490 && operand_equal_p (TREE_OPERAND (arg0, 1),
2491 TREE_OPERAND (arg1, 0), flags));
2493 case tcc_reference:
2494 /* If either of the pointer (or reference) expressions we are
2495 dereferencing contain a side effect, these cannot be equal. */
2496 if (TREE_SIDE_EFFECTS (arg0)
2497 || TREE_SIDE_EFFECTS (arg1))
2498 return 0;
2500 switch (TREE_CODE (arg0))
2502 case INDIRECT_REF:
2503 case ALIGN_INDIRECT_REF:
2504 case MISALIGNED_INDIRECT_REF:
2505 case REALPART_EXPR:
2506 case IMAGPART_EXPR:
2507 return OP_SAME (0);
2509 case ARRAY_REF:
2510 case ARRAY_RANGE_REF:
2511 /* Operands 2 and 3 may be null. */
2512 return (OP_SAME (0)
2513 && OP_SAME (1)
2514 && OP_SAME_WITH_NULL (2)
2515 && OP_SAME_WITH_NULL (3));
2517 case COMPONENT_REF:
2518 /* Handle operand 2 the same as for ARRAY_REF. */
2519 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2521 case BIT_FIELD_REF:
2522 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2524 default:
2525 return 0;
2528 case tcc_expression:
2529 switch (TREE_CODE (arg0))
2531 case ADDR_EXPR:
2532 case TRUTH_NOT_EXPR:
2533 return OP_SAME (0);
2535 case TRUTH_ANDIF_EXPR:
2536 case TRUTH_ORIF_EXPR:
2537 return OP_SAME (0) && OP_SAME (1);
2539 case TRUTH_AND_EXPR:
2540 case TRUTH_OR_EXPR:
2541 case TRUTH_XOR_EXPR:
2542 if (OP_SAME (0) && OP_SAME (1))
2543 return 1;
2545 /* Otherwise take into account this is a commutative operation. */
2546 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2547 TREE_OPERAND (arg1, 1), flags)
2548 && operand_equal_p (TREE_OPERAND (arg0, 1),
2549 TREE_OPERAND (arg1, 0), flags));
2551 case CALL_EXPR:
2552 /* If the CALL_EXPRs call different functions, then they
2553 clearly can not be equal. */
2554 if (!OP_SAME (0))
2555 return 0;
2558 unsigned int cef = call_expr_flags (arg0);
2559 if (flags & OEP_PURE_SAME)
2560 cef &= ECF_CONST | ECF_PURE;
2561 else
2562 cef &= ECF_CONST;
2563 if (!cef)
2564 return 0;
2567 /* Now see if all the arguments are the same. operand_equal_p
2568 does not handle TREE_LIST, so we walk the operands here
2569 feeding them to operand_equal_p. */
2570 arg0 = TREE_OPERAND (arg0, 1);
2571 arg1 = TREE_OPERAND (arg1, 1);
2572 while (arg0 && arg1)
2574 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2575 flags))
2576 return 0;
2578 arg0 = TREE_CHAIN (arg0);
2579 arg1 = TREE_CHAIN (arg1);
2582 /* If we get here and both argument lists are exhausted
2583 then the CALL_EXPRs are equal. */
2584 return ! (arg0 || arg1);
2586 default:
2587 return 0;
2590 case tcc_declaration:
2591 /* Consider __builtin_sqrt equal to sqrt. */
2592 return (TREE_CODE (arg0) == FUNCTION_DECL
2593 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2594 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2595 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2597 default:
2598 return 0;
2601 #undef OP_SAME
2602 #undef OP_SAME_WITH_NULL
2605 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2606 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2608 When in doubt, return 0. */
2610 static int
2611 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2613 int unsignedp1, unsignedpo;
2614 tree primarg0, primarg1, primother;
2615 unsigned int correct_width;
2617 if (operand_equal_p (arg0, arg1, 0))
2618 return 1;
2620 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2621 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2622 return 0;
2624 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2625 and see if the inner values are the same. This removes any
2626 signedness comparison, which doesn't matter here. */
2627 primarg0 = arg0, primarg1 = arg1;
2628 STRIP_NOPS (primarg0);
2629 STRIP_NOPS (primarg1);
2630 if (operand_equal_p (primarg0, primarg1, 0))
2631 return 1;
2633 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2634 actual comparison operand, ARG0.
2636 First throw away any conversions to wider types
2637 already present in the operands. */
2639 primarg1 = get_narrower (arg1, &unsignedp1);
2640 primother = get_narrower (other, &unsignedpo);
2642 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2643 if (unsignedp1 == unsignedpo
2644 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2645 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2647 tree type = TREE_TYPE (arg0);
2649 /* Make sure shorter operand is extended the right way
2650 to match the longer operand. */
2651 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2652 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2654 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2655 return 1;
2658 return 0;
2661 /* See if ARG is an expression that is either a comparison or is performing
2662 arithmetic on comparisons. The comparisons must only be comparing
2663 two different values, which will be stored in *CVAL1 and *CVAL2; if
2664 they are nonzero it means that some operands have already been found.
2665 No variables may be used anywhere else in the expression except in the
2666 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2667 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2669 If this is true, return 1. Otherwise, return zero. */
2671 static int
2672 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2674 enum tree_code code = TREE_CODE (arg);
2675 enum tree_code_class class = TREE_CODE_CLASS (code);
2677 /* We can handle some of the tcc_expression cases here. */
2678 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2679 class = tcc_unary;
2680 else if (class == tcc_expression
2681 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2682 || code == COMPOUND_EXPR))
2683 class = tcc_binary;
2685 else if (class == tcc_expression && code == SAVE_EXPR
2686 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2688 /* If we've already found a CVAL1 or CVAL2, this expression is
2689 two complex to handle. */
2690 if (*cval1 || *cval2)
2691 return 0;
2693 class = tcc_unary;
2694 *save_p = 1;
2697 switch (class)
2699 case tcc_unary:
2700 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2702 case tcc_binary:
2703 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2704 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2705 cval1, cval2, save_p));
2707 case tcc_constant:
2708 return 1;
2710 case tcc_expression:
2711 if (code == COND_EXPR)
2712 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2713 cval1, cval2, save_p)
2714 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2715 cval1, cval2, save_p)
2716 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2717 cval1, cval2, save_p));
2718 return 0;
2720 case tcc_comparison:
2721 /* First see if we can handle the first operand, then the second. For
2722 the second operand, we know *CVAL1 can't be zero. It must be that
2723 one side of the comparison is each of the values; test for the
2724 case where this isn't true by failing if the two operands
2725 are the same. */
2727 if (operand_equal_p (TREE_OPERAND (arg, 0),
2728 TREE_OPERAND (arg, 1), 0))
2729 return 0;
2731 if (*cval1 == 0)
2732 *cval1 = TREE_OPERAND (arg, 0);
2733 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2735 else if (*cval2 == 0)
2736 *cval2 = TREE_OPERAND (arg, 0);
2737 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2739 else
2740 return 0;
2742 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2744 else if (*cval2 == 0)
2745 *cval2 = TREE_OPERAND (arg, 1);
2746 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2748 else
2749 return 0;
2751 return 1;
2753 default:
2754 return 0;
2758 /* ARG is a tree that is known to contain just arithmetic operations and
2759 comparisons. Evaluate the operations in the tree substituting NEW0 for
2760 any occurrence of OLD0 as an operand of a comparison and likewise for
2761 NEW1 and OLD1. */
2763 static tree
2764 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2766 tree type = TREE_TYPE (arg);
2767 enum tree_code code = TREE_CODE (arg);
2768 enum tree_code_class class = TREE_CODE_CLASS (code);
2770 /* We can handle some of the tcc_expression cases here. */
2771 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2772 class = tcc_unary;
2773 else if (class == tcc_expression
2774 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2775 class = tcc_binary;
2777 switch (class)
2779 case tcc_unary:
2780 return fold (build1 (code, type,
2781 eval_subst (TREE_OPERAND (arg, 0),
2782 old0, new0, old1, new1)));
2784 case tcc_binary:
2785 return fold (build2 (code, type,
2786 eval_subst (TREE_OPERAND (arg, 0),
2787 old0, new0, old1, new1),
2788 eval_subst (TREE_OPERAND (arg, 1),
2789 old0, new0, old1, new1)));
2791 case tcc_expression:
2792 switch (code)
2794 case SAVE_EXPR:
2795 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2797 case COMPOUND_EXPR:
2798 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2800 case COND_EXPR:
2801 return fold (build3 (code, type,
2802 eval_subst (TREE_OPERAND (arg, 0),
2803 old0, new0, old1, new1),
2804 eval_subst (TREE_OPERAND (arg, 1),
2805 old0, new0, old1, new1),
2806 eval_subst (TREE_OPERAND (arg, 2),
2807 old0, new0, old1, new1)));
2808 default:
2809 break;
2811 /* Fall through - ??? */
2813 case tcc_comparison:
2815 tree arg0 = TREE_OPERAND (arg, 0);
2816 tree arg1 = TREE_OPERAND (arg, 1);
2818 /* We need to check both for exact equality and tree equality. The
2819 former will be true if the operand has a side-effect. In that
2820 case, we know the operand occurred exactly once. */
2822 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2823 arg0 = new0;
2824 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2825 arg0 = new1;
2827 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2828 arg1 = new0;
2829 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2830 arg1 = new1;
2832 return fold (build2 (code, type, arg0, arg1));
2835 default:
2836 return arg;
2840 /* Return a tree for the case when the result of an expression is RESULT
2841 converted to TYPE and OMITTED was previously an operand of the expression
2842 but is now not needed (e.g., we folded OMITTED * 0).
2844 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2845 the conversion of RESULT to TYPE. */
2847 tree
2848 omit_one_operand (tree type, tree result, tree omitted)
2850 tree t = fold_convert (type, result);
2852 if (TREE_SIDE_EFFECTS (omitted))
2853 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2855 return non_lvalue (t);
2858 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2860 static tree
2861 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2863 tree t = fold_convert (type, result);
2865 if (TREE_SIDE_EFFECTS (omitted))
2866 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2868 return pedantic_non_lvalue (t);
2871 /* Return a tree for the case when the result of an expression is RESULT
2872 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2873 of the expression but are now not needed.
2875 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2876 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2877 evaluated before OMITTED2. Otherwise, if neither has side effects,
2878 just do the conversion of RESULT to TYPE. */
2880 tree
2881 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2883 tree t = fold_convert (type, result);
2885 if (TREE_SIDE_EFFECTS (omitted2))
2886 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2887 if (TREE_SIDE_EFFECTS (omitted1))
2888 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2890 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2894 /* Return a simplified tree node for the truth-negation of ARG. This
2895 never alters ARG itself. We assume that ARG is an operation that
2896 returns a truth value (0 or 1).
2898 FIXME: one would think we would fold the result, but it causes
2899 problems with the dominator optimizer. */
2900 tree
2901 invert_truthvalue (tree arg)
2903 tree type = TREE_TYPE (arg);
2904 enum tree_code code = TREE_CODE (arg);
2906 if (code == ERROR_MARK)
2907 return arg;
2909 /* If this is a comparison, we can simply invert it, except for
2910 floating-point non-equality comparisons, in which case we just
2911 enclose a TRUTH_NOT_EXPR around what we have. */
2913 if (TREE_CODE_CLASS (code) == tcc_comparison)
2915 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2916 if (FLOAT_TYPE_P (op_type)
2917 && flag_trapping_math
2918 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2919 && code != NE_EXPR && code != EQ_EXPR)
2920 return build1 (TRUTH_NOT_EXPR, type, arg);
2921 else
2923 code = invert_tree_comparison (code,
2924 HONOR_NANS (TYPE_MODE (op_type)));
2925 if (code == ERROR_MARK)
2926 return build1 (TRUTH_NOT_EXPR, type, arg);
2927 else
2928 return build2 (code, type,
2929 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2933 switch (code)
2935 case INTEGER_CST:
2936 return fold_convert (type,
2937 build_int_cst (NULL_TREE, integer_zerop (arg)));
2939 case TRUTH_AND_EXPR:
2940 return build2 (TRUTH_OR_EXPR, type,
2941 invert_truthvalue (TREE_OPERAND (arg, 0)),
2942 invert_truthvalue (TREE_OPERAND (arg, 1)));
2944 case TRUTH_OR_EXPR:
2945 return build2 (TRUTH_AND_EXPR, type,
2946 invert_truthvalue (TREE_OPERAND (arg, 0)),
2947 invert_truthvalue (TREE_OPERAND (arg, 1)));
2949 case TRUTH_XOR_EXPR:
2950 /* Here we can invert either operand. We invert the first operand
2951 unless the second operand is a TRUTH_NOT_EXPR in which case our
2952 result is the XOR of the first operand with the inside of the
2953 negation of the second operand. */
2955 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2956 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2957 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2958 else
2959 return build2 (TRUTH_XOR_EXPR, type,
2960 invert_truthvalue (TREE_OPERAND (arg, 0)),
2961 TREE_OPERAND (arg, 1));
2963 case TRUTH_ANDIF_EXPR:
2964 return build2 (TRUTH_ORIF_EXPR, type,
2965 invert_truthvalue (TREE_OPERAND (arg, 0)),
2966 invert_truthvalue (TREE_OPERAND (arg, 1)));
2968 case TRUTH_ORIF_EXPR:
2969 return build2 (TRUTH_ANDIF_EXPR, type,
2970 invert_truthvalue (TREE_OPERAND (arg, 0)),
2971 invert_truthvalue (TREE_OPERAND (arg, 1)));
2973 case TRUTH_NOT_EXPR:
2974 return TREE_OPERAND (arg, 0);
2976 case COND_EXPR:
2977 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2978 invert_truthvalue (TREE_OPERAND (arg, 1)),
2979 invert_truthvalue (TREE_OPERAND (arg, 2)));
2981 case COMPOUND_EXPR:
2982 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2983 invert_truthvalue (TREE_OPERAND (arg, 1)));
2985 case NON_LVALUE_EXPR:
2986 return invert_truthvalue (TREE_OPERAND (arg, 0));
2988 case NOP_EXPR:
2989 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2990 break;
2992 case CONVERT_EXPR:
2993 case FLOAT_EXPR:
2994 return build1 (TREE_CODE (arg), type,
2995 invert_truthvalue (TREE_OPERAND (arg, 0)));
2997 case BIT_AND_EXPR:
2998 if (!integer_onep (TREE_OPERAND (arg, 1)))
2999 break;
3000 return build2 (EQ_EXPR, type, arg,
3001 fold_convert (type, integer_zero_node));
3003 case SAVE_EXPR:
3004 return build1 (TRUTH_NOT_EXPR, type, arg);
3006 case CLEANUP_POINT_EXPR:
3007 return build1 (CLEANUP_POINT_EXPR, type,
3008 invert_truthvalue (TREE_OPERAND (arg, 0)));
3010 default:
3011 break;
3013 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3014 return build1 (TRUTH_NOT_EXPR, type, arg);
3017 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3018 operands are another bit-wise operation with a common input. If so,
3019 distribute the bit operations to save an operation and possibly two if
3020 constants are involved. For example, convert
3021 (A | B) & (A | C) into A | (B & C)
3022 Further simplification will occur if B and C are constants.
3024 If this optimization cannot be done, 0 will be returned. */
3026 static tree
3027 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3029 tree common;
3030 tree left, right;
3032 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3033 || TREE_CODE (arg0) == code
3034 || (TREE_CODE (arg0) != BIT_AND_EXPR
3035 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3036 return 0;
3038 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3040 common = TREE_OPERAND (arg0, 0);
3041 left = TREE_OPERAND (arg0, 1);
3042 right = TREE_OPERAND (arg1, 1);
3044 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3046 common = TREE_OPERAND (arg0, 0);
3047 left = TREE_OPERAND (arg0, 1);
3048 right = TREE_OPERAND (arg1, 0);
3050 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3052 common = TREE_OPERAND (arg0, 1);
3053 left = TREE_OPERAND (arg0, 0);
3054 right = TREE_OPERAND (arg1, 1);
3056 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3058 common = TREE_OPERAND (arg0, 1);
3059 left = TREE_OPERAND (arg0, 0);
3060 right = TREE_OPERAND (arg1, 0);
3062 else
3063 return 0;
3065 return fold (build2 (TREE_CODE (arg0), type, common,
3066 fold (build2 (code, type, left, right))));
3069 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3070 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3072 static tree
3073 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3074 int unsignedp)
3076 tree result = build3 (BIT_FIELD_REF, type, inner,
3077 size_int (bitsize), bitsize_int (bitpos));
3079 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3081 return result;
3084 /* Optimize a bit-field compare.
3086 There are two cases: First is a compare against a constant and the
3087 second is a comparison of two items where the fields are at the same
3088 bit position relative to the start of a chunk (byte, halfword, word)
3089 large enough to contain it. In these cases we can avoid the shift
3090 implicit in bitfield extractions.
3092 For constants, we emit a compare of the shifted constant with the
3093 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3094 compared. For two fields at the same position, we do the ANDs with the
3095 similar mask and compare the result of the ANDs.
3097 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3098 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3099 are the left and right operands of the comparison, respectively.
3101 If the optimization described above can be done, we return the resulting
3102 tree. Otherwise we return zero. */
3104 static tree
3105 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3106 tree lhs, tree rhs)
3108 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3109 tree type = TREE_TYPE (lhs);
3110 tree signed_type, unsigned_type;
3111 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3112 enum machine_mode lmode, rmode, nmode;
3113 int lunsignedp, runsignedp;
3114 int lvolatilep = 0, rvolatilep = 0;
3115 tree linner, rinner = NULL_TREE;
3116 tree mask;
3117 tree offset;
3119 /* Get all the information about the extractions being done. If the bit size
3120 if the same as the size of the underlying object, we aren't doing an
3121 extraction at all and so can do nothing. We also don't want to
3122 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3123 then will no longer be able to replace it. */
3124 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3125 &lunsignedp, &lvolatilep, false);
3126 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3127 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3128 return 0;
3130 if (!const_p)
3132 /* If this is not a constant, we can only do something if bit positions,
3133 sizes, and signedness are the same. */
3134 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3135 &runsignedp, &rvolatilep, false);
3137 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3138 || lunsignedp != runsignedp || offset != 0
3139 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3140 return 0;
3143 /* See if we can find a mode to refer to this field. We should be able to,
3144 but fail if we can't. */
3145 nmode = get_best_mode (lbitsize, lbitpos,
3146 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3147 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3148 TYPE_ALIGN (TREE_TYPE (rinner))),
3149 word_mode, lvolatilep || rvolatilep);
3150 if (nmode == VOIDmode)
3151 return 0;
3153 /* Set signed and unsigned types of the precision of this mode for the
3154 shifts below. */
3155 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3156 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3158 /* Compute the bit position and size for the new reference and our offset
3159 within it. If the new reference is the same size as the original, we
3160 won't optimize anything, so return zero. */
3161 nbitsize = GET_MODE_BITSIZE (nmode);
3162 nbitpos = lbitpos & ~ (nbitsize - 1);
3163 lbitpos -= nbitpos;
3164 if (nbitsize == lbitsize)
3165 return 0;
3167 if (BYTES_BIG_ENDIAN)
3168 lbitpos = nbitsize - lbitsize - lbitpos;
3170 /* Make the mask to be used against the extracted field. */
3171 mask = build_int_cst (unsigned_type, -1);
3172 mask = force_fit_type (mask, 0, false, false);
3173 mask = fold_convert (unsigned_type, mask);
3174 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3175 mask = const_binop (RSHIFT_EXPR, mask,
3176 size_int (nbitsize - lbitsize - lbitpos), 0);
3178 if (! const_p)
3179 /* If not comparing with constant, just rework the comparison
3180 and return. */
3181 return build2 (code, compare_type,
3182 build2 (BIT_AND_EXPR, unsigned_type,
3183 make_bit_field_ref (linner, unsigned_type,
3184 nbitsize, nbitpos, 1),
3185 mask),
3186 build2 (BIT_AND_EXPR, unsigned_type,
3187 make_bit_field_ref (rinner, unsigned_type,
3188 nbitsize, nbitpos, 1),
3189 mask));
3191 /* Otherwise, we are handling the constant case. See if the constant is too
3192 big for the field. Warn and return a tree of for 0 (false) if so. We do
3193 this not only for its own sake, but to avoid having to test for this
3194 error case below. If we didn't, we might generate wrong code.
3196 For unsigned fields, the constant shifted right by the field length should
3197 be all zero. For signed fields, the high-order bits should agree with
3198 the sign bit. */
3200 if (lunsignedp)
3202 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3203 fold_convert (unsigned_type, rhs),
3204 size_int (lbitsize), 0)))
3206 warning ("comparison is always %d due to width of bit-field",
3207 code == NE_EXPR);
3208 return constant_boolean_node (code == NE_EXPR, compare_type);
3211 else
3213 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3214 size_int (lbitsize - 1), 0);
3215 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3217 warning ("comparison is always %d due to width of bit-field",
3218 code == NE_EXPR);
3219 return constant_boolean_node (code == NE_EXPR, compare_type);
3223 /* Single-bit compares should always be against zero. */
3224 if (lbitsize == 1 && ! integer_zerop (rhs))
3226 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3227 rhs = fold_convert (type, integer_zero_node);
3230 /* Make a new bitfield reference, shift the constant over the
3231 appropriate number of bits and mask it with the computed mask
3232 (in case this was a signed field). If we changed it, make a new one. */
3233 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3234 if (lvolatilep)
3236 TREE_SIDE_EFFECTS (lhs) = 1;
3237 TREE_THIS_VOLATILE (lhs) = 1;
3240 rhs = fold (const_binop (BIT_AND_EXPR,
3241 const_binop (LSHIFT_EXPR,
3242 fold_convert (unsigned_type, rhs),
3243 size_int (lbitpos), 0),
3244 mask, 0));
3246 return build2 (code, compare_type,
3247 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3248 rhs);
3251 /* Subroutine for fold_truthop: decode a field reference.
3253 If EXP is a comparison reference, we return the innermost reference.
3255 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3256 set to the starting bit number.
3258 If the innermost field can be completely contained in a mode-sized
3259 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3261 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3262 otherwise it is not changed.
3264 *PUNSIGNEDP is set to the signedness of the field.
3266 *PMASK is set to the mask used. This is either contained in a
3267 BIT_AND_EXPR or derived from the width of the field.
3269 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3271 Return 0 if this is not a component reference or is one that we can't
3272 do anything with. */
3274 static tree
3275 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3276 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3277 int *punsignedp, int *pvolatilep,
3278 tree *pmask, tree *pand_mask)
3280 tree outer_type = 0;
3281 tree and_mask = 0;
3282 tree mask, inner, offset;
3283 tree unsigned_type;
3284 unsigned int precision;
3286 /* All the optimizations using this function assume integer fields.
3287 There are problems with FP fields since the type_for_size call
3288 below can fail for, e.g., XFmode. */
3289 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3290 return 0;
3292 /* We are interested in the bare arrangement of bits, so strip everything
3293 that doesn't affect the machine mode. However, record the type of the
3294 outermost expression if it may matter below. */
3295 if (TREE_CODE (exp) == NOP_EXPR
3296 || TREE_CODE (exp) == CONVERT_EXPR
3297 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3298 outer_type = TREE_TYPE (exp);
3299 STRIP_NOPS (exp);
3301 if (TREE_CODE (exp) == BIT_AND_EXPR)
3303 and_mask = TREE_OPERAND (exp, 1);
3304 exp = TREE_OPERAND (exp, 0);
3305 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3306 if (TREE_CODE (and_mask) != INTEGER_CST)
3307 return 0;
3310 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3311 punsignedp, pvolatilep, false);
3312 if ((inner == exp && and_mask == 0)
3313 || *pbitsize < 0 || offset != 0
3314 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3315 return 0;
3317 /* If the number of bits in the reference is the same as the bitsize of
3318 the outer type, then the outer type gives the signedness. Otherwise
3319 (in case of a small bitfield) the signedness is unchanged. */
3320 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3321 *punsignedp = TYPE_UNSIGNED (outer_type);
3323 /* Compute the mask to access the bitfield. */
3324 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3325 precision = TYPE_PRECISION (unsigned_type);
3327 mask = build_int_cst (unsigned_type, -1);
3328 mask = force_fit_type (mask, 0, false, false);
3330 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3331 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3333 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3334 if (and_mask != 0)
3335 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3336 fold_convert (unsigned_type, and_mask), mask));
3338 *pmask = mask;
3339 *pand_mask = and_mask;
3340 return inner;
3343 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3344 bit positions. */
3346 static int
3347 all_ones_mask_p (tree mask, int size)
3349 tree type = TREE_TYPE (mask);
3350 unsigned int precision = TYPE_PRECISION (type);
3351 tree tmask;
3353 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3354 tmask = force_fit_type (tmask, 0, false, false);
3356 return
3357 tree_int_cst_equal (mask,
3358 const_binop (RSHIFT_EXPR,
3359 const_binop (LSHIFT_EXPR, tmask,
3360 size_int (precision - size),
3362 size_int (precision - size), 0));
3365 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3366 represents the sign bit of EXP's type. If EXP represents a sign
3367 or zero extension, also test VAL against the unextended type.
3368 The return value is the (sub)expression whose sign bit is VAL,
3369 or NULL_TREE otherwise. */
3371 static tree
3372 sign_bit_p (tree exp, tree val)
3374 unsigned HOST_WIDE_INT mask_lo, lo;
3375 HOST_WIDE_INT mask_hi, hi;
3376 int width;
3377 tree t;
3379 /* Tree EXP must have an integral type. */
3380 t = TREE_TYPE (exp);
3381 if (! INTEGRAL_TYPE_P (t))
3382 return NULL_TREE;
3384 /* Tree VAL must be an integer constant. */
3385 if (TREE_CODE (val) != INTEGER_CST
3386 || TREE_CONSTANT_OVERFLOW (val))
3387 return NULL_TREE;
3389 width = TYPE_PRECISION (t);
3390 if (width > HOST_BITS_PER_WIDE_INT)
3392 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3393 lo = 0;
3395 mask_hi = ((unsigned HOST_WIDE_INT) -1
3396 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3397 mask_lo = -1;
3399 else
3401 hi = 0;
3402 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3404 mask_hi = 0;
3405 mask_lo = ((unsigned HOST_WIDE_INT) -1
3406 >> (HOST_BITS_PER_WIDE_INT - width));
3409 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3410 treat VAL as if it were unsigned. */
3411 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3412 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3413 return exp;
3415 /* Handle extension from a narrower type. */
3416 if (TREE_CODE (exp) == NOP_EXPR
3417 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3418 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3420 return NULL_TREE;
3423 /* Subroutine for fold_truthop: determine if an operand is simple enough
3424 to be evaluated unconditionally. */
3426 static int
3427 simple_operand_p (tree exp)
3429 /* Strip any conversions that don't change the machine mode. */
3430 STRIP_NOPS (exp);
3432 return (CONSTANT_CLASS_P (exp)
3433 || TREE_CODE (exp) == SSA_NAME
3434 || (DECL_P (exp)
3435 && ! TREE_ADDRESSABLE (exp)
3436 && ! TREE_THIS_VOLATILE (exp)
3437 && ! DECL_NONLOCAL (exp)
3438 /* Don't regard global variables as simple. They may be
3439 allocated in ways unknown to the compiler (shared memory,
3440 #pragma weak, etc). */
3441 && ! TREE_PUBLIC (exp)
3442 && ! DECL_EXTERNAL (exp)
3443 /* Loading a static variable is unduly expensive, but global
3444 registers aren't expensive. */
3445 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3448 /* The following functions are subroutines to fold_range_test and allow it to
3449 try to change a logical combination of comparisons into a range test.
3451 For example, both
3452 X == 2 || X == 3 || X == 4 || X == 5
3454 X >= 2 && X <= 5
3455 are converted to
3456 (unsigned) (X - 2) <= 3
3458 We describe each set of comparisons as being either inside or outside
3459 a range, using a variable named like IN_P, and then describe the
3460 range with a lower and upper bound. If one of the bounds is omitted,
3461 it represents either the highest or lowest value of the type.
3463 In the comments below, we represent a range by two numbers in brackets
3464 preceded by a "+" to designate being inside that range, or a "-" to
3465 designate being outside that range, so the condition can be inverted by
3466 flipping the prefix. An omitted bound is represented by a "-". For
3467 example, "- [-, 10]" means being outside the range starting at the lowest
3468 possible value and ending at 10, in other words, being greater than 10.
3469 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3470 always false.
3472 We set up things so that the missing bounds are handled in a consistent
3473 manner so neither a missing bound nor "true" and "false" need to be
3474 handled using a special case. */
3476 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3477 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3478 and UPPER1_P are nonzero if the respective argument is an upper bound
3479 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3480 must be specified for a comparison. ARG1 will be converted to ARG0's
3481 type if both are specified. */
3483 static tree
3484 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3485 tree arg1, int upper1_p)
3487 tree tem;
3488 int result;
3489 int sgn0, sgn1;
3491 /* If neither arg represents infinity, do the normal operation.
3492 Else, if not a comparison, return infinity. Else handle the special
3493 comparison rules. Note that most of the cases below won't occur, but
3494 are handled for consistency. */
3496 if (arg0 != 0 && arg1 != 0)
3498 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3499 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3500 STRIP_NOPS (tem);
3501 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3504 if (TREE_CODE_CLASS (code) != tcc_comparison)
3505 return 0;
3507 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3508 for neither. In real maths, we cannot assume open ended ranges are
3509 the same. But, this is computer arithmetic, where numbers are finite.
3510 We can therefore make the transformation of any unbounded range with
3511 the value Z, Z being greater than any representable number. This permits
3512 us to treat unbounded ranges as equal. */
3513 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3514 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3515 switch (code)
3517 case EQ_EXPR:
3518 result = sgn0 == sgn1;
3519 break;
3520 case NE_EXPR:
3521 result = sgn0 != sgn1;
3522 break;
3523 case LT_EXPR:
3524 result = sgn0 < sgn1;
3525 break;
3526 case LE_EXPR:
3527 result = sgn0 <= sgn1;
3528 break;
3529 case GT_EXPR:
3530 result = sgn0 > sgn1;
3531 break;
3532 case GE_EXPR:
3533 result = sgn0 >= sgn1;
3534 break;
3535 default:
3536 gcc_unreachable ();
3539 return constant_boolean_node (result, type);
3542 /* Given EXP, a logical expression, set the range it is testing into
3543 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3544 actually being tested. *PLOW and *PHIGH will be made of the same type
3545 as the returned expression. If EXP is not a comparison, we will most
3546 likely not be returning a useful value and range. */
3548 static tree
3549 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3551 enum tree_code code;
3552 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3553 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3554 int in_p, n_in_p;
3555 tree low, high, n_low, n_high;
3557 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3558 and see if we can refine the range. Some of the cases below may not
3559 happen, but it doesn't seem worth worrying about this. We "continue"
3560 the outer loop when we've changed something; otherwise we "break"
3561 the switch, which will "break" the while. */
3563 in_p = 0;
3564 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3566 while (1)
3568 code = TREE_CODE (exp);
3569 exp_type = TREE_TYPE (exp);
3571 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3573 if (TREE_CODE_LENGTH (code) > 0)
3574 arg0 = TREE_OPERAND (exp, 0);
3575 if (TREE_CODE_CLASS (code) == tcc_comparison
3576 || TREE_CODE_CLASS (code) == tcc_unary
3577 || TREE_CODE_CLASS (code) == tcc_binary)
3578 arg0_type = TREE_TYPE (arg0);
3579 if (TREE_CODE_CLASS (code) == tcc_binary
3580 || TREE_CODE_CLASS (code) == tcc_comparison
3581 || (TREE_CODE_CLASS (code) == tcc_expression
3582 && TREE_CODE_LENGTH (code) > 1))
3583 arg1 = TREE_OPERAND (exp, 1);
3586 switch (code)
3588 case TRUTH_NOT_EXPR:
3589 in_p = ! in_p, exp = arg0;
3590 continue;
3592 case EQ_EXPR: case NE_EXPR:
3593 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3594 /* We can only do something if the range is testing for zero
3595 and if the second operand is an integer constant. Note that
3596 saying something is "in" the range we make is done by
3597 complementing IN_P since it will set in the initial case of
3598 being not equal to zero; "out" is leaving it alone. */
3599 if (low == 0 || high == 0
3600 || ! integer_zerop (low) || ! integer_zerop (high)
3601 || TREE_CODE (arg1) != INTEGER_CST)
3602 break;
3604 switch (code)
3606 case NE_EXPR: /* - [c, c] */
3607 low = high = arg1;
3608 break;
3609 case EQ_EXPR: /* + [c, c] */
3610 in_p = ! in_p, low = high = arg1;
3611 break;
3612 case GT_EXPR: /* - [-, c] */
3613 low = 0, high = arg1;
3614 break;
3615 case GE_EXPR: /* + [c, -] */
3616 in_p = ! in_p, low = arg1, high = 0;
3617 break;
3618 case LT_EXPR: /* - [c, -] */
3619 low = arg1, high = 0;
3620 break;
3621 case LE_EXPR: /* + [-, c] */
3622 in_p = ! in_p, low = 0, high = arg1;
3623 break;
3624 default:
3625 gcc_unreachable ();
3628 /* If this is an unsigned comparison, we also know that EXP is
3629 greater than or equal to zero. We base the range tests we make
3630 on that fact, so we record it here so we can parse existing
3631 range tests. We test arg0_type since often the return type
3632 of, e.g. EQ_EXPR, is boolean. */
3633 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3635 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3636 in_p, low, high, 1,
3637 fold_convert (arg0_type, integer_zero_node),
3638 NULL_TREE))
3639 break;
3641 in_p = n_in_p, low = n_low, high = n_high;
3643 /* If the high bound is missing, but we have a nonzero low
3644 bound, reverse the range so it goes from zero to the low bound
3645 minus 1. */
3646 if (high == 0 && low && ! integer_zerop (low))
3648 in_p = ! in_p;
3649 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3650 integer_one_node, 0);
3651 low = fold_convert (arg0_type, integer_zero_node);
3655 exp = arg0;
3656 continue;
3658 case NEGATE_EXPR:
3659 /* (-x) IN [a,b] -> x in [-b, -a] */
3660 n_low = range_binop (MINUS_EXPR, exp_type,
3661 fold_convert (exp_type, integer_zero_node),
3662 0, high, 1);
3663 n_high = range_binop (MINUS_EXPR, exp_type,
3664 fold_convert (exp_type, integer_zero_node),
3665 0, low, 0);
3666 low = n_low, high = n_high;
3667 exp = arg0;
3668 continue;
3670 case BIT_NOT_EXPR:
3671 /* ~ X -> -X - 1 */
3672 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3673 fold_convert (exp_type, integer_one_node));
3674 continue;
3676 case PLUS_EXPR: case MINUS_EXPR:
3677 if (TREE_CODE (arg1) != INTEGER_CST)
3678 break;
3680 /* If EXP is signed, any overflow in the computation is undefined,
3681 so we don't worry about it so long as our computations on
3682 the bounds don't overflow. For unsigned, overflow is defined
3683 and this is exactly the right thing. */
3684 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3685 arg0_type, low, 0, arg1, 0);
3686 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3687 arg0_type, high, 1, arg1, 0);
3688 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3689 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3690 break;
3692 /* Check for an unsigned range which has wrapped around the maximum
3693 value thus making n_high < n_low, and normalize it. */
3694 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3696 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3697 integer_one_node, 0);
3698 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3699 integer_one_node, 0);
3701 /* If the range is of the form +/- [ x+1, x ], we won't
3702 be able to normalize it. But then, it represents the
3703 whole range or the empty set, so make it
3704 +/- [ -, - ]. */
3705 if (tree_int_cst_equal (n_low, low)
3706 && tree_int_cst_equal (n_high, high))
3707 low = high = 0;
3708 else
3709 in_p = ! in_p;
3711 else
3712 low = n_low, high = n_high;
3714 exp = arg0;
3715 continue;
3717 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3718 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3719 break;
3721 if (! INTEGRAL_TYPE_P (arg0_type)
3722 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3723 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3724 break;
3726 n_low = low, n_high = high;
3728 if (n_low != 0)
3729 n_low = fold_convert (arg0_type, n_low);
3731 if (n_high != 0)
3732 n_high = fold_convert (arg0_type, n_high);
3735 /* If we're converting arg0 from an unsigned type, to exp,
3736 a signed type, we will be doing the comparison as unsigned.
3737 The tests above have already verified that LOW and HIGH
3738 are both positive.
3740 So we have to ensure that we will handle large unsigned
3741 values the same way that the current signed bounds treat
3742 negative values. */
3744 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3746 tree high_positive;
3747 tree equiv_type = lang_hooks.types.type_for_mode
3748 (TYPE_MODE (arg0_type), 1);
3750 /* A range without an upper bound is, naturally, unbounded.
3751 Since convert would have cropped a very large value, use
3752 the max value for the destination type. */
3753 high_positive
3754 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3755 : TYPE_MAX_VALUE (arg0_type);
3757 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3758 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3759 fold_convert (arg0_type,
3760 high_positive),
3761 fold_convert (arg0_type,
3762 integer_one_node)));
3764 /* If the low bound is specified, "and" the range with the
3765 range for which the original unsigned value will be
3766 positive. */
3767 if (low != 0)
3769 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3770 1, n_low, n_high, 1,
3771 fold_convert (arg0_type,
3772 integer_zero_node),
3773 high_positive))
3774 break;
3776 in_p = (n_in_p == in_p);
3778 else
3780 /* Otherwise, "or" the range with the range of the input
3781 that will be interpreted as negative. */
3782 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3783 0, n_low, n_high, 1,
3784 fold_convert (arg0_type,
3785 integer_zero_node),
3786 high_positive))
3787 break;
3789 in_p = (in_p != n_in_p);
3793 exp = arg0;
3794 low = n_low, high = n_high;
3795 continue;
3797 default:
3798 break;
3801 break;
3804 /* If EXP is a constant, we can evaluate whether this is true or false. */
3805 if (TREE_CODE (exp) == INTEGER_CST)
3807 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3808 exp, 0, low, 0))
3809 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3810 exp, 1, high, 1)));
3811 low = high = 0;
3812 exp = 0;
3815 *pin_p = in_p, *plow = low, *phigh = high;
3816 return exp;
3819 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3820 type, TYPE, return an expression to test if EXP is in (or out of, depending
3821 on IN_P) the range. Return 0 if the test couldn't be created. */
3823 static tree
3824 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3826 tree etype = TREE_TYPE (exp);
3827 tree value;
3829 if (! in_p)
3831 value = build_range_check (type, exp, 1, low, high);
3832 if (value != 0)
3833 return invert_truthvalue (value);
3835 return 0;
3838 if (low == 0 && high == 0)
3839 return fold_convert (type, integer_one_node);
3841 if (low == 0)
3842 return fold (build2 (LE_EXPR, type, exp, high));
3844 if (high == 0)
3845 return fold (build2 (GE_EXPR, type, exp, low));
3847 if (operand_equal_p (low, high, 0))
3848 return fold (build2 (EQ_EXPR, type, exp, low));
3850 if (integer_zerop (low))
3852 if (! TYPE_UNSIGNED (etype))
3854 etype = lang_hooks.types.unsigned_type (etype);
3855 high = fold_convert (etype, high);
3856 exp = fold_convert (etype, exp);
3858 return build_range_check (type, exp, 1, 0, high);
3861 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3862 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3864 unsigned HOST_WIDE_INT lo;
3865 HOST_WIDE_INT hi;
3866 int prec;
3868 prec = TYPE_PRECISION (etype);
3869 if (prec <= HOST_BITS_PER_WIDE_INT)
3871 hi = 0;
3872 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3874 else
3876 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3877 lo = (unsigned HOST_WIDE_INT) -1;
3880 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3882 if (TYPE_UNSIGNED (etype))
3884 etype = lang_hooks.types.signed_type (etype);
3885 exp = fold_convert (etype, exp);
3887 return fold (build2 (GT_EXPR, type, exp,
3888 fold_convert (etype, integer_zero_node)));
3892 value = const_binop (MINUS_EXPR, high, low, 0);
3893 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3895 tree utype, minv, maxv;
3897 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3898 for the type in question, as we rely on this here. */
3899 switch (TREE_CODE (etype))
3901 case INTEGER_TYPE:
3902 case ENUMERAL_TYPE:
3903 case CHAR_TYPE:
3904 utype = lang_hooks.types.unsigned_type (etype);
3905 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3906 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3907 integer_one_node, 1);
3908 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3909 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3910 minv, 1, maxv, 1)))
3912 etype = utype;
3913 high = fold_convert (etype, high);
3914 low = fold_convert (etype, low);
3915 exp = fold_convert (etype, exp);
3916 value = const_binop (MINUS_EXPR, high, low, 0);
3918 break;
3919 default:
3920 break;
3924 if (value != 0 && ! TREE_OVERFLOW (value))
3925 return build_range_check (type,
3926 fold (build2 (MINUS_EXPR, etype, exp, low)),
3927 1, fold_convert (etype, integer_zero_node),
3928 value);
3930 return 0;
3933 /* Given two ranges, see if we can merge them into one. Return 1 if we
3934 can, 0 if we can't. Set the output range into the specified parameters. */
3936 static int
3937 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3938 tree high0, int in1_p, tree low1, tree high1)
3940 int no_overlap;
3941 int subset;
3942 int temp;
3943 tree tem;
3944 int in_p;
3945 tree low, high;
3946 int lowequal = ((low0 == 0 && low1 == 0)
3947 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3948 low0, 0, low1, 0)));
3949 int highequal = ((high0 == 0 && high1 == 0)
3950 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3951 high0, 1, high1, 1)));
3953 /* Make range 0 be the range that starts first, or ends last if they
3954 start at the same value. Swap them if it isn't. */
3955 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3956 low0, 0, low1, 0))
3957 || (lowequal
3958 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3959 high1, 1, high0, 1))))
3961 temp = in0_p, in0_p = in1_p, in1_p = temp;
3962 tem = low0, low0 = low1, low1 = tem;
3963 tem = high0, high0 = high1, high1 = tem;
3966 /* Now flag two cases, whether the ranges are disjoint or whether the
3967 second range is totally subsumed in the first. Note that the tests
3968 below are simplified by the ones above. */
3969 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3970 high0, 1, low1, 0));
3971 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3972 high1, 1, high0, 1));
3974 /* We now have four cases, depending on whether we are including or
3975 excluding the two ranges. */
3976 if (in0_p && in1_p)
3978 /* If they don't overlap, the result is false. If the second range
3979 is a subset it is the result. Otherwise, the range is from the start
3980 of the second to the end of the first. */
3981 if (no_overlap)
3982 in_p = 0, low = high = 0;
3983 else if (subset)
3984 in_p = 1, low = low1, high = high1;
3985 else
3986 in_p = 1, low = low1, high = high0;
3989 else if (in0_p && ! in1_p)
3991 /* If they don't overlap, the result is the first range. If they are
3992 equal, the result is false. If the second range is a subset of the
3993 first, and the ranges begin at the same place, we go from just after
3994 the end of the first range to the end of the second. If the second
3995 range is not a subset of the first, or if it is a subset and both
3996 ranges end at the same place, the range starts at the start of the
3997 first range and ends just before the second range.
3998 Otherwise, we can't describe this as a single range. */
3999 if (no_overlap)
4000 in_p = 1, low = low0, high = high0;
4001 else if (lowequal && highequal)
4002 in_p = 0, low = high = 0;
4003 else if (subset && lowequal)
4005 in_p = 1, high = high0;
4006 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4007 integer_one_node, 0);
4009 else if (! subset || highequal)
4011 in_p = 1, low = low0;
4012 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4013 integer_one_node, 0);
4015 else
4016 return 0;
4019 else if (! in0_p && in1_p)
4021 /* If they don't overlap, the result is the second range. If the second
4022 is a subset of the first, the result is false. Otherwise,
4023 the range starts just after the first range and ends at the
4024 end of the second. */
4025 if (no_overlap)
4026 in_p = 1, low = low1, high = high1;
4027 else if (subset || highequal)
4028 in_p = 0, low = high = 0;
4029 else
4031 in_p = 1, high = high1;
4032 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4033 integer_one_node, 0);
4037 else
4039 /* The case where we are excluding both ranges. Here the complex case
4040 is if they don't overlap. In that case, the only time we have a
4041 range is if they are adjacent. If the second is a subset of the
4042 first, the result is the first. Otherwise, the range to exclude
4043 starts at the beginning of the first range and ends at the end of the
4044 second. */
4045 if (no_overlap)
4047 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4048 range_binop (PLUS_EXPR, NULL_TREE,
4049 high0, 1,
4050 integer_one_node, 1),
4051 1, low1, 0)))
4052 in_p = 0, low = low0, high = high1;
4053 else
4055 /* Canonicalize - [min, x] into - [-, x]. */
4056 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4057 switch (TREE_CODE (TREE_TYPE (low0)))
4059 case ENUMERAL_TYPE:
4060 if (TYPE_PRECISION (TREE_TYPE (low0))
4061 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4062 break;
4063 /* FALLTHROUGH */
4064 case INTEGER_TYPE:
4065 case CHAR_TYPE:
4066 if (tree_int_cst_equal (low0,
4067 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4068 low0 = 0;
4069 break;
4070 case POINTER_TYPE:
4071 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4072 && integer_zerop (low0))
4073 low0 = 0;
4074 break;
4075 default:
4076 break;
4079 /* Canonicalize - [x, max] into - [x, -]. */
4080 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4081 switch (TREE_CODE (TREE_TYPE (high1)))
4083 case ENUMERAL_TYPE:
4084 if (TYPE_PRECISION (TREE_TYPE (high1))
4085 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4086 break;
4087 /* FALLTHROUGH */
4088 case INTEGER_TYPE:
4089 case CHAR_TYPE:
4090 if (tree_int_cst_equal (high1,
4091 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4092 high1 = 0;
4093 break;
4094 case POINTER_TYPE:
4095 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4096 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4097 high1, 1,
4098 integer_one_node, 1)))
4099 high1 = 0;
4100 break;
4101 default:
4102 break;
4105 /* The ranges might be also adjacent between the maximum and
4106 minimum values of the given type. For
4107 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4108 return + [x + 1, y - 1]. */
4109 if (low0 == 0 && high1 == 0)
4111 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4112 integer_one_node, 1);
4113 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4114 integer_one_node, 0);
4115 if (low == 0 || high == 0)
4116 return 0;
4118 in_p = 1;
4120 else
4121 return 0;
4124 else if (subset)
4125 in_p = 0, low = low0, high = high0;
4126 else
4127 in_p = 0, low = low0, high = high1;
4130 *pin_p = in_p, *plow = low, *phigh = high;
4131 return 1;
4135 /* Subroutine of fold, looking inside expressions of the form
4136 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4137 of the COND_EXPR. This function is being used also to optimize
4138 A op B ? C : A, by reversing the comparison first.
4140 Return a folded expression whose code is not a COND_EXPR
4141 anymore, or NULL_TREE if no folding opportunity is found. */
4143 static tree
4144 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4146 enum tree_code comp_code = TREE_CODE (arg0);
4147 tree arg00 = TREE_OPERAND (arg0, 0);
4148 tree arg01 = TREE_OPERAND (arg0, 1);
4149 tree arg1_type = TREE_TYPE (arg1);
4150 tree tem;
4152 STRIP_NOPS (arg1);
4153 STRIP_NOPS (arg2);
4155 /* If we have A op 0 ? A : -A, consider applying the following
4156 transformations:
4158 A == 0? A : -A same as -A
4159 A != 0? A : -A same as A
4160 A >= 0? A : -A same as abs (A)
4161 A > 0? A : -A same as abs (A)
4162 A <= 0? A : -A same as -abs (A)
4163 A < 0? A : -A same as -abs (A)
4165 None of these transformations work for modes with signed
4166 zeros. If A is +/-0, the first two transformations will
4167 change the sign of the result (from +0 to -0, or vice
4168 versa). The last four will fix the sign of the result,
4169 even though the original expressions could be positive or
4170 negative, depending on the sign of A.
4172 Note that all these transformations are correct if A is
4173 NaN, since the two alternatives (A and -A) are also NaNs. */
4174 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4175 ? real_zerop (arg01)
4176 : integer_zerop (arg01))
4177 && TREE_CODE (arg2) == NEGATE_EXPR
4178 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4179 switch (comp_code)
4181 case EQ_EXPR:
4182 case UNEQ_EXPR:
4183 tem = fold_convert (arg1_type, arg1);
4184 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4185 case NE_EXPR:
4186 case LTGT_EXPR:
4187 return pedantic_non_lvalue (fold_convert (type, arg1));
4188 case UNGE_EXPR:
4189 case UNGT_EXPR:
4190 if (flag_trapping_math)
4191 break;
4192 /* Fall through. */
4193 case GE_EXPR:
4194 case GT_EXPR:
4195 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4196 arg1 = fold_convert (lang_hooks.types.signed_type
4197 (TREE_TYPE (arg1)), arg1);
4198 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4199 return pedantic_non_lvalue (fold_convert (type, tem));
4200 case UNLE_EXPR:
4201 case UNLT_EXPR:
4202 if (flag_trapping_math)
4203 break;
4204 case LE_EXPR:
4205 case LT_EXPR:
4206 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4207 arg1 = fold_convert (lang_hooks.types.signed_type
4208 (TREE_TYPE (arg1)), arg1);
4209 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4210 return negate_expr (fold_convert (type, tem));
4211 default:
4212 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4213 break;
4216 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4217 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4218 both transformations are correct when A is NaN: A != 0
4219 is then true, and A == 0 is false. */
4221 if (integer_zerop (arg01) && integer_zerop (arg2))
4223 if (comp_code == NE_EXPR)
4224 return pedantic_non_lvalue (fold_convert (type, arg1));
4225 else if (comp_code == EQ_EXPR)
4226 return fold_convert (type, integer_zero_node);
4229 /* Try some transformations of A op B ? A : B.
4231 A == B? A : B same as B
4232 A != B? A : B same as A
4233 A >= B? A : B same as max (A, B)
4234 A > B? A : B same as max (B, A)
4235 A <= B? A : B same as min (A, B)
4236 A < B? A : B same as min (B, A)
4238 As above, these transformations don't work in the presence
4239 of signed zeros. For example, if A and B are zeros of
4240 opposite sign, the first two transformations will change
4241 the sign of the result. In the last four, the original
4242 expressions give different results for (A=+0, B=-0) and
4243 (A=-0, B=+0), but the transformed expressions do not.
4245 The first two transformations are correct if either A or B
4246 is a NaN. In the first transformation, the condition will
4247 be false, and B will indeed be chosen. In the case of the
4248 second transformation, the condition A != B will be true,
4249 and A will be chosen.
4251 The conversions to max() and min() are not correct if B is
4252 a number and A is not. The conditions in the original
4253 expressions will be false, so all four give B. The min()
4254 and max() versions would give a NaN instead. */
4255 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4257 tree comp_op0 = arg00;
4258 tree comp_op1 = arg01;
4259 tree comp_type = TREE_TYPE (comp_op0);
4261 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4262 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4264 comp_type = type;
4265 comp_op0 = arg1;
4266 comp_op1 = arg2;
4269 switch (comp_code)
4271 case EQ_EXPR:
4272 return pedantic_non_lvalue (fold_convert (type, arg2));
4273 case NE_EXPR:
4274 return pedantic_non_lvalue (fold_convert (type, arg1));
4275 case LE_EXPR:
4276 case LT_EXPR:
4277 case UNLE_EXPR:
4278 case UNLT_EXPR:
4279 /* In C++ a ?: expression can be an lvalue, so put the
4280 operand which will be used if they are equal first
4281 so that we can convert this back to the
4282 corresponding COND_EXPR. */
4283 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4285 comp_op0 = fold_convert (comp_type, comp_op0);
4286 comp_op1 = fold_convert (comp_type, comp_op1);
4287 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4288 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4289 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4290 return pedantic_non_lvalue (fold_convert (type, tem));
4292 break;
4293 case GE_EXPR:
4294 case GT_EXPR:
4295 case UNGE_EXPR:
4296 case UNGT_EXPR:
4297 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4299 comp_op0 = fold_convert (comp_type, comp_op0);
4300 comp_op1 = fold_convert (comp_type, comp_op1);
4301 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4302 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4303 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4304 return pedantic_non_lvalue (fold_convert (type, tem));
4306 break;
4307 case UNEQ_EXPR:
4308 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4309 return pedantic_non_lvalue (fold_convert (type, arg2));
4310 break;
4311 case LTGT_EXPR:
4312 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4313 return pedantic_non_lvalue (fold_convert (type, arg1));
4314 break;
4315 default:
4316 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4317 break;
4321 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4322 we might still be able to simplify this. For example,
4323 if C1 is one less or one more than C2, this might have started
4324 out as a MIN or MAX and been transformed by this function.
4325 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4327 if (INTEGRAL_TYPE_P (type)
4328 && TREE_CODE (arg01) == INTEGER_CST
4329 && TREE_CODE (arg2) == INTEGER_CST)
4330 switch (comp_code)
4332 case EQ_EXPR:
4333 /* We can replace A with C1 in this case. */
4334 arg1 = fold_convert (type, arg01);
4335 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4337 case LT_EXPR:
4338 /* If C1 is C2 + 1, this is min(A, C2). */
4339 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4340 OEP_ONLY_CONST)
4341 && operand_equal_p (arg01,
4342 const_binop (PLUS_EXPR, arg2,
4343 integer_one_node, 0),
4344 OEP_ONLY_CONST))
4345 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4346 type, arg1, arg2)));
4347 break;
4349 case LE_EXPR:
4350 /* If C1 is C2 - 1, this is min(A, C2). */
4351 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4352 OEP_ONLY_CONST)
4353 && operand_equal_p (arg01,
4354 const_binop (MINUS_EXPR, arg2,
4355 integer_one_node, 0),
4356 OEP_ONLY_CONST))
4357 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4358 type, arg1, arg2)));
4359 break;
4361 case GT_EXPR:
4362 /* If C1 is C2 - 1, this is max(A, C2). */
4363 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4364 OEP_ONLY_CONST)
4365 && operand_equal_p (arg01,
4366 const_binop (MINUS_EXPR, arg2,
4367 integer_one_node, 0),
4368 OEP_ONLY_CONST))
4369 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4370 type, arg1, arg2)));
4371 break;
4373 case GE_EXPR:
4374 /* If C1 is C2 + 1, this is max(A, C2). */
4375 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4376 OEP_ONLY_CONST)
4377 && operand_equal_p (arg01,
4378 const_binop (PLUS_EXPR, arg2,
4379 integer_one_node, 0),
4380 OEP_ONLY_CONST))
4381 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4382 type, arg1, arg2)));
4383 break;
4384 case NE_EXPR:
4385 break;
4386 default:
4387 gcc_unreachable ();
4390 return NULL_TREE;
4395 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4396 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4397 #endif
4399 /* EXP is some logical combination of boolean tests. See if we can
4400 merge it into some range test. Return the new tree if so. */
4402 static tree
4403 fold_range_test (tree exp)
4405 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4406 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4407 int in0_p, in1_p, in_p;
4408 tree low0, low1, low, high0, high1, high;
4409 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4410 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4411 tree tem;
4413 /* If this is an OR operation, invert both sides; we will invert
4414 again at the end. */
4415 if (or_op)
4416 in0_p = ! in0_p, in1_p = ! in1_p;
4418 /* If both expressions are the same, if we can merge the ranges, and we
4419 can build the range test, return it or it inverted. If one of the
4420 ranges is always true or always false, consider it to be the same
4421 expression as the other. */
4422 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4423 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4424 in1_p, low1, high1)
4425 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4426 lhs != 0 ? lhs
4427 : rhs != 0 ? rhs : integer_zero_node,
4428 in_p, low, high))))
4429 return or_op ? invert_truthvalue (tem) : tem;
4431 /* On machines where the branch cost is expensive, if this is a
4432 short-circuited branch and the underlying object on both sides
4433 is the same, make a non-short-circuit operation. */
4434 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4435 && lhs != 0 && rhs != 0
4436 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4437 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4438 && operand_equal_p (lhs, rhs, 0))
4440 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4441 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4442 which cases we can't do this. */
4443 if (simple_operand_p (lhs))
4444 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4445 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4446 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4447 TREE_OPERAND (exp, 1));
4449 else if (lang_hooks.decls.global_bindings_p () == 0
4450 && ! CONTAINS_PLACEHOLDER_P (lhs))
4452 tree common = save_expr (lhs);
4454 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4455 or_op ? ! in0_p : in0_p,
4456 low0, high0))
4457 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4458 or_op ? ! in1_p : in1_p,
4459 low1, high1))))
4460 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4461 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4462 TREE_TYPE (exp), lhs, rhs);
4466 return 0;
4469 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4470 bit value. Arrange things so the extra bits will be set to zero if and
4471 only if C is signed-extended to its full width. If MASK is nonzero,
4472 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4474 static tree
4475 unextend (tree c, int p, int unsignedp, tree mask)
4477 tree type = TREE_TYPE (c);
4478 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4479 tree temp;
4481 if (p == modesize || unsignedp)
4482 return c;
4484 /* We work by getting just the sign bit into the low-order bit, then
4485 into the high-order bit, then sign-extend. We then XOR that value
4486 with C. */
4487 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4488 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4490 /* We must use a signed type in order to get an arithmetic right shift.
4491 However, we must also avoid introducing accidental overflows, so that
4492 a subsequent call to integer_zerop will work. Hence we must
4493 do the type conversion here. At this point, the constant is either
4494 zero or one, and the conversion to a signed type can never overflow.
4495 We could get an overflow if this conversion is done anywhere else. */
4496 if (TYPE_UNSIGNED (type))
4497 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4499 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4500 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4501 if (mask != 0)
4502 temp = const_binop (BIT_AND_EXPR, temp,
4503 fold_convert (TREE_TYPE (c), mask), 0);
4504 /* If necessary, convert the type back to match the type of C. */
4505 if (TYPE_UNSIGNED (type))
4506 temp = fold_convert (type, temp);
4508 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4511 /* Find ways of folding logical expressions of LHS and RHS:
4512 Try to merge two comparisons to the same innermost item.
4513 Look for range tests like "ch >= '0' && ch <= '9'".
4514 Look for combinations of simple terms on machines with expensive branches
4515 and evaluate the RHS unconditionally.
4517 For example, if we have p->a == 2 && p->b == 4 and we can make an
4518 object large enough to span both A and B, we can do this with a comparison
4519 against the object ANDed with the a mask.
4521 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4522 operations to do this with one comparison.
4524 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4525 function and the one above.
4527 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4528 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4530 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4531 two operands.
4533 We return the simplified tree or 0 if no optimization is possible. */
4535 static tree
4536 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4538 /* If this is the "or" of two comparisons, we can do something if
4539 the comparisons are NE_EXPR. If this is the "and", we can do something
4540 if the comparisons are EQ_EXPR. I.e.,
4541 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4543 WANTED_CODE is this operation code. For single bit fields, we can
4544 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4545 comparison for one-bit fields. */
4547 enum tree_code wanted_code;
4548 enum tree_code lcode, rcode;
4549 tree ll_arg, lr_arg, rl_arg, rr_arg;
4550 tree ll_inner, lr_inner, rl_inner, rr_inner;
4551 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4552 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4553 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4554 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4555 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4556 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4557 enum machine_mode lnmode, rnmode;
4558 tree ll_mask, lr_mask, rl_mask, rr_mask;
4559 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4560 tree l_const, r_const;
4561 tree lntype, rntype, result;
4562 int first_bit, end_bit;
4563 int volatilep;
4565 /* Start by getting the comparison codes. Fail if anything is volatile.
4566 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4567 it were surrounded with a NE_EXPR. */
4569 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4570 return 0;
4572 lcode = TREE_CODE (lhs);
4573 rcode = TREE_CODE (rhs);
4575 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4577 lhs = build2 (NE_EXPR, truth_type, lhs,
4578 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4579 lcode = NE_EXPR;
4582 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4584 rhs = build2 (NE_EXPR, truth_type, rhs,
4585 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4586 rcode = NE_EXPR;
4589 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4590 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4591 return 0;
4593 ll_arg = TREE_OPERAND (lhs, 0);
4594 lr_arg = TREE_OPERAND (lhs, 1);
4595 rl_arg = TREE_OPERAND (rhs, 0);
4596 rr_arg = TREE_OPERAND (rhs, 1);
4598 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4599 if (simple_operand_p (ll_arg)
4600 && simple_operand_p (lr_arg))
4602 tree result;
4603 if (operand_equal_p (ll_arg, rl_arg, 0)
4604 && operand_equal_p (lr_arg, rr_arg, 0))
4606 result = combine_comparisons (code, lcode, rcode,
4607 truth_type, ll_arg, lr_arg);
4608 if (result)
4609 return result;
4611 else if (operand_equal_p (ll_arg, rr_arg, 0)
4612 && operand_equal_p (lr_arg, rl_arg, 0))
4614 result = combine_comparisons (code, lcode,
4615 swap_tree_comparison (rcode),
4616 truth_type, ll_arg, lr_arg);
4617 if (result)
4618 return result;
4622 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4623 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4625 /* If the RHS can be evaluated unconditionally and its operands are
4626 simple, it wins to evaluate the RHS unconditionally on machines
4627 with expensive branches. In this case, this isn't a comparison
4628 that can be merged. Avoid doing this if the RHS is a floating-point
4629 comparison since those can trap. */
4631 if (BRANCH_COST >= 2
4632 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4633 && simple_operand_p (rl_arg)
4634 && simple_operand_p (rr_arg))
4636 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4637 if (code == TRUTH_OR_EXPR
4638 && lcode == NE_EXPR && integer_zerop (lr_arg)
4639 && rcode == NE_EXPR && integer_zerop (rr_arg)
4640 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4641 return build2 (NE_EXPR, truth_type,
4642 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4643 ll_arg, rl_arg),
4644 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4646 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4647 if (code == TRUTH_AND_EXPR
4648 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4649 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4650 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4651 return build2 (EQ_EXPR, truth_type,
4652 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4653 ll_arg, rl_arg),
4654 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4656 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4657 return build2 (code, truth_type, lhs, rhs);
4660 /* See if the comparisons can be merged. Then get all the parameters for
4661 each side. */
4663 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4664 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4665 return 0;
4667 volatilep = 0;
4668 ll_inner = decode_field_reference (ll_arg,
4669 &ll_bitsize, &ll_bitpos, &ll_mode,
4670 &ll_unsignedp, &volatilep, &ll_mask,
4671 &ll_and_mask);
4672 lr_inner = decode_field_reference (lr_arg,
4673 &lr_bitsize, &lr_bitpos, &lr_mode,
4674 &lr_unsignedp, &volatilep, &lr_mask,
4675 &lr_and_mask);
4676 rl_inner = decode_field_reference (rl_arg,
4677 &rl_bitsize, &rl_bitpos, &rl_mode,
4678 &rl_unsignedp, &volatilep, &rl_mask,
4679 &rl_and_mask);
4680 rr_inner = decode_field_reference (rr_arg,
4681 &rr_bitsize, &rr_bitpos, &rr_mode,
4682 &rr_unsignedp, &volatilep, &rr_mask,
4683 &rr_and_mask);
4685 /* It must be true that the inner operation on the lhs of each
4686 comparison must be the same if we are to be able to do anything.
4687 Then see if we have constants. If not, the same must be true for
4688 the rhs's. */
4689 if (volatilep || ll_inner == 0 || rl_inner == 0
4690 || ! operand_equal_p (ll_inner, rl_inner, 0))
4691 return 0;
4693 if (TREE_CODE (lr_arg) == INTEGER_CST
4694 && TREE_CODE (rr_arg) == INTEGER_CST)
4695 l_const = lr_arg, r_const = rr_arg;
4696 else if (lr_inner == 0 || rr_inner == 0
4697 || ! operand_equal_p (lr_inner, rr_inner, 0))
4698 return 0;
4699 else
4700 l_const = r_const = 0;
4702 /* If either comparison code is not correct for our logical operation,
4703 fail. However, we can convert a one-bit comparison against zero into
4704 the opposite comparison against that bit being set in the field. */
4706 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4707 if (lcode != wanted_code)
4709 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4711 /* Make the left operand unsigned, since we are only interested
4712 in the value of one bit. Otherwise we are doing the wrong
4713 thing below. */
4714 ll_unsignedp = 1;
4715 l_const = ll_mask;
4717 else
4718 return 0;
4721 /* This is analogous to the code for l_const above. */
4722 if (rcode != wanted_code)
4724 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4726 rl_unsignedp = 1;
4727 r_const = rl_mask;
4729 else
4730 return 0;
4733 /* After this point all optimizations will generate bit-field
4734 references, which we might not want. */
4735 if (! lang_hooks.can_use_bit_fields_p ())
4736 return 0;
4738 /* See if we can find a mode that contains both fields being compared on
4739 the left. If we can't, fail. Otherwise, update all constants and masks
4740 to be relative to a field of that size. */
4741 first_bit = MIN (ll_bitpos, rl_bitpos);
4742 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4743 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4744 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4745 volatilep);
4746 if (lnmode == VOIDmode)
4747 return 0;
4749 lnbitsize = GET_MODE_BITSIZE (lnmode);
4750 lnbitpos = first_bit & ~ (lnbitsize - 1);
4751 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4752 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4754 if (BYTES_BIG_ENDIAN)
4756 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4757 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4760 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4761 size_int (xll_bitpos), 0);
4762 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4763 size_int (xrl_bitpos), 0);
4765 if (l_const)
4767 l_const = fold_convert (lntype, l_const);
4768 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4769 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4770 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4771 fold (build1 (BIT_NOT_EXPR,
4772 lntype, ll_mask)),
4773 0)))
4775 warning ("comparison is always %d", wanted_code == NE_EXPR);
4777 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4780 if (r_const)
4782 r_const = fold_convert (lntype, r_const);
4783 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4784 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4785 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4786 fold (build1 (BIT_NOT_EXPR,
4787 lntype, rl_mask)),
4788 0)))
4790 warning ("comparison is always %d", wanted_code == NE_EXPR);
4792 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4796 /* If the right sides are not constant, do the same for it. Also,
4797 disallow this optimization if a size or signedness mismatch occurs
4798 between the left and right sides. */
4799 if (l_const == 0)
4801 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4802 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4803 /* Make sure the two fields on the right
4804 correspond to the left without being swapped. */
4805 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4806 return 0;
4808 first_bit = MIN (lr_bitpos, rr_bitpos);
4809 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4810 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4811 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4812 volatilep);
4813 if (rnmode == VOIDmode)
4814 return 0;
4816 rnbitsize = GET_MODE_BITSIZE (rnmode);
4817 rnbitpos = first_bit & ~ (rnbitsize - 1);
4818 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4819 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4821 if (BYTES_BIG_ENDIAN)
4823 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4824 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4827 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4828 size_int (xlr_bitpos), 0);
4829 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4830 size_int (xrr_bitpos), 0);
4832 /* Make a mask that corresponds to both fields being compared.
4833 Do this for both items being compared. If the operands are the
4834 same size and the bits being compared are in the same position
4835 then we can do this by masking both and comparing the masked
4836 results. */
4837 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4838 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4839 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4841 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4842 ll_unsignedp || rl_unsignedp);
4843 if (! all_ones_mask_p (ll_mask, lnbitsize))
4844 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4846 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4847 lr_unsignedp || rr_unsignedp);
4848 if (! all_ones_mask_p (lr_mask, rnbitsize))
4849 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4851 return build2 (wanted_code, truth_type, lhs, rhs);
4854 /* There is still another way we can do something: If both pairs of
4855 fields being compared are adjacent, we may be able to make a wider
4856 field containing them both.
4858 Note that we still must mask the lhs/rhs expressions. Furthermore,
4859 the mask must be shifted to account for the shift done by
4860 make_bit_field_ref. */
4861 if ((ll_bitsize + ll_bitpos == rl_bitpos
4862 && lr_bitsize + lr_bitpos == rr_bitpos)
4863 || (ll_bitpos == rl_bitpos + rl_bitsize
4864 && lr_bitpos == rr_bitpos + rr_bitsize))
4866 tree type;
4868 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4869 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4870 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4871 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4873 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4874 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4875 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4876 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4878 /* Convert to the smaller type before masking out unwanted bits. */
4879 type = lntype;
4880 if (lntype != rntype)
4882 if (lnbitsize > rnbitsize)
4884 lhs = fold_convert (rntype, lhs);
4885 ll_mask = fold_convert (rntype, ll_mask);
4886 type = rntype;
4888 else if (lnbitsize < rnbitsize)
4890 rhs = fold_convert (lntype, rhs);
4891 lr_mask = fold_convert (lntype, lr_mask);
4892 type = lntype;
4896 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4897 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4899 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4900 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4902 return build2 (wanted_code, truth_type, lhs, rhs);
4905 return 0;
4908 /* Handle the case of comparisons with constants. If there is something in
4909 common between the masks, those bits of the constants must be the same.
4910 If not, the condition is always false. Test for this to avoid generating
4911 incorrect code below. */
4912 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4913 if (! integer_zerop (result)
4914 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4915 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4917 if (wanted_code == NE_EXPR)
4919 warning ("%<or%> of unmatched not-equal tests is always 1");
4920 return constant_boolean_node (true, truth_type);
4922 else
4924 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4925 return constant_boolean_node (false, truth_type);
4929 /* Construct the expression we will return. First get the component
4930 reference we will make. Unless the mask is all ones the width of
4931 that field, perform the mask operation. Then compare with the
4932 merged constant. */
4933 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4934 ll_unsignedp || rl_unsignedp);
4936 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4937 if (! all_ones_mask_p (ll_mask, lnbitsize))
4938 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4940 return build2 (wanted_code, truth_type, result,
4941 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4944 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4945 constant. */
4947 static tree
4948 optimize_minmax_comparison (tree t)
4950 tree type = TREE_TYPE (t);
4951 tree arg0 = TREE_OPERAND (t, 0);
4952 enum tree_code op_code;
4953 tree comp_const = TREE_OPERAND (t, 1);
4954 tree minmax_const;
4955 int consts_equal, consts_lt;
4956 tree inner;
4958 STRIP_SIGN_NOPS (arg0);
4960 op_code = TREE_CODE (arg0);
4961 minmax_const = TREE_OPERAND (arg0, 1);
4962 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4963 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4964 inner = TREE_OPERAND (arg0, 0);
4966 /* If something does not permit us to optimize, return the original tree. */
4967 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4968 || TREE_CODE (comp_const) != INTEGER_CST
4969 || TREE_CONSTANT_OVERFLOW (comp_const)
4970 || TREE_CODE (minmax_const) != INTEGER_CST
4971 || TREE_CONSTANT_OVERFLOW (minmax_const))
4972 return t;
4974 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4975 and GT_EXPR, doing the rest with recursive calls using logical
4976 simplifications. */
4977 switch (TREE_CODE (t))
4979 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4980 return
4981 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4983 case GE_EXPR:
4984 return
4985 fold (build2 (TRUTH_ORIF_EXPR, type,
4986 optimize_minmax_comparison
4987 (build2 (EQ_EXPR, type, arg0, comp_const)),
4988 optimize_minmax_comparison
4989 (build2 (GT_EXPR, type, arg0, comp_const))));
4991 case EQ_EXPR:
4992 if (op_code == MAX_EXPR && consts_equal)
4993 /* MAX (X, 0) == 0 -> X <= 0 */
4994 return fold (build2 (LE_EXPR, type, inner, comp_const));
4996 else if (op_code == MAX_EXPR && consts_lt)
4997 /* MAX (X, 0) == 5 -> X == 5 */
4998 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5000 else if (op_code == MAX_EXPR)
5001 /* MAX (X, 0) == -1 -> false */
5002 return omit_one_operand (type, integer_zero_node, inner);
5004 else if (consts_equal)
5005 /* MIN (X, 0) == 0 -> X >= 0 */
5006 return fold (build2 (GE_EXPR, type, inner, comp_const));
5008 else if (consts_lt)
5009 /* MIN (X, 0) == 5 -> false */
5010 return omit_one_operand (type, integer_zero_node, inner);
5012 else
5013 /* MIN (X, 0) == -1 -> X == -1 */
5014 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5016 case GT_EXPR:
5017 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5018 /* MAX (X, 0) > 0 -> X > 0
5019 MAX (X, 0) > 5 -> X > 5 */
5020 return fold (build2 (GT_EXPR, type, inner, comp_const));
5022 else if (op_code == MAX_EXPR)
5023 /* MAX (X, 0) > -1 -> true */
5024 return omit_one_operand (type, integer_one_node, inner);
5026 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5027 /* MIN (X, 0) > 0 -> false
5028 MIN (X, 0) > 5 -> false */
5029 return omit_one_operand (type, integer_zero_node, inner);
5031 else
5032 /* MIN (X, 0) > -1 -> X > -1 */
5033 return fold (build2 (GT_EXPR, type, inner, comp_const));
5035 default:
5036 return t;
5040 /* T is an integer expression that is being multiplied, divided, or taken a
5041 modulus (CODE says which and what kind of divide or modulus) by a
5042 constant C. See if we can eliminate that operation by folding it with
5043 other operations already in T. WIDE_TYPE, if non-null, is a type that
5044 should be used for the computation if wider than our type.
5046 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5047 (X * 2) + (Y * 4). We must, however, be assured that either the original
5048 expression would not overflow or that overflow is undefined for the type
5049 in the language in question.
5051 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5052 the machine has a multiply-accumulate insn or that this is part of an
5053 addressing calculation.
5055 If we return a non-null expression, it is an equivalent form of the
5056 original computation, but need not be in the original type. */
5058 static tree
5059 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5061 /* To avoid exponential search depth, refuse to allow recursion past
5062 three levels. Beyond that (1) it's highly unlikely that we'll find
5063 something interesting and (2) we've probably processed it before
5064 when we built the inner expression. */
5066 static int depth;
5067 tree ret;
5069 if (depth > 3)
5070 return NULL;
5072 depth++;
5073 ret = extract_muldiv_1 (t, c, code, wide_type);
5074 depth--;
5076 return ret;
5079 static tree
5080 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5082 tree type = TREE_TYPE (t);
5083 enum tree_code tcode = TREE_CODE (t);
5084 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5085 > GET_MODE_SIZE (TYPE_MODE (type)))
5086 ? wide_type : type);
5087 tree t1, t2;
5088 int same_p = tcode == code;
5089 tree op0 = NULL_TREE, op1 = NULL_TREE;
5091 /* Don't deal with constants of zero here; they confuse the code below. */
5092 if (integer_zerop (c))
5093 return NULL_TREE;
5095 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5096 op0 = TREE_OPERAND (t, 0);
5098 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5099 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5101 /* Note that we need not handle conditional operations here since fold
5102 already handles those cases. So just do arithmetic here. */
5103 switch (tcode)
5105 case INTEGER_CST:
5106 /* For a constant, we can always simplify if we are a multiply
5107 or (for divide and modulus) if it is a multiple of our constant. */
5108 if (code == MULT_EXPR
5109 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5110 return const_binop (code, fold_convert (ctype, t),
5111 fold_convert (ctype, c), 0);
5112 break;
5114 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5115 /* If op0 is an expression ... */
5116 if ((COMPARISON_CLASS_P (op0)
5117 || UNARY_CLASS_P (op0)
5118 || BINARY_CLASS_P (op0)
5119 || EXPRESSION_CLASS_P (op0))
5120 /* ... and is unsigned, and its type is smaller than ctype,
5121 then we cannot pass through as widening. */
5122 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5123 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5124 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5125 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5126 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5127 /* ... or this is a truncation (t is narrower than op0),
5128 then we cannot pass through this narrowing. */
5129 || (GET_MODE_SIZE (TYPE_MODE (type))
5130 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5131 /* ... or signedness changes for division or modulus,
5132 then we cannot pass through this conversion. */
5133 || (code != MULT_EXPR
5134 && (TYPE_UNSIGNED (ctype)
5135 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5136 break;
5138 /* Pass the constant down and see if we can make a simplification. If
5139 we can, replace this expression with the inner simplification for
5140 possible later conversion to our or some other type. */
5141 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5142 && TREE_CODE (t2) == INTEGER_CST
5143 && ! TREE_CONSTANT_OVERFLOW (t2)
5144 && (0 != (t1 = extract_muldiv (op0, t2, code,
5145 code == MULT_EXPR
5146 ? ctype : NULL_TREE))))
5147 return t1;
5148 break;
5150 case ABS_EXPR:
5151 /* If widening the type changes it from signed to unsigned, then we
5152 must avoid building ABS_EXPR itself as unsigned. */
5153 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5155 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5156 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5158 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5159 return fold_convert (ctype, t1);
5161 break;
5163 /* FALLTHROUGH */
5164 case NEGATE_EXPR:
5165 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5166 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5167 break;
5169 case MIN_EXPR: case MAX_EXPR:
5170 /* If widening the type changes the signedness, then we can't perform
5171 this optimization as that changes the result. */
5172 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5173 break;
5175 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5176 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5177 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5179 if (tree_int_cst_sgn (c) < 0)
5180 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5182 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5183 fold_convert (ctype, t2)));
5185 break;
5187 case LSHIFT_EXPR: case RSHIFT_EXPR:
5188 /* If the second operand is constant, this is a multiplication
5189 or floor division, by a power of two, so we can treat it that
5190 way unless the multiplier or divisor overflows. Signed
5191 left-shift overflow is implementation-defined rather than
5192 undefined in C90, so do not convert signed left shift into
5193 multiplication. */
5194 if (TREE_CODE (op1) == INTEGER_CST
5195 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5196 /* const_binop may not detect overflow correctly,
5197 so check for it explicitly here. */
5198 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5199 && TREE_INT_CST_HIGH (op1) == 0
5200 && 0 != (t1 = fold_convert (ctype,
5201 const_binop (LSHIFT_EXPR,
5202 size_one_node,
5203 op1, 0)))
5204 && ! TREE_OVERFLOW (t1))
5205 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5206 ? MULT_EXPR : FLOOR_DIV_EXPR,
5207 ctype, fold_convert (ctype, op0), t1),
5208 c, code, wide_type);
5209 break;
5211 case PLUS_EXPR: case MINUS_EXPR:
5212 /* See if we can eliminate the operation on both sides. If we can, we
5213 can return a new PLUS or MINUS. If we can't, the only remaining
5214 cases where we can do anything are if the second operand is a
5215 constant. */
5216 t1 = extract_muldiv (op0, c, code, wide_type);
5217 t2 = extract_muldiv (op1, c, code, wide_type);
5218 if (t1 != 0 && t2 != 0
5219 && (code == MULT_EXPR
5220 /* If not multiplication, we can only do this if both operands
5221 are divisible by c. */
5222 || (multiple_of_p (ctype, op0, c)
5223 && multiple_of_p (ctype, op1, c))))
5224 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5225 fold_convert (ctype, t2)));
5227 /* If this was a subtraction, negate OP1 and set it to be an addition.
5228 This simplifies the logic below. */
5229 if (tcode == MINUS_EXPR)
5230 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5232 if (TREE_CODE (op1) != INTEGER_CST)
5233 break;
5235 /* If either OP1 or C are negative, this optimization is not safe for
5236 some of the division and remainder types while for others we need
5237 to change the code. */
5238 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5240 if (code == CEIL_DIV_EXPR)
5241 code = FLOOR_DIV_EXPR;
5242 else if (code == FLOOR_DIV_EXPR)
5243 code = CEIL_DIV_EXPR;
5244 else if (code != MULT_EXPR
5245 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5246 break;
5249 /* If it's a multiply or a division/modulus operation of a multiple
5250 of our constant, do the operation and verify it doesn't overflow. */
5251 if (code == MULT_EXPR
5252 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5254 op1 = const_binop (code, fold_convert (ctype, op1),
5255 fold_convert (ctype, c), 0);
5256 /* We allow the constant to overflow with wrapping semantics. */
5257 if (op1 == 0
5258 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5259 break;
5261 else
5262 break;
5264 /* If we have an unsigned type is not a sizetype, we cannot widen
5265 the operation since it will change the result if the original
5266 computation overflowed. */
5267 if (TYPE_UNSIGNED (ctype)
5268 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5269 && ctype != type)
5270 break;
5272 /* If we were able to eliminate our operation from the first side,
5273 apply our operation to the second side and reform the PLUS. */
5274 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5275 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5277 /* The last case is if we are a multiply. In that case, we can
5278 apply the distributive law to commute the multiply and addition
5279 if the multiplication of the constants doesn't overflow. */
5280 if (code == MULT_EXPR)
5281 return fold (build2 (tcode, ctype,
5282 fold (build2 (code, ctype,
5283 fold_convert (ctype, op0),
5284 fold_convert (ctype, c))),
5285 op1));
5287 break;
5289 case MULT_EXPR:
5290 /* We have a special case here if we are doing something like
5291 (C * 8) % 4 since we know that's zero. */
5292 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5293 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5294 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5295 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5296 return omit_one_operand (type, integer_zero_node, op0);
5298 /* ... fall through ... */
5300 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5301 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5302 /* If we can extract our operation from the LHS, do so and return a
5303 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5304 do something only if the second operand is a constant. */
5305 if (same_p
5306 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5307 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5308 fold_convert (ctype, op1)));
5309 else if (tcode == MULT_EXPR && code == MULT_EXPR
5310 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5311 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5312 fold_convert (ctype, t1)));
5313 else if (TREE_CODE (op1) != INTEGER_CST)
5314 return 0;
5316 /* If these are the same operation types, we can associate them
5317 assuming no overflow. */
5318 if (tcode == code
5319 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5320 fold_convert (ctype, c), 0))
5321 && ! TREE_OVERFLOW (t1))
5322 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5324 /* If these operations "cancel" each other, we have the main
5325 optimizations of this pass, which occur when either constant is a
5326 multiple of the other, in which case we replace this with either an
5327 operation or CODE or TCODE.
5329 If we have an unsigned type that is not a sizetype, we cannot do
5330 this since it will change the result if the original computation
5331 overflowed. */
5332 if ((! TYPE_UNSIGNED (ctype)
5333 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5334 && ! flag_wrapv
5335 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5336 || (tcode == MULT_EXPR
5337 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5338 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5340 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5341 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5342 fold_convert (ctype,
5343 const_binop (TRUNC_DIV_EXPR,
5344 op1, c, 0))));
5345 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5346 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5347 fold_convert (ctype,
5348 const_binop (TRUNC_DIV_EXPR,
5349 c, op1, 0))));
5351 break;
5353 default:
5354 break;
5357 return 0;
5360 /* Return a node which has the indicated constant VALUE (either 0 or
5361 1), and is of the indicated TYPE. */
5363 tree
5364 constant_boolean_node (int value, tree type)
5366 if (type == integer_type_node)
5367 return value ? integer_one_node : integer_zero_node;
5368 else if (type == boolean_type_node)
5369 return value ? boolean_true_node : boolean_false_node;
5370 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5371 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5372 : integer_zero_node);
5373 else
5374 return build_int_cst (type, value);
5378 /* Return true if expr looks like an ARRAY_REF and set base and
5379 offset to the appropriate trees. If there is no offset,
5380 offset is set to NULL_TREE. */
5382 static bool
5383 extract_array_ref (tree expr, tree *base, tree *offset)
5385 /* We have to be careful with stripping nops as with the
5386 base type the meaning of the offset can change. */
5387 tree inner_expr = expr;
5388 STRIP_NOPS (inner_expr);
5389 /* One canonical form is a PLUS_EXPR with the first
5390 argument being an ADDR_EXPR with a possible NOP_EXPR
5391 attached. */
5392 if (TREE_CODE (expr) == PLUS_EXPR)
5394 tree op0 = TREE_OPERAND (expr, 0);
5395 STRIP_NOPS (op0);
5396 if (TREE_CODE (op0) == ADDR_EXPR)
5398 *base = TREE_OPERAND (expr, 0);
5399 *offset = TREE_OPERAND (expr, 1);
5400 return true;
5403 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5404 which we transform into an ADDR_EXPR with appropriate
5405 offset. For other arguments to the ADDR_EXPR we assume
5406 zero offset and as such do not care about the ADDR_EXPR
5407 type and strip possible nops from it. */
5408 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5410 tree op0 = TREE_OPERAND (inner_expr, 0);
5411 if (TREE_CODE (op0) == ARRAY_REF)
5413 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5414 *offset = TREE_OPERAND (op0, 1);
5416 else
5418 *base = inner_expr;
5419 *offset = NULL_TREE;
5421 return true;
5424 return false;
5428 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5429 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5430 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5431 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5432 COND is the first argument to CODE; otherwise (as in the example
5433 given here), it is the second argument. TYPE is the type of the
5434 original expression. Return NULL_TREE if no simplification is
5435 possible. */
5437 static tree
5438 fold_binary_op_with_conditional_arg (tree t, enum tree_code code, tree cond,
5439 tree arg, int cond_first_p)
5441 const tree type = TREE_TYPE (t);
5442 tree cond_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 0))
5443 : TREE_TYPE (TREE_OPERAND (t, 1));
5444 tree arg_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 1))
5445 : TREE_TYPE (TREE_OPERAND (t, 0));
5446 tree test, true_value, false_value;
5447 tree lhs = NULL_TREE;
5448 tree rhs = NULL_TREE;
5450 /* This transformation is only worthwhile if we don't have to wrap
5451 arg in a SAVE_EXPR, and the operation can be simplified on at least
5452 one of the branches once its pushed inside the COND_EXPR. */
5453 if (!TREE_CONSTANT (arg))
5454 return NULL_TREE;
5456 if (TREE_CODE (cond) == COND_EXPR)
5458 test = TREE_OPERAND (cond, 0);
5459 true_value = TREE_OPERAND (cond, 1);
5460 false_value = TREE_OPERAND (cond, 2);
5461 /* If this operand throws an expression, then it does not make
5462 sense to try to perform a logical or arithmetic operation
5463 involving it. */
5464 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5465 lhs = true_value;
5466 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5467 rhs = false_value;
5469 else
5471 tree testtype = TREE_TYPE (cond);
5472 test = cond;
5473 true_value = constant_boolean_node (true, testtype);
5474 false_value = constant_boolean_node (false, testtype);
5477 arg = fold_convert (arg_type, arg);
5478 if (lhs == 0)
5480 true_value = fold_convert (cond_type, true_value);
5481 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5482 : build2 (code, type, arg, true_value));
5484 if (rhs == 0)
5486 false_value = fold_convert (cond_type, false_value);
5487 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5488 : build2 (code, type, arg, false_value));
5491 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5492 return fold_convert (type, test);
5496 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5498 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5499 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5500 ADDEND is the same as X.
5502 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5503 and finite. The problematic cases are when X is zero, and its mode
5504 has signed zeros. In the case of rounding towards -infinity,
5505 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5506 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5508 static bool
5509 fold_real_zero_addition_p (tree type, tree addend, int negate)
5511 if (!real_zerop (addend))
5512 return false;
5514 /* Don't allow the fold with -fsignaling-nans. */
5515 if (HONOR_SNANS (TYPE_MODE (type)))
5516 return false;
5518 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5519 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5520 return true;
5522 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5523 if (TREE_CODE (addend) == REAL_CST
5524 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5525 negate = !negate;
5527 /* The mode has signed zeros, and we have to honor their sign.
5528 In this situation, there is only one case we can return true for.
5529 X - 0 is the same as X unless rounding towards -infinity is
5530 supported. */
5531 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5534 /* Subroutine of fold() that checks comparisons of built-in math
5535 functions against real constants.
5537 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5538 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5539 is the type of the result and ARG0 and ARG1 are the operands of the
5540 comparison. ARG1 must be a TREE_REAL_CST.
5542 The function returns the constant folded tree if a simplification
5543 can be made, and NULL_TREE otherwise. */
5545 static tree
5546 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5547 tree type, tree arg0, tree arg1)
5549 REAL_VALUE_TYPE c;
5551 if (BUILTIN_SQRT_P (fcode))
5553 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5554 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5556 c = TREE_REAL_CST (arg1);
5557 if (REAL_VALUE_NEGATIVE (c))
5559 /* sqrt(x) < y is always false, if y is negative. */
5560 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5561 return omit_one_operand (type, integer_zero_node, arg);
5563 /* sqrt(x) > y is always true, if y is negative and we
5564 don't care about NaNs, i.e. negative values of x. */
5565 if (code == NE_EXPR || !HONOR_NANS (mode))
5566 return omit_one_operand (type, integer_one_node, arg);
5568 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5569 return fold (build2 (GE_EXPR, type, arg,
5570 build_real (TREE_TYPE (arg), dconst0)));
5572 else if (code == GT_EXPR || code == GE_EXPR)
5574 REAL_VALUE_TYPE c2;
5576 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5577 real_convert (&c2, mode, &c2);
5579 if (REAL_VALUE_ISINF (c2))
5581 /* sqrt(x) > y is x == +Inf, when y is very large. */
5582 if (HONOR_INFINITIES (mode))
5583 return fold (build2 (EQ_EXPR, type, arg,
5584 build_real (TREE_TYPE (arg), c2)));
5586 /* sqrt(x) > y is always false, when y is very large
5587 and we don't care about infinities. */
5588 return omit_one_operand (type, integer_zero_node, arg);
5591 /* sqrt(x) > c is the same as x > c*c. */
5592 return fold (build2 (code, type, arg,
5593 build_real (TREE_TYPE (arg), c2)));
5595 else if (code == LT_EXPR || code == LE_EXPR)
5597 REAL_VALUE_TYPE c2;
5599 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5600 real_convert (&c2, mode, &c2);
5602 if (REAL_VALUE_ISINF (c2))
5604 /* sqrt(x) < y is always true, when y is a very large
5605 value and we don't care about NaNs or Infinities. */
5606 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5607 return omit_one_operand (type, integer_one_node, arg);
5609 /* sqrt(x) < y is x != +Inf when y is very large and we
5610 don't care about NaNs. */
5611 if (! HONOR_NANS (mode))
5612 return fold (build2 (NE_EXPR, type, arg,
5613 build_real (TREE_TYPE (arg), c2)));
5615 /* sqrt(x) < y is x >= 0 when y is very large and we
5616 don't care about Infinities. */
5617 if (! HONOR_INFINITIES (mode))
5618 return fold (build2 (GE_EXPR, type, arg,
5619 build_real (TREE_TYPE (arg), dconst0)));
5621 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5622 if (lang_hooks.decls.global_bindings_p () != 0
5623 || CONTAINS_PLACEHOLDER_P (arg))
5624 return NULL_TREE;
5626 arg = save_expr (arg);
5627 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5628 fold (build2 (GE_EXPR, type, arg,
5629 build_real (TREE_TYPE (arg),
5630 dconst0))),
5631 fold (build2 (NE_EXPR, type, arg,
5632 build_real (TREE_TYPE (arg),
5633 c2)))));
5636 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5637 if (! HONOR_NANS (mode))
5638 return fold (build2 (code, type, arg,
5639 build_real (TREE_TYPE (arg), c2)));
5641 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5642 if (lang_hooks.decls.global_bindings_p () == 0
5643 && ! CONTAINS_PLACEHOLDER_P (arg))
5645 arg = save_expr (arg);
5646 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5647 fold (build2 (GE_EXPR, type, arg,
5648 build_real (TREE_TYPE (arg),
5649 dconst0))),
5650 fold (build2 (code, type, arg,
5651 build_real (TREE_TYPE (arg),
5652 c2)))));
5657 return NULL_TREE;
5660 /* Subroutine of fold() that optimizes comparisons against Infinities,
5661 either +Inf or -Inf.
5663 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5664 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5665 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5667 The function returns the constant folded tree if a simplification
5668 can be made, and NULL_TREE otherwise. */
5670 static tree
5671 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5673 enum machine_mode mode;
5674 REAL_VALUE_TYPE max;
5675 tree temp;
5676 bool neg;
5678 mode = TYPE_MODE (TREE_TYPE (arg0));
5680 /* For negative infinity swap the sense of the comparison. */
5681 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5682 if (neg)
5683 code = swap_tree_comparison (code);
5685 switch (code)
5687 case GT_EXPR:
5688 /* x > +Inf is always false, if with ignore sNANs. */
5689 if (HONOR_SNANS (mode))
5690 return NULL_TREE;
5691 return omit_one_operand (type, integer_zero_node, arg0);
5693 case LE_EXPR:
5694 /* x <= +Inf is always true, if we don't case about NaNs. */
5695 if (! HONOR_NANS (mode))
5696 return omit_one_operand (type, integer_one_node, arg0);
5698 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5699 if (lang_hooks.decls.global_bindings_p () == 0
5700 && ! CONTAINS_PLACEHOLDER_P (arg0))
5702 arg0 = save_expr (arg0);
5703 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5705 break;
5707 case EQ_EXPR:
5708 case GE_EXPR:
5709 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5710 real_maxval (&max, neg, mode);
5711 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5712 arg0, build_real (TREE_TYPE (arg0), max)));
5714 case LT_EXPR:
5715 /* x < +Inf is always equal to x <= DBL_MAX. */
5716 real_maxval (&max, neg, mode);
5717 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5718 arg0, build_real (TREE_TYPE (arg0), max)));
5720 case NE_EXPR:
5721 /* x != +Inf is always equal to !(x > DBL_MAX). */
5722 real_maxval (&max, neg, mode);
5723 if (! HONOR_NANS (mode))
5724 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5725 arg0, build_real (TREE_TYPE (arg0), max)));
5727 /* The transformation below creates non-gimple code and thus is
5728 not appropriate if we are in gimple form. */
5729 if (in_gimple_form)
5730 return NULL_TREE;
5732 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5733 arg0, build_real (TREE_TYPE (arg0), max)));
5734 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5736 default:
5737 break;
5740 return NULL_TREE;
5743 /* Subroutine of fold() that optimizes comparisons of a division by
5744 a nonzero integer constant against an integer constant, i.e.
5745 X/C1 op C2.
5747 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5748 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5749 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5751 The function returns the constant folded tree if a simplification
5752 can be made, and NULL_TREE otherwise. */
5754 static tree
5755 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5757 tree prod, tmp, hi, lo;
5758 tree arg00 = TREE_OPERAND (arg0, 0);
5759 tree arg01 = TREE_OPERAND (arg0, 1);
5760 unsigned HOST_WIDE_INT lpart;
5761 HOST_WIDE_INT hpart;
5762 int overflow;
5764 /* We have to do this the hard way to detect unsigned overflow.
5765 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5766 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5767 TREE_INT_CST_HIGH (arg01),
5768 TREE_INT_CST_LOW (arg1),
5769 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5770 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5771 prod = force_fit_type (prod, -1, overflow, false);
5773 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5775 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5776 lo = prod;
5778 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5779 overflow = add_double (TREE_INT_CST_LOW (prod),
5780 TREE_INT_CST_HIGH (prod),
5781 TREE_INT_CST_LOW (tmp),
5782 TREE_INT_CST_HIGH (tmp),
5783 &lpart, &hpart);
5784 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5785 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5786 TREE_CONSTANT_OVERFLOW (prod));
5788 else if (tree_int_cst_sgn (arg01) >= 0)
5790 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5791 switch (tree_int_cst_sgn (arg1))
5793 case -1:
5794 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5795 hi = prod;
5796 break;
5798 case 0:
5799 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5800 hi = tmp;
5801 break;
5803 case 1:
5804 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5805 lo = prod;
5806 break;
5808 default:
5809 gcc_unreachable ();
5812 else
5814 /* A negative divisor reverses the relational operators. */
5815 code = swap_tree_comparison (code);
5817 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5818 switch (tree_int_cst_sgn (arg1))
5820 case -1:
5821 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5822 lo = prod;
5823 break;
5825 case 0:
5826 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5827 lo = tmp;
5828 break;
5830 case 1:
5831 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5832 hi = prod;
5833 break;
5835 default:
5836 gcc_unreachable ();
5840 switch (code)
5842 case EQ_EXPR:
5843 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5844 return omit_one_operand (type, integer_zero_node, arg00);
5845 if (TREE_OVERFLOW (hi))
5846 return fold (build2 (GE_EXPR, type, arg00, lo));
5847 if (TREE_OVERFLOW (lo))
5848 return fold (build2 (LE_EXPR, type, arg00, hi));
5849 return build_range_check (type, arg00, 1, lo, hi);
5851 case NE_EXPR:
5852 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5853 return omit_one_operand (type, integer_one_node, arg00);
5854 if (TREE_OVERFLOW (hi))
5855 return fold (build2 (LT_EXPR, type, arg00, lo));
5856 if (TREE_OVERFLOW (lo))
5857 return fold (build2 (GT_EXPR, type, arg00, hi));
5858 return build_range_check (type, arg00, 0, lo, hi);
5860 case LT_EXPR:
5861 if (TREE_OVERFLOW (lo))
5862 return omit_one_operand (type, integer_zero_node, arg00);
5863 return fold (build2 (LT_EXPR, type, arg00, lo));
5865 case LE_EXPR:
5866 if (TREE_OVERFLOW (hi))
5867 return omit_one_operand (type, integer_one_node, arg00);
5868 return fold (build2 (LE_EXPR, type, arg00, hi));
5870 case GT_EXPR:
5871 if (TREE_OVERFLOW (hi))
5872 return omit_one_operand (type, integer_zero_node, arg00);
5873 return fold (build2 (GT_EXPR, type, arg00, hi));
5875 case GE_EXPR:
5876 if (TREE_OVERFLOW (lo))
5877 return omit_one_operand (type, integer_one_node, arg00);
5878 return fold (build2 (GE_EXPR, type, arg00, lo));
5880 default:
5881 break;
5884 return NULL_TREE;
5888 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5889 equality/inequality test, then return a simplified form of
5890 the test using shifts and logical operations. Otherwise return
5891 NULL. TYPE is the desired result type. */
5893 tree
5894 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5895 tree result_type)
5897 /* If this is testing a single bit, we can optimize the test. */
5898 if ((code == NE_EXPR || code == EQ_EXPR)
5899 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5900 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5902 tree inner = TREE_OPERAND (arg0, 0);
5903 tree type = TREE_TYPE (arg0);
5904 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5905 enum machine_mode operand_mode = TYPE_MODE (type);
5906 int ops_unsigned;
5907 tree signed_type, unsigned_type, intermediate_type;
5908 tree arg00;
5910 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5911 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5912 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5913 if (arg00 != NULL_TREE
5914 /* This is only a win if casting to a signed type is cheap,
5915 i.e. when arg00's type is not a partial mode. */
5916 && TYPE_PRECISION (TREE_TYPE (arg00))
5917 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5919 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5920 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5921 result_type, fold_convert (stype, arg00),
5922 fold_convert (stype, integer_zero_node)));
5925 /* Otherwise we have (A & C) != 0 where C is a single bit,
5926 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5927 Similarly for (A & C) == 0. */
5929 /* If INNER is a right shift of a constant and it plus BITNUM does
5930 not overflow, adjust BITNUM and INNER. */
5931 if (TREE_CODE (inner) == RSHIFT_EXPR
5932 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5933 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5934 && bitnum < TYPE_PRECISION (type)
5935 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5936 bitnum - TYPE_PRECISION (type)))
5938 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5939 inner = TREE_OPERAND (inner, 0);
5942 /* If we are going to be able to omit the AND below, we must do our
5943 operations as unsigned. If we must use the AND, we have a choice.
5944 Normally unsigned is faster, but for some machines signed is. */
5945 #ifdef LOAD_EXTEND_OP
5946 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5947 && !flag_syntax_only) ? 0 : 1;
5948 #else
5949 ops_unsigned = 1;
5950 #endif
5952 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5953 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5954 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5955 inner = fold_convert (intermediate_type, inner);
5957 if (bitnum != 0)
5958 inner = build2 (RSHIFT_EXPR, intermediate_type,
5959 inner, size_int (bitnum));
5961 if (code == EQ_EXPR)
5962 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5963 inner, integer_one_node));
5965 /* Put the AND last so it can combine with more things. */
5966 inner = build2 (BIT_AND_EXPR, intermediate_type,
5967 inner, integer_one_node);
5969 /* Make sure to return the proper type. */
5970 inner = fold_convert (result_type, inner);
5972 return inner;
5974 return NULL_TREE;
5977 /* Check whether we are allowed to reorder operands arg0 and arg1,
5978 such that the evaluation of arg1 occurs before arg0. */
5980 static bool
5981 reorder_operands_p (tree arg0, tree arg1)
5983 if (! flag_evaluation_order)
5984 return true;
5985 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5986 return true;
5987 return ! TREE_SIDE_EFFECTS (arg0)
5988 && ! TREE_SIDE_EFFECTS (arg1);
5991 /* Test whether it is preferable two swap two operands, ARG0 and
5992 ARG1, for example because ARG0 is an integer constant and ARG1
5993 isn't. If REORDER is true, only recommend swapping if we can
5994 evaluate the operands in reverse order. */
5996 bool
5997 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5999 STRIP_SIGN_NOPS (arg0);
6000 STRIP_SIGN_NOPS (arg1);
6002 if (TREE_CODE (arg1) == INTEGER_CST)
6003 return 0;
6004 if (TREE_CODE (arg0) == INTEGER_CST)
6005 return 1;
6007 if (TREE_CODE (arg1) == REAL_CST)
6008 return 0;
6009 if (TREE_CODE (arg0) == REAL_CST)
6010 return 1;
6012 if (TREE_CODE (arg1) == COMPLEX_CST)
6013 return 0;
6014 if (TREE_CODE (arg0) == COMPLEX_CST)
6015 return 1;
6017 if (TREE_CONSTANT (arg1))
6018 return 0;
6019 if (TREE_CONSTANT (arg0))
6020 return 1;
6022 if (optimize_size)
6023 return 0;
6025 if (reorder && flag_evaluation_order
6026 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6027 return 0;
6029 if (DECL_P (arg1))
6030 return 0;
6031 if (DECL_P (arg0))
6032 return 1;
6034 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6035 for commutative and comparison operators. Ensuring a canonical
6036 form allows the optimizers to find additional redundancies without
6037 having to explicitly check for both orderings. */
6038 if (TREE_CODE (arg0) == SSA_NAME
6039 && TREE_CODE (arg1) == SSA_NAME
6040 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6041 return 1;
6043 return 0;
6046 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6047 ARG0 is extended to a wider type. */
6049 static tree
6050 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6052 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6053 tree arg1_unw;
6054 tree shorter_type, outer_type;
6055 tree min, max;
6056 bool above, below;
6058 if (arg0_unw == arg0)
6059 return NULL_TREE;
6060 shorter_type = TREE_TYPE (arg0_unw);
6062 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6063 return NULL_TREE;
6065 arg1_unw = get_unwidened (arg1, shorter_type);
6066 if (!arg1_unw)
6067 return NULL_TREE;
6069 /* If possible, express the comparison in the shorter mode. */
6070 if ((code == EQ_EXPR || code == NE_EXPR
6071 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6072 && (TREE_TYPE (arg1_unw) == shorter_type
6073 || (TREE_CODE (arg1_unw) == INTEGER_CST
6074 && TREE_CODE (shorter_type) == INTEGER_TYPE
6075 && int_fits_type_p (arg1_unw, shorter_type))))
6076 return fold (build (code, type, arg0_unw,
6077 fold_convert (shorter_type, arg1_unw)));
6079 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6080 return NULL_TREE;
6082 /* If we are comparing with the integer that does not fit into the range
6083 of the shorter type, the result is known. */
6084 outer_type = TREE_TYPE (arg1_unw);
6085 min = lower_bound_in_type (outer_type, shorter_type);
6086 max = upper_bound_in_type (outer_type, shorter_type);
6088 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6089 max, arg1_unw));
6090 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6091 arg1_unw, min));
6093 switch (code)
6095 case EQ_EXPR:
6096 if (above || below)
6097 return omit_one_operand (type, integer_zero_node, arg0);
6098 break;
6100 case NE_EXPR:
6101 if (above || below)
6102 return omit_one_operand (type, integer_one_node, arg0);
6103 break;
6105 case LT_EXPR:
6106 case LE_EXPR:
6107 if (above)
6108 return omit_one_operand (type, integer_one_node, arg0);
6109 else if (below)
6110 return omit_one_operand (type, integer_zero_node, arg0);
6112 case GT_EXPR:
6113 case GE_EXPR:
6114 if (above)
6115 return omit_one_operand (type, integer_zero_node, arg0);
6116 else if (below)
6117 return omit_one_operand (type, integer_one_node, arg0);
6119 default:
6120 break;
6123 return NULL_TREE;
6126 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6127 ARG0 just the signedness is changed. */
6129 static tree
6130 fold_sign_changed_comparison (enum tree_code code, tree type,
6131 tree arg0, tree arg1)
6133 tree arg0_inner, tmp;
6134 tree inner_type, outer_type;
6136 if (TREE_CODE (arg0) != NOP_EXPR)
6137 return NULL_TREE;
6139 outer_type = TREE_TYPE (arg0);
6140 arg0_inner = TREE_OPERAND (arg0, 0);
6141 inner_type = TREE_TYPE (arg0_inner);
6143 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6144 return NULL_TREE;
6146 if (TREE_CODE (arg1) != INTEGER_CST
6147 && !(TREE_CODE (arg1) == NOP_EXPR
6148 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6149 return NULL_TREE;
6151 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6152 && code != NE_EXPR
6153 && code != EQ_EXPR)
6154 return NULL_TREE;
6156 if (TREE_CODE (arg1) == INTEGER_CST)
6158 tmp = build_int_cst_wide (inner_type,
6159 TREE_INT_CST_LOW (arg1),
6160 TREE_INT_CST_HIGH (arg1));
6161 arg1 = force_fit_type (tmp, 0,
6162 TREE_OVERFLOW (arg1),
6163 TREE_CONSTANT_OVERFLOW (arg1));
6165 else
6166 arg1 = fold_convert (inner_type, arg1);
6168 return fold (build (code, type, arg0_inner, arg1));
6171 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6172 step of the array. TYPE is the type of the expression. ADDR is the address.
6173 MULT is the multiplicative expression. If the function succeeds, the new
6174 address expression is returned. Otherwise NULL_TREE is returned. */
6176 static tree
6177 try_move_mult_to_index (tree type, enum tree_code code, tree addr, tree mult)
6179 tree s, delta, step;
6180 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6181 tree ref = TREE_OPERAND (addr, 0), pref;
6182 tree ret, pos;
6183 tree itype;
6185 STRIP_NOPS (arg0);
6186 STRIP_NOPS (arg1);
6188 if (TREE_CODE (arg0) == INTEGER_CST)
6190 s = arg0;
6191 delta = arg1;
6193 else if (TREE_CODE (arg1) == INTEGER_CST)
6195 s = arg1;
6196 delta = arg0;
6198 else
6199 return NULL_TREE;
6201 for (;; ref = TREE_OPERAND (ref, 0))
6203 if (TREE_CODE (ref) == ARRAY_REF)
6205 step = array_ref_element_size (ref);
6207 if (TREE_CODE (step) != INTEGER_CST)
6208 continue;
6210 itype = TREE_TYPE (step);
6212 /* If the type sizes do not match, we might run into problems
6213 when one of them would overflow. */
6214 if (TYPE_PRECISION (itype) != TYPE_PRECISION (type))
6215 continue;
6217 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6218 continue;
6220 delta = fold_convert (itype, delta);
6221 break;
6224 if (!handled_component_p (ref))
6225 return NULL_TREE;
6228 /* We found the suitable array reference. So copy everything up to it,
6229 and replace the index. */
6231 pref = TREE_OPERAND (addr, 0);
6232 ret = copy_node (pref);
6233 pos = ret;
6235 while (pref != ref)
6237 pref = TREE_OPERAND (pref, 0);
6238 TREE_OPERAND (pos, 0) = copy_node (pref);
6239 pos = TREE_OPERAND (pos, 0);
6242 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6243 TREE_OPERAND (pos, 1),
6244 delta));
6246 return build1 (ADDR_EXPR, type, ret);
6250 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6251 means A >= Y && A != MAX, but in this case we know that
6252 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6254 static tree
6255 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6257 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6259 if (TREE_CODE (bound) == LT_EXPR)
6260 a = TREE_OPERAND (bound, 0);
6261 else if (TREE_CODE (bound) == GT_EXPR)
6262 a = TREE_OPERAND (bound, 1);
6263 else
6264 return NULL_TREE;
6266 typea = TREE_TYPE (a);
6267 if (!INTEGRAL_TYPE_P (typea)
6268 && !POINTER_TYPE_P (typea))
6269 return NULL_TREE;
6271 if (TREE_CODE (ineq) == LT_EXPR)
6273 a1 = TREE_OPERAND (ineq, 1);
6274 y = TREE_OPERAND (ineq, 0);
6276 else if (TREE_CODE (ineq) == GT_EXPR)
6278 a1 = TREE_OPERAND (ineq, 0);
6279 y = TREE_OPERAND (ineq, 1);
6281 else
6282 return NULL_TREE;
6284 if (TREE_TYPE (a1) != typea)
6285 return NULL_TREE;
6287 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6288 if (!integer_onep (diff))
6289 return NULL_TREE;
6291 return fold (build2 (GE_EXPR, type, a, y));
6294 /* Perform constant folding and related simplification of EXPR.
6295 The related simplifications include x*1 => x, x*0 => 0, etc.,
6296 and application of the associative law.
6297 NOP_EXPR conversions may be removed freely (as long as we
6298 are careful not to change the type of the overall expression).
6299 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6300 but we can constant-fold them if they have constant operands. */
6302 #ifdef ENABLE_FOLD_CHECKING
6303 # define fold(x) fold_1 (x)
6304 static tree fold_1 (tree);
6305 static
6306 #endif
6307 tree
6308 fold (tree expr)
6310 const tree t = expr;
6311 const tree type = TREE_TYPE (expr);
6312 tree t1 = NULL_TREE;
6313 tree tem;
6314 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6315 enum tree_code code = TREE_CODE (t);
6316 enum tree_code_class kind = TREE_CODE_CLASS (code);
6318 /* WINS will be nonzero when the switch is done
6319 if all operands are constant. */
6320 int wins = 1;
6322 /* Return right away if a constant. */
6323 if (kind == tcc_constant)
6324 return t;
6326 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6328 tree subop;
6330 /* Special case for conversion ops that can have fixed point args. */
6331 arg0 = TREE_OPERAND (t, 0);
6333 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6334 if (arg0 != 0)
6335 STRIP_SIGN_NOPS (arg0);
6337 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6338 subop = TREE_REALPART (arg0);
6339 else
6340 subop = arg0;
6342 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6343 && TREE_CODE (subop) != REAL_CST)
6344 /* Note that TREE_CONSTANT isn't enough:
6345 static var addresses are constant but we can't
6346 do arithmetic on them. */
6347 wins = 0;
6349 else if (IS_EXPR_CODE_CLASS (kind))
6351 int len = TREE_CODE_LENGTH (code);
6352 int i;
6353 for (i = 0; i < len; i++)
6355 tree op = TREE_OPERAND (t, i);
6356 tree subop;
6358 if (op == 0)
6359 continue; /* Valid for CALL_EXPR, at least. */
6361 /* Strip any conversions that don't change the mode. This is
6362 safe for every expression, except for a comparison expression
6363 because its signedness is derived from its operands. So, in
6364 the latter case, only strip conversions that don't change the
6365 signedness.
6367 Note that this is done as an internal manipulation within the
6368 constant folder, in order to find the simplest representation
6369 of the arguments so that their form can be studied. In any
6370 cases, the appropriate type conversions should be put back in
6371 the tree that will get out of the constant folder. */
6372 if (kind == tcc_comparison)
6373 STRIP_SIGN_NOPS (op);
6374 else
6375 STRIP_NOPS (op);
6377 if (TREE_CODE (op) == COMPLEX_CST)
6378 subop = TREE_REALPART (op);
6379 else
6380 subop = op;
6382 if (TREE_CODE (subop) != INTEGER_CST
6383 && TREE_CODE (subop) != REAL_CST)
6384 /* Note that TREE_CONSTANT isn't enough:
6385 static var addresses are constant but we can't
6386 do arithmetic on them. */
6387 wins = 0;
6389 if (i == 0)
6390 arg0 = op;
6391 else if (i == 1)
6392 arg1 = op;
6396 /* If this is a commutative operation, and ARG0 is a constant, move it
6397 to ARG1 to reduce the number of tests below. */
6398 if (commutative_tree_code (code)
6399 && tree_swap_operands_p (arg0, arg1, true))
6400 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6401 TREE_OPERAND (t, 0)));
6403 /* Now WINS is set as described above,
6404 ARG0 is the first operand of EXPR,
6405 and ARG1 is the second operand (if it has more than one operand).
6407 First check for cases where an arithmetic operation is applied to a
6408 compound, conditional, or comparison operation. Push the arithmetic
6409 operation inside the compound or conditional to see if any folding
6410 can then be done. Convert comparison to conditional for this purpose.
6411 The also optimizes non-constant cases that used to be done in
6412 expand_expr.
6414 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6415 one of the operands is a comparison and the other is a comparison, a
6416 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6417 code below would make the expression more complex. Change it to a
6418 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6419 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6421 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6422 || code == EQ_EXPR || code == NE_EXPR)
6423 && ((truth_value_p (TREE_CODE (arg0))
6424 && (truth_value_p (TREE_CODE (arg1))
6425 || (TREE_CODE (arg1) == BIT_AND_EXPR
6426 && integer_onep (TREE_OPERAND (arg1, 1)))))
6427 || (truth_value_p (TREE_CODE (arg1))
6428 && (truth_value_p (TREE_CODE (arg0))
6429 || (TREE_CODE (arg0) == BIT_AND_EXPR
6430 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6432 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6433 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6434 : TRUTH_XOR_EXPR,
6435 type, fold_convert (boolean_type_node, arg0),
6436 fold_convert (boolean_type_node, arg1)));
6438 if (code == EQ_EXPR)
6439 tem = invert_truthvalue (tem);
6441 return tem;
6444 if (TREE_CODE_CLASS (code) == tcc_unary)
6446 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6447 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6448 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6449 else if (TREE_CODE (arg0) == COND_EXPR)
6451 tree arg01 = TREE_OPERAND (arg0, 1);
6452 tree arg02 = TREE_OPERAND (arg0, 2);
6453 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6454 arg01 = fold (build1 (code, type, arg01));
6455 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6456 arg02 = fold (build1 (code, type, arg02));
6457 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6458 arg01, arg02));
6460 /* If this was a conversion, and all we did was to move into
6461 inside the COND_EXPR, bring it back out. But leave it if
6462 it is a conversion from integer to integer and the
6463 result precision is no wider than a word since such a
6464 conversion is cheap and may be optimized away by combine,
6465 while it couldn't if it were outside the COND_EXPR. Then return
6466 so we don't get into an infinite recursion loop taking the
6467 conversion out and then back in. */
6469 if ((code == NOP_EXPR || code == CONVERT_EXPR
6470 || code == NON_LVALUE_EXPR)
6471 && TREE_CODE (tem) == COND_EXPR
6472 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6473 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6474 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6475 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6476 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6477 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6478 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6479 && (INTEGRAL_TYPE_P
6480 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6481 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6482 || flag_syntax_only))
6483 tem = build1 (code, type,
6484 build3 (COND_EXPR,
6485 TREE_TYPE (TREE_OPERAND
6486 (TREE_OPERAND (tem, 1), 0)),
6487 TREE_OPERAND (tem, 0),
6488 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6489 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6490 return tem;
6492 else if (COMPARISON_CLASS_P (arg0))
6494 if (TREE_CODE (type) == BOOLEAN_TYPE)
6496 arg0 = copy_node (arg0);
6497 TREE_TYPE (arg0) = type;
6498 return arg0;
6500 else if (TREE_CODE (type) != INTEGER_TYPE)
6501 return fold (build3 (COND_EXPR, type, arg0,
6502 fold (build1 (code, type,
6503 integer_one_node)),
6504 fold (build1 (code, type,
6505 integer_zero_node))));
6508 else if (TREE_CODE_CLASS (code) == tcc_comparison
6509 && TREE_CODE (arg0) == COMPOUND_EXPR)
6510 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6511 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6512 else if (TREE_CODE_CLASS (code) == tcc_comparison
6513 && TREE_CODE (arg1) == COMPOUND_EXPR)
6514 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6515 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6516 else if (TREE_CODE_CLASS (code) == tcc_binary
6517 || TREE_CODE_CLASS (code) == tcc_comparison)
6519 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6520 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6521 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6522 arg1)));
6523 if (TREE_CODE (arg1) == COMPOUND_EXPR
6524 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6525 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6526 fold (build2 (code, type,
6527 arg0, TREE_OPERAND (arg1, 1))));
6529 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6531 tem = fold_binary_op_with_conditional_arg (t, code, arg0, arg1,
6532 /*cond_first_p=*/1);
6533 if (tem != NULL_TREE)
6534 return tem;
6537 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6539 tem = fold_binary_op_with_conditional_arg (t, code, arg1, arg0,
6540 /*cond_first_p=*/0);
6541 if (tem != NULL_TREE)
6542 return tem;
6546 switch (code)
6548 case CONST_DECL:
6549 return fold (DECL_INITIAL (t));
6551 case NOP_EXPR:
6552 case FLOAT_EXPR:
6553 case CONVERT_EXPR:
6554 case FIX_TRUNC_EXPR:
6555 case FIX_CEIL_EXPR:
6556 case FIX_FLOOR_EXPR:
6557 case FIX_ROUND_EXPR:
6558 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6559 return TREE_OPERAND (t, 0);
6561 /* Handle cases of two conversions in a row. */
6562 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6563 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6565 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6566 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6567 int inside_int = INTEGRAL_TYPE_P (inside_type);
6568 int inside_ptr = POINTER_TYPE_P (inside_type);
6569 int inside_float = FLOAT_TYPE_P (inside_type);
6570 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6571 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6572 int inter_int = INTEGRAL_TYPE_P (inter_type);
6573 int inter_ptr = POINTER_TYPE_P (inter_type);
6574 int inter_float = FLOAT_TYPE_P (inter_type);
6575 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6576 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6577 int final_int = INTEGRAL_TYPE_P (type);
6578 int final_ptr = POINTER_TYPE_P (type);
6579 int final_float = FLOAT_TYPE_P (type);
6580 unsigned int final_prec = TYPE_PRECISION (type);
6581 int final_unsignedp = TYPE_UNSIGNED (type);
6583 /* In addition to the cases of two conversions in a row
6584 handled below, if we are converting something to its own
6585 type via an object of identical or wider precision, neither
6586 conversion is needed. */
6587 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6588 && ((inter_int && final_int) || (inter_float && final_float))
6589 && inter_prec >= final_prec)
6590 return fold (build1 (code, type,
6591 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6593 /* Likewise, if the intermediate and final types are either both
6594 float or both integer, we don't need the middle conversion if
6595 it is wider than the final type and doesn't change the signedness
6596 (for integers). Avoid this if the final type is a pointer
6597 since then we sometimes need the inner conversion. Likewise if
6598 the outer has a precision not equal to the size of its mode. */
6599 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6600 || (inter_float && inside_float))
6601 && inter_prec >= inside_prec
6602 && (inter_float || inter_unsignedp == inside_unsignedp)
6603 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6604 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6605 && ! final_ptr)
6606 return fold (build1 (code, type,
6607 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6609 /* If we have a sign-extension of a zero-extended value, we can
6610 replace that by a single zero-extension. */
6611 if (inside_int && inter_int && final_int
6612 && inside_prec < inter_prec && inter_prec < final_prec
6613 && inside_unsignedp && !inter_unsignedp)
6614 return fold (build1 (code, type,
6615 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6617 /* Two conversions in a row are not needed unless:
6618 - some conversion is floating-point (overstrict for now), or
6619 - the intermediate type is narrower than both initial and
6620 final, or
6621 - the intermediate type and innermost type differ in signedness,
6622 and the outermost type is wider than the intermediate, or
6623 - the initial type is a pointer type and the precisions of the
6624 intermediate and final types differ, or
6625 - the final type is a pointer type and the precisions of the
6626 initial and intermediate types differ. */
6627 if (! inside_float && ! inter_float && ! final_float
6628 && (inter_prec > inside_prec || inter_prec > final_prec)
6629 && ! (inside_int && inter_int
6630 && inter_unsignedp != inside_unsignedp
6631 && inter_prec < final_prec)
6632 && ((inter_unsignedp && inter_prec > inside_prec)
6633 == (final_unsignedp && final_prec > inter_prec))
6634 && ! (inside_ptr && inter_prec != final_prec)
6635 && ! (final_ptr && inside_prec != inter_prec)
6636 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6637 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6638 && ! final_ptr)
6639 return fold (build1 (code, type,
6640 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6643 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6644 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6645 /* Detect assigning a bitfield. */
6646 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6647 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6649 /* Don't leave an assignment inside a conversion
6650 unless assigning a bitfield. */
6651 tree prev = TREE_OPERAND (t, 0);
6652 tem = copy_node (t);
6653 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6654 /* First do the assignment, then return converted constant. */
6655 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6656 TREE_NO_WARNING (tem) = 1;
6657 TREE_USED (tem) = 1;
6658 return tem;
6661 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6662 constants (if x has signed type, the sign bit cannot be set
6663 in c). This folds extension into the BIT_AND_EXPR. */
6664 if (INTEGRAL_TYPE_P (type)
6665 && TREE_CODE (type) != BOOLEAN_TYPE
6666 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6667 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6669 tree and = TREE_OPERAND (t, 0);
6670 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6671 int change = 0;
6673 if (TYPE_UNSIGNED (TREE_TYPE (and))
6674 || (TYPE_PRECISION (type)
6675 <= TYPE_PRECISION (TREE_TYPE (and))))
6676 change = 1;
6677 else if (TYPE_PRECISION (TREE_TYPE (and1))
6678 <= HOST_BITS_PER_WIDE_INT
6679 && host_integerp (and1, 1))
6681 unsigned HOST_WIDE_INT cst;
6683 cst = tree_low_cst (and1, 1);
6684 cst &= (HOST_WIDE_INT) -1
6685 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6686 change = (cst == 0);
6687 #ifdef LOAD_EXTEND_OP
6688 if (change
6689 && !flag_syntax_only
6690 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6691 == ZERO_EXTEND))
6693 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6694 and0 = fold_convert (uns, and0);
6695 and1 = fold_convert (uns, and1);
6697 #endif
6699 if (change)
6700 return fold (build2 (BIT_AND_EXPR, type,
6701 fold_convert (type, and0),
6702 fold_convert (type, and1)));
6705 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6706 T2 being pointers to types of the same size. */
6707 if (POINTER_TYPE_P (TREE_TYPE (t))
6708 && BINARY_CLASS_P (arg0)
6709 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6710 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6712 tree arg00 = TREE_OPERAND (arg0, 0);
6713 tree t0 = TREE_TYPE (t);
6714 tree t1 = TREE_TYPE (arg00);
6715 tree tt0 = TREE_TYPE (t0);
6716 tree tt1 = TREE_TYPE (t1);
6717 tree s0 = TYPE_SIZE (tt0);
6718 tree s1 = TYPE_SIZE (tt1);
6720 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6721 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6722 TREE_OPERAND (arg0, 1));
6725 tem = fold_convert_const (code, type, arg0);
6726 return tem ? tem : t;
6728 case VIEW_CONVERT_EXPR:
6729 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6730 return build1 (VIEW_CONVERT_EXPR, type,
6731 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6732 return t;
6734 case COMPONENT_REF:
6735 if (TREE_CODE (arg0) == CONSTRUCTOR
6736 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6738 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6739 if (m)
6740 return TREE_VALUE (m);
6742 return t;
6744 case RANGE_EXPR:
6745 if (TREE_CONSTANT (t) != wins)
6747 tem = copy_node (t);
6748 TREE_CONSTANT (tem) = wins;
6749 TREE_INVARIANT (tem) = wins;
6750 return tem;
6752 return t;
6754 case NEGATE_EXPR:
6755 if (negate_expr_p (arg0))
6756 return fold_convert (type, negate_expr (arg0));
6757 return t;
6759 case ABS_EXPR:
6760 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6761 return fold_abs_const (arg0, type);
6762 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6763 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6764 /* Convert fabs((double)float) into (double)fabsf(float). */
6765 else if (TREE_CODE (arg0) == NOP_EXPR
6766 && TREE_CODE (type) == REAL_TYPE)
6768 tree targ0 = strip_float_extensions (arg0);
6769 if (targ0 != arg0)
6770 return fold_convert (type, fold (build1 (ABS_EXPR,
6771 TREE_TYPE (targ0),
6772 targ0)));
6774 else if (tree_expr_nonnegative_p (arg0))
6775 return arg0;
6776 return t;
6778 case CONJ_EXPR:
6779 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6780 return fold_convert (type, arg0);
6781 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6782 return build2 (COMPLEX_EXPR, type,
6783 TREE_OPERAND (arg0, 0),
6784 negate_expr (TREE_OPERAND (arg0, 1)));
6785 else if (TREE_CODE (arg0) == COMPLEX_CST)
6786 return build_complex (type, TREE_REALPART (arg0),
6787 negate_expr (TREE_IMAGPART (arg0)));
6788 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6789 return fold (build2 (TREE_CODE (arg0), type,
6790 fold (build1 (CONJ_EXPR, type,
6791 TREE_OPERAND (arg0, 0))),
6792 fold (build1 (CONJ_EXPR, type,
6793 TREE_OPERAND (arg0, 1)))));
6794 else if (TREE_CODE (arg0) == CONJ_EXPR)
6795 return TREE_OPERAND (arg0, 0);
6796 return t;
6798 case BIT_NOT_EXPR:
6799 if (TREE_CODE (arg0) == INTEGER_CST)
6800 return fold_not_const (arg0, type);
6801 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6802 return TREE_OPERAND (arg0, 0);
6803 return t;
6805 case PLUS_EXPR:
6806 /* A + (-B) -> A - B */
6807 if (TREE_CODE (arg1) == NEGATE_EXPR)
6808 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6809 /* (-A) + B -> B - A */
6810 if (TREE_CODE (arg0) == NEGATE_EXPR
6811 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6812 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6813 if (! FLOAT_TYPE_P (type))
6815 if (integer_zerop (arg1))
6816 return non_lvalue (fold_convert (type, arg0));
6818 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6819 with a constant, and the two constants have no bits in common,
6820 we should treat this as a BIT_IOR_EXPR since this may produce more
6821 simplifications. */
6822 if (TREE_CODE (arg0) == BIT_AND_EXPR
6823 && TREE_CODE (arg1) == BIT_AND_EXPR
6824 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6825 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6826 && integer_zerop (const_binop (BIT_AND_EXPR,
6827 TREE_OPERAND (arg0, 1),
6828 TREE_OPERAND (arg1, 1), 0)))
6830 code = BIT_IOR_EXPR;
6831 goto bit_ior;
6834 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6835 (plus (plus (mult) (mult)) (foo)) so that we can
6836 take advantage of the factoring cases below. */
6837 if (((TREE_CODE (arg0) == PLUS_EXPR
6838 || TREE_CODE (arg0) == MINUS_EXPR)
6839 && TREE_CODE (arg1) == MULT_EXPR)
6840 || ((TREE_CODE (arg1) == PLUS_EXPR
6841 || TREE_CODE (arg1) == MINUS_EXPR)
6842 && TREE_CODE (arg0) == MULT_EXPR))
6844 tree parg0, parg1, parg, marg;
6845 enum tree_code pcode;
6847 if (TREE_CODE (arg1) == MULT_EXPR)
6848 parg = arg0, marg = arg1;
6849 else
6850 parg = arg1, marg = arg0;
6851 pcode = TREE_CODE (parg);
6852 parg0 = TREE_OPERAND (parg, 0);
6853 parg1 = TREE_OPERAND (parg, 1);
6854 STRIP_NOPS (parg0);
6855 STRIP_NOPS (parg1);
6857 if (TREE_CODE (parg0) == MULT_EXPR
6858 && TREE_CODE (parg1) != MULT_EXPR)
6859 return fold (build2 (pcode, type,
6860 fold (build2 (PLUS_EXPR, type,
6861 fold_convert (type, parg0),
6862 fold_convert (type, marg))),
6863 fold_convert (type, parg1)));
6864 if (TREE_CODE (parg0) != MULT_EXPR
6865 && TREE_CODE (parg1) == MULT_EXPR)
6866 return fold (build2 (PLUS_EXPR, type,
6867 fold_convert (type, parg0),
6868 fold (build2 (pcode, type,
6869 fold_convert (type, marg),
6870 fold_convert (type,
6871 parg1)))));
6874 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6876 tree arg00, arg01, arg10, arg11;
6877 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6879 /* (A * C) + (B * C) -> (A+B) * C.
6880 We are most concerned about the case where C is a constant,
6881 but other combinations show up during loop reduction. Since
6882 it is not difficult, try all four possibilities. */
6884 arg00 = TREE_OPERAND (arg0, 0);
6885 arg01 = TREE_OPERAND (arg0, 1);
6886 arg10 = TREE_OPERAND (arg1, 0);
6887 arg11 = TREE_OPERAND (arg1, 1);
6888 same = NULL_TREE;
6890 if (operand_equal_p (arg01, arg11, 0))
6891 same = arg01, alt0 = arg00, alt1 = arg10;
6892 else if (operand_equal_p (arg00, arg10, 0))
6893 same = arg00, alt0 = arg01, alt1 = arg11;
6894 else if (operand_equal_p (arg00, arg11, 0))
6895 same = arg00, alt0 = arg01, alt1 = arg10;
6896 else if (operand_equal_p (arg01, arg10, 0))
6897 same = arg01, alt0 = arg00, alt1 = arg11;
6899 /* No identical multiplicands; see if we can find a common
6900 power-of-two factor in non-power-of-two multiplies. This
6901 can help in multi-dimensional array access. */
6902 else if (TREE_CODE (arg01) == INTEGER_CST
6903 && TREE_CODE (arg11) == INTEGER_CST
6904 && TREE_INT_CST_HIGH (arg01) == 0
6905 && TREE_INT_CST_HIGH (arg11) == 0)
6907 HOST_WIDE_INT int01, int11, tmp;
6908 int01 = TREE_INT_CST_LOW (arg01);
6909 int11 = TREE_INT_CST_LOW (arg11);
6911 /* Move min of absolute values to int11. */
6912 if ((int01 >= 0 ? int01 : -int01)
6913 < (int11 >= 0 ? int11 : -int11))
6915 tmp = int01, int01 = int11, int11 = tmp;
6916 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6917 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6920 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6922 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6923 build_int_cst (NULL_TREE,
6924 int01 / int11)));
6925 alt1 = arg10;
6926 same = arg11;
6930 if (same)
6931 return fold (build2 (MULT_EXPR, type,
6932 fold (build2 (PLUS_EXPR, type,
6933 fold_convert (type, alt0),
6934 fold_convert (type, alt1))),
6935 same));
6938 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6939 of the array. Loop optimizer sometimes produce this type of
6940 expressions. */
6941 if (TREE_CODE (arg0) == ADDR_EXPR
6942 && TREE_CODE (arg1) == MULT_EXPR)
6944 tem = try_move_mult_to_index (type, PLUS_EXPR, arg0, arg1);
6945 if (tem)
6946 return fold (tem);
6948 else if (TREE_CODE (arg1) == ADDR_EXPR
6949 && TREE_CODE (arg0) == MULT_EXPR)
6951 tem = try_move_mult_to_index (type, PLUS_EXPR, arg1, arg0);
6952 if (tem)
6953 return fold (tem);
6956 else
6958 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6959 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6960 return non_lvalue (fold_convert (type, arg0));
6962 /* Likewise if the operands are reversed. */
6963 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6964 return non_lvalue (fold_convert (type, arg1));
6966 /* Convert X + -C into X - C. */
6967 if (TREE_CODE (arg1) == REAL_CST
6968 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6970 tem = fold_negate_const (arg1, type);
6971 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6972 return fold (build2 (MINUS_EXPR, type,
6973 fold_convert (type, arg0),
6974 fold_convert (type, tem)));
6977 /* Convert x+x into x*2.0. */
6978 if (operand_equal_p (arg0, arg1, 0)
6979 && SCALAR_FLOAT_TYPE_P (type))
6980 return fold (build2 (MULT_EXPR, type, arg0,
6981 build_real (type, dconst2)));
6983 /* Convert x*c+x into x*(c+1). */
6984 if (flag_unsafe_math_optimizations
6985 && TREE_CODE (arg0) == MULT_EXPR
6986 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6987 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6988 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6990 REAL_VALUE_TYPE c;
6992 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6993 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6994 return fold (build2 (MULT_EXPR, type, arg1,
6995 build_real (type, c)));
6998 /* Convert x+x*c into x*(c+1). */
6999 if (flag_unsafe_math_optimizations
7000 && TREE_CODE (arg1) == MULT_EXPR
7001 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7002 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7003 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7005 REAL_VALUE_TYPE c;
7007 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7008 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7009 return fold (build2 (MULT_EXPR, type, arg0,
7010 build_real (type, c)));
7013 /* Convert x*c1+x*c2 into x*(c1+c2). */
7014 if (flag_unsafe_math_optimizations
7015 && TREE_CODE (arg0) == MULT_EXPR
7016 && TREE_CODE (arg1) == MULT_EXPR
7017 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7018 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7019 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7020 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7021 && operand_equal_p (TREE_OPERAND (arg0, 0),
7022 TREE_OPERAND (arg1, 0), 0))
7024 REAL_VALUE_TYPE c1, c2;
7026 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7027 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7028 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7029 return fold (build2 (MULT_EXPR, type,
7030 TREE_OPERAND (arg0, 0),
7031 build_real (type, c1)));
7033 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7034 if (flag_unsafe_math_optimizations
7035 && TREE_CODE (arg1) == PLUS_EXPR
7036 && TREE_CODE (arg0) != MULT_EXPR)
7038 tree tree10 = TREE_OPERAND (arg1, 0);
7039 tree tree11 = TREE_OPERAND (arg1, 1);
7040 if (TREE_CODE (tree11) == MULT_EXPR
7041 && TREE_CODE (tree10) == MULT_EXPR)
7043 tree tree0;
7044 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
7045 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
7048 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7049 if (flag_unsafe_math_optimizations
7050 && TREE_CODE (arg0) == PLUS_EXPR
7051 && TREE_CODE (arg1) != MULT_EXPR)
7053 tree tree00 = TREE_OPERAND (arg0, 0);
7054 tree tree01 = TREE_OPERAND (arg0, 1);
7055 if (TREE_CODE (tree01) == MULT_EXPR
7056 && TREE_CODE (tree00) == MULT_EXPR)
7058 tree tree0;
7059 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
7060 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
7065 bit_rotate:
7066 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7067 is a rotate of A by C1 bits. */
7068 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7069 is a rotate of A by B bits. */
7071 enum tree_code code0, code1;
7072 code0 = TREE_CODE (arg0);
7073 code1 = TREE_CODE (arg1);
7074 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7075 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7076 && operand_equal_p (TREE_OPERAND (arg0, 0),
7077 TREE_OPERAND (arg1, 0), 0)
7078 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7080 tree tree01, tree11;
7081 enum tree_code code01, code11;
7083 tree01 = TREE_OPERAND (arg0, 1);
7084 tree11 = TREE_OPERAND (arg1, 1);
7085 STRIP_NOPS (tree01);
7086 STRIP_NOPS (tree11);
7087 code01 = TREE_CODE (tree01);
7088 code11 = TREE_CODE (tree11);
7089 if (code01 == INTEGER_CST
7090 && code11 == INTEGER_CST
7091 && TREE_INT_CST_HIGH (tree01) == 0
7092 && TREE_INT_CST_HIGH (tree11) == 0
7093 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7094 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7095 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7096 code0 == LSHIFT_EXPR ? tree01 : tree11);
7097 else if (code11 == MINUS_EXPR)
7099 tree tree110, tree111;
7100 tree110 = TREE_OPERAND (tree11, 0);
7101 tree111 = TREE_OPERAND (tree11, 1);
7102 STRIP_NOPS (tree110);
7103 STRIP_NOPS (tree111);
7104 if (TREE_CODE (tree110) == INTEGER_CST
7105 && 0 == compare_tree_int (tree110,
7106 TYPE_PRECISION
7107 (TREE_TYPE (TREE_OPERAND
7108 (arg0, 0))))
7109 && operand_equal_p (tree01, tree111, 0))
7110 return build2 ((code0 == LSHIFT_EXPR
7111 ? LROTATE_EXPR
7112 : RROTATE_EXPR),
7113 type, TREE_OPERAND (arg0, 0), tree01);
7115 else if (code01 == MINUS_EXPR)
7117 tree tree010, tree011;
7118 tree010 = TREE_OPERAND (tree01, 0);
7119 tree011 = TREE_OPERAND (tree01, 1);
7120 STRIP_NOPS (tree010);
7121 STRIP_NOPS (tree011);
7122 if (TREE_CODE (tree010) == INTEGER_CST
7123 && 0 == compare_tree_int (tree010,
7124 TYPE_PRECISION
7125 (TREE_TYPE (TREE_OPERAND
7126 (arg0, 0))))
7127 && operand_equal_p (tree11, tree011, 0))
7128 return build2 ((code0 != LSHIFT_EXPR
7129 ? LROTATE_EXPR
7130 : RROTATE_EXPR),
7131 type, TREE_OPERAND (arg0, 0), tree11);
7136 associate:
7137 /* In most languages, can't associate operations on floats through
7138 parentheses. Rather than remember where the parentheses were, we
7139 don't associate floats at all, unless the user has specified
7140 -funsafe-math-optimizations. */
7142 if (! wins
7143 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7145 tree var0, con0, lit0, minus_lit0;
7146 tree var1, con1, lit1, minus_lit1;
7148 /* Split both trees into variables, constants, and literals. Then
7149 associate each group together, the constants with literals,
7150 then the result with variables. This increases the chances of
7151 literals being recombined later and of generating relocatable
7152 expressions for the sum of a constant and literal. */
7153 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7154 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7155 code == MINUS_EXPR);
7157 /* Only do something if we found more than two objects. Otherwise,
7158 nothing has changed and we risk infinite recursion. */
7159 if (2 < ((var0 != 0) + (var1 != 0)
7160 + (con0 != 0) + (con1 != 0)
7161 + (lit0 != 0) + (lit1 != 0)
7162 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7164 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7165 if (code == MINUS_EXPR)
7166 code = PLUS_EXPR;
7168 var0 = associate_trees (var0, var1, code, type);
7169 con0 = associate_trees (con0, con1, code, type);
7170 lit0 = associate_trees (lit0, lit1, code, type);
7171 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7173 /* Preserve the MINUS_EXPR if the negative part of the literal is
7174 greater than the positive part. Otherwise, the multiplicative
7175 folding code (i.e extract_muldiv) may be fooled in case
7176 unsigned constants are subtracted, like in the following
7177 example: ((X*2 + 4) - 8U)/2. */
7178 if (minus_lit0 && lit0)
7180 if (TREE_CODE (lit0) == INTEGER_CST
7181 && TREE_CODE (minus_lit0) == INTEGER_CST
7182 && tree_int_cst_lt (lit0, minus_lit0))
7184 minus_lit0 = associate_trees (minus_lit0, lit0,
7185 MINUS_EXPR, type);
7186 lit0 = 0;
7188 else
7190 lit0 = associate_trees (lit0, minus_lit0,
7191 MINUS_EXPR, type);
7192 minus_lit0 = 0;
7195 if (minus_lit0)
7197 if (con0 == 0)
7198 return fold_convert (type,
7199 associate_trees (var0, minus_lit0,
7200 MINUS_EXPR, type));
7201 else
7203 con0 = associate_trees (con0, minus_lit0,
7204 MINUS_EXPR, type);
7205 return fold_convert (type,
7206 associate_trees (var0, con0,
7207 PLUS_EXPR, type));
7211 con0 = associate_trees (con0, lit0, code, type);
7212 return fold_convert (type, associate_trees (var0, con0,
7213 code, type));
7217 binary:
7218 if (wins)
7219 t1 = const_binop (code, arg0, arg1, 0);
7220 if (t1 != NULL_TREE)
7222 /* The return value should always have
7223 the same type as the original expression. */
7224 if (TREE_TYPE (t1) != type)
7225 t1 = fold_convert (type, t1);
7227 return t1;
7229 return t;
7231 case MINUS_EXPR:
7232 /* A - (-B) -> A + B */
7233 if (TREE_CODE (arg1) == NEGATE_EXPR)
7234 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7235 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7236 if (TREE_CODE (arg0) == NEGATE_EXPR
7237 && (FLOAT_TYPE_P (type)
7238 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7239 && negate_expr_p (arg1)
7240 && reorder_operands_p (arg0, arg1))
7241 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7242 TREE_OPERAND (arg0, 0)));
7244 if (! FLOAT_TYPE_P (type))
7246 if (! wins && integer_zerop (arg0))
7247 return negate_expr (fold_convert (type, arg1));
7248 if (integer_zerop (arg1))
7249 return non_lvalue (fold_convert (type, arg0));
7251 /* Fold A - (A & B) into ~B & A. */
7252 if (!TREE_SIDE_EFFECTS (arg0)
7253 && TREE_CODE (arg1) == BIT_AND_EXPR)
7255 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7256 return fold (build2 (BIT_AND_EXPR, type,
7257 fold (build1 (BIT_NOT_EXPR, type,
7258 TREE_OPERAND (arg1, 0))),
7259 arg0));
7260 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7261 return fold (build2 (BIT_AND_EXPR, type,
7262 fold (build1 (BIT_NOT_EXPR, type,
7263 TREE_OPERAND (arg1, 1))),
7264 arg0));
7267 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7268 any power of 2 minus 1. */
7269 if (TREE_CODE (arg0) == BIT_AND_EXPR
7270 && TREE_CODE (arg1) == BIT_AND_EXPR
7271 && operand_equal_p (TREE_OPERAND (arg0, 0),
7272 TREE_OPERAND (arg1, 0), 0))
7274 tree mask0 = TREE_OPERAND (arg0, 1);
7275 tree mask1 = TREE_OPERAND (arg1, 1);
7276 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7278 if (operand_equal_p (tem, mask1, 0))
7280 tem = fold (build2 (BIT_XOR_EXPR, type,
7281 TREE_OPERAND (arg0, 0), mask1));
7282 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7287 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7288 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7289 return non_lvalue (fold_convert (type, arg0));
7291 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7292 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7293 (-ARG1 + ARG0) reduces to -ARG1. */
7294 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7295 return negate_expr (fold_convert (type, arg1));
7297 /* Fold &x - &x. This can happen from &x.foo - &x.
7298 This is unsafe for certain floats even in non-IEEE formats.
7299 In IEEE, it is unsafe because it does wrong for NaNs.
7300 Also note that operand_equal_p is always false if an operand
7301 is volatile. */
7303 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7304 && operand_equal_p (arg0, arg1, 0))
7305 return fold_convert (type, integer_zero_node);
7307 /* A - B -> A + (-B) if B is easily negatable. */
7308 if (!wins && negate_expr_p (arg1)
7309 && ((FLOAT_TYPE_P (type)
7310 /* Avoid this transformation if B is a positive REAL_CST. */
7311 && (TREE_CODE (arg1) != REAL_CST
7312 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7313 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7314 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7316 /* Try folding difference of addresses. */
7318 HOST_WIDE_INT diff;
7320 if ((TREE_CODE (arg0) == ADDR_EXPR
7321 || TREE_CODE (arg1) == ADDR_EXPR)
7322 && ptr_difference_const (arg0, arg1, &diff))
7323 return build_int_cst_type (type, diff);
7326 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7327 of the array. Loop optimizer sometimes produce this type of
7328 expressions. */
7329 if (TREE_CODE (arg0) == ADDR_EXPR
7330 && TREE_CODE (arg1) == MULT_EXPR)
7332 tem = try_move_mult_to_index (type, MINUS_EXPR, arg0, arg1);
7333 if (tem)
7334 return fold (tem);
7337 if (TREE_CODE (arg0) == MULT_EXPR
7338 && TREE_CODE (arg1) == MULT_EXPR
7339 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7341 /* (A * C) - (B * C) -> (A-B) * C. */
7342 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7343 TREE_OPERAND (arg1, 1), 0))
7344 return fold (build2 (MULT_EXPR, type,
7345 fold (build2 (MINUS_EXPR, type,
7346 TREE_OPERAND (arg0, 0),
7347 TREE_OPERAND (arg1, 0))),
7348 TREE_OPERAND (arg0, 1)));
7349 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7350 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7351 TREE_OPERAND (arg1, 0), 0))
7352 return fold (build2 (MULT_EXPR, type,
7353 TREE_OPERAND (arg0, 0),
7354 fold (build2 (MINUS_EXPR, type,
7355 TREE_OPERAND (arg0, 1),
7356 TREE_OPERAND (arg1, 1)))));
7359 goto associate;
7361 case MULT_EXPR:
7362 /* (-A) * (-B) -> A * B */
7363 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7364 return fold (build2 (MULT_EXPR, type,
7365 TREE_OPERAND (arg0, 0),
7366 negate_expr (arg1)));
7367 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7368 return fold (build2 (MULT_EXPR, type,
7369 negate_expr (arg0),
7370 TREE_OPERAND (arg1, 0)));
7372 if (! FLOAT_TYPE_P (type))
7374 if (integer_zerop (arg1))
7375 return omit_one_operand (type, arg1, arg0);
7376 if (integer_onep (arg1))
7377 return non_lvalue (fold_convert (type, arg0));
7379 /* (a * (1 << b)) is (a << b) */
7380 if (TREE_CODE (arg1) == LSHIFT_EXPR
7381 && integer_onep (TREE_OPERAND (arg1, 0)))
7382 return fold (build2 (LSHIFT_EXPR, type, arg0,
7383 TREE_OPERAND (arg1, 1)));
7384 if (TREE_CODE (arg0) == LSHIFT_EXPR
7385 && integer_onep (TREE_OPERAND (arg0, 0)))
7386 return fold (build2 (LSHIFT_EXPR, type, arg1,
7387 TREE_OPERAND (arg0, 1)));
7389 if (TREE_CODE (arg1) == INTEGER_CST
7390 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7391 fold_convert (type, arg1),
7392 code, NULL_TREE)))
7393 return fold_convert (type, tem);
7396 else
7398 /* Maybe fold x * 0 to 0. The expressions aren't the same
7399 when x is NaN, since x * 0 is also NaN. Nor are they the
7400 same in modes with signed zeros, since multiplying a
7401 negative value by 0 gives -0, not +0. */
7402 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7403 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7404 && real_zerop (arg1))
7405 return omit_one_operand (type, arg1, arg0);
7406 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7407 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7408 && real_onep (arg1))
7409 return non_lvalue (fold_convert (type, arg0));
7411 /* Transform x * -1.0 into -x. */
7412 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7413 && real_minus_onep (arg1))
7414 return fold_convert (type, negate_expr (arg0));
7416 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7417 if (flag_unsafe_math_optimizations
7418 && TREE_CODE (arg0) == RDIV_EXPR
7419 && TREE_CODE (arg1) == REAL_CST
7420 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7422 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7423 arg1, 0);
7424 if (tem)
7425 return fold (build2 (RDIV_EXPR, type, tem,
7426 TREE_OPERAND (arg0, 1)));
7429 if (flag_unsafe_math_optimizations)
7431 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7432 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7434 /* Optimizations of root(...)*root(...). */
7435 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7437 tree rootfn, arg, arglist;
7438 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7439 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7441 /* Optimize sqrt(x)*sqrt(x) as x. */
7442 if (BUILTIN_SQRT_P (fcode0)
7443 && operand_equal_p (arg00, arg10, 0)
7444 && ! HONOR_SNANS (TYPE_MODE (type)))
7445 return arg00;
7447 /* Optimize root(x)*root(y) as root(x*y). */
7448 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7449 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7450 arglist = build_tree_list (NULL_TREE, arg);
7451 return build_function_call_expr (rootfn, arglist);
7454 /* Optimize expN(x)*expN(y) as expN(x+y). */
7455 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7457 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7458 tree arg = build2 (PLUS_EXPR, type,
7459 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7460 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7461 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7462 return build_function_call_expr (expfn, arglist);
7465 /* Optimizations of pow(...)*pow(...). */
7466 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7467 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7468 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7470 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7471 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7472 1)));
7473 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7474 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7475 1)));
7477 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7478 if (operand_equal_p (arg01, arg11, 0))
7480 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7481 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7482 tree arglist = tree_cons (NULL_TREE, fold (arg),
7483 build_tree_list (NULL_TREE,
7484 arg01));
7485 return build_function_call_expr (powfn, arglist);
7488 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7489 if (operand_equal_p (arg00, arg10, 0))
7491 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7492 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7493 tree arglist = tree_cons (NULL_TREE, arg00,
7494 build_tree_list (NULL_TREE,
7495 arg));
7496 return build_function_call_expr (powfn, arglist);
7500 /* Optimize tan(x)*cos(x) as sin(x). */
7501 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7502 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7503 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7504 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7505 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7506 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7507 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7508 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7510 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7512 if (sinfn != NULL_TREE)
7513 return build_function_call_expr (sinfn,
7514 TREE_OPERAND (arg0, 1));
7517 /* Optimize x*pow(x,c) as pow(x,c+1). */
7518 if (fcode1 == BUILT_IN_POW
7519 || fcode1 == BUILT_IN_POWF
7520 || fcode1 == BUILT_IN_POWL)
7522 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7523 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7524 1)));
7525 if (TREE_CODE (arg11) == REAL_CST
7526 && ! TREE_CONSTANT_OVERFLOW (arg11)
7527 && operand_equal_p (arg0, arg10, 0))
7529 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7530 REAL_VALUE_TYPE c;
7531 tree arg, arglist;
7533 c = TREE_REAL_CST (arg11);
7534 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7535 arg = build_real (type, c);
7536 arglist = build_tree_list (NULL_TREE, arg);
7537 arglist = tree_cons (NULL_TREE, arg0, arglist);
7538 return build_function_call_expr (powfn, arglist);
7542 /* Optimize pow(x,c)*x as pow(x,c+1). */
7543 if (fcode0 == BUILT_IN_POW
7544 || fcode0 == BUILT_IN_POWF
7545 || fcode0 == BUILT_IN_POWL)
7547 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7548 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7549 1)));
7550 if (TREE_CODE (arg01) == REAL_CST
7551 && ! TREE_CONSTANT_OVERFLOW (arg01)
7552 && operand_equal_p (arg1, arg00, 0))
7554 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7555 REAL_VALUE_TYPE c;
7556 tree arg, arglist;
7558 c = TREE_REAL_CST (arg01);
7559 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7560 arg = build_real (type, c);
7561 arglist = build_tree_list (NULL_TREE, arg);
7562 arglist = tree_cons (NULL_TREE, arg1, arglist);
7563 return build_function_call_expr (powfn, arglist);
7567 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7568 if (! optimize_size
7569 && operand_equal_p (arg0, arg1, 0))
7571 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7573 if (powfn)
7575 tree arg = build_real (type, dconst2);
7576 tree arglist = build_tree_list (NULL_TREE, arg);
7577 arglist = tree_cons (NULL_TREE, arg0, arglist);
7578 return build_function_call_expr (powfn, arglist);
7583 goto associate;
7585 case BIT_IOR_EXPR:
7586 bit_ior:
7587 if (integer_all_onesp (arg1))
7588 return omit_one_operand (type, arg1, arg0);
7589 if (integer_zerop (arg1))
7590 return non_lvalue (fold_convert (type, arg0));
7591 if (operand_equal_p (arg0, arg1, 0))
7592 return non_lvalue (fold_convert (type, arg0));
7594 /* ~X | X is -1. */
7595 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7596 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7598 t1 = build_int_cst (type, -1);
7599 t1 = force_fit_type (t1, 0, false, false);
7600 return omit_one_operand (type, t1, arg1);
7603 /* X | ~X is -1. */
7604 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7605 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7607 t1 = build_int_cst (type, -1);
7608 t1 = force_fit_type (t1, 0, false, false);
7609 return omit_one_operand (type, t1, arg0);
7612 t1 = distribute_bit_expr (code, type, arg0, arg1);
7613 if (t1 != NULL_TREE)
7614 return t1;
7616 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7618 This results in more efficient code for machines without a NAND
7619 instruction. Combine will canonicalize to the first form
7620 which will allow use of NAND instructions provided by the
7621 backend if they exist. */
7622 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7623 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7625 return fold (build1 (BIT_NOT_EXPR, type,
7626 build2 (BIT_AND_EXPR, type,
7627 TREE_OPERAND (arg0, 0),
7628 TREE_OPERAND (arg1, 0))));
7631 /* See if this can be simplified into a rotate first. If that
7632 is unsuccessful continue in the association code. */
7633 goto bit_rotate;
7635 case BIT_XOR_EXPR:
7636 if (integer_zerop (arg1))
7637 return non_lvalue (fold_convert (type, arg0));
7638 if (integer_all_onesp (arg1))
7639 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7640 if (operand_equal_p (arg0, arg1, 0))
7641 return omit_one_operand (type, integer_zero_node, arg0);
7643 /* ~X ^ X is -1. */
7644 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7645 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7647 t1 = build_int_cst (type, -1);
7648 t1 = force_fit_type (t1, 0, false, false);
7649 return omit_one_operand (type, t1, arg1);
7652 /* X ^ ~X is -1. */
7653 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7654 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7656 t1 = build_int_cst (type, -1);
7657 t1 = force_fit_type (t1, 0, false, false);
7658 return omit_one_operand (type, t1, arg0);
7661 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7662 with a constant, and the two constants have no bits in common,
7663 we should treat this as a BIT_IOR_EXPR since this may produce more
7664 simplifications. */
7665 if (TREE_CODE (arg0) == BIT_AND_EXPR
7666 && TREE_CODE (arg1) == BIT_AND_EXPR
7667 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7668 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7669 && integer_zerop (const_binop (BIT_AND_EXPR,
7670 TREE_OPERAND (arg0, 1),
7671 TREE_OPERAND (arg1, 1), 0)))
7673 code = BIT_IOR_EXPR;
7674 goto bit_ior;
7677 /* See if this can be simplified into a rotate first. If that
7678 is unsuccessful continue in the association code. */
7679 goto bit_rotate;
7681 case BIT_AND_EXPR:
7682 if (integer_all_onesp (arg1))
7683 return non_lvalue (fold_convert (type, arg0));
7684 if (integer_zerop (arg1))
7685 return omit_one_operand (type, arg1, arg0);
7686 if (operand_equal_p (arg0, arg1, 0))
7687 return non_lvalue (fold_convert (type, arg0));
7689 /* ~X & X is always zero. */
7690 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7691 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7692 return omit_one_operand (type, integer_zero_node, arg1);
7694 /* X & ~X is always zero. */
7695 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7696 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7697 return omit_one_operand (type, integer_zero_node, arg0);
7699 t1 = distribute_bit_expr (code, type, arg0, arg1);
7700 if (t1 != NULL_TREE)
7701 return t1;
7702 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7703 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7704 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7706 unsigned int prec
7707 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7709 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7710 && (~TREE_INT_CST_LOW (arg1)
7711 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7712 return fold_convert (type, TREE_OPERAND (arg0, 0));
7715 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7717 This results in more efficient code for machines without a NOR
7718 instruction. Combine will canonicalize to the first form
7719 which will allow use of NOR instructions provided by the
7720 backend if they exist. */
7721 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7722 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7724 return fold (build1 (BIT_NOT_EXPR, type,
7725 build2 (BIT_IOR_EXPR, type,
7726 TREE_OPERAND (arg0, 0),
7727 TREE_OPERAND (arg1, 0))));
7730 goto associate;
7732 case RDIV_EXPR:
7733 /* Don't touch a floating-point divide by zero unless the mode
7734 of the constant can represent infinity. */
7735 if (TREE_CODE (arg1) == REAL_CST
7736 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7737 && real_zerop (arg1))
7738 return t;
7740 /* (-A) / (-B) -> A / B */
7741 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7742 return fold (build2 (RDIV_EXPR, type,
7743 TREE_OPERAND (arg0, 0),
7744 negate_expr (arg1)));
7745 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7746 return fold (build2 (RDIV_EXPR, type,
7747 negate_expr (arg0),
7748 TREE_OPERAND (arg1, 0)));
7750 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7751 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7752 && real_onep (arg1))
7753 return non_lvalue (fold_convert (type, arg0));
7755 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7756 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7757 && real_minus_onep (arg1))
7758 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7760 /* If ARG1 is a constant, we can convert this to a multiply by the
7761 reciprocal. This does not have the same rounding properties,
7762 so only do this if -funsafe-math-optimizations. We can actually
7763 always safely do it if ARG1 is a power of two, but it's hard to
7764 tell if it is or not in a portable manner. */
7765 if (TREE_CODE (arg1) == REAL_CST)
7767 if (flag_unsafe_math_optimizations
7768 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7769 arg1, 0)))
7770 return fold (build2 (MULT_EXPR, type, arg0, tem));
7771 /* Find the reciprocal if optimizing and the result is exact. */
7772 if (optimize)
7774 REAL_VALUE_TYPE r;
7775 r = TREE_REAL_CST (arg1);
7776 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7778 tem = build_real (type, r);
7779 return fold (build2 (MULT_EXPR, type, arg0, tem));
7783 /* Convert A/B/C to A/(B*C). */
7784 if (flag_unsafe_math_optimizations
7785 && TREE_CODE (arg0) == RDIV_EXPR)
7786 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7787 fold (build2 (MULT_EXPR, type,
7788 TREE_OPERAND (arg0, 1), arg1))));
7790 /* Convert A/(B/C) to (A/B)*C. */
7791 if (flag_unsafe_math_optimizations
7792 && TREE_CODE (arg1) == RDIV_EXPR)
7793 return fold (build2 (MULT_EXPR, type,
7794 fold (build2 (RDIV_EXPR, type, arg0,
7795 TREE_OPERAND (arg1, 0))),
7796 TREE_OPERAND (arg1, 1)));
7798 /* Convert C1/(X*C2) into (C1/C2)/X. */
7799 if (flag_unsafe_math_optimizations
7800 && TREE_CODE (arg1) == MULT_EXPR
7801 && TREE_CODE (arg0) == REAL_CST
7802 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7804 tree tem = const_binop (RDIV_EXPR, arg0,
7805 TREE_OPERAND (arg1, 1), 0);
7806 if (tem)
7807 return fold (build2 (RDIV_EXPR, type, tem,
7808 TREE_OPERAND (arg1, 0)));
7811 if (flag_unsafe_math_optimizations)
7813 enum built_in_function fcode = builtin_mathfn_code (arg1);
7814 /* Optimize x/expN(y) into x*expN(-y). */
7815 if (BUILTIN_EXPONENT_P (fcode))
7817 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7818 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7819 tree arglist = build_tree_list (NULL_TREE,
7820 fold_convert (type, arg));
7821 arg1 = build_function_call_expr (expfn, arglist);
7822 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7825 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7826 if (fcode == BUILT_IN_POW
7827 || fcode == BUILT_IN_POWF
7828 || fcode == BUILT_IN_POWL)
7830 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7831 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7832 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7833 tree neg11 = fold_convert (type, negate_expr (arg11));
7834 tree arglist = tree_cons(NULL_TREE, arg10,
7835 build_tree_list (NULL_TREE, neg11));
7836 arg1 = build_function_call_expr (powfn, arglist);
7837 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7841 if (flag_unsafe_math_optimizations)
7843 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7844 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7846 /* Optimize sin(x)/cos(x) as tan(x). */
7847 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7848 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7849 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7850 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7851 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7853 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7855 if (tanfn != NULL_TREE)
7856 return build_function_call_expr (tanfn,
7857 TREE_OPERAND (arg0, 1));
7860 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7861 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7862 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7863 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7864 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7865 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7867 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7869 if (tanfn != NULL_TREE)
7871 tree tmp = TREE_OPERAND (arg0, 1);
7872 tmp = build_function_call_expr (tanfn, tmp);
7873 return fold (build2 (RDIV_EXPR, type,
7874 build_real (type, dconst1), tmp));
7878 /* Optimize pow(x,c)/x as pow(x,c-1). */
7879 if (fcode0 == BUILT_IN_POW
7880 || fcode0 == BUILT_IN_POWF
7881 || fcode0 == BUILT_IN_POWL)
7883 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7884 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7885 if (TREE_CODE (arg01) == REAL_CST
7886 && ! TREE_CONSTANT_OVERFLOW (arg01)
7887 && operand_equal_p (arg1, arg00, 0))
7889 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7890 REAL_VALUE_TYPE c;
7891 tree arg, arglist;
7893 c = TREE_REAL_CST (arg01);
7894 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7895 arg = build_real (type, c);
7896 arglist = build_tree_list (NULL_TREE, arg);
7897 arglist = tree_cons (NULL_TREE, arg1, arglist);
7898 return build_function_call_expr (powfn, arglist);
7902 goto binary;
7904 case TRUNC_DIV_EXPR:
7905 case ROUND_DIV_EXPR:
7906 case FLOOR_DIV_EXPR:
7907 case CEIL_DIV_EXPR:
7908 case EXACT_DIV_EXPR:
7909 if (integer_onep (arg1))
7910 return non_lvalue (fold_convert (type, arg0));
7911 if (integer_zerop (arg1))
7912 return t;
7913 /* X / -1 is -X. */
7914 if (!TYPE_UNSIGNED (type)
7915 && TREE_CODE (arg1) == INTEGER_CST
7916 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7917 && TREE_INT_CST_HIGH (arg1) == -1)
7918 return fold_convert (type, negate_expr (arg0));
7920 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7921 operation, EXACT_DIV_EXPR.
7923 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7924 At one time others generated faster code, it's not clear if they do
7925 after the last round to changes to the DIV code in expmed.c. */
7926 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7927 && multiple_of_p (type, arg0, arg1))
7928 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7930 if (TREE_CODE (arg1) == INTEGER_CST
7931 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7932 code, NULL_TREE)))
7933 return fold_convert (type, tem);
7935 goto binary;
7937 case CEIL_MOD_EXPR:
7938 case FLOOR_MOD_EXPR:
7939 case ROUND_MOD_EXPR:
7940 case TRUNC_MOD_EXPR:
7941 /* X % 1 is always zero, but be sure to preserve any side
7942 effects in X. */
7943 if (integer_onep (arg1))
7944 return omit_one_operand (type, integer_zero_node, arg0);
7946 /* X % 0, return X % 0 unchanged so that we can get the
7947 proper warnings and errors. */
7948 if (integer_zerop (arg1))
7949 return t;
7951 /* 0 % X is always zero, but be sure to preserve any side
7952 effects in X. Place this after checking for X == 0. */
7953 if (integer_zerop (arg0))
7954 return omit_one_operand (type, integer_zero_node, arg1);
7956 /* X % -1 is zero. */
7957 if (!TYPE_UNSIGNED (type)
7958 && TREE_CODE (arg1) == INTEGER_CST
7959 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7960 && TREE_INT_CST_HIGH (arg1) == -1)
7961 return omit_one_operand (type, integer_zero_node, arg0);
7963 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7964 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7965 if (code == TRUNC_MOD_EXPR
7966 && TYPE_UNSIGNED (type)
7967 && integer_pow2p (arg1))
7969 unsigned HOST_WIDE_INT high, low;
7970 tree mask;
7971 int l;
7973 l = tree_log2 (arg1);
7974 if (l >= HOST_BITS_PER_WIDE_INT)
7976 high = ((unsigned HOST_WIDE_INT) 1
7977 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7978 low = -1;
7980 else
7982 high = 0;
7983 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7986 mask = build_int_cst_wide (type, low, high);
7987 return fold (build2 (BIT_AND_EXPR, type,
7988 fold_convert (type, arg0), mask));
7991 /* X % -C is the same as X % C. */
7992 if (code == TRUNC_MOD_EXPR
7993 && !TYPE_UNSIGNED (type)
7994 && TREE_CODE (arg1) == INTEGER_CST
7995 && TREE_INT_CST_HIGH (arg1) < 0
7996 && !flag_trapv
7997 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7998 && !sign_bit_p (arg1, arg1))
7999 return fold (build2 (code, type, fold_convert (type, arg0),
8000 fold_convert (type, negate_expr (arg1))));
8002 /* X % -Y is the same as X % Y. */
8003 if (code == TRUNC_MOD_EXPR
8004 && !TYPE_UNSIGNED (type)
8005 && TREE_CODE (arg1) == NEGATE_EXPR
8006 && !flag_trapv)
8007 return fold (build2 (code, type, fold_convert (type, arg0),
8008 fold_convert (type, TREE_OPERAND (arg1, 0))));
8010 if (TREE_CODE (arg1) == INTEGER_CST
8011 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
8012 code, NULL_TREE)))
8013 return fold_convert (type, tem);
8015 goto binary;
8017 case LROTATE_EXPR:
8018 case RROTATE_EXPR:
8019 if (integer_all_onesp (arg0))
8020 return omit_one_operand (type, arg0, arg1);
8021 goto shift;
8023 case RSHIFT_EXPR:
8024 /* Optimize -1 >> x for arithmetic right shifts. */
8025 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8026 return omit_one_operand (type, arg0, arg1);
8027 /* ... fall through ... */
8029 case LSHIFT_EXPR:
8030 shift:
8031 if (integer_zerop (arg1))
8032 return non_lvalue (fold_convert (type, arg0));
8033 if (integer_zerop (arg0))
8034 return omit_one_operand (type, arg0, arg1);
8036 /* Since negative shift count is not well-defined,
8037 don't try to compute it in the compiler. */
8038 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8039 return t;
8040 /* Rewrite an LROTATE_EXPR by a constant into an
8041 RROTATE_EXPR by a new constant. */
8042 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8044 tree tem = build_int_cst (NULL_TREE,
8045 GET_MODE_BITSIZE (TYPE_MODE (type)));
8046 tem = fold_convert (TREE_TYPE (arg1), tem);
8047 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8048 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
8051 /* If we have a rotate of a bit operation with the rotate count and
8052 the second operand of the bit operation both constant,
8053 permute the two operations. */
8054 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8055 && (TREE_CODE (arg0) == BIT_AND_EXPR
8056 || TREE_CODE (arg0) == BIT_IOR_EXPR
8057 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8058 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8059 return fold (build2 (TREE_CODE (arg0), type,
8060 fold (build2 (code, type,
8061 TREE_OPERAND (arg0, 0), arg1)),
8062 fold (build2 (code, type,
8063 TREE_OPERAND (arg0, 1), arg1))));
8065 /* Two consecutive rotates adding up to the width of the mode can
8066 be ignored. */
8067 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8068 && TREE_CODE (arg0) == RROTATE_EXPR
8069 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8070 && TREE_INT_CST_HIGH (arg1) == 0
8071 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8072 && ((TREE_INT_CST_LOW (arg1)
8073 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8074 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8075 return TREE_OPERAND (arg0, 0);
8077 goto binary;
8079 case MIN_EXPR:
8080 if (operand_equal_p (arg0, arg1, 0))
8081 return omit_one_operand (type, arg0, arg1);
8082 if (INTEGRAL_TYPE_P (type)
8083 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8084 return omit_one_operand (type, arg1, arg0);
8085 goto associate;
8087 case MAX_EXPR:
8088 if (operand_equal_p (arg0, arg1, 0))
8089 return omit_one_operand (type, arg0, arg1);
8090 if (INTEGRAL_TYPE_P (type)
8091 && TYPE_MAX_VALUE (type)
8092 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8093 return omit_one_operand (type, arg1, arg0);
8094 goto associate;
8096 case TRUTH_NOT_EXPR:
8097 /* The argument to invert_truthvalue must have Boolean type. */
8098 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8099 arg0 = fold_convert (boolean_type_node, arg0);
8101 /* Note that the operand of this must be an int
8102 and its values must be 0 or 1.
8103 ("true" is a fixed value perhaps depending on the language,
8104 but we don't handle values other than 1 correctly yet.) */
8105 tem = invert_truthvalue (arg0);
8106 /* Avoid infinite recursion. */
8107 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
8108 return t;
8109 return fold_convert (type, tem);
8111 case TRUTH_ANDIF_EXPR:
8112 /* Note that the operands of this must be ints
8113 and their values must be 0 or 1.
8114 ("true" is a fixed value perhaps depending on the language.) */
8115 /* If first arg is constant zero, return it. */
8116 if (integer_zerop (arg0))
8117 return fold_convert (type, arg0);
8118 case TRUTH_AND_EXPR:
8119 /* If either arg is constant true, drop it. */
8120 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8121 return non_lvalue (fold_convert (type, arg1));
8122 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8123 /* Preserve sequence points. */
8124 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8125 return non_lvalue (fold_convert (type, arg0));
8126 /* If second arg is constant zero, result is zero, but first arg
8127 must be evaluated. */
8128 if (integer_zerop (arg1))
8129 return omit_one_operand (type, arg1, arg0);
8130 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8131 case will be handled here. */
8132 if (integer_zerop (arg0))
8133 return omit_one_operand (type, arg0, arg1);
8135 /* !X && X is always false. */
8136 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8137 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8138 return omit_one_operand (type, integer_zero_node, arg1);
8139 /* X && !X is always false. */
8140 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8141 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8142 return omit_one_operand (type, integer_zero_node, arg0);
8144 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8145 means A >= Y && A != MAX, but in this case we know that
8146 A < X <= MAX. */
8148 if (!TREE_SIDE_EFFECTS (arg0)
8149 && !TREE_SIDE_EFFECTS (arg1))
8151 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8152 if (tem)
8153 return fold (build2 (code, type, tem, arg1));
8155 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8156 if (tem)
8157 return fold (build2 (code, type, arg0, tem));
8160 truth_andor:
8161 /* We only do these simplifications if we are optimizing. */
8162 if (!optimize)
8163 return t;
8165 /* Check for things like (A || B) && (A || C). We can convert this
8166 to A || (B && C). Note that either operator can be any of the four
8167 truth and/or operations and the transformation will still be
8168 valid. Also note that we only care about order for the
8169 ANDIF and ORIF operators. If B contains side effects, this
8170 might change the truth-value of A. */
8171 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8172 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8173 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8174 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8175 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8176 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8178 tree a00 = TREE_OPERAND (arg0, 0);
8179 tree a01 = TREE_OPERAND (arg0, 1);
8180 tree a10 = TREE_OPERAND (arg1, 0);
8181 tree a11 = TREE_OPERAND (arg1, 1);
8182 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8183 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8184 && (code == TRUTH_AND_EXPR
8185 || code == TRUTH_OR_EXPR));
8187 if (operand_equal_p (a00, a10, 0))
8188 return fold (build2 (TREE_CODE (arg0), type, a00,
8189 fold (build2 (code, type, a01, a11))));
8190 else if (commutative && operand_equal_p (a00, a11, 0))
8191 return fold (build2 (TREE_CODE (arg0), type, a00,
8192 fold (build2 (code, type, a01, a10))));
8193 else if (commutative && operand_equal_p (a01, a10, 0))
8194 return fold (build2 (TREE_CODE (arg0), type, a01,
8195 fold (build2 (code, type, a00, a11))));
8197 /* This case if tricky because we must either have commutative
8198 operators or else A10 must not have side-effects. */
8200 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8201 && operand_equal_p (a01, a11, 0))
8202 return fold (build2 (TREE_CODE (arg0), type,
8203 fold (build2 (code, type, a00, a10)),
8204 a01));
8207 /* See if we can build a range comparison. */
8208 if (0 != (tem = fold_range_test (t)))
8209 return tem;
8211 /* Check for the possibility of merging component references. If our
8212 lhs is another similar operation, try to merge its rhs with our
8213 rhs. Then try to merge our lhs and rhs. */
8214 if (TREE_CODE (arg0) == code
8215 && 0 != (tem = fold_truthop (code, type,
8216 TREE_OPERAND (arg0, 1), arg1)))
8217 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8219 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8220 return tem;
8222 return t;
8224 case TRUTH_ORIF_EXPR:
8225 /* Note that the operands of this must be ints
8226 and their values must be 0 or true.
8227 ("true" is a fixed value perhaps depending on the language.) */
8228 /* If first arg is constant true, return it. */
8229 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8230 return fold_convert (type, arg0);
8231 case TRUTH_OR_EXPR:
8232 /* If either arg is constant zero, drop it. */
8233 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8234 return non_lvalue (fold_convert (type, arg1));
8235 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8236 /* Preserve sequence points. */
8237 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8238 return non_lvalue (fold_convert (type, arg0));
8239 /* If second arg is constant true, result is true, but we must
8240 evaluate first arg. */
8241 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8242 return omit_one_operand (type, arg1, arg0);
8243 /* Likewise for first arg, but note this only occurs here for
8244 TRUTH_OR_EXPR. */
8245 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8246 return omit_one_operand (type, arg0, arg1);
8248 /* !X || X is always true. */
8249 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8250 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8251 return omit_one_operand (type, integer_one_node, arg1);
8252 /* X || !X is always true. */
8253 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8254 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8255 return omit_one_operand (type, integer_one_node, arg0);
8257 goto truth_andor;
8259 case TRUTH_XOR_EXPR:
8260 /* If the second arg is constant zero, drop it. */
8261 if (integer_zerop (arg1))
8262 return non_lvalue (fold_convert (type, arg0));
8263 /* If the second arg is constant true, this is a logical inversion. */
8264 if (integer_onep (arg1))
8265 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8266 /* Identical arguments cancel to zero. */
8267 if (operand_equal_p (arg0, arg1, 0))
8268 return omit_one_operand (type, integer_zero_node, arg0);
8270 /* !X ^ X is always true. */
8271 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8272 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8273 return omit_one_operand (type, integer_one_node, arg1);
8275 /* X ^ !X is always true. */
8276 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8277 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8278 return omit_one_operand (type, integer_one_node, arg0);
8280 return t;
8282 case EQ_EXPR:
8283 case NE_EXPR:
8284 case LT_EXPR:
8285 case GT_EXPR:
8286 case LE_EXPR:
8287 case GE_EXPR:
8288 /* If one arg is a real or integer constant, put it last. */
8289 if (tree_swap_operands_p (arg0, arg1, true))
8290 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8292 /* If this is an equality comparison of the address of a non-weak
8293 object against zero, then we know the result. */
8294 if ((code == EQ_EXPR || code == NE_EXPR)
8295 && TREE_CODE (arg0) == ADDR_EXPR
8296 && DECL_P (TREE_OPERAND (arg0, 0))
8297 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8298 && integer_zerop (arg1))
8299 return constant_boolean_node (code != EQ_EXPR, type);
8301 /* If this is an equality comparison of the address of two non-weak,
8302 unaliased symbols neither of which are extern (since we do not
8303 have access to attributes for externs), then we know the result. */
8304 if ((code == EQ_EXPR || code == NE_EXPR)
8305 && TREE_CODE (arg0) == ADDR_EXPR
8306 && DECL_P (TREE_OPERAND (arg0, 0))
8307 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8308 && ! lookup_attribute ("alias",
8309 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8310 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8311 && TREE_CODE (arg1) == ADDR_EXPR
8312 && DECL_P (TREE_OPERAND (arg1, 0))
8313 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8314 && ! lookup_attribute ("alias",
8315 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8316 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8317 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8318 ? code == EQ_EXPR : code != EQ_EXPR,
8319 type);
8321 /* If this is a comparison of two exprs that look like an
8322 ARRAY_REF of the same object, then we can fold this to a
8323 comparison of the two offsets. */
8324 if (COMPARISON_CLASS_P (t))
8326 tree base0, offset0, base1, offset1;
8328 if (extract_array_ref (arg0, &base0, &offset0)
8329 && extract_array_ref (arg1, &base1, &offset1)
8330 && operand_equal_p (base0, base1, 0))
8332 if (offset0 == NULL_TREE
8333 && offset1 == NULL_TREE)
8335 offset0 = integer_zero_node;
8336 offset1 = integer_zero_node;
8338 else if (offset0 == NULL_TREE)
8339 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8340 else if (offset1 == NULL_TREE)
8341 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8343 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8344 return fold (build2 (code, type, offset0, offset1));
8348 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8350 tree targ0 = strip_float_extensions (arg0);
8351 tree targ1 = strip_float_extensions (arg1);
8352 tree newtype = TREE_TYPE (targ0);
8354 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8355 newtype = TREE_TYPE (targ1);
8357 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8358 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8359 return fold (build2 (code, type, fold_convert (newtype, targ0),
8360 fold_convert (newtype, targ1)));
8362 /* (-a) CMP (-b) -> b CMP a */
8363 if (TREE_CODE (arg0) == NEGATE_EXPR
8364 && TREE_CODE (arg1) == NEGATE_EXPR)
8365 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8366 TREE_OPERAND (arg0, 0)));
8368 if (TREE_CODE (arg1) == REAL_CST)
8370 REAL_VALUE_TYPE cst;
8371 cst = TREE_REAL_CST (arg1);
8373 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8374 if (TREE_CODE (arg0) == NEGATE_EXPR)
8375 return
8376 fold (build2 (swap_tree_comparison (code), type,
8377 TREE_OPERAND (arg0, 0),
8378 build_real (TREE_TYPE (arg1),
8379 REAL_VALUE_NEGATE (cst))));
8381 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8382 /* a CMP (-0) -> a CMP 0 */
8383 if (REAL_VALUE_MINUS_ZERO (cst))
8384 return fold (build2 (code, type, arg0,
8385 build_real (TREE_TYPE (arg1), dconst0)));
8387 /* x != NaN is always true, other ops are always false. */
8388 if (REAL_VALUE_ISNAN (cst)
8389 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8391 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8392 return omit_one_operand (type, tem, arg0);
8395 /* Fold comparisons against infinity. */
8396 if (REAL_VALUE_ISINF (cst))
8398 tem = fold_inf_compare (code, type, arg0, arg1);
8399 if (tem != NULL_TREE)
8400 return tem;
8404 /* If this is a comparison of a real constant with a PLUS_EXPR
8405 or a MINUS_EXPR of a real constant, we can convert it into a
8406 comparison with a revised real constant as long as no overflow
8407 occurs when unsafe_math_optimizations are enabled. */
8408 if (flag_unsafe_math_optimizations
8409 && TREE_CODE (arg1) == REAL_CST
8410 && (TREE_CODE (arg0) == PLUS_EXPR
8411 || TREE_CODE (arg0) == MINUS_EXPR)
8412 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8413 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8414 ? MINUS_EXPR : PLUS_EXPR,
8415 arg1, TREE_OPERAND (arg0, 1), 0))
8416 && ! TREE_CONSTANT_OVERFLOW (tem))
8417 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8419 /* Likewise, we can simplify a comparison of a real constant with
8420 a MINUS_EXPR whose first operand is also a real constant, i.e.
8421 (c1 - x) < c2 becomes x > c1-c2. */
8422 if (flag_unsafe_math_optimizations
8423 && TREE_CODE (arg1) == REAL_CST
8424 && TREE_CODE (arg0) == MINUS_EXPR
8425 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8426 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8427 arg1, 0))
8428 && ! TREE_CONSTANT_OVERFLOW (tem))
8429 return fold (build2 (swap_tree_comparison (code), type,
8430 TREE_OPERAND (arg0, 1), tem));
8432 /* Fold comparisons against built-in math functions. */
8433 if (TREE_CODE (arg1) == REAL_CST
8434 && flag_unsafe_math_optimizations
8435 && ! flag_errno_math)
8437 enum built_in_function fcode = builtin_mathfn_code (arg0);
8439 if (fcode != END_BUILTINS)
8441 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8442 if (tem != NULL_TREE)
8443 return tem;
8448 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8449 if (TREE_CONSTANT (arg1)
8450 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8451 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8452 /* This optimization is invalid for ordered comparisons
8453 if CONST+INCR overflows or if foo+incr might overflow.
8454 This optimization is invalid for floating point due to rounding.
8455 For pointer types we assume overflow doesn't happen. */
8456 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8457 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8458 && (code == EQ_EXPR || code == NE_EXPR))))
8460 tree varop, newconst;
8462 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8464 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8465 arg1, TREE_OPERAND (arg0, 1)));
8466 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8467 TREE_OPERAND (arg0, 0),
8468 TREE_OPERAND (arg0, 1));
8470 else
8472 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8473 arg1, TREE_OPERAND (arg0, 1)));
8474 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8475 TREE_OPERAND (arg0, 0),
8476 TREE_OPERAND (arg0, 1));
8480 /* If VAROP is a reference to a bitfield, we must mask
8481 the constant by the width of the field. */
8482 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8483 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8484 && host_integerp (DECL_SIZE (TREE_OPERAND
8485 (TREE_OPERAND (varop, 0), 1)), 1))
8487 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8488 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8489 tree folded_compare, shift;
8491 /* First check whether the comparison would come out
8492 always the same. If we don't do that we would
8493 change the meaning with the masking. */
8494 folded_compare = fold (build2 (code, type,
8495 TREE_OPERAND (varop, 0), arg1));
8496 if (integer_zerop (folded_compare)
8497 || integer_onep (folded_compare))
8498 return omit_one_operand (type, folded_compare, varop);
8500 shift = build_int_cst (NULL_TREE,
8501 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8502 shift = fold_convert (TREE_TYPE (varop), shift);
8503 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8504 newconst, shift));
8505 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8506 newconst, shift));
8509 return fold (build2 (code, type, varop, newconst));
8512 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8513 This transformation affects the cases which are handled in later
8514 optimizations involving comparisons with non-negative constants. */
8515 if (TREE_CODE (arg1) == INTEGER_CST
8516 && TREE_CODE (arg0) != INTEGER_CST
8517 && tree_int_cst_sgn (arg1) > 0)
8519 switch (code)
8521 case GE_EXPR:
8522 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8523 return fold (build2 (GT_EXPR, type, arg0, arg1));
8525 case LT_EXPR:
8526 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8527 return fold (build2 (LE_EXPR, type, arg0, arg1));
8529 default:
8530 break;
8534 /* Comparisons with the highest or lowest possible integer of
8535 the specified size will have known values.
8537 This is quite similar to fold_relational_hi_lo, however,
8538 attempts to share the code have been nothing but trouble. */
8540 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8542 if (TREE_CODE (arg1) == INTEGER_CST
8543 && ! TREE_CONSTANT_OVERFLOW (arg1)
8544 && width <= 2 * HOST_BITS_PER_WIDE_INT
8545 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8546 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8548 HOST_WIDE_INT signed_max_hi;
8549 unsigned HOST_WIDE_INT signed_max_lo;
8550 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
8552 if (width <= HOST_BITS_PER_WIDE_INT)
8554 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
8555 - 1;
8556 signed_max_hi = 0;
8557 max_hi = 0;
8559 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8561 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8562 min_lo = 0;
8563 min_hi = 0;
8565 else
8567 max_lo = signed_max_lo;
8568 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8569 min_hi = -1;
8572 else
8574 width -= HOST_BITS_PER_WIDE_INT;
8575 signed_max_lo = -1;
8576 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
8577 - 1;
8578 max_lo = -1;
8579 min_lo = 0;
8581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8583 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8584 min_hi = 0;
8586 else
8588 max_hi = signed_max_hi;
8589 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8593 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
8594 && TREE_INT_CST_LOW (arg1) == max_lo)
8595 switch (code)
8597 case GT_EXPR:
8598 return omit_one_operand (type, integer_zero_node, arg0);
8600 case GE_EXPR:
8601 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8603 case LE_EXPR:
8604 return omit_one_operand (type, integer_one_node, arg0);
8606 case LT_EXPR:
8607 return fold (build2 (NE_EXPR, type, arg0, arg1));
8609 /* The GE_EXPR and LT_EXPR cases above are not normally
8610 reached because of previous transformations. */
8612 default:
8613 break;
8615 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
8616 == max_hi
8617 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
8618 switch (code)
8620 case GT_EXPR:
8621 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8622 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8623 case LE_EXPR:
8624 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8625 return fold (build2 (NE_EXPR, type, arg0, arg1));
8626 default:
8627 break;
8629 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
8630 == min_hi
8631 && TREE_INT_CST_LOW (arg1) == min_lo)
8632 switch (code)
8634 case LT_EXPR:
8635 return omit_one_operand (type, integer_zero_node, arg0);
8637 case LE_EXPR:
8638 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8640 case GE_EXPR:
8641 return omit_one_operand (type, integer_one_node, arg0);
8643 case GT_EXPR:
8644 return fold (build2 (NE_EXPR, type, arg0, arg1));
8646 default:
8647 break;
8649 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
8650 == min_hi
8651 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
8652 switch (code)
8654 case GE_EXPR:
8655 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8656 return fold (build2 (NE_EXPR, type, arg0, arg1));
8657 case LT_EXPR:
8658 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8659 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8660 default:
8661 break;
8664 else if (!in_gimple_form
8665 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
8666 && TREE_INT_CST_LOW (arg1) == signed_max_lo
8667 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8668 /* signed_type does not work on pointer types. */
8669 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8671 /* The following case also applies to X < signed_max+1
8672 and X >= signed_max+1 because previous transformations. */
8673 if (code == LE_EXPR || code == GT_EXPR)
8675 tree st0, st1;
8676 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8677 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8678 return fold
8679 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8680 type, fold_convert (st0, arg0),
8681 fold_convert (st1, integer_zero_node)));
8687 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8688 a MINUS_EXPR of a constant, we can convert it into a comparison with
8689 a revised constant as long as no overflow occurs. */
8690 if ((code == EQ_EXPR || code == NE_EXPR)
8691 && TREE_CODE (arg1) == INTEGER_CST
8692 && (TREE_CODE (arg0) == PLUS_EXPR
8693 || TREE_CODE (arg0) == MINUS_EXPR)
8694 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8695 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8696 ? MINUS_EXPR : PLUS_EXPR,
8697 arg1, TREE_OPERAND (arg0, 1), 0))
8698 && ! TREE_CONSTANT_OVERFLOW (tem))
8699 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8701 /* Similarly for a NEGATE_EXPR. */
8702 else if ((code == EQ_EXPR || code == NE_EXPR)
8703 && TREE_CODE (arg0) == NEGATE_EXPR
8704 && TREE_CODE (arg1) == INTEGER_CST
8705 && 0 != (tem = negate_expr (arg1))
8706 && TREE_CODE (tem) == INTEGER_CST
8707 && ! TREE_CONSTANT_OVERFLOW (tem))
8708 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8710 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8711 for !=. Don't do this for ordered comparisons due to overflow. */
8712 else if ((code == NE_EXPR || code == EQ_EXPR)
8713 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8714 return fold (build2 (code, type,
8715 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8717 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8718 && TREE_CODE (arg0) == NOP_EXPR)
8720 /* If we are widening one operand of an integer comparison,
8721 see if the other operand is similarly being widened. Perhaps we
8722 can do the comparison in the narrower type. */
8723 tem = fold_widened_comparison (code, type, arg0, arg1);
8724 if (tem)
8725 return tem;
8727 /* Or if we are changing signedness. */
8728 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8729 if (tem)
8730 return tem;
8733 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8734 constant, we can simplify it. */
8735 else if (TREE_CODE (arg1) == INTEGER_CST
8736 && (TREE_CODE (arg0) == MIN_EXPR
8737 || TREE_CODE (arg0) == MAX_EXPR)
8738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8739 return optimize_minmax_comparison (t);
8741 /* If we are comparing an ABS_EXPR with a constant, we can
8742 convert all the cases into explicit comparisons, but they may
8743 well not be faster than doing the ABS and one comparison.
8744 But ABS (X) <= C is a range comparison, which becomes a subtraction
8745 and a comparison, and is probably faster. */
8746 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8747 && TREE_CODE (arg0) == ABS_EXPR
8748 && ! TREE_SIDE_EFFECTS (arg0)
8749 && (0 != (tem = negate_expr (arg1)))
8750 && TREE_CODE (tem) == INTEGER_CST
8751 && ! TREE_CONSTANT_OVERFLOW (tem))
8752 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8753 build2 (GE_EXPR, type,
8754 TREE_OPERAND (arg0, 0), tem),
8755 build2 (LE_EXPR, type,
8756 TREE_OPERAND (arg0, 0), arg1)));
8758 /* If this is an EQ or NE comparison with zero and ARG0 is
8759 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8760 two operations, but the latter can be done in one less insn
8761 on machines that have only two-operand insns or on which a
8762 constant cannot be the first operand. */
8763 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8764 && TREE_CODE (arg0) == BIT_AND_EXPR)
8766 tree arg00 = TREE_OPERAND (arg0, 0);
8767 tree arg01 = TREE_OPERAND (arg0, 1);
8768 if (TREE_CODE (arg00) == LSHIFT_EXPR
8769 && integer_onep (TREE_OPERAND (arg00, 0)))
8770 return
8771 fold (build2 (code, type,
8772 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8773 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8774 arg01, TREE_OPERAND (arg00, 1)),
8775 fold_convert (TREE_TYPE (arg0),
8776 integer_one_node)),
8777 arg1));
8778 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8779 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8780 return
8781 fold (build2 (code, type,
8782 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8783 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8784 arg00, TREE_OPERAND (arg01, 1)),
8785 fold_convert (TREE_TYPE (arg0),
8786 integer_one_node)),
8787 arg1));
8790 /* If this is an NE or EQ comparison of zero against the result of a
8791 signed MOD operation whose second operand is a power of 2, make
8792 the MOD operation unsigned since it is simpler and equivalent. */
8793 if ((code == NE_EXPR || code == EQ_EXPR)
8794 && integer_zerop (arg1)
8795 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8796 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8797 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8798 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8799 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8800 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8802 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8803 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8804 fold_convert (newtype,
8805 TREE_OPERAND (arg0, 0)),
8806 fold_convert (newtype,
8807 TREE_OPERAND (arg0, 1))));
8809 return fold (build2 (code, type, newmod,
8810 fold_convert (newtype, arg1)));
8813 /* If this is an NE comparison of zero with an AND of one, remove the
8814 comparison since the AND will give the correct value. */
8815 if (code == NE_EXPR && integer_zerop (arg1)
8816 && TREE_CODE (arg0) == BIT_AND_EXPR
8817 && integer_onep (TREE_OPERAND (arg0, 1)))
8818 return fold_convert (type, arg0);
8820 /* If we have (A & C) == C where C is a power of 2, convert this into
8821 (A & C) != 0. Similarly for NE_EXPR. */
8822 if ((code == EQ_EXPR || code == NE_EXPR)
8823 && TREE_CODE (arg0) == BIT_AND_EXPR
8824 && integer_pow2p (TREE_OPERAND (arg0, 1))
8825 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8826 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8827 arg0, fold_convert (TREE_TYPE (arg0),
8828 integer_zero_node)));
8830 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8831 2, then fold the expression into shifts and logical operations. */
8832 tem = fold_single_bit_test (code, arg0, arg1, type);
8833 if (tem)
8834 return tem;
8836 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8837 Similarly for NE_EXPR. */
8838 if ((code == EQ_EXPR || code == NE_EXPR)
8839 && TREE_CODE (arg0) == BIT_AND_EXPR
8840 && TREE_CODE (arg1) == INTEGER_CST
8841 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8843 tree notc = fold (build1 (BIT_NOT_EXPR,
8844 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8845 TREE_OPERAND (arg0, 1)));
8846 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8847 arg1, notc));
8848 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8849 if (integer_nonzerop (dandnotc))
8850 return omit_one_operand (type, rslt, arg0);
8853 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8854 Similarly for NE_EXPR. */
8855 if ((code == EQ_EXPR || code == NE_EXPR)
8856 && TREE_CODE (arg0) == BIT_IOR_EXPR
8857 && TREE_CODE (arg1) == INTEGER_CST
8858 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8860 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8861 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8862 TREE_OPERAND (arg0, 1), notd));
8863 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8864 if (integer_nonzerop (candnotd))
8865 return omit_one_operand (type, rslt, arg0);
8868 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8869 and similarly for >= into !=. */
8870 if ((code == LT_EXPR || code == GE_EXPR)
8871 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8872 && TREE_CODE (arg1) == LSHIFT_EXPR
8873 && integer_onep (TREE_OPERAND (arg1, 0)))
8874 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8875 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8876 TREE_OPERAND (arg1, 1)),
8877 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8879 else if ((code == LT_EXPR || code == GE_EXPR)
8880 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8881 && (TREE_CODE (arg1) == NOP_EXPR
8882 || TREE_CODE (arg1) == CONVERT_EXPR)
8883 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8884 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8885 return
8886 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8887 fold_convert (TREE_TYPE (arg0),
8888 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8889 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8890 1))),
8891 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8893 /* Simplify comparison of something with itself. (For IEEE
8894 floating-point, we can only do some of these simplifications.) */
8895 if (operand_equal_p (arg0, arg1, 0))
8897 switch (code)
8899 case EQ_EXPR:
8900 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8901 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8902 return constant_boolean_node (1, type);
8903 break;
8905 case GE_EXPR:
8906 case LE_EXPR:
8907 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8908 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8909 return constant_boolean_node (1, type);
8910 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8912 case NE_EXPR:
8913 /* For NE, we can only do this simplification if integer
8914 or we don't honor IEEE floating point NaNs. */
8915 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8916 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8917 break;
8918 /* ... fall through ... */
8919 case GT_EXPR:
8920 case LT_EXPR:
8921 return constant_boolean_node (0, type);
8922 default:
8923 gcc_unreachable ();
8927 /* If we are comparing an expression that just has comparisons
8928 of two integer values, arithmetic expressions of those comparisons,
8929 and constants, we can simplify it. There are only three cases
8930 to check: the two values can either be equal, the first can be
8931 greater, or the second can be greater. Fold the expression for
8932 those three values. Since each value must be 0 or 1, we have
8933 eight possibilities, each of which corresponds to the constant 0
8934 or 1 or one of the six possible comparisons.
8936 This handles common cases like (a > b) == 0 but also handles
8937 expressions like ((x > y) - (y > x)) > 0, which supposedly
8938 occur in macroized code. */
8940 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8942 tree cval1 = 0, cval2 = 0;
8943 int save_p = 0;
8945 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8946 /* Don't handle degenerate cases here; they should already
8947 have been handled anyway. */
8948 && cval1 != 0 && cval2 != 0
8949 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8950 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8951 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8952 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8953 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8954 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8955 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8957 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8958 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8960 /* We can't just pass T to eval_subst in case cval1 or cval2
8961 was the same as ARG1. */
8963 tree high_result
8964 = fold (build2 (code, type,
8965 eval_subst (arg0, cval1, maxval,
8966 cval2, minval),
8967 arg1));
8968 tree equal_result
8969 = fold (build2 (code, type,
8970 eval_subst (arg0, cval1, maxval,
8971 cval2, maxval),
8972 arg1));
8973 tree low_result
8974 = fold (build2 (code, type,
8975 eval_subst (arg0, cval1, minval,
8976 cval2, maxval),
8977 arg1));
8979 /* All three of these results should be 0 or 1. Confirm they
8980 are. Then use those values to select the proper code
8981 to use. */
8983 if ((integer_zerop (high_result)
8984 || integer_onep (high_result))
8985 && (integer_zerop (equal_result)
8986 || integer_onep (equal_result))
8987 && (integer_zerop (low_result)
8988 || integer_onep (low_result)))
8990 /* Make a 3-bit mask with the high-order bit being the
8991 value for `>', the next for '=', and the low for '<'. */
8992 switch ((integer_onep (high_result) * 4)
8993 + (integer_onep (equal_result) * 2)
8994 + integer_onep (low_result))
8996 case 0:
8997 /* Always false. */
8998 return omit_one_operand (type, integer_zero_node, arg0);
8999 case 1:
9000 code = LT_EXPR;
9001 break;
9002 case 2:
9003 code = EQ_EXPR;
9004 break;
9005 case 3:
9006 code = LE_EXPR;
9007 break;
9008 case 4:
9009 code = GT_EXPR;
9010 break;
9011 case 5:
9012 code = NE_EXPR;
9013 break;
9014 case 6:
9015 code = GE_EXPR;
9016 break;
9017 case 7:
9018 /* Always true. */
9019 return omit_one_operand (type, integer_one_node, arg0);
9022 tem = build2 (code, type, cval1, cval2);
9023 if (save_p)
9024 return save_expr (tem);
9025 else
9026 return fold (tem);
9031 /* If this is a comparison of a field, we may be able to simplify it. */
9032 if (((TREE_CODE (arg0) == COMPONENT_REF
9033 && lang_hooks.can_use_bit_fields_p ())
9034 || TREE_CODE (arg0) == BIT_FIELD_REF)
9035 && (code == EQ_EXPR || code == NE_EXPR)
9036 /* Handle the constant case even without -O
9037 to make sure the warnings are given. */
9038 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9040 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9041 if (t1)
9042 return t1;
9045 /* If this is a comparison of complex values and either or both sides
9046 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9047 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9048 This may prevent needless evaluations. */
9049 if ((code == EQ_EXPR || code == NE_EXPR)
9050 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9051 && (TREE_CODE (arg0) == COMPLEX_EXPR
9052 || TREE_CODE (arg1) == COMPLEX_EXPR
9053 || TREE_CODE (arg0) == COMPLEX_CST
9054 || TREE_CODE (arg1) == COMPLEX_CST))
9056 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9057 tree real0, imag0, real1, imag1;
9059 arg0 = save_expr (arg0);
9060 arg1 = save_expr (arg1);
9061 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
9062 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
9063 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
9064 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
9066 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9067 : TRUTH_ORIF_EXPR),
9068 type,
9069 fold (build2 (code, type, real0, real1)),
9070 fold (build2 (code, type, imag0, imag1))));
9073 /* Optimize comparisons of strlen vs zero to a compare of the
9074 first character of the string vs zero. To wit,
9075 strlen(ptr) == 0 => *ptr == 0
9076 strlen(ptr) != 0 => *ptr != 0
9077 Other cases should reduce to one of these two (or a constant)
9078 due to the return value of strlen being unsigned. */
9079 if ((code == EQ_EXPR || code == NE_EXPR)
9080 && integer_zerop (arg1)
9081 && TREE_CODE (arg0) == CALL_EXPR)
9083 tree fndecl = get_callee_fndecl (arg0);
9084 tree arglist;
9086 if (fndecl
9087 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9088 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9089 && (arglist = TREE_OPERAND (arg0, 1))
9090 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9091 && ! TREE_CHAIN (arglist))
9092 return fold (build2 (code, type,
9093 build1 (INDIRECT_REF, char_type_node,
9094 TREE_VALUE (arglist)),
9095 fold_convert (char_type_node,
9096 integer_zero_node)));
9099 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9100 into a single range test. */
9101 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9102 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9103 && TREE_CODE (arg1) == INTEGER_CST
9104 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9105 && !integer_zerop (TREE_OPERAND (arg0, 1))
9106 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9107 && !TREE_OVERFLOW (arg1))
9109 t1 = fold_div_compare (code, type, arg0, arg1);
9110 if (t1 != NULL_TREE)
9111 return t1;
9114 if ((code == EQ_EXPR || code == NE_EXPR)
9115 && !TREE_SIDE_EFFECTS (arg0)
9116 && integer_zerop (arg1)
9117 && tree_expr_nonzero_p (arg0))
9118 return constant_boolean_node (code==NE_EXPR, type);
9120 t1 = fold_relational_const (code, type, arg0, arg1);
9121 return t1 == NULL_TREE ? t : t1;
9123 case UNORDERED_EXPR:
9124 case ORDERED_EXPR:
9125 case UNLT_EXPR:
9126 case UNLE_EXPR:
9127 case UNGT_EXPR:
9128 case UNGE_EXPR:
9129 case UNEQ_EXPR:
9130 case LTGT_EXPR:
9131 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9133 t1 = fold_relational_const (code, type, arg0, arg1);
9134 if (t1 != NULL_TREE)
9135 return t1;
9138 /* If the first operand is NaN, the result is constant. */
9139 if (TREE_CODE (arg0) == REAL_CST
9140 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9141 && (code != LTGT_EXPR || ! flag_trapping_math))
9143 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9144 ? integer_zero_node
9145 : integer_one_node;
9146 return omit_one_operand (type, t1, arg1);
9149 /* If the second operand is NaN, the result is constant. */
9150 if (TREE_CODE (arg1) == REAL_CST
9151 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9152 && (code != LTGT_EXPR || ! flag_trapping_math))
9154 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9155 ? integer_zero_node
9156 : integer_one_node;
9157 return omit_one_operand (type, t1, arg0);
9160 /* Simplify unordered comparison of something with itself. */
9161 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9162 && operand_equal_p (arg0, arg1, 0))
9163 return constant_boolean_node (1, type);
9165 if (code == LTGT_EXPR
9166 && !flag_trapping_math
9167 && operand_equal_p (arg0, arg1, 0))
9168 return constant_boolean_node (0, type);
9170 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9172 tree targ0 = strip_float_extensions (arg0);
9173 tree targ1 = strip_float_extensions (arg1);
9174 tree newtype = TREE_TYPE (targ0);
9176 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9177 newtype = TREE_TYPE (targ1);
9179 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9180 return fold (build2 (code, type, fold_convert (newtype, targ0),
9181 fold_convert (newtype, targ1)));
9184 return t;
9186 case COND_EXPR:
9187 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9188 so all simple results must be passed through pedantic_non_lvalue. */
9189 if (TREE_CODE (arg0) == INTEGER_CST)
9191 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
9192 /* Only optimize constant conditions when the selected branch
9193 has the same type as the COND_EXPR. This avoids optimizing
9194 away "c ? x : throw", where the throw has a void type. */
9195 if (! VOID_TYPE_P (TREE_TYPE (tem))
9196 || VOID_TYPE_P (type))
9197 return pedantic_non_lvalue (tem);
9198 return t;
9200 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
9201 return pedantic_omit_one_operand (type, arg1, arg0);
9203 /* If we have A op B ? A : C, we may be able to convert this to a
9204 simpler expression, depending on the operation and the values
9205 of B and C. Signed zeros prevent all of these transformations,
9206 for reasons given above each one.
9208 Also try swapping the arguments and inverting the conditional. */
9209 if (COMPARISON_CLASS_P (arg0)
9210 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9211 arg1, TREE_OPERAND (arg0, 1))
9212 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9214 tem = fold_cond_expr_with_comparison (type, arg0,
9215 TREE_OPERAND (t, 1),
9216 TREE_OPERAND (t, 2));
9217 if (tem)
9218 return tem;
9221 if (COMPARISON_CLASS_P (arg0)
9222 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9223 TREE_OPERAND (t, 2),
9224 TREE_OPERAND (arg0, 1))
9225 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
9227 tem = invert_truthvalue (arg0);
9228 if (COMPARISON_CLASS_P (tem))
9230 tem = fold_cond_expr_with_comparison (type, tem,
9231 TREE_OPERAND (t, 2),
9232 TREE_OPERAND (t, 1));
9233 if (tem)
9234 return tem;
9238 /* If the second operand is simpler than the third, swap them
9239 since that produces better jump optimization results. */
9240 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
9241 TREE_OPERAND (t, 2), false))
9243 /* See if this can be inverted. If it can't, possibly because
9244 it was a floating-point inequality comparison, don't do
9245 anything. */
9246 tem = invert_truthvalue (arg0);
9248 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9249 return fold (build3 (code, type, tem,
9250 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
9253 /* Convert A ? 1 : 0 to simply A. */
9254 if (integer_onep (TREE_OPERAND (t, 1))
9255 && integer_zerop (TREE_OPERAND (t, 2))
9256 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9257 call to fold will try to move the conversion inside
9258 a COND, which will recurse. In that case, the COND_EXPR
9259 is probably the best choice, so leave it alone. */
9260 && type == TREE_TYPE (arg0))
9261 return pedantic_non_lvalue (arg0);
9263 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9264 over COND_EXPR in cases such as floating point comparisons. */
9265 if (integer_zerop (TREE_OPERAND (t, 1))
9266 && integer_onep (TREE_OPERAND (t, 2))
9267 && truth_value_p (TREE_CODE (arg0)))
9268 return pedantic_non_lvalue (fold_convert (type,
9269 invert_truthvalue (arg0)));
9271 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9272 if (TREE_CODE (arg0) == LT_EXPR
9273 && integer_zerop (TREE_OPERAND (arg0, 1))
9274 && integer_zerop (TREE_OPERAND (t, 2))
9275 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9276 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9277 TREE_TYPE (tem), tem, arg1)));
9279 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9280 already handled above. */
9281 if (TREE_CODE (arg0) == BIT_AND_EXPR
9282 && integer_onep (TREE_OPERAND (arg0, 1))
9283 && integer_zerop (TREE_OPERAND (t, 2))
9284 && integer_pow2p (arg1))
9286 tree tem = TREE_OPERAND (arg0, 0);
9287 STRIP_NOPS (tem);
9288 if (TREE_CODE (tem) == RSHIFT_EXPR
9289 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9290 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9291 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9292 return fold (build2 (BIT_AND_EXPR, type,
9293 TREE_OPERAND (tem, 0), arg1));
9296 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9297 is probably obsolete because the first operand should be a
9298 truth value (that's why we have the two cases above), but let's
9299 leave it in until we can confirm this for all front-ends. */
9300 if (integer_zerop (TREE_OPERAND (t, 2))
9301 && TREE_CODE (arg0) == NE_EXPR
9302 && integer_zerop (TREE_OPERAND (arg0, 1))
9303 && integer_pow2p (arg1)
9304 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9305 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9306 arg1, OEP_ONLY_CONST))
9307 return pedantic_non_lvalue (fold_convert (type,
9308 TREE_OPERAND (arg0, 0)));
9310 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9311 if (integer_zerop (TREE_OPERAND (t, 2))
9312 && truth_value_p (TREE_CODE (arg0))
9313 && truth_value_p (TREE_CODE (arg1)))
9314 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9316 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9317 if (integer_onep (TREE_OPERAND (t, 2))
9318 && truth_value_p (TREE_CODE (arg0))
9319 && truth_value_p (TREE_CODE (arg1)))
9321 /* Only perform transformation if ARG0 is easily inverted. */
9322 tem = invert_truthvalue (arg0);
9323 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9324 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9327 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9328 if (integer_zerop (arg1)
9329 && truth_value_p (TREE_CODE (arg0))
9330 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9332 /* Only perform transformation if ARG0 is easily inverted. */
9333 tem = invert_truthvalue (arg0);
9334 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9335 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9336 TREE_OPERAND (t, 2)));
9339 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9340 if (integer_onep (arg1)
9341 && truth_value_p (TREE_CODE (arg0))
9342 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9343 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9344 TREE_OPERAND (t, 2)));
9346 return t;
9348 case COMPOUND_EXPR:
9349 /* When pedantic, a compound expression can be neither an lvalue
9350 nor an integer constant expression. */
9351 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9352 return t;
9353 /* Don't let (0, 0) be null pointer constant. */
9354 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9355 : fold_convert (type, arg1);
9356 return pedantic_non_lvalue (tem);
9358 case COMPLEX_EXPR:
9359 if (wins)
9360 return build_complex (type, arg0, arg1);
9361 return t;
9363 case REALPART_EXPR:
9364 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9365 return t;
9366 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9367 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9368 TREE_OPERAND (arg0, 1));
9369 else if (TREE_CODE (arg0) == COMPLEX_CST)
9370 return TREE_REALPART (arg0);
9371 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9372 return fold (build2 (TREE_CODE (arg0), type,
9373 fold (build1 (REALPART_EXPR, type,
9374 TREE_OPERAND (arg0, 0))),
9375 fold (build1 (REALPART_EXPR, type,
9376 TREE_OPERAND (arg0, 1)))));
9377 return t;
9379 case IMAGPART_EXPR:
9380 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9381 return fold_convert (type, integer_zero_node);
9382 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9383 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9384 TREE_OPERAND (arg0, 0));
9385 else if (TREE_CODE (arg0) == COMPLEX_CST)
9386 return TREE_IMAGPART (arg0);
9387 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9388 return fold (build2 (TREE_CODE (arg0), type,
9389 fold (build1 (IMAGPART_EXPR, type,
9390 TREE_OPERAND (arg0, 0))),
9391 fold (build1 (IMAGPART_EXPR, type,
9392 TREE_OPERAND (arg0, 1)))));
9393 return t;
9395 case CALL_EXPR:
9396 /* Check for a built-in function. */
9397 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9398 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9399 == FUNCTION_DECL)
9400 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9402 tree tmp = fold_builtin (t, false);
9403 if (tmp)
9404 return tmp;
9406 return t;
9408 default:
9409 return t;
9410 } /* switch (code) */
9413 #ifdef ENABLE_FOLD_CHECKING
9414 #undef fold
9416 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9417 static void fold_check_failed (tree, tree);
9418 void print_fold_checksum (tree);
9420 /* When --enable-checking=fold, compute a digest of expr before
9421 and after actual fold call to see if fold did not accidentally
9422 change original expr. */
9424 tree
9425 fold (tree expr)
9427 tree ret;
9428 struct md5_ctx ctx;
9429 unsigned char checksum_before[16], checksum_after[16];
9430 htab_t ht;
9432 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9433 md5_init_ctx (&ctx);
9434 fold_checksum_tree (expr, &ctx, ht);
9435 md5_finish_ctx (&ctx, checksum_before);
9436 htab_empty (ht);
9438 ret = fold_1 (expr);
9440 md5_init_ctx (&ctx);
9441 fold_checksum_tree (expr, &ctx, ht);
9442 md5_finish_ctx (&ctx, checksum_after);
9443 htab_delete (ht);
9445 if (memcmp (checksum_before, checksum_after, 16))
9446 fold_check_failed (expr, ret);
9448 return ret;
9451 void
9452 print_fold_checksum (tree expr)
9454 struct md5_ctx ctx;
9455 unsigned char checksum[16], cnt;
9456 htab_t ht;
9458 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9459 md5_init_ctx (&ctx);
9460 fold_checksum_tree (expr, &ctx, ht);
9461 md5_finish_ctx (&ctx, checksum);
9462 htab_delete (ht);
9463 for (cnt = 0; cnt < 16; ++cnt)
9464 fprintf (stderr, "%02x", checksum[cnt]);
9465 putc ('\n', stderr);
9468 static void
9469 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9471 internal_error ("fold check: original tree changed by fold");
9474 static void
9475 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9477 void **slot;
9478 enum tree_code code;
9479 char buf[sizeof (struct tree_decl)];
9480 int i, len;
9482 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9483 <= sizeof (struct tree_decl))
9484 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9485 if (expr == NULL)
9486 return;
9487 slot = htab_find_slot (ht, expr, INSERT);
9488 if (*slot != NULL)
9489 return;
9490 *slot = expr;
9491 code = TREE_CODE (expr);
9492 if (TREE_CODE_CLASS (code) == tcc_declaration
9493 && DECL_ASSEMBLER_NAME_SET_P (expr))
9495 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9496 memcpy (buf, expr, tree_size (expr));
9497 expr = (tree) buf;
9498 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9500 else if (TREE_CODE_CLASS (code) == tcc_type
9501 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9502 || TYPE_CACHED_VALUES_P (expr)))
9504 /* Allow these fields to be modified. */
9505 memcpy (buf, expr, tree_size (expr));
9506 expr = (tree) buf;
9507 TYPE_POINTER_TO (expr) = NULL;
9508 TYPE_REFERENCE_TO (expr) = NULL;
9509 TYPE_CACHED_VALUES_P (expr) = 0;
9510 TYPE_CACHED_VALUES (expr) = NULL;
9512 md5_process_bytes (expr, tree_size (expr), ctx);
9513 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9514 if (TREE_CODE_CLASS (code) != tcc_type
9515 && TREE_CODE_CLASS (code) != tcc_declaration)
9516 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9517 switch (TREE_CODE_CLASS (code))
9519 case tcc_constant:
9520 switch (code)
9522 case STRING_CST:
9523 md5_process_bytes (TREE_STRING_POINTER (expr),
9524 TREE_STRING_LENGTH (expr), ctx);
9525 break;
9526 case COMPLEX_CST:
9527 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9528 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9529 break;
9530 case VECTOR_CST:
9531 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9532 break;
9533 default:
9534 break;
9536 break;
9537 case tcc_exceptional:
9538 switch (code)
9540 case TREE_LIST:
9541 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9542 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9543 break;
9544 case TREE_VEC:
9545 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9546 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9547 break;
9548 default:
9549 break;
9551 break;
9552 case tcc_expression:
9553 case tcc_reference:
9554 case tcc_comparison:
9555 case tcc_unary:
9556 case tcc_binary:
9557 case tcc_statement:
9558 len = TREE_CODE_LENGTH (code);
9559 for (i = 0; i < len; ++i)
9560 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9561 break;
9562 case tcc_declaration:
9563 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9564 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9565 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9566 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9567 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9568 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9569 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9570 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9571 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9572 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9573 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9574 break;
9575 case tcc_type:
9576 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9577 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9578 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9579 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9580 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9581 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9582 if (INTEGRAL_TYPE_P (expr)
9583 || SCALAR_FLOAT_TYPE_P (expr))
9585 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9586 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9588 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9589 if (TREE_CODE (expr) == RECORD_TYPE
9590 || TREE_CODE (expr) == UNION_TYPE
9591 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9592 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9593 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9594 break;
9595 default:
9596 break;
9600 #endif
9602 /* Perform constant folding and related simplification of initializer
9603 expression EXPR. This behaves identically to "fold" but ignores
9604 potential run-time traps and exceptions that fold must preserve. */
9606 tree
9607 fold_initializer (tree expr)
9609 int saved_signaling_nans = flag_signaling_nans;
9610 int saved_trapping_math = flag_trapping_math;
9611 int saved_rounding_math = flag_rounding_math;
9612 int saved_trapv = flag_trapv;
9613 tree result;
9615 flag_signaling_nans = 0;
9616 flag_trapping_math = 0;
9617 flag_rounding_math = 0;
9618 flag_trapv = 0;
9620 result = fold (expr);
9622 flag_signaling_nans = saved_signaling_nans;
9623 flag_trapping_math = saved_trapping_math;
9624 flag_rounding_math = saved_rounding_math;
9625 flag_trapv = saved_trapv;
9627 return result;
9630 /* Determine if first argument is a multiple of second argument. Return 0 if
9631 it is not, or we cannot easily determined it to be.
9633 An example of the sort of thing we care about (at this point; this routine
9634 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9635 fold cases do now) is discovering that
9637 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9639 is a multiple of
9641 SAVE_EXPR (J * 8)
9643 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9645 This code also handles discovering that
9647 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9649 is a multiple of 8 so we don't have to worry about dealing with a
9650 possible remainder.
9652 Note that we *look* inside a SAVE_EXPR only to determine how it was
9653 calculated; it is not safe for fold to do much of anything else with the
9654 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9655 at run time. For example, the latter example above *cannot* be implemented
9656 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9657 evaluation time of the original SAVE_EXPR is not necessarily the same at
9658 the time the new expression is evaluated. The only optimization of this
9659 sort that would be valid is changing
9661 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9663 divided by 8 to
9665 SAVE_EXPR (I) * SAVE_EXPR (J)
9667 (where the same SAVE_EXPR (J) is used in the original and the
9668 transformed version). */
9670 static int
9671 multiple_of_p (tree type, tree top, tree bottom)
9673 if (operand_equal_p (top, bottom, 0))
9674 return 1;
9676 if (TREE_CODE (type) != INTEGER_TYPE)
9677 return 0;
9679 switch (TREE_CODE (top))
9681 case BIT_AND_EXPR:
9682 /* Bitwise and provides a power of two multiple. If the mask is
9683 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
9684 if (!integer_pow2p (bottom))
9685 return 0;
9686 /* FALLTHRU */
9688 case MULT_EXPR:
9689 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9690 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9692 case PLUS_EXPR:
9693 case MINUS_EXPR:
9694 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9695 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9697 case LSHIFT_EXPR:
9698 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9700 tree op1, t1;
9702 op1 = TREE_OPERAND (top, 1);
9703 /* const_binop may not detect overflow correctly,
9704 so check for it explicitly here. */
9705 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9706 > TREE_INT_CST_LOW (op1)
9707 && TREE_INT_CST_HIGH (op1) == 0
9708 && 0 != (t1 = fold_convert (type,
9709 const_binop (LSHIFT_EXPR,
9710 size_one_node,
9711 op1, 0)))
9712 && ! TREE_OVERFLOW (t1))
9713 return multiple_of_p (type, t1, bottom);
9715 return 0;
9717 case NOP_EXPR:
9718 /* Can't handle conversions from non-integral or wider integral type. */
9719 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9720 || (TYPE_PRECISION (type)
9721 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9722 return 0;
9724 /* .. fall through ... */
9726 case SAVE_EXPR:
9727 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9729 case INTEGER_CST:
9730 if (TREE_CODE (bottom) != INTEGER_CST
9731 || (TYPE_UNSIGNED (type)
9732 && (tree_int_cst_sgn (top) < 0
9733 || tree_int_cst_sgn (bottom) < 0)))
9734 return 0;
9735 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9736 top, bottom, 0));
9738 default:
9739 return 0;
9743 /* Return true if `t' is known to be non-negative. */
9746 tree_expr_nonnegative_p (tree t)
9748 switch (TREE_CODE (t))
9750 case ABS_EXPR:
9751 return 1;
9753 case INTEGER_CST:
9754 return tree_int_cst_sgn (t) >= 0;
9756 case REAL_CST:
9757 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9759 case PLUS_EXPR:
9760 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9761 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9762 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9764 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9765 both unsigned and at least 2 bits shorter than the result. */
9766 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9767 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9768 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9770 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9771 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9772 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9773 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9775 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9776 TYPE_PRECISION (inner2)) + 1;
9777 return prec < TYPE_PRECISION (TREE_TYPE (t));
9780 break;
9782 case MULT_EXPR:
9783 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9785 /* x * x for floating point x is always non-negative. */
9786 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9787 return 1;
9788 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9789 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9792 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9793 both unsigned and their total bits is shorter than the result. */
9794 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9795 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9796 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9798 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9799 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9800 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9801 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9802 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9803 < TYPE_PRECISION (TREE_TYPE (t));
9805 return 0;
9807 case TRUNC_DIV_EXPR:
9808 case CEIL_DIV_EXPR:
9809 case FLOOR_DIV_EXPR:
9810 case ROUND_DIV_EXPR:
9811 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9812 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9814 case TRUNC_MOD_EXPR:
9815 case CEIL_MOD_EXPR:
9816 case FLOOR_MOD_EXPR:
9817 case ROUND_MOD_EXPR:
9818 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9820 case RDIV_EXPR:
9821 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9822 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9824 case BIT_AND_EXPR:
9825 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9826 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9827 case BIT_IOR_EXPR:
9828 case BIT_XOR_EXPR:
9829 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9830 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9832 case NOP_EXPR:
9834 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9835 tree outer_type = TREE_TYPE (t);
9837 if (TREE_CODE (outer_type) == REAL_TYPE)
9839 if (TREE_CODE (inner_type) == REAL_TYPE)
9840 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9841 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9843 if (TYPE_UNSIGNED (inner_type))
9844 return 1;
9845 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9848 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9850 if (TREE_CODE (inner_type) == REAL_TYPE)
9851 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9852 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9853 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9854 && TYPE_UNSIGNED (inner_type);
9857 break;
9859 case COND_EXPR:
9860 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9861 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9862 case COMPOUND_EXPR:
9863 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9864 case MIN_EXPR:
9865 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9866 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9867 case MAX_EXPR:
9868 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9869 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9870 case MODIFY_EXPR:
9871 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9872 case BIND_EXPR:
9873 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9874 case SAVE_EXPR:
9875 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9876 case NON_LVALUE_EXPR:
9877 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9878 case FLOAT_EXPR:
9879 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9881 case TARGET_EXPR:
9883 tree temp = TARGET_EXPR_SLOT (t);
9884 t = TARGET_EXPR_INITIAL (t);
9886 /* If the initializer is non-void, then it's a normal expression
9887 that will be assigned to the slot. */
9888 if (!VOID_TYPE_P (t))
9889 return tree_expr_nonnegative_p (t);
9891 /* Otherwise, the initializer sets the slot in some way. One common
9892 way is an assignment statement at the end of the initializer. */
9893 while (1)
9895 if (TREE_CODE (t) == BIND_EXPR)
9896 t = expr_last (BIND_EXPR_BODY (t));
9897 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9898 || TREE_CODE (t) == TRY_CATCH_EXPR)
9899 t = expr_last (TREE_OPERAND (t, 0));
9900 else if (TREE_CODE (t) == STATEMENT_LIST)
9901 t = expr_last (t);
9902 else
9903 break;
9905 if (TREE_CODE (t) == MODIFY_EXPR
9906 && TREE_OPERAND (t, 0) == temp)
9907 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9909 return 0;
9912 case CALL_EXPR:
9914 tree fndecl = get_callee_fndecl (t);
9915 tree arglist = TREE_OPERAND (t, 1);
9916 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9917 switch (DECL_FUNCTION_CODE (fndecl))
9919 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9920 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9921 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9922 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9924 CASE_BUILTIN_F (BUILT_IN_ACOS)
9925 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9926 CASE_BUILTIN_F (BUILT_IN_CABS)
9927 CASE_BUILTIN_F (BUILT_IN_COSH)
9928 CASE_BUILTIN_F (BUILT_IN_ERFC)
9929 CASE_BUILTIN_F (BUILT_IN_EXP)
9930 CASE_BUILTIN_F (BUILT_IN_EXP10)
9931 CASE_BUILTIN_F (BUILT_IN_EXP2)
9932 CASE_BUILTIN_F (BUILT_IN_FABS)
9933 CASE_BUILTIN_F (BUILT_IN_FDIM)
9934 CASE_BUILTIN_F (BUILT_IN_FREXP)
9935 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9936 CASE_BUILTIN_F (BUILT_IN_POW10)
9937 CASE_BUILTIN_I (BUILT_IN_FFS)
9938 CASE_BUILTIN_I (BUILT_IN_PARITY)
9939 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9940 /* Always true. */
9941 return 1;
9943 CASE_BUILTIN_F (BUILT_IN_SQRT)
9944 /* sqrt(-0.0) is -0.0. */
9945 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9946 return 1;
9947 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9949 CASE_BUILTIN_F (BUILT_IN_ASINH)
9950 CASE_BUILTIN_F (BUILT_IN_ATAN)
9951 CASE_BUILTIN_F (BUILT_IN_ATANH)
9952 CASE_BUILTIN_F (BUILT_IN_CBRT)
9953 CASE_BUILTIN_F (BUILT_IN_CEIL)
9954 CASE_BUILTIN_F (BUILT_IN_ERF)
9955 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9956 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9957 CASE_BUILTIN_F (BUILT_IN_FMOD)
9958 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9959 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9960 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9961 CASE_BUILTIN_F (BUILT_IN_LRINT)
9962 CASE_BUILTIN_F (BUILT_IN_LROUND)
9963 CASE_BUILTIN_F (BUILT_IN_MODF)
9964 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9965 CASE_BUILTIN_F (BUILT_IN_POW)
9966 CASE_BUILTIN_F (BUILT_IN_RINT)
9967 CASE_BUILTIN_F (BUILT_IN_ROUND)
9968 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9969 CASE_BUILTIN_F (BUILT_IN_SINH)
9970 CASE_BUILTIN_F (BUILT_IN_TANH)
9971 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9972 /* True if the 1st argument is nonnegative. */
9973 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9975 CASE_BUILTIN_F (BUILT_IN_FMAX)
9976 /* True if the 1st OR 2nd arguments are nonnegative. */
9977 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9978 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9980 CASE_BUILTIN_F (BUILT_IN_FMIN)
9981 /* True if the 1st AND 2nd arguments are nonnegative. */
9982 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9983 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9985 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9986 /* True if the 2nd argument is nonnegative. */
9987 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9989 default:
9990 break;
9991 #undef CASE_BUILTIN_F
9992 #undef CASE_BUILTIN_I
9996 /* ... fall through ... */
9998 default:
9999 if (truth_value_p (TREE_CODE (t)))
10000 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10001 return 1;
10004 /* We don't know sign of `t', so be conservative and return false. */
10005 return 0;
10008 /* Return true when T is an address and is known to be nonzero.
10009 For floating point we further ensure that T is not denormal.
10010 Similar logic is present in nonzero_address in rtlanal.h. */
10012 static bool
10013 tree_expr_nonzero_p (tree t)
10015 tree type = TREE_TYPE (t);
10017 /* Doing something useful for floating point would need more work. */
10018 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10019 return false;
10021 switch (TREE_CODE (t))
10023 case ABS_EXPR:
10024 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10025 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10027 case INTEGER_CST:
10028 /* We used to test for !integer_zerop here. This does not work correctly
10029 if TREE_CONSTANT_OVERFLOW (t). */
10030 return (TREE_INT_CST_LOW (t) != 0
10031 || TREE_INT_CST_HIGH (t) != 0);
10033 case PLUS_EXPR:
10034 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10036 /* With the presence of negative values it is hard
10037 to say something. */
10038 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10039 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10040 return false;
10041 /* One of operands must be positive and the other non-negative. */
10042 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10043 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10045 break;
10047 case MULT_EXPR:
10048 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10050 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10051 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10053 break;
10055 case NOP_EXPR:
10057 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10058 tree outer_type = TREE_TYPE (t);
10060 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10061 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10063 break;
10065 case ADDR_EXPR:
10067 tree base = get_base_address (TREE_OPERAND (t, 0));
10069 if (!base)
10070 return false;
10072 /* Weak declarations may link to NULL. */
10073 if (DECL_P (base))
10074 return !DECL_WEAK (base);
10076 /* Constants are never weak. */
10077 if (CONSTANT_CLASS_P (base))
10078 return true;
10080 return false;
10083 case COND_EXPR:
10084 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10085 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10087 case MIN_EXPR:
10088 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10089 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10091 case MAX_EXPR:
10092 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10094 /* When both operands are nonzero, then MAX must be too. */
10095 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10096 return true;
10098 /* MAX where operand 0 is positive is positive. */
10099 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10101 /* MAX where operand 1 is positive is positive. */
10102 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10103 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10104 return true;
10105 break;
10107 case COMPOUND_EXPR:
10108 case MODIFY_EXPR:
10109 case BIND_EXPR:
10110 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10112 case SAVE_EXPR:
10113 case NON_LVALUE_EXPR:
10114 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10116 case BIT_IOR_EXPR:
10117 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10118 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10120 default:
10121 break;
10123 return false;
10126 /* See if we are applying CODE, a relational to the highest or lowest
10127 possible integer of TYPE. If so, then the result is a compile
10128 time constant. */
10130 static tree
10131 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10132 tree *op1_p)
10134 tree op0 = *op0_p;
10135 tree op1 = *op1_p;
10136 enum tree_code code = *code_p;
10137 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10139 if (TREE_CODE (op1) == INTEGER_CST
10140 && ! TREE_CONSTANT_OVERFLOW (op1)
10141 && width <= HOST_BITS_PER_WIDE_INT
10142 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10143 || POINTER_TYPE_P (TREE_TYPE (op1))))
10145 unsigned HOST_WIDE_INT signed_max;
10146 unsigned HOST_WIDE_INT max, min;
10148 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10150 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10152 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10153 min = 0;
10155 else
10157 max = signed_max;
10158 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10161 if (TREE_INT_CST_HIGH (op1) == 0
10162 && TREE_INT_CST_LOW (op1) == max)
10163 switch (code)
10165 case GT_EXPR:
10166 return omit_one_operand (type, integer_zero_node, op0);
10168 case GE_EXPR:
10169 *code_p = EQ_EXPR;
10170 break;
10171 case LE_EXPR:
10172 return omit_one_operand (type, integer_one_node, op0);
10174 case LT_EXPR:
10175 *code_p = NE_EXPR;
10176 break;
10178 /* The GE_EXPR and LT_EXPR cases above are not normally
10179 reached because of previous transformations. */
10181 default:
10182 break;
10184 else if (TREE_INT_CST_HIGH (op1) == 0
10185 && TREE_INT_CST_LOW (op1) == max - 1)
10186 switch (code)
10188 case GT_EXPR:
10189 *code_p = EQ_EXPR;
10190 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10191 break;
10192 case LE_EXPR:
10193 *code_p = NE_EXPR;
10194 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10195 break;
10196 default:
10197 break;
10199 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10200 && TREE_INT_CST_LOW (op1) == min)
10201 switch (code)
10203 case LT_EXPR:
10204 return omit_one_operand (type, integer_zero_node, op0);
10206 case LE_EXPR:
10207 *code_p = EQ_EXPR;
10208 break;
10210 case GE_EXPR:
10211 return omit_one_operand (type, integer_one_node, op0);
10213 case GT_EXPR:
10214 *code_p = NE_EXPR;
10215 break;
10217 default:
10218 break;
10220 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10221 && TREE_INT_CST_LOW (op1) == min + 1)
10222 switch (code)
10224 case GE_EXPR:
10225 *code_p = NE_EXPR;
10226 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10227 break;
10228 case LT_EXPR:
10229 *code_p = EQ_EXPR;
10230 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10231 break;
10232 default:
10233 break;
10236 else if (TREE_INT_CST_HIGH (op1) == 0
10237 && TREE_INT_CST_LOW (op1) == signed_max
10238 && TYPE_UNSIGNED (TREE_TYPE (op1))
10239 /* signed_type does not work on pointer types. */
10240 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10242 /* The following case also applies to X < signed_max+1
10243 and X >= signed_max+1 because previous transformations. */
10244 if (code == LE_EXPR || code == GT_EXPR)
10246 tree st0, st1, exp, retval;
10247 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10248 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10250 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10251 type,
10252 fold_convert (st0, op0),
10253 fold_convert (st1, integer_zero_node));
10255 retval = fold_binary_to_constant (TREE_CODE (exp),
10256 TREE_TYPE (exp),
10257 TREE_OPERAND (exp, 0),
10258 TREE_OPERAND (exp, 1));
10260 /* If we are in gimple form, then returning EXP would create
10261 non-gimple expressions. Clearing it is safe and insures
10262 we do not allow a non-gimple expression to escape. */
10263 if (in_gimple_form)
10264 exp = NULL;
10266 return (retval ? retval : exp);
10271 return NULL_TREE;
10275 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10276 attempt to fold the expression to a constant without modifying TYPE,
10277 OP0 or OP1.
10279 If the expression could be simplified to a constant, then return
10280 the constant. If the expression would not be simplified to a
10281 constant, then return NULL_TREE.
10283 Note this is primarily designed to be called after gimplification
10284 of the tree structures and when at least one operand is a constant.
10285 As a result of those simplifying assumptions this routine is far
10286 simpler than the generic fold routine. */
10288 tree
10289 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10291 int wins = 1;
10292 tree subop0;
10293 tree subop1;
10294 tree tem;
10296 /* If this is a commutative operation, and ARG0 is a constant, move it
10297 to ARG1 to reduce the number of tests below. */
10298 if (commutative_tree_code (code)
10299 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10301 tem = op0;
10302 op0 = op1;
10303 op1 = tem;
10306 /* If either operand is a complex type, extract its real component. */
10307 if (TREE_CODE (op0) == COMPLEX_CST)
10308 subop0 = TREE_REALPART (op0);
10309 else
10310 subop0 = op0;
10312 if (TREE_CODE (op1) == COMPLEX_CST)
10313 subop1 = TREE_REALPART (op1);
10314 else
10315 subop1 = op1;
10317 /* Note if either argument is not a real or integer constant.
10318 With a few exceptions, simplification is limited to cases
10319 where both arguments are constants. */
10320 if ((TREE_CODE (subop0) != INTEGER_CST
10321 && TREE_CODE (subop0) != REAL_CST)
10322 || (TREE_CODE (subop1) != INTEGER_CST
10323 && TREE_CODE (subop1) != REAL_CST))
10324 wins = 0;
10326 switch (code)
10328 case PLUS_EXPR:
10329 /* (plus (address) (const_int)) is a constant. */
10330 if (TREE_CODE (op0) == PLUS_EXPR
10331 && TREE_CODE (op1) == INTEGER_CST
10332 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10333 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10334 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10335 == ADDR_EXPR)))
10336 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10338 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10339 const_binop (PLUS_EXPR, op1,
10340 TREE_OPERAND (op0, 1), 0));
10342 case BIT_XOR_EXPR:
10344 binary:
10345 if (!wins)
10346 return NULL_TREE;
10348 /* Both arguments are constants. Simplify. */
10349 tem = const_binop (code, op0, op1, 0);
10350 if (tem != NULL_TREE)
10352 /* The return value should always have the same type as
10353 the original expression. */
10354 if (TREE_TYPE (tem) != type)
10355 tem = fold_convert (type, tem);
10357 return tem;
10359 return NULL_TREE;
10361 case MINUS_EXPR:
10362 /* Fold &x - &x. This can happen from &x.foo - &x.
10363 This is unsafe for certain floats even in non-IEEE formats.
10364 In IEEE, it is unsafe because it does wrong for NaNs.
10365 Also note that operand_equal_p is always false if an
10366 operand is volatile. */
10367 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10368 return fold_convert (type, integer_zero_node);
10370 goto binary;
10372 case MULT_EXPR:
10373 case BIT_AND_EXPR:
10374 /* Special case multiplication or bitwise AND where one argument
10375 is zero. */
10376 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10377 return omit_one_operand (type, op1, op0);
10378 else
10379 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10380 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10381 && real_zerop (op1))
10382 return omit_one_operand (type, op1, op0);
10384 goto binary;
10386 case BIT_IOR_EXPR:
10387 /* Special case when we know the result will be all ones. */
10388 if (integer_all_onesp (op1))
10389 return omit_one_operand (type, op1, op0);
10391 goto binary;
10393 case TRUNC_DIV_EXPR:
10394 case ROUND_DIV_EXPR:
10395 case FLOOR_DIV_EXPR:
10396 case CEIL_DIV_EXPR:
10397 case EXACT_DIV_EXPR:
10398 case TRUNC_MOD_EXPR:
10399 case ROUND_MOD_EXPR:
10400 case FLOOR_MOD_EXPR:
10401 case CEIL_MOD_EXPR:
10402 case RDIV_EXPR:
10403 /* Division by zero is undefined. */
10404 if (integer_zerop (op1))
10405 return NULL_TREE;
10407 if (TREE_CODE (op1) == REAL_CST
10408 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10409 && real_zerop (op1))
10410 return NULL_TREE;
10412 goto binary;
10414 case MIN_EXPR:
10415 if (INTEGRAL_TYPE_P (type)
10416 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10417 return omit_one_operand (type, op1, op0);
10419 goto binary;
10421 case MAX_EXPR:
10422 if (INTEGRAL_TYPE_P (type)
10423 && TYPE_MAX_VALUE (type)
10424 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10425 return omit_one_operand (type, op1, op0);
10427 goto binary;
10429 case RSHIFT_EXPR:
10430 /* Optimize -1 >> x for arithmetic right shifts. */
10431 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10432 return omit_one_operand (type, op0, op1);
10433 /* ... fall through ... */
10435 case LSHIFT_EXPR:
10436 if (integer_zerop (op0))
10437 return omit_one_operand (type, op0, op1);
10439 /* Since negative shift count is not well-defined, don't
10440 try to compute it in the compiler. */
10441 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10442 return NULL_TREE;
10444 goto binary;
10446 case LROTATE_EXPR:
10447 case RROTATE_EXPR:
10448 /* -1 rotated either direction by any amount is still -1. */
10449 if (integer_all_onesp (op0))
10450 return omit_one_operand (type, op0, op1);
10452 /* 0 rotated either direction by any amount is still zero. */
10453 if (integer_zerop (op0))
10454 return omit_one_operand (type, op0, op1);
10456 goto binary;
10458 case COMPLEX_EXPR:
10459 if (wins)
10460 return build_complex (type, op0, op1);
10461 return NULL_TREE;
10463 case LT_EXPR:
10464 case LE_EXPR:
10465 case GT_EXPR:
10466 case GE_EXPR:
10467 case EQ_EXPR:
10468 case NE_EXPR:
10469 /* If one arg is a real or integer constant, put it last. */
10470 if ((TREE_CODE (op0) == INTEGER_CST
10471 && TREE_CODE (op1) != INTEGER_CST)
10472 || (TREE_CODE (op0) == REAL_CST
10473 && TREE_CODE (op0) != REAL_CST))
10475 tree temp;
10477 temp = op0;
10478 op0 = op1;
10479 op1 = temp;
10480 code = swap_tree_comparison (code);
10483 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10484 This transformation affects the cases which are handled in later
10485 optimizations involving comparisons with non-negative constants. */
10486 if (TREE_CODE (op1) == INTEGER_CST
10487 && TREE_CODE (op0) != INTEGER_CST
10488 && tree_int_cst_sgn (op1) > 0)
10490 switch (code)
10492 case GE_EXPR:
10493 code = GT_EXPR;
10494 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10495 break;
10497 case LT_EXPR:
10498 code = LE_EXPR;
10499 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10500 break;
10502 default:
10503 break;
10507 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10508 if (tem)
10509 return tem;
10511 /* Fall through. */
10513 case ORDERED_EXPR:
10514 case UNORDERED_EXPR:
10515 case UNLT_EXPR:
10516 case UNLE_EXPR:
10517 case UNGT_EXPR:
10518 case UNGE_EXPR:
10519 case UNEQ_EXPR:
10520 case LTGT_EXPR:
10521 if (!wins)
10522 return NULL_TREE;
10524 return fold_relational_const (code, type, op0, op1);
10526 case RANGE_EXPR:
10527 /* This could probably be handled. */
10528 return NULL_TREE;
10530 case TRUTH_AND_EXPR:
10531 /* If second arg is constant zero, result is zero, but first arg
10532 must be evaluated. */
10533 if (integer_zerop (op1))
10534 return omit_one_operand (type, op1, op0);
10535 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10536 case will be handled here. */
10537 if (integer_zerop (op0))
10538 return omit_one_operand (type, op0, op1);
10539 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10540 return constant_boolean_node (true, type);
10541 return NULL_TREE;
10543 case TRUTH_OR_EXPR:
10544 /* If second arg is constant true, result is true, but we must
10545 evaluate first arg. */
10546 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10547 return omit_one_operand (type, op1, op0);
10548 /* Likewise for first arg, but note this only occurs here for
10549 TRUTH_OR_EXPR. */
10550 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10551 return omit_one_operand (type, op0, op1);
10552 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10553 return constant_boolean_node (false, type);
10554 return NULL_TREE;
10556 case TRUTH_XOR_EXPR:
10557 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10559 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10560 return constant_boolean_node (x, type);
10562 return NULL_TREE;
10564 default:
10565 return NULL_TREE;
10569 /* Given the components of a unary expression CODE, TYPE and OP0,
10570 attempt to fold the expression to a constant without modifying
10571 TYPE or OP0.
10573 If the expression could be simplified to a constant, then return
10574 the constant. If the expression would not be simplified to a
10575 constant, then return NULL_TREE.
10577 Note this is primarily designed to be called after gimplification
10578 of the tree structures and when op0 is a constant. As a result
10579 of those simplifying assumptions this routine is far simpler than
10580 the generic fold routine. */
10582 tree
10583 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10585 /* Make sure we have a suitable constant argument. */
10586 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10588 tree subop;
10590 if (TREE_CODE (op0) == COMPLEX_CST)
10591 subop = TREE_REALPART (op0);
10592 else
10593 subop = op0;
10595 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10596 return NULL_TREE;
10599 switch (code)
10601 case NOP_EXPR:
10602 case FLOAT_EXPR:
10603 case CONVERT_EXPR:
10604 case FIX_TRUNC_EXPR:
10605 case FIX_FLOOR_EXPR:
10606 case FIX_CEIL_EXPR:
10607 return fold_convert_const (code, type, op0);
10609 case NEGATE_EXPR:
10610 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10611 return fold_negate_const (op0, type);
10612 else
10613 return NULL_TREE;
10615 case ABS_EXPR:
10616 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10617 return fold_abs_const (op0, type);
10618 else
10619 return NULL_TREE;
10621 case BIT_NOT_EXPR:
10622 if (TREE_CODE (op0) == INTEGER_CST)
10623 return fold_not_const (op0, type);
10624 else
10625 return NULL_TREE;
10627 case REALPART_EXPR:
10628 if (TREE_CODE (op0) == COMPLEX_CST)
10629 return TREE_REALPART (op0);
10630 else
10631 return NULL_TREE;
10633 case IMAGPART_EXPR:
10634 if (TREE_CODE (op0) == COMPLEX_CST)
10635 return TREE_IMAGPART (op0);
10636 else
10637 return NULL_TREE;
10639 case CONJ_EXPR:
10640 if (TREE_CODE (op0) == COMPLEX_CST
10641 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10642 return build_complex (type, TREE_REALPART (op0),
10643 negate_expr (TREE_IMAGPART (op0)));
10644 return NULL_TREE;
10646 default:
10647 return NULL_TREE;
10651 /* If EXP represents referencing an element in a constant string
10652 (either via pointer arithmetic or array indexing), return the
10653 tree representing the value accessed, otherwise return NULL. */
10655 tree
10656 fold_read_from_constant_string (tree exp)
10658 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10660 tree exp1 = TREE_OPERAND (exp, 0);
10661 tree index;
10662 tree string;
10664 if (TREE_CODE (exp) == INDIRECT_REF)
10665 string = string_constant (exp1, &index);
10666 else
10668 tree low_bound = array_ref_low_bound (exp);
10669 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10671 /* Optimize the special-case of a zero lower bound.
10673 We convert the low_bound to sizetype to avoid some problems
10674 with constant folding. (E.g. suppose the lower bound is 1,
10675 and its mode is QI. Without the conversion,l (ARRAY
10676 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10677 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10678 if (! integer_zerop (low_bound))
10679 index = size_diffop (index, fold_convert (sizetype, low_bound));
10681 string = exp1;
10684 if (string
10685 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10686 && TREE_CODE (string) == STRING_CST
10687 && TREE_CODE (index) == INTEGER_CST
10688 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10689 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10690 == MODE_INT)
10691 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10692 return fold_convert (TREE_TYPE (exp),
10693 build_int_cst (NULL_TREE,
10694 (TREE_STRING_POINTER (string)
10695 [TREE_INT_CST_LOW (index)])));
10697 return NULL;
10700 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10701 an integer constant or real constant.
10703 TYPE is the type of the result. */
10705 static tree
10706 fold_negate_const (tree arg0, tree type)
10708 tree t = NULL_TREE;
10710 switch (TREE_CODE (arg0))
10712 case INTEGER_CST:
10714 unsigned HOST_WIDE_INT low;
10715 HOST_WIDE_INT high;
10716 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10717 TREE_INT_CST_HIGH (arg0),
10718 &low, &high);
10719 t = build_int_cst_wide (type, low, high);
10720 t = force_fit_type (t, 1,
10721 (overflow | TREE_OVERFLOW (arg0))
10722 && !TYPE_UNSIGNED (type),
10723 TREE_CONSTANT_OVERFLOW (arg0));
10724 break;
10727 case REAL_CST:
10728 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10729 break;
10731 default:
10732 gcc_unreachable ();
10735 return t;
10738 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10739 an integer constant or real constant.
10741 TYPE is the type of the result. */
10743 tree
10744 fold_abs_const (tree arg0, tree type)
10746 tree t = NULL_TREE;
10748 switch (TREE_CODE (arg0))
10750 case INTEGER_CST:
10751 /* If the value is unsigned, then the absolute value is
10752 the same as the ordinary value. */
10753 if (TYPE_UNSIGNED (type))
10754 t = arg0;
10755 /* Similarly, if the value is non-negative. */
10756 else if (INT_CST_LT (integer_minus_one_node, arg0))
10757 t = arg0;
10758 /* If the value is negative, then the absolute value is
10759 its negation. */
10760 else
10762 unsigned HOST_WIDE_INT low;
10763 HOST_WIDE_INT high;
10764 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10765 TREE_INT_CST_HIGH (arg0),
10766 &low, &high);
10767 t = build_int_cst_wide (type, low, high);
10768 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10769 TREE_CONSTANT_OVERFLOW (arg0));
10771 break;
10773 case REAL_CST:
10774 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10775 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10776 else
10777 t = arg0;
10778 break;
10780 default:
10781 gcc_unreachable ();
10784 return t;
10787 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10788 constant. TYPE is the type of the result. */
10790 static tree
10791 fold_not_const (tree arg0, tree type)
10793 tree t = NULL_TREE;
10795 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10797 t = build_int_cst_wide (type,
10798 ~ TREE_INT_CST_LOW (arg0),
10799 ~ TREE_INT_CST_HIGH (arg0));
10800 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10801 TREE_CONSTANT_OVERFLOW (arg0));
10803 return t;
10806 /* Given CODE, a relational operator, the target type, TYPE and two
10807 constant operands OP0 and OP1, return the result of the
10808 relational operation. If the result is not a compile time
10809 constant, then return NULL_TREE. */
10811 static tree
10812 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10814 int result, invert;
10816 /* From here on, the only cases we handle are when the result is
10817 known to be a constant. */
10819 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10821 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10822 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10824 /* Handle the cases where either operand is a NaN. */
10825 if (real_isnan (c0) || real_isnan (c1))
10827 switch (code)
10829 case EQ_EXPR:
10830 case ORDERED_EXPR:
10831 result = 0;
10832 break;
10834 case NE_EXPR:
10835 case UNORDERED_EXPR:
10836 case UNLT_EXPR:
10837 case UNLE_EXPR:
10838 case UNGT_EXPR:
10839 case UNGE_EXPR:
10840 case UNEQ_EXPR:
10841 result = 1;
10842 break;
10844 case LT_EXPR:
10845 case LE_EXPR:
10846 case GT_EXPR:
10847 case GE_EXPR:
10848 case LTGT_EXPR:
10849 if (flag_trapping_math)
10850 return NULL_TREE;
10851 result = 0;
10852 break;
10854 default:
10855 gcc_unreachable ();
10858 return constant_boolean_node (result, type);
10861 return constant_boolean_node (real_compare (code, c0, c1), type);
10864 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10866 To compute GT, swap the arguments and do LT.
10867 To compute GE, do LT and invert the result.
10868 To compute LE, swap the arguments, do LT and invert the result.
10869 To compute NE, do EQ and invert the result.
10871 Therefore, the code below must handle only EQ and LT. */
10873 if (code == LE_EXPR || code == GT_EXPR)
10875 tree tem = op0;
10876 op0 = op1;
10877 op1 = tem;
10878 code = swap_tree_comparison (code);
10881 /* Note that it is safe to invert for real values here because we
10882 have already handled the one case that it matters. */
10884 invert = 0;
10885 if (code == NE_EXPR || code == GE_EXPR)
10887 invert = 1;
10888 code = invert_tree_comparison (code, false);
10891 /* Compute a result for LT or EQ if args permit;
10892 Otherwise return T. */
10893 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10895 if (code == EQ_EXPR)
10896 result = tree_int_cst_equal (op0, op1);
10897 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10898 result = INT_CST_LT_UNSIGNED (op0, op1);
10899 else
10900 result = INT_CST_LT (op0, op1);
10902 else
10903 return NULL_TREE;
10905 if (invert)
10906 result ^= 1;
10907 return constant_boolean_node (result, type);
10910 /* Build an expression for the a clean point containing EXPR with type TYPE.
10911 Don't build a cleanup point expression for EXPR which don't have side
10912 effects. */
10914 tree
10915 fold_build_cleanup_point_expr (tree type, tree expr)
10917 /* If the expression does not have side effects then we don't have to wrap
10918 it with a cleanup point expression. */
10919 if (!TREE_SIDE_EFFECTS (expr))
10920 return expr;
10922 /* If the expression is a return, check to see if the expression inside the
10923 return has no side effects or the right hand side of the modify expression
10924 inside the return. If either don't have side effects set we don't need to
10925 wrap the expression in a cleanup point expression. Note we don't check the
10926 left hand side of the modify because it should always be a return decl. */
10927 if (TREE_CODE (expr) == RETURN_EXPR)
10929 tree op = TREE_OPERAND (expr, 0);
10930 if (!op || !TREE_SIDE_EFFECTS (op))
10931 return expr;
10932 op = TREE_OPERAND (op, 1);
10933 if (!TREE_SIDE_EFFECTS (op))
10934 return expr;
10937 return build1 (CLEANUP_POINT_EXPR, type, expr);
10940 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10941 avoid confusing the gimplify process. */
10943 tree
10944 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10946 /* The size of the object is not relevant when talking about its address. */
10947 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10948 t = TREE_OPERAND (t, 0);
10950 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10951 if (TREE_CODE (t) == INDIRECT_REF
10952 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
10954 t = TREE_OPERAND (t, 0);
10955 if (TREE_TYPE (t) != ptrtype)
10956 t = build1 (NOP_EXPR, ptrtype, t);
10958 else
10960 tree base = t;
10962 while (handled_component_p (base))
10963 base = TREE_OPERAND (base, 0);
10964 if (DECL_P (base))
10965 TREE_ADDRESSABLE (base) = 1;
10967 t = build1 (ADDR_EXPR, ptrtype, t);
10970 return t;
10973 tree
10974 build_fold_addr_expr (tree t)
10976 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10979 /* Builds an expression for an indirection through T, simplifying some
10980 cases. */
10982 tree
10983 build_fold_indirect_ref (tree t)
10985 tree type = TREE_TYPE (TREE_TYPE (t));
10986 tree sub = t;
10987 tree subtype;
10989 STRIP_NOPS (sub);
10990 if (TREE_CODE (sub) == ADDR_EXPR)
10992 tree op = TREE_OPERAND (sub, 0);
10993 tree optype = TREE_TYPE (op);
10994 /* *&p => p */
10995 if (lang_hooks.types_compatible_p (type, optype))
10996 return op;
10997 /* *(foo *)&fooarray => fooarray[0] */
10998 else if (TREE_CODE (optype) == ARRAY_TYPE
10999 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11000 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
11003 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11004 subtype = TREE_TYPE (sub);
11005 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11006 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11008 sub = build_fold_indirect_ref (sub);
11009 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
11012 return build1 (INDIRECT_REF, type, t);
11015 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11016 whose result is ignored. The type of the returned tree need not be
11017 the same as the original expression. */
11019 tree
11020 fold_ignored_result (tree t)
11022 if (!TREE_SIDE_EFFECTS (t))
11023 return integer_zero_node;
11025 for (;;)
11026 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11028 case tcc_unary:
11029 t = TREE_OPERAND (t, 0);
11030 break;
11032 case tcc_binary:
11033 case tcc_comparison:
11034 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11035 t = TREE_OPERAND (t, 0);
11036 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11037 t = TREE_OPERAND (t, 1);
11038 else
11039 return t;
11040 break;
11042 case tcc_expression:
11043 switch (TREE_CODE (t))
11045 case COMPOUND_EXPR:
11046 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11047 return t;
11048 t = TREE_OPERAND (t, 0);
11049 break;
11051 case COND_EXPR:
11052 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11053 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11054 return t;
11055 t = TREE_OPERAND (t, 0);
11056 break;
11058 default:
11059 return t;
11061 break;
11063 default:
11064 return t;
11068 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11069 This can only be applied to objects of a sizetype. */
11071 tree
11072 round_up (tree value, int divisor)
11074 tree div = NULL_TREE;
11076 gcc_assert (divisor > 0);
11077 if (divisor == 1)
11078 return value;
11080 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11081 have to do anything. Only do this when we are not given a const,
11082 because in that case, this check is more expensive than just
11083 doing it. */
11084 if (TREE_CODE (value) != INTEGER_CST)
11086 div = build_int_cst (TREE_TYPE (value), divisor);
11088 if (multiple_of_p (TREE_TYPE (value), value, div))
11089 return value;
11092 /* If divisor is a power of two, simplify this to bit manipulation. */
11093 if (divisor == (divisor & -divisor))
11095 tree t;
11097 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11098 value = size_binop (PLUS_EXPR, value, t);
11099 t = build_int_cst (TREE_TYPE (value), -divisor);
11100 value = size_binop (BIT_AND_EXPR, value, t);
11102 else
11104 if (!div)
11105 div = build_int_cst (TREE_TYPE (value), divisor);
11106 value = size_binop (CEIL_DIV_EXPR, value, div);
11107 value = size_binop (MULT_EXPR, value, div);
11110 return value;
11113 /* Likewise, but round down. */
11115 tree
11116 round_down (tree value, int divisor)
11118 tree div = NULL_TREE;
11120 gcc_assert (divisor > 0);
11121 if (divisor == 1)
11122 return value;
11124 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11125 have to do anything. Only do this when we are not given a const,
11126 because in that case, this check is more expensive than just
11127 doing it. */
11128 if (TREE_CODE (value) != INTEGER_CST)
11130 div = build_int_cst (TREE_TYPE (value), divisor);
11132 if (multiple_of_p (TREE_TYPE (value), value, div))
11133 return value;
11136 /* If divisor is a power of two, simplify this to bit manipulation. */
11137 if (divisor == (divisor & -divisor))
11139 tree t;
11141 t = build_int_cst (TREE_TYPE (value), -divisor);
11142 value = size_binop (BIT_AND_EXPR, value, t);
11144 else
11146 if (!div)
11147 div = build_int_cst (TREE_TYPE (value), divisor);
11148 value = size_binop (FLOOR_DIV_EXPR, value, div);
11149 value = size_binop (MULT_EXPR, value, div);
11152 return value;
11155 /* Returns the pointer to the base of the object addressed by EXP and
11156 extracts the information about the offset of the access, storing it
11157 to PBITPOS and POFFSET. */
11159 static tree
11160 split_address_to_core_and_offset (tree exp,
11161 HOST_WIDE_INT *pbitpos, tree *poffset)
11163 tree core;
11164 enum machine_mode mode;
11165 int unsignedp, volatilep;
11166 HOST_WIDE_INT bitsize;
11168 if (TREE_CODE (exp) == ADDR_EXPR)
11170 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11171 poffset, &mode, &unsignedp, &volatilep,
11172 false);
11174 if (TREE_CODE (core) == INDIRECT_REF)
11175 core = TREE_OPERAND (core, 0);
11177 else
11179 core = exp;
11180 *pbitpos = 0;
11181 *poffset = NULL_TREE;
11184 return core;
11187 /* Returns true if addresses of E1 and E2 differ by a constant, false
11188 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11190 bool
11191 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11193 tree core1, core2;
11194 HOST_WIDE_INT bitpos1, bitpos2;
11195 tree toffset1, toffset2, tdiff, type;
11197 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11198 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11200 if (bitpos1 % BITS_PER_UNIT != 0
11201 || bitpos2 % BITS_PER_UNIT != 0
11202 || !operand_equal_p (core1, core2, 0))
11203 return false;
11205 if (toffset1 && toffset2)
11207 type = TREE_TYPE (toffset1);
11208 if (type != TREE_TYPE (toffset2))
11209 toffset2 = fold_convert (type, toffset2);
11211 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11212 if (!host_integerp (tdiff, 0))
11213 return false;
11215 *diff = tree_low_cst (tdiff, 0);
11217 else if (toffset1 || toffset2)
11219 /* If only one of the offsets is non-constant, the difference cannot
11220 be a constant. */
11221 return false;
11223 else
11224 *diff = 0;
11226 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11227 return true;