2005-01-07 Benjamin Kosnik <bkoz@redhat.com>
[official-gcc.git] / gcc / fold-const.c
blob21ee14cc5635796d6420ec24252c73bfbb707fe0
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
125 tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static tree fold_relational_hi_lo (enum tree_code *, const tree,
136 tree *, tree *);
137 static bool tree_expr_nonzero_p (tree);
139 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
140 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
141 and SUM1. Then this yields nonzero if overflow occurred during the
142 addition.
144 Overflow occurs if A and B have the same sign, but A and SUM differ in
145 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
146 sign. */
147 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
150 We do that by representing the two-word integer in 4 words, with only
151 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
152 number. The value of the word is LOWPART + HIGHPART * BASE. */
154 #define LOWPART(x) \
155 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
156 #define HIGHPART(x) \
157 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
158 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160 /* Unpack a two-word integer into 4 words.
161 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
162 WORDS points to the array of HOST_WIDE_INTs. */
164 static void
165 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 words[0] = LOWPART (low);
168 words[1] = HIGHPART (low);
169 words[2] = LOWPART (hi);
170 words[3] = HIGHPART (hi);
173 /* Pack an array of 4 words into a two-word integer.
174 WORDS points to the array of words.
175 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
177 static void
178 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
179 HOST_WIDE_INT *hi)
181 *low = words[0] + words[1] * BASE;
182 *hi = words[2] + words[3] * BASE;
185 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
186 in overflow of the value, when >0 we are only interested in signed
187 overflow, for <0 we are interested in any overflow. OVERFLOWED
188 indicates whether overflow has already occurred. CONST_OVERFLOWED
189 indicates whether constant overflow has already occurred. We force
190 T's value to be within range of T's type (by setting to 0 or 1 all
191 the bits outside the type's range). We set TREE_OVERFLOWED if,
192 OVERFLOWED is nonzero,
193 or OVERFLOWABLE is >0 and signed overflow occurs
194 or OVERFLOWABLE is <0 and any overflow occurs
195 We set TREE_CONSTANT_OVERFLOWED if,
196 CONST_OVERFLOWED is nonzero
197 or we set TREE_OVERFLOWED.
198 We return either the original T, or a copy. */
200 tree
201 force_fit_type (tree t, int overflowable,
202 bool overflowed, bool overflowed_const)
204 unsigned HOST_WIDE_INT low;
205 HOST_WIDE_INT high;
206 unsigned int prec;
207 int sign_extended_type;
209 gcc_assert (TREE_CODE (t) == INTEGER_CST);
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
216 prec = POINTER_SIZE;
217 else
218 prec = TYPE_PRECISION (TREE_TYPE (t));
219 /* Size types *are* sign extended. */
220 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
221 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
222 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
224 /* First clear all bits that are beyond the type's precision. */
226 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
228 else if (prec > HOST_BITS_PER_WIDE_INT)
229 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 else
232 high = 0;
233 if (prec < HOST_BITS_PER_WIDE_INT)
234 low &= ~((HOST_WIDE_INT) (-1) << prec);
237 if (!sign_extended_type)
238 /* No sign extension */;
239 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
240 /* Correct width already. */;
241 else if (prec > HOST_BITS_PER_WIDE_INT)
243 /* Sign extend top half? */
244 if (high & ((unsigned HOST_WIDE_INT)1
245 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
246 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
248 else if (prec == HOST_BITS_PER_WIDE_INT)
250 if ((HOST_WIDE_INT)low < 0)
251 high = -1;
253 else
255 /* Sign extend bottom half? */
256 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
258 high = -1;
259 low |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value changed, return a new node. */
264 if (overflowed || overflowed_const
265 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
267 t = build_int_cst_wide (TREE_TYPE (t), low, high);
269 if (overflowed
270 || overflowable < 0
271 || (overflowable > 0 && sign_extended_type))
273 t = copy_node (t);
274 TREE_OVERFLOW (t) = 1;
275 TREE_CONSTANT_OVERFLOW (t) = 1;
277 else if (overflowed_const)
279 t = copy_node (t);
280 TREE_CONSTANT_OVERFLOW (t) = 1;
284 return t;
287 /* Add two doubleword integers with doubleword result.
288 Each argument is given as two `HOST_WIDE_INT' pieces.
289 One argument is L1 and H1; the other, L2 and H2.
290 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
294 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
295 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 unsigned HOST_WIDE_INT l;
298 HOST_WIDE_INT h;
300 l = l1 + l2;
301 h = h1 + h2 + (l < l1);
303 *lv = l;
304 *hv = h;
305 return OVERFLOW_SUM_SIGN (h1, h2, h);
308 /* Negate a doubleword integer with doubleword result.
309 Return nonzero if the operation overflows, assuming it's signed.
310 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
311 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
314 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
315 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
317 if (l1 == 0)
319 *lv = 0;
320 *hv = - h1;
321 return (*hv & h1) < 0;
323 else
325 *lv = -l1;
326 *hv = ~h1;
327 return 0;
331 /* Multiply two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows, assuming it's signed.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
342 HOST_WIDE_INT arg1[4];
343 HOST_WIDE_INT arg2[4];
344 HOST_WIDE_INT prod[4 * 2];
345 unsigned HOST_WIDE_INT carry;
346 int i, j, k;
347 unsigned HOST_WIDE_INT toplow, neglow;
348 HOST_WIDE_INT tophigh, neghigh;
350 encode (arg1, l1, h1);
351 encode (arg2, l2, h2);
353 memset (prod, 0, sizeof prod);
355 for (i = 0; i < 4; i++)
357 carry = 0;
358 for (j = 0; j < 4; j++)
360 k = i + j;
361 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
362 carry += arg1[i] * arg2[j];
363 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
364 carry += prod[k];
365 prod[k] = LOWPART (carry);
366 carry = HIGHPART (carry);
368 prod[i + 4] = carry;
371 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
373 /* Check for overflow by calculating the top half of the answer in full;
374 it should agree with the low half's sign bit. */
375 decode (prod + 4, &toplow, &tophigh);
376 if (h1 < 0)
378 neg_double (l2, h2, &neglow, &neghigh);
379 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
381 if (h2 < 0)
383 neg_double (l1, h1, &neglow, &neghigh);
384 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
386 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
389 /* Shift the doubleword integer in L1, H1 left by COUNT places
390 keeping only PREC bits of result.
391 Shift right if COUNT is negative.
392 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
393 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
395 void
396 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
397 HOST_WIDE_INT count, unsigned int prec,
398 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
400 unsigned HOST_WIDE_INT signmask;
402 if (count < 0)
404 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 return;
408 if (SHIFT_COUNT_TRUNCATED)
409 count %= prec;
411 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
413 /* Shifting by the host word size is undefined according to the
414 ANSI standard, so we must handle this as a special case. */
415 *hv = 0;
416 *lv = 0;
418 else if (count >= HOST_BITS_PER_WIDE_INT)
420 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
421 *lv = 0;
423 else
425 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
426 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 *lv = l1 << count;
430 /* Sign extend all bits that are beyond the precision. */
432 signmask = -((prec > HOST_BITS_PER_WIDE_INT
433 ? ((unsigned HOST_WIDE_INT) *hv
434 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
435 : (*lv >> (prec - 1))) & 1);
437 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
439 else if (prec >= HOST_BITS_PER_WIDE_INT)
441 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
442 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 else
446 *hv = signmask;
447 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
448 *lv |= signmask << prec;
452 /* Shift the doubleword integer in L1, H1 right by COUNT places
453 keeping only PREC bits of result. COUNT must be positive.
454 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
455 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
457 void
458 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
459 HOST_WIDE_INT count, unsigned int prec,
460 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
461 int arith)
463 unsigned HOST_WIDE_INT signmask;
465 signmask = (arith
466 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
467 : 0);
469 if (SHIFT_COUNT_TRUNCATED)
470 count %= prec;
472 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
474 /* Shifting by the host word size is undefined according to the
475 ANSI standard, so we must handle this as a special case. */
476 *hv = 0;
477 *lv = 0;
479 else if (count >= HOST_BITS_PER_WIDE_INT)
481 *hv = 0;
482 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
484 else
486 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
487 *lv = ((l1 >> count)
488 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
491 /* Zero / sign extend all bits that are beyond the precision. */
493 if (count >= (HOST_WIDE_INT)prec)
495 *hv = signmask;
496 *lv = signmask;
498 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
500 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
502 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
503 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 else
507 *hv = signmask;
508 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
509 *lv |= signmask << (prec - count);
513 /* Rotate the doubleword integer in L1, H1 left by COUNT places
514 keeping only PREC bits of result.
515 Rotate right if COUNT is negative.
516 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
518 void
519 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
520 HOST_WIDE_INT count, unsigned int prec,
521 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
523 unsigned HOST_WIDE_INT s1l, s2l;
524 HOST_WIDE_INT s1h, s2h;
526 count %= prec;
527 if (count < 0)
528 count += prec;
530 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
531 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
532 *lv = s1l | s2l;
533 *hv = s1h | s2h;
536 /* Rotate the doubleword integer in L1, H1 left by COUNT places
537 keeping only PREC bits of result. COUNT must be positive.
538 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
540 void
541 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
542 HOST_WIDE_INT count, unsigned int prec,
543 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
545 unsigned HOST_WIDE_INT s1l, s2l;
546 HOST_WIDE_INT s1h, s2h;
548 count %= prec;
549 if (count < 0)
550 count += prec;
552 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
553 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
554 *lv = s1l | s2l;
555 *hv = s1h | s2h;
558 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
559 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
560 CODE is a tree code for a kind of division, one of
561 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
562 or EXACT_DIV_EXPR
563 It controls how the quotient is rounded to an integer.
564 Return nonzero if the operation overflows.
565 UNS nonzero says do unsigned division. */
568 div_and_round_double (enum tree_code code, int uns,
569 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
570 HOST_WIDE_INT hnum_orig,
571 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
572 HOST_WIDE_INT hden_orig,
573 unsigned HOST_WIDE_INT *lquo,
574 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT *hrem)
577 int quo_neg = 0;
578 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
579 HOST_WIDE_INT den[4], quo[4];
580 int i, j;
581 unsigned HOST_WIDE_INT work;
582 unsigned HOST_WIDE_INT carry = 0;
583 unsigned HOST_WIDE_INT lnum = lnum_orig;
584 HOST_WIDE_INT hnum = hnum_orig;
585 unsigned HOST_WIDE_INT lden = lden_orig;
586 HOST_WIDE_INT hden = hden_orig;
587 int overflow = 0;
589 if (hden == 0 && lden == 0)
590 overflow = 1, lden = 1;
592 /* Calculate quotient sign and convert operands to unsigned. */
593 if (!uns)
595 if (hnum < 0)
597 quo_neg = ~ quo_neg;
598 /* (minimum integer) / (-1) is the only overflow case. */
599 if (neg_double (lnum, hnum, &lnum, &hnum)
600 && ((HOST_WIDE_INT) lden & hden) == -1)
601 overflow = 1;
603 if (hden < 0)
605 quo_neg = ~ quo_neg;
606 neg_double (lden, hden, &lden, &hden);
610 if (hnum == 0 && hden == 0)
611 { /* single precision */
612 *hquo = *hrem = 0;
613 /* This unsigned division rounds toward zero. */
614 *lquo = lnum / lden;
615 goto finish_up;
618 if (hnum == 0)
619 { /* trivial case: dividend < divisor */
620 /* hden != 0 already checked. */
621 *hquo = *lquo = 0;
622 *hrem = hnum;
623 *lrem = lnum;
624 goto finish_up;
627 memset (quo, 0, sizeof quo);
629 memset (num, 0, sizeof num); /* to zero 9th element */
630 memset (den, 0, sizeof den);
632 encode (num, lnum, hnum);
633 encode (den, lden, hden);
635 /* Special code for when the divisor < BASE. */
636 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
638 /* hnum != 0 already checked. */
639 for (i = 4 - 1; i >= 0; i--)
641 work = num[i] + carry * BASE;
642 quo[i] = work / lden;
643 carry = work % lden;
646 else
648 /* Full double precision division,
649 with thanks to Don Knuth's "Seminumerical Algorithms". */
650 int num_hi_sig, den_hi_sig;
651 unsigned HOST_WIDE_INT quo_est, scale;
653 /* Find the highest nonzero divisor digit. */
654 for (i = 4 - 1;; i--)
655 if (den[i] != 0)
657 den_hi_sig = i;
658 break;
661 /* Insure that the first digit of the divisor is at least BASE/2.
662 This is required by the quotient digit estimation algorithm. */
664 scale = BASE / (den[den_hi_sig] + 1);
665 if (scale > 1)
666 { /* scale divisor and dividend */
667 carry = 0;
668 for (i = 0; i <= 4 - 1; i++)
670 work = (num[i] * scale) + carry;
671 num[i] = LOWPART (work);
672 carry = HIGHPART (work);
675 num[4] = carry;
676 carry = 0;
677 for (i = 0; i <= 4 - 1; i++)
679 work = (den[i] * scale) + carry;
680 den[i] = LOWPART (work);
681 carry = HIGHPART (work);
682 if (den[i] != 0) den_hi_sig = i;
686 num_hi_sig = 4;
688 /* Main loop */
689 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
691 /* Guess the next quotient digit, quo_est, by dividing the first
692 two remaining dividend digits by the high order quotient digit.
693 quo_est is never low and is at most 2 high. */
694 unsigned HOST_WIDE_INT tmp;
696 num_hi_sig = i + den_hi_sig + 1;
697 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698 if (num[num_hi_sig] != den[den_hi_sig])
699 quo_est = work / den[den_hi_sig];
700 else
701 quo_est = BASE - 1;
703 /* Refine quo_est so it's usually correct, and at most one high. */
704 tmp = work - quo_est * den[den_hi_sig];
705 if (tmp < BASE
706 && (den[den_hi_sig - 1] * quo_est
707 > (tmp * BASE + num[num_hi_sig - 2])))
708 quo_est--;
710 /* Try QUO_EST as the quotient digit, by multiplying the
711 divisor by QUO_EST and subtracting from the remaining dividend.
712 Keep in mind that QUO_EST is the I - 1st digit. */
714 carry = 0;
715 for (j = 0; j <= den_hi_sig; j++)
717 work = quo_est * den[j] + carry;
718 carry = HIGHPART (work);
719 work = num[i + j] - LOWPART (work);
720 num[i + j] = LOWPART (work);
721 carry += HIGHPART (work) != 0;
724 /* If quo_est was high by one, then num[i] went negative and
725 we need to correct things. */
726 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
728 quo_est--;
729 carry = 0; /* add divisor back in */
730 for (j = 0; j <= den_hi_sig; j++)
732 work = num[i + j] + den[j] + carry;
733 carry = HIGHPART (work);
734 num[i + j] = LOWPART (work);
737 num [num_hi_sig] += carry;
740 /* Store the quotient digit. */
741 quo[i] = quo_est;
745 decode (quo, lquo, hquo);
747 finish_up:
748 /* If result is negative, make it so. */
749 if (quo_neg)
750 neg_double (*lquo, *hquo, lquo, hquo);
752 /* Compute trial remainder: rem = num - (quo * den) */
753 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754 neg_double (*lrem, *hrem, lrem, hrem);
755 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 switch (code)
759 case TRUNC_DIV_EXPR:
760 case TRUNC_MOD_EXPR: /* round toward zero */
761 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 return overflow;
764 case FLOOR_DIV_EXPR:
765 case FLOOR_MOD_EXPR: /* round toward negative infinity */
766 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
768 /* quo = quo - 1; */
769 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
770 lquo, hquo);
772 else
773 return overflow;
774 break;
776 case CEIL_DIV_EXPR:
777 case CEIL_MOD_EXPR: /* round toward positive infinity */
778 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
780 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
781 lquo, hquo);
783 else
784 return overflow;
785 break;
787 case ROUND_DIV_EXPR:
788 case ROUND_MOD_EXPR: /* round to closest integer */
790 unsigned HOST_WIDE_INT labs_rem = *lrem;
791 HOST_WIDE_INT habs_rem = *hrem;
792 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793 HOST_WIDE_INT habs_den = hden, htwice;
795 /* Get absolute values. */
796 if (*hrem < 0)
797 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
798 if (hden < 0)
799 neg_double (lden, hden, &labs_den, &habs_den);
801 /* If (2 * abs (lrem) >= abs (lden)) */
802 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803 labs_rem, habs_rem, &ltwice, &htwice);
805 if (((unsigned HOST_WIDE_INT) habs_den
806 < (unsigned HOST_WIDE_INT) htwice)
807 || (((unsigned HOST_WIDE_INT) habs_den
808 == (unsigned HOST_WIDE_INT) htwice)
809 && (labs_den < ltwice)))
811 if (*hquo < 0)
812 /* quo = quo - 1; */
813 add_double (*lquo, *hquo,
814 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815 else
816 /* quo = quo + 1; */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
820 else
821 return overflow;
823 break;
825 default:
826 gcc_unreachable ();
829 /* Compute true remainder: rem = num - (quo * den) */
830 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831 neg_double (*lrem, *hrem, lrem, hrem);
832 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 return overflow;
836 /* Return true if built-in mathematical function specified by CODE
837 preserves the sign of it argument, i.e. -f(x) == f(-x). */
839 static bool
840 negate_mathfn_p (enum built_in_function code)
842 switch (code)
844 case BUILT_IN_ASIN:
845 case BUILT_IN_ASINF:
846 case BUILT_IN_ASINL:
847 case BUILT_IN_ATAN:
848 case BUILT_IN_ATANF:
849 case BUILT_IN_ATANL:
850 case BUILT_IN_SIN:
851 case BUILT_IN_SINF:
852 case BUILT_IN_SINL:
853 case BUILT_IN_TAN:
854 case BUILT_IN_TANF:
855 case BUILT_IN_TANL:
856 return true;
858 default:
859 break;
861 return false;
864 /* Check whether we may negate an integer constant T without causing
865 overflow. */
867 bool
868 may_negate_without_overflow_p (tree t)
870 unsigned HOST_WIDE_INT val;
871 unsigned int prec;
872 tree type;
874 gcc_assert (TREE_CODE (t) == INTEGER_CST);
876 type = TREE_TYPE (t);
877 if (TYPE_UNSIGNED (type))
878 return false;
880 prec = TYPE_PRECISION (type);
881 if (prec > HOST_BITS_PER_WIDE_INT)
883 if (TREE_INT_CST_LOW (t) != 0)
884 return true;
885 prec -= HOST_BITS_PER_WIDE_INT;
886 val = TREE_INT_CST_HIGH (t);
888 else
889 val = TREE_INT_CST_LOW (t);
890 if (prec < HOST_BITS_PER_WIDE_INT)
891 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
892 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
895 /* Determine whether an expression T can be cheaply negated using
896 the function negate_expr. */
898 static bool
899 negate_expr_p (tree t)
901 tree type;
903 if (t == 0)
904 return false;
906 type = TREE_TYPE (t);
908 STRIP_SIGN_NOPS (t);
909 switch (TREE_CODE (t))
911 case INTEGER_CST:
912 if (TYPE_UNSIGNED (type) || ! flag_trapv)
913 return true;
915 /* Check that -CST will not overflow type. */
916 return may_negate_without_overflow_p (t);
918 case REAL_CST:
919 case NEGATE_EXPR:
920 return true;
922 case COMPLEX_CST:
923 return negate_expr_p (TREE_REALPART (t))
924 && negate_expr_p (TREE_IMAGPART (t));
926 case PLUS_EXPR:
927 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
928 return false;
929 /* -(A + B) -> (-B) - A. */
930 if (negate_expr_p (TREE_OPERAND (t, 1))
931 && reorder_operands_p (TREE_OPERAND (t, 0),
932 TREE_OPERAND (t, 1)))
933 return true;
934 /* -(A + B) -> (-A) - B. */
935 return negate_expr_p (TREE_OPERAND (t, 0));
937 case MINUS_EXPR:
938 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
939 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
940 && reorder_operands_p (TREE_OPERAND (t, 0),
941 TREE_OPERAND (t, 1));
943 case MULT_EXPR:
944 if (TYPE_UNSIGNED (TREE_TYPE (t)))
945 break;
947 /* Fall through. */
949 case RDIV_EXPR:
950 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
951 return negate_expr_p (TREE_OPERAND (t, 1))
952 || negate_expr_p (TREE_OPERAND (t, 0));
953 break;
955 case NOP_EXPR:
956 /* Negate -((double)float) as (double)(-float). */
957 if (TREE_CODE (type) == REAL_TYPE)
959 tree tem = strip_float_extensions (t);
960 if (tem != t)
961 return negate_expr_p (tem);
963 break;
965 case CALL_EXPR:
966 /* Negate -f(x) as f(-x). */
967 if (negate_mathfn_p (builtin_mathfn_code (t)))
968 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
969 break;
971 case RSHIFT_EXPR:
972 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
973 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
975 tree op1 = TREE_OPERAND (t, 1);
976 if (TREE_INT_CST_HIGH (op1) == 0
977 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
978 == TREE_INT_CST_LOW (op1))
979 return true;
981 break;
983 default:
984 break;
986 return false;
989 /* Given T, an expression, return the negation of T. Allow for T to be
990 null, in which case return null. */
992 static tree
993 negate_expr (tree t)
995 tree type;
996 tree tem;
998 if (t == 0)
999 return 0;
1001 type = TREE_TYPE (t);
1002 STRIP_SIGN_NOPS (t);
1004 switch (TREE_CODE (t))
1006 case INTEGER_CST:
1007 tem = fold_negate_const (t, type);
1008 if (! TREE_OVERFLOW (tem)
1009 || TYPE_UNSIGNED (type)
1010 || ! flag_trapv)
1011 return tem;
1012 break;
1014 case REAL_CST:
1015 tem = fold_negate_const (t, type);
1016 /* Two's complement FP formats, such as c4x, may overflow. */
1017 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1018 return fold_convert (type, tem);
1019 break;
1021 case COMPLEX_CST:
1023 tree rpart = negate_expr (TREE_REALPART (t));
1024 tree ipart = negate_expr (TREE_IMAGPART (t));
1026 if ((TREE_CODE (rpart) == REAL_CST
1027 && TREE_CODE (ipart) == REAL_CST)
1028 || (TREE_CODE (rpart) == INTEGER_CST
1029 && TREE_CODE (ipart) == INTEGER_CST))
1030 return build_complex (type, rpart, ipart);
1032 break;
1034 case NEGATE_EXPR:
1035 return fold_convert (type, TREE_OPERAND (t, 0));
1037 case PLUS_EXPR:
1038 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1040 /* -(A + B) -> (-B) - A. */
1041 if (negate_expr_p (TREE_OPERAND (t, 1))
1042 && reorder_operands_p (TREE_OPERAND (t, 0),
1043 TREE_OPERAND (t, 1)))
1045 tem = negate_expr (TREE_OPERAND (t, 1));
1046 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1047 tem, TREE_OPERAND (t, 0)));
1048 return fold_convert (type, tem);
1051 /* -(A + B) -> (-A) - B. */
1052 if (negate_expr_p (TREE_OPERAND (t, 0)))
1054 tem = negate_expr (TREE_OPERAND (t, 0));
1055 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1056 tem, TREE_OPERAND (t, 1)));
1057 return fold_convert (type, tem);
1060 break;
1062 case MINUS_EXPR:
1063 /* - (A - B) -> B - A */
1064 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1066 return fold_convert (type,
1067 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1068 TREE_OPERAND (t, 1),
1069 TREE_OPERAND (t, 0))));
1070 break;
1072 case MULT_EXPR:
1073 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1074 break;
1076 /* Fall through. */
1078 case RDIV_EXPR:
1079 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1081 tem = TREE_OPERAND (t, 1);
1082 if (negate_expr_p (tem))
1083 return fold_convert (type,
1084 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1085 TREE_OPERAND (t, 0),
1086 negate_expr (tem))));
1087 tem = TREE_OPERAND (t, 0);
1088 if (negate_expr_p (tem))
1089 return fold_convert (type,
1090 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1091 negate_expr (tem),
1092 TREE_OPERAND (t, 1))));
1094 break;
1096 case NOP_EXPR:
1097 /* Convert -((double)float) into (double)(-float). */
1098 if (TREE_CODE (type) == REAL_TYPE)
1100 tem = strip_float_extensions (t);
1101 if (tem != t && negate_expr_p (tem))
1102 return fold_convert (type, negate_expr (tem));
1104 break;
1106 case CALL_EXPR:
1107 /* Negate -f(x) as f(-x). */
1108 if (negate_mathfn_p (builtin_mathfn_code (t))
1109 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1111 tree fndecl, arg, arglist;
1113 fndecl = get_callee_fndecl (t);
1114 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1115 arglist = build_tree_list (NULL_TREE, arg);
1116 return build_function_call_expr (fndecl, arglist);
1118 break;
1120 case RSHIFT_EXPR:
1121 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1122 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1124 tree op1 = TREE_OPERAND (t, 1);
1125 if (TREE_INT_CST_HIGH (op1) == 0
1126 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1127 == TREE_INT_CST_LOW (op1))
1129 tree ntype = TYPE_UNSIGNED (type)
1130 ? lang_hooks.types.signed_type (type)
1131 : lang_hooks.types.unsigned_type (type);
1132 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1133 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1134 return fold_convert (type, temp);
1137 break;
1139 default:
1140 break;
1143 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1144 return fold_convert (type, tem);
1147 /* Split a tree IN into a constant, literal and variable parts that could be
1148 combined with CODE to make IN. "constant" means an expression with
1149 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1150 commutative arithmetic operation. Store the constant part into *CONP,
1151 the literal in *LITP and return the variable part. If a part isn't
1152 present, set it to null. If the tree does not decompose in this way,
1153 return the entire tree as the variable part and the other parts as null.
1155 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1156 case, we negate an operand that was subtracted. Except if it is a
1157 literal for which we use *MINUS_LITP instead.
1159 If NEGATE_P is true, we are negating all of IN, again except a literal
1160 for which we use *MINUS_LITP instead.
1162 If IN is itself a literal or constant, return it as appropriate.
1164 Note that we do not guarantee that any of the three values will be the
1165 same type as IN, but they will have the same signedness and mode. */
1167 static tree
1168 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1169 tree *minus_litp, int negate_p)
1171 tree var = 0;
1173 *conp = 0;
1174 *litp = 0;
1175 *minus_litp = 0;
1177 /* Strip any conversions that don't change the machine mode or signedness. */
1178 STRIP_SIGN_NOPS (in);
1180 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1181 *litp = in;
1182 else if (TREE_CODE (in) == code
1183 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1184 /* We can associate addition and subtraction together (even
1185 though the C standard doesn't say so) for integers because
1186 the value is not affected. For reals, the value might be
1187 affected, so we can't. */
1188 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1189 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1191 tree op0 = TREE_OPERAND (in, 0);
1192 tree op1 = TREE_OPERAND (in, 1);
1193 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1194 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1196 /* First see if either of the operands is a literal, then a constant. */
1197 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1198 *litp = op0, op0 = 0;
1199 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1200 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1202 if (op0 != 0 && TREE_CONSTANT (op0))
1203 *conp = op0, op0 = 0;
1204 else if (op1 != 0 && TREE_CONSTANT (op1))
1205 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1207 /* If we haven't dealt with either operand, this is not a case we can
1208 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1209 if (op0 != 0 && op1 != 0)
1210 var = in;
1211 else if (op0 != 0)
1212 var = op0;
1213 else
1214 var = op1, neg_var_p = neg1_p;
1216 /* Now do any needed negations. */
1217 if (neg_litp_p)
1218 *minus_litp = *litp, *litp = 0;
1219 if (neg_conp_p)
1220 *conp = negate_expr (*conp);
1221 if (neg_var_p)
1222 var = negate_expr (var);
1224 else if (TREE_CONSTANT (in))
1225 *conp = in;
1226 else
1227 var = in;
1229 if (negate_p)
1231 if (*litp)
1232 *minus_litp = *litp, *litp = 0;
1233 else if (*minus_litp)
1234 *litp = *minus_litp, *minus_litp = 0;
1235 *conp = negate_expr (*conp);
1236 var = negate_expr (var);
1239 return var;
1242 /* Re-associate trees split by the above function. T1 and T2 are either
1243 expressions to associate or null. Return the new expression, if any. If
1244 we build an operation, do it in TYPE and with CODE. */
1246 static tree
1247 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1249 if (t1 == 0)
1250 return t2;
1251 else if (t2 == 0)
1252 return t1;
1254 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1255 try to fold this since we will have infinite recursion. But do
1256 deal with any NEGATE_EXPRs. */
1257 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1258 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1260 if (code == PLUS_EXPR)
1262 if (TREE_CODE (t1) == NEGATE_EXPR)
1263 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1264 fold_convert (type, TREE_OPERAND (t1, 0)));
1265 else if (TREE_CODE (t2) == NEGATE_EXPR)
1266 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1267 fold_convert (type, TREE_OPERAND (t2, 0)));
1268 else if (integer_zerop (t2))
1269 return fold_convert (type, t1);
1271 else if (code == MINUS_EXPR)
1273 if (integer_zerop (t2))
1274 return fold_convert (type, t1);
1277 return build2 (code, type, fold_convert (type, t1),
1278 fold_convert (type, t2));
1281 return fold (build2 (code, type, fold_convert (type, t1),
1282 fold_convert (type, t2)));
1285 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1286 to produce a new constant.
1288 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1290 tree
1291 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1293 unsigned HOST_WIDE_INT int1l, int2l;
1294 HOST_WIDE_INT int1h, int2h;
1295 unsigned HOST_WIDE_INT low;
1296 HOST_WIDE_INT hi;
1297 unsigned HOST_WIDE_INT garbagel;
1298 HOST_WIDE_INT garbageh;
1299 tree t;
1300 tree type = TREE_TYPE (arg1);
1301 int uns = TYPE_UNSIGNED (type);
1302 int is_sizetype
1303 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1304 int overflow = 0;
1305 int no_overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1312 switch (code)
1314 case BIT_IOR_EXPR:
1315 low = int1l | int2l, hi = int1h | int2h;
1316 break;
1318 case BIT_XOR_EXPR:
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1320 break;
1322 case BIT_AND_EXPR:
1323 low = int1l & int2l, hi = int1h & int2h;
1324 break;
1326 case RSHIFT_EXPR:
1327 int2l = -int2l;
1328 case LSHIFT_EXPR:
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 &low, &hi, !uns);
1334 no_overflow = 1;
1335 break;
1337 case RROTATE_EXPR:
1338 int2l = - int2l;
1339 case LROTATE_EXPR:
1340 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1341 &low, &hi);
1342 break;
1344 case PLUS_EXPR:
1345 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1346 break;
1348 case MINUS_EXPR:
1349 neg_double (int2l, int2h, &low, &hi);
1350 add_double (int1l, int1h, low, hi, &low, &hi);
1351 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1352 break;
1354 case MULT_EXPR:
1355 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1356 break;
1358 case TRUNC_DIV_EXPR:
1359 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1360 case EXACT_DIV_EXPR:
1361 /* This is a shortcut for a common special case. */
1362 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1363 && ! TREE_CONSTANT_OVERFLOW (arg1)
1364 && ! TREE_CONSTANT_OVERFLOW (arg2)
1365 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1367 if (code == CEIL_DIV_EXPR)
1368 int1l += int2l - 1;
1370 low = int1l / int2l, hi = 0;
1371 break;
1374 /* ... fall through ... */
1376 case ROUND_DIV_EXPR:
1377 if (int2h == 0 && int2l == 1)
1379 low = int1l, hi = int1h;
1380 break;
1382 if (int1l == int2l && int1h == int2h
1383 && ! (int1l == 0 && int1h == 0))
1385 low = 1, hi = 0;
1386 break;
1388 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1389 &low, &hi, &garbagel, &garbageh);
1390 break;
1392 case TRUNC_MOD_EXPR:
1393 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1394 /* This is a shortcut for a common special case. */
1395 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1396 && ! TREE_CONSTANT_OVERFLOW (arg1)
1397 && ! TREE_CONSTANT_OVERFLOW (arg2)
1398 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1400 if (code == CEIL_MOD_EXPR)
1401 int1l += int2l - 1;
1402 low = int1l % int2l, hi = 0;
1403 break;
1406 /* ... fall through ... */
1408 case ROUND_MOD_EXPR:
1409 overflow = div_and_round_double (code, uns,
1410 int1l, int1h, int2l, int2h,
1411 &garbagel, &garbageh, &low, &hi);
1412 break;
1414 case MIN_EXPR:
1415 case MAX_EXPR:
1416 if (uns)
1417 low = (((unsigned HOST_WIDE_INT) int1h
1418 < (unsigned HOST_WIDE_INT) int2h)
1419 || (((unsigned HOST_WIDE_INT) int1h
1420 == (unsigned HOST_WIDE_INT) int2h)
1421 && int1l < int2l));
1422 else
1423 low = (int1h < int2h
1424 || (int1h == int2h && int1l < int2l));
1426 if (low == (code == MIN_EXPR))
1427 low = int1l, hi = int1h;
1428 else
1429 low = int2l, hi = int2h;
1430 break;
1432 default:
1433 gcc_unreachable ();
1436 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1438 if (notrunc)
1440 /* Propagate overflow flags ourselves. */
1441 if (((!uns || is_sizetype) && overflow)
1442 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1444 t = copy_node (t);
1445 TREE_OVERFLOW (t) = 1;
1446 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1450 t = copy_node (t);
1451 TREE_CONSTANT_OVERFLOW (t) = 1;
1454 else
1455 t = force_fit_type (t, 1,
1456 ((!uns || is_sizetype) && overflow)
1457 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1458 TREE_CONSTANT_OVERFLOW (arg1)
1459 | TREE_CONSTANT_OVERFLOW (arg2));
1461 return t;
1464 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1465 constant. We assume ARG1 and ARG2 have the same data type, or at least
1466 are the same kind of constant and the same machine mode.
1468 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1470 static tree
1471 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1473 STRIP_NOPS (arg1);
1474 STRIP_NOPS (arg2);
1476 if (TREE_CODE (arg1) == INTEGER_CST)
1477 return int_const_binop (code, arg1, arg2, notrunc);
1479 if (TREE_CODE (arg1) == REAL_CST)
1481 enum machine_mode mode;
1482 REAL_VALUE_TYPE d1;
1483 REAL_VALUE_TYPE d2;
1484 REAL_VALUE_TYPE value;
1485 tree t, type;
1487 d1 = TREE_REAL_CST (arg1);
1488 d2 = TREE_REAL_CST (arg2);
1490 type = TREE_TYPE (arg1);
1491 mode = TYPE_MODE (type);
1493 /* Don't perform operation if we honor signaling NaNs and
1494 either operand is a NaN. */
1495 if (HONOR_SNANS (mode)
1496 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1497 return NULL_TREE;
1499 /* Don't perform operation if it would raise a division
1500 by zero exception. */
1501 if (code == RDIV_EXPR
1502 && REAL_VALUES_EQUAL (d2, dconst0)
1503 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1504 return NULL_TREE;
1506 /* If either operand is a NaN, just return it. Otherwise, set up
1507 for floating-point trap; we return an overflow. */
1508 if (REAL_VALUE_ISNAN (d1))
1509 return arg1;
1510 else if (REAL_VALUE_ISNAN (d2))
1511 return arg2;
1513 REAL_ARITHMETIC (value, code, d1, d2);
1515 t = build_real (type, real_value_truncate (mode, value));
1517 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1518 TREE_CONSTANT_OVERFLOW (t)
1519 = TREE_OVERFLOW (t)
1520 | TREE_CONSTANT_OVERFLOW (arg1)
1521 | TREE_CONSTANT_OVERFLOW (arg2);
1522 return t;
1524 if (TREE_CODE (arg1) == COMPLEX_CST)
1526 tree type = TREE_TYPE (arg1);
1527 tree r1 = TREE_REALPART (arg1);
1528 tree i1 = TREE_IMAGPART (arg1);
1529 tree r2 = TREE_REALPART (arg2);
1530 tree i2 = TREE_IMAGPART (arg2);
1531 tree t;
1533 switch (code)
1535 case PLUS_EXPR:
1536 t = build_complex (type,
1537 const_binop (PLUS_EXPR, r1, r2, notrunc),
1538 const_binop (PLUS_EXPR, i1, i2, notrunc));
1539 break;
1541 case MINUS_EXPR:
1542 t = build_complex (type,
1543 const_binop (MINUS_EXPR, r1, r2, notrunc),
1544 const_binop (MINUS_EXPR, i1, i2, notrunc));
1545 break;
1547 case MULT_EXPR:
1548 t = build_complex (type,
1549 const_binop (MINUS_EXPR,
1550 const_binop (MULT_EXPR,
1551 r1, r2, notrunc),
1552 const_binop (MULT_EXPR,
1553 i1, i2, notrunc),
1554 notrunc),
1555 const_binop (PLUS_EXPR,
1556 const_binop (MULT_EXPR,
1557 r1, i2, notrunc),
1558 const_binop (MULT_EXPR,
1559 i1, r2, notrunc),
1560 notrunc));
1561 break;
1563 case RDIV_EXPR:
1565 tree magsquared
1566 = const_binop (PLUS_EXPR,
1567 const_binop (MULT_EXPR, r2, r2, notrunc),
1568 const_binop (MULT_EXPR, i2, i2, notrunc),
1569 notrunc);
1571 t = build_complex (type,
1572 const_binop
1573 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1574 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1575 const_binop (PLUS_EXPR,
1576 const_binop (MULT_EXPR, r1, r2,
1577 notrunc),
1578 const_binop (MULT_EXPR, i1, i2,
1579 notrunc),
1580 notrunc),
1581 magsquared, notrunc),
1582 const_binop
1583 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1584 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1585 const_binop (MINUS_EXPR,
1586 const_binop (MULT_EXPR, i1, r2,
1587 notrunc),
1588 const_binop (MULT_EXPR, r1, i2,
1589 notrunc),
1590 notrunc),
1591 magsquared, notrunc));
1593 break;
1595 default:
1596 gcc_unreachable ();
1598 return t;
1600 return 0;
1603 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1604 indicates which particular sizetype to create. */
1606 tree
1607 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1609 return build_int_cst (sizetype_tab[(int) kind], number);
1612 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1613 is a tree code. The type of the result is taken from the operands.
1614 Both must be the same type integer type and it must be a size type.
1615 If the operands are constant, so is the result. */
1617 tree
1618 size_binop (enum tree_code code, tree arg0, tree arg1)
1620 tree type = TREE_TYPE (arg0);
1622 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1623 && type == TREE_TYPE (arg1));
1625 /* Handle the special case of two integer constants faster. */
1626 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1628 /* And some specific cases even faster than that. */
1629 if (code == PLUS_EXPR && integer_zerop (arg0))
1630 return arg1;
1631 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1632 && integer_zerop (arg1))
1633 return arg0;
1634 else if (code == MULT_EXPR && integer_onep (arg0))
1635 return arg1;
1637 /* Handle general case of two integer constants. */
1638 return int_const_binop (code, arg0, arg1, 0);
1641 if (arg0 == error_mark_node || arg1 == error_mark_node)
1642 return error_mark_node;
1644 return fold (build2 (code, type, arg0, arg1));
1647 /* Given two values, either both of sizetype or both of bitsizetype,
1648 compute the difference between the two values. Return the value
1649 in signed type corresponding to the type of the operands. */
1651 tree
1652 size_diffop (tree arg0, tree arg1)
1654 tree type = TREE_TYPE (arg0);
1655 tree ctype;
1657 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1658 && type == TREE_TYPE (arg1));
1660 /* If the type is already signed, just do the simple thing. */
1661 if (!TYPE_UNSIGNED (type))
1662 return size_binop (MINUS_EXPR, arg0, arg1);
1664 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1666 /* If either operand is not a constant, do the conversions to the signed
1667 type and subtract. The hardware will do the right thing with any
1668 overflow in the subtraction. */
1669 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1670 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1671 fold_convert (ctype, arg1));
1673 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1674 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1675 overflow) and negate (which can't either). Special-case a result
1676 of zero while we're here. */
1677 if (tree_int_cst_equal (arg0, arg1))
1678 return fold_convert (ctype, integer_zero_node);
1679 else if (tree_int_cst_lt (arg1, arg0))
1680 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1681 else
1682 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1683 fold_convert (ctype, size_binop (MINUS_EXPR,
1684 arg1, arg0)));
1687 /* A subroutine of fold_convert_const handling conversions of an
1688 INTEGER_CST to another integer type. */
1690 static tree
1691 fold_convert_const_int_from_int (tree type, tree arg1)
1693 tree t;
1695 /* Given an integer constant, make new constant with new type,
1696 appropriately sign-extended or truncated. */
1697 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1698 TREE_INT_CST_HIGH (arg1));
1700 t = force_fit_type (t,
1701 /* Don't set the overflow when
1702 converting a pointer */
1703 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1704 (TREE_INT_CST_HIGH (arg1) < 0
1705 && (TYPE_UNSIGNED (type)
1706 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1707 | TREE_OVERFLOW (arg1),
1708 TREE_CONSTANT_OVERFLOW (arg1));
1710 return t;
1713 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1714 to an integer type. */
1716 static tree
1717 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1719 int overflow = 0;
1720 tree t;
1722 /* The following code implements the floating point to integer
1723 conversion rules required by the Java Language Specification,
1724 that IEEE NaNs are mapped to zero and values that overflow
1725 the target precision saturate, i.e. values greater than
1726 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1727 are mapped to INT_MIN. These semantics are allowed by the
1728 C and C++ standards that simply state that the behavior of
1729 FP-to-integer conversion is unspecified upon overflow. */
1731 HOST_WIDE_INT high, low;
1732 REAL_VALUE_TYPE r;
1733 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1735 switch (code)
1737 case FIX_TRUNC_EXPR:
1738 real_trunc (&r, VOIDmode, &x);
1739 break;
1741 case FIX_CEIL_EXPR:
1742 real_ceil (&r, VOIDmode, &x);
1743 break;
1745 case FIX_FLOOR_EXPR:
1746 real_floor (&r, VOIDmode, &x);
1747 break;
1749 case FIX_ROUND_EXPR:
1750 real_round (&r, VOIDmode, &x);
1751 break;
1753 default:
1754 gcc_unreachable ();
1757 /* If R is NaN, return zero and show we have an overflow. */
1758 if (REAL_VALUE_ISNAN (r))
1760 overflow = 1;
1761 high = 0;
1762 low = 0;
1765 /* See if R is less than the lower bound or greater than the
1766 upper bound. */
1768 if (! overflow)
1770 tree lt = TYPE_MIN_VALUE (type);
1771 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1772 if (REAL_VALUES_LESS (r, l))
1774 overflow = 1;
1775 high = TREE_INT_CST_HIGH (lt);
1776 low = TREE_INT_CST_LOW (lt);
1780 if (! overflow)
1782 tree ut = TYPE_MAX_VALUE (type);
1783 if (ut)
1785 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1786 if (REAL_VALUES_LESS (u, r))
1788 overflow = 1;
1789 high = TREE_INT_CST_HIGH (ut);
1790 low = TREE_INT_CST_LOW (ut);
1795 if (! overflow)
1796 REAL_VALUE_TO_INT (&low, &high, r);
1798 t = build_int_cst_wide (type, low, high);
1800 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1801 TREE_CONSTANT_OVERFLOW (arg1));
1802 return t;
1805 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1806 to another floating point type. */
1808 static tree
1809 fold_convert_const_real_from_real (tree type, tree arg1)
1811 tree t;
1813 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1815 /* We make a copy of ARG1 so that we don't modify an
1816 existing constant tree. */
1817 t = copy_node (arg1);
1818 TREE_TYPE (t) = type;
1819 return t;
1822 t = build_real (type,
1823 real_value_truncate (TYPE_MODE (type),
1824 TREE_REAL_CST (arg1)));
1826 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1827 TREE_CONSTANT_OVERFLOW (t)
1828 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1829 return t;
1832 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1833 type TYPE. If no simplification can be done return NULL_TREE. */
1835 static tree
1836 fold_convert_const (enum tree_code code, tree type, tree arg1)
1838 if (TREE_TYPE (arg1) == type)
1839 return arg1;
1841 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1843 if (TREE_CODE (arg1) == INTEGER_CST)
1844 return fold_convert_const_int_from_int (type, arg1);
1845 else if (TREE_CODE (arg1) == REAL_CST)
1846 return fold_convert_const_int_from_real (code, type, arg1);
1848 else if (TREE_CODE (type) == REAL_TYPE)
1850 if (TREE_CODE (arg1) == INTEGER_CST)
1851 return build_real_from_int_cst (type, arg1);
1852 if (TREE_CODE (arg1) == REAL_CST)
1853 return fold_convert_const_real_from_real (type, arg1);
1855 return NULL_TREE;
1858 /* Construct a vector of zero elements of vector type TYPE. */
1860 static tree
1861 build_zero_vector (tree type)
1863 tree elem, list;
1864 int i, units;
1866 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1867 units = TYPE_VECTOR_SUBPARTS (type);
1869 list = NULL_TREE;
1870 for (i = 0; i < units; i++)
1871 list = tree_cons (NULL_TREE, elem, list);
1872 return build_vector (type, list);
1875 /* Convert expression ARG to type TYPE. Used by the middle-end for
1876 simple conversions in preference to calling the front-end's convert. */
1878 tree
1879 fold_convert (tree type, tree arg)
1881 tree orig = TREE_TYPE (arg);
1882 tree tem;
1884 if (type == orig)
1885 return arg;
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return error_mark_node;
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1893 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1894 TYPE_MAIN_VARIANT (orig)))
1895 return fold (build1 (NOP_EXPR, type, arg));
1897 switch (TREE_CODE (type))
1899 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1900 case POINTER_TYPE: case REFERENCE_TYPE:
1901 case OFFSET_TYPE:
1902 if (TREE_CODE (arg) == INTEGER_CST)
1904 tem = fold_convert_const (NOP_EXPR, type, arg);
1905 if (tem != NULL_TREE)
1906 return tem;
1908 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1909 || TREE_CODE (orig) == OFFSET_TYPE)
1910 return fold (build1 (NOP_EXPR, type, arg));
1911 if (TREE_CODE (orig) == COMPLEX_TYPE)
1913 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1914 return fold_convert (type, tem);
1916 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1917 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1918 return fold (build1 (NOP_EXPR, type, arg));
1920 case REAL_TYPE:
1921 if (TREE_CODE (arg) == INTEGER_CST)
1923 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1924 if (tem != NULL_TREE)
1925 return tem;
1927 else if (TREE_CODE (arg) == REAL_CST)
1929 tem = fold_convert_const (NOP_EXPR, type, arg);
1930 if (tem != NULL_TREE)
1931 return tem;
1934 switch (TREE_CODE (orig))
1936 case INTEGER_TYPE: case CHAR_TYPE:
1937 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1938 case POINTER_TYPE: case REFERENCE_TYPE:
1939 return fold (build1 (FLOAT_EXPR, type, arg));
1941 case REAL_TYPE:
1942 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1943 type, arg));
1945 case COMPLEX_TYPE:
1946 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1947 return fold_convert (type, tem);
1949 default:
1950 gcc_unreachable ();
1953 case COMPLEX_TYPE:
1954 switch (TREE_CODE (orig))
1956 case INTEGER_TYPE: case CHAR_TYPE:
1957 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1958 case POINTER_TYPE: case REFERENCE_TYPE:
1959 case REAL_TYPE:
1960 return build2 (COMPLEX_EXPR, type,
1961 fold_convert (TREE_TYPE (type), arg),
1962 fold_convert (TREE_TYPE (type), integer_zero_node));
1963 case COMPLEX_TYPE:
1965 tree rpart, ipart;
1967 if (TREE_CODE (arg) == COMPLEX_EXPR)
1969 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1970 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1971 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1974 arg = save_expr (arg);
1975 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1976 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1977 rpart = fold_convert (TREE_TYPE (type), rpart);
1978 ipart = fold_convert (TREE_TYPE (type), ipart);
1979 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1982 default:
1983 gcc_unreachable ();
1986 case VECTOR_TYPE:
1987 if (integer_zerop (arg))
1988 return build_zero_vector (type);
1989 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1990 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1991 || TREE_CODE (orig) == VECTOR_TYPE);
1992 return fold (build1 (NOP_EXPR, type, arg));
1994 case VOID_TYPE:
1995 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
1997 default:
1998 gcc_unreachable ();
2002 /* Return an expr equal to X but certainly not valid as an lvalue. */
2004 tree
2005 non_lvalue (tree x)
2007 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2008 us. */
2009 if (in_gimple_form)
2010 return x;
2012 /* We only need to wrap lvalue tree codes. */
2013 switch (TREE_CODE (x))
2015 case VAR_DECL:
2016 case PARM_DECL:
2017 case RESULT_DECL:
2018 case LABEL_DECL:
2019 case FUNCTION_DECL:
2020 case SSA_NAME:
2022 case COMPONENT_REF:
2023 case INDIRECT_REF:
2024 case ALIGN_INDIRECT_REF:
2025 case MISALIGNED_INDIRECT_REF:
2026 case ARRAY_REF:
2027 case ARRAY_RANGE_REF:
2028 case BIT_FIELD_REF:
2029 case OBJ_TYPE_REF:
2031 case REALPART_EXPR:
2032 case IMAGPART_EXPR:
2033 case PREINCREMENT_EXPR:
2034 case PREDECREMENT_EXPR:
2035 case SAVE_EXPR:
2036 case TRY_CATCH_EXPR:
2037 case WITH_CLEANUP_EXPR:
2038 case COMPOUND_EXPR:
2039 case MODIFY_EXPR:
2040 case TARGET_EXPR:
2041 case COND_EXPR:
2042 case BIND_EXPR:
2043 case MIN_EXPR:
2044 case MAX_EXPR:
2045 break;
2047 default:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2050 break;
2051 return x;
2053 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2056 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2057 Zero means allow extended lvalues. */
2059 int pedantic_lvalues;
2061 /* When pedantic, return an expr equal to X but certainly not valid as a
2062 pedantic lvalue. Otherwise, return X. */
2064 static tree
2065 pedantic_non_lvalue (tree x)
2067 if (pedantic_lvalues)
2068 return non_lvalue (x);
2069 else
2070 return x;
2073 /* Given a tree comparison code, return the code that is the logical inverse
2074 of the given code. It is not safe to do this for floating-point
2075 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2076 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2078 static enum tree_code
2079 invert_tree_comparison (enum tree_code code, bool honor_nans)
2081 if (honor_nans && flag_trapping_math)
2082 return ERROR_MARK;
2084 switch (code)
2086 case EQ_EXPR:
2087 return NE_EXPR;
2088 case NE_EXPR:
2089 return EQ_EXPR;
2090 case GT_EXPR:
2091 return honor_nans ? UNLE_EXPR : LE_EXPR;
2092 case GE_EXPR:
2093 return honor_nans ? UNLT_EXPR : LT_EXPR;
2094 case LT_EXPR:
2095 return honor_nans ? UNGE_EXPR : GE_EXPR;
2096 case LE_EXPR:
2097 return honor_nans ? UNGT_EXPR : GT_EXPR;
2098 case LTGT_EXPR:
2099 return UNEQ_EXPR;
2100 case UNEQ_EXPR:
2101 return LTGT_EXPR;
2102 case UNGT_EXPR:
2103 return LE_EXPR;
2104 case UNGE_EXPR:
2105 return LT_EXPR;
2106 case UNLT_EXPR:
2107 return GE_EXPR;
2108 case UNLE_EXPR:
2109 return GT_EXPR;
2110 case ORDERED_EXPR:
2111 return UNORDERED_EXPR;
2112 case UNORDERED_EXPR:
2113 return ORDERED_EXPR;
2114 default:
2115 gcc_unreachable ();
2119 /* Similar, but return the comparison that results if the operands are
2120 swapped. This is safe for floating-point. */
2122 enum tree_code
2123 swap_tree_comparison (enum tree_code code)
2125 switch (code)
2127 case EQ_EXPR:
2128 case NE_EXPR:
2129 return code;
2130 case GT_EXPR:
2131 return LT_EXPR;
2132 case GE_EXPR:
2133 return LE_EXPR;
2134 case LT_EXPR:
2135 return GT_EXPR;
2136 case LE_EXPR:
2137 return GE_EXPR;
2138 default:
2139 gcc_unreachable ();
2144 /* Convert a comparison tree code from an enum tree_code representation
2145 into a compcode bit-based encoding. This function is the inverse of
2146 compcode_to_comparison. */
2148 static enum comparison_code
2149 comparison_to_compcode (enum tree_code code)
2151 switch (code)
2153 case LT_EXPR:
2154 return COMPCODE_LT;
2155 case EQ_EXPR:
2156 return COMPCODE_EQ;
2157 case LE_EXPR:
2158 return COMPCODE_LE;
2159 case GT_EXPR:
2160 return COMPCODE_GT;
2161 case NE_EXPR:
2162 return COMPCODE_NE;
2163 case GE_EXPR:
2164 return COMPCODE_GE;
2165 case ORDERED_EXPR:
2166 return COMPCODE_ORD;
2167 case UNORDERED_EXPR:
2168 return COMPCODE_UNORD;
2169 case UNLT_EXPR:
2170 return COMPCODE_UNLT;
2171 case UNEQ_EXPR:
2172 return COMPCODE_UNEQ;
2173 case UNLE_EXPR:
2174 return COMPCODE_UNLE;
2175 case UNGT_EXPR:
2176 return COMPCODE_UNGT;
2177 case LTGT_EXPR:
2178 return COMPCODE_LTGT;
2179 case UNGE_EXPR:
2180 return COMPCODE_UNGE;
2181 default:
2182 gcc_unreachable ();
2186 /* Convert a compcode bit-based encoding of a comparison operator back
2187 to GCC's enum tree_code representation. This function is the
2188 inverse of comparison_to_compcode. */
2190 static enum tree_code
2191 compcode_to_comparison (enum comparison_code code)
2193 switch (code)
2195 case COMPCODE_LT:
2196 return LT_EXPR;
2197 case COMPCODE_EQ:
2198 return EQ_EXPR;
2199 case COMPCODE_LE:
2200 return LE_EXPR;
2201 case COMPCODE_GT:
2202 return GT_EXPR;
2203 case COMPCODE_NE:
2204 return NE_EXPR;
2205 case COMPCODE_GE:
2206 return GE_EXPR;
2207 case COMPCODE_ORD:
2208 return ORDERED_EXPR;
2209 case COMPCODE_UNORD:
2210 return UNORDERED_EXPR;
2211 case COMPCODE_UNLT:
2212 return UNLT_EXPR;
2213 case COMPCODE_UNEQ:
2214 return UNEQ_EXPR;
2215 case COMPCODE_UNLE:
2216 return UNLE_EXPR;
2217 case COMPCODE_UNGT:
2218 return UNGT_EXPR;
2219 case COMPCODE_LTGT:
2220 return LTGT_EXPR;
2221 case COMPCODE_UNGE:
2222 return UNGE_EXPR;
2223 default:
2224 gcc_unreachable ();
2228 /* Return a tree for the comparison which is the combination of
2229 doing the AND or OR (depending on CODE) of the two operations LCODE
2230 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2231 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2232 if this makes the transformation invalid. */
2234 tree
2235 combine_comparisons (enum tree_code code, enum tree_code lcode,
2236 enum tree_code rcode, tree truth_type,
2237 tree ll_arg, tree lr_arg)
2239 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2240 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2241 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2242 enum comparison_code compcode;
2244 switch (code)
2246 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2247 compcode = lcompcode & rcompcode;
2248 break;
2250 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2251 compcode = lcompcode | rcompcode;
2252 break;
2254 default:
2255 return NULL_TREE;
2258 if (!honor_nans)
2260 /* Eliminate unordered comparisons, as well as LTGT and ORD
2261 which are not used unless the mode has NaNs. */
2262 compcode &= ~COMPCODE_UNORD;
2263 if (compcode == COMPCODE_LTGT)
2264 compcode = COMPCODE_NE;
2265 else if (compcode == COMPCODE_ORD)
2266 compcode = COMPCODE_TRUE;
2268 else if (flag_trapping_math)
2270 /* Check that the original operation and the optimized ones will trap
2271 under the same condition. */
2272 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2273 && (lcompcode != COMPCODE_EQ)
2274 && (lcompcode != COMPCODE_ORD);
2275 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2276 && (rcompcode != COMPCODE_EQ)
2277 && (rcompcode != COMPCODE_ORD);
2278 bool trap = (compcode & COMPCODE_UNORD) == 0
2279 && (compcode != COMPCODE_EQ)
2280 && (compcode != COMPCODE_ORD);
2282 /* In a short-circuited boolean expression the LHS might be
2283 such that the RHS, if evaluated, will never trap. For
2284 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2285 if neither x nor y is NaN. (This is a mixed blessing: for
2286 example, the expression above will never trap, hence
2287 optimizing it to x < y would be invalid). */
2288 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2289 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2290 rtrap = false;
2292 /* If the comparison was short-circuited, and only the RHS
2293 trapped, we may now generate a spurious trap. */
2294 if (rtrap && !ltrap
2295 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2296 return NULL_TREE;
2298 /* If we changed the conditions that cause a trap, we lose. */
2299 if ((ltrap || rtrap) != trap)
2300 return NULL_TREE;
2303 if (compcode == COMPCODE_TRUE)
2304 return constant_boolean_node (true, truth_type);
2305 else if (compcode == COMPCODE_FALSE)
2306 return constant_boolean_node (false, truth_type);
2307 else
2308 return fold (build2 (compcode_to_comparison (compcode),
2309 truth_type, ll_arg, lr_arg));
2312 /* Return nonzero if CODE is a tree code that represents a truth value. */
2314 static int
2315 truth_value_p (enum tree_code code)
2317 return (TREE_CODE_CLASS (code) == tcc_comparison
2318 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2319 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2320 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2354 return 0;
2356 /* If both types don't have the same signedness, then we can't consider
2357 them equal. We must check this before the STRIP_NOPS calls
2358 because they may change the signedness of the arguments. */
2359 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2360 return 0;
2362 STRIP_NOPS (arg0);
2363 STRIP_NOPS (arg1);
2365 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2366 /* This is needed for conversions and for COMPONENT_REF.
2367 Might as well play it safe and always test this. */
2368 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2369 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2370 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2371 return 0;
2373 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2374 We don't care about side effects in that case because the SAVE_EXPR
2375 takes care of that for us. In all other cases, two expressions are
2376 equal if they have no side effects. If we have two identical
2377 expressions with side effects that should be treated the same due
2378 to the only side effects being identical SAVE_EXPR's, that will
2379 be detected in the recursive calls below. */
2380 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2381 && (TREE_CODE (arg0) == SAVE_EXPR
2382 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2383 return 1;
2385 /* Next handle constant cases, those for which we can return 1 even
2386 if ONLY_CONST is set. */
2387 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2388 switch (TREE_CODE (arg0))
2390 case INTEGER_CST:
2391 return (! TREE_CONSTANT_OVERFLOW (arg0)
2392 && ! TREE_CONSTANT_OVERFLOW (arg1)
2393 && tree_int_cst_equal (arg0, arg1));
2395 case REAL_CST:
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2399 TREE_REAL_CST (arg1)));
2401 case VECTOR_CST:
2403 tree v1, v2;
2405 if (TREE_CONSTANT_OVERFLOW (arg0)
2406 || TREE_CONSTANT_OVERFLOW (arg1))
2407 return 0;
2409 v1 = TREE_VECTOR_CST_ELTS (arg0);
2410 v2 = TREE_VECTOR_CST_ELTS (arg1);
2411 while (v1 && v2)
2413 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2414 flags))
2415 return 0;
2416 v1 = TREE_CHAIN (v1);
2417 v2 = TREE_CHAIN (v2);
2420 return 1;
2423 case COMPLEX_CST:
2424 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2425 flags)
2426 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2427 flags));
2429 case STRING_CST:
2430 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2431 && ! memcmp (TREE_STRING_POINTER (arg0),
2432 TREE_STRING_POINTER (arg1),
2433 TREE_STRING_LENGTH (arg0)));
2435 case ADDR_EXPR:
2436 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2438 default:
2439 break;
2442 if (flags & OEP_ONLY_CONST)
2443 return 0;
2445 /* Define macros to test an operand from arg0 and arg1 for equality and a
2446 variant that allows null and views null as being different from any
2447 non-null value. In the latter case, if either is null, the both
2448 must be; otherwise, do the normal comparison. */
2449 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2450 TREE_OPERAND (arg1, N), flags)
2452 #define OP_SAME_WITH_NULL(N) \
2453 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2454 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2456 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2458 case tcc_unary:
2459 /* Two conversions are equal only if signedness and modes match. */
2460 switch (TREE_CODE (arg0))
2462 case NOP_EXPR:
2463 case CONVERT_EXPR:
2464 case FIX_CEIL_EXPR:
2465 case FIX_TRUNC_EXPR:
2466 case FIX_FLOOR_EXPR:
2467 case FIX_ROUND_EXPR:
2468 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2469 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2470 return 0;
2471 break;
2472 default:
2473 break;
2476 return OP_SAME (0);
2479 case tcc_comparison:
2480 case tcc_binary:
2481 if (OP_SAME (0) && OP_SAME (1))
2482 return 1;
2484 /* For commutative ops, allow the other order. */
2485 return (commutative_tree_code (TREE_CODE (arg0))
2486 && operand_equal_p (TREE_OPERAND (arg0, 0),
2487 TREE_OPERAND (arg1, 1), flags)
2488 && operand_equal_p (TREE_OPERAND (arg0, 1),
2489 TREE_OPERAND (arg1, 0), flags));
2491 case tcc_reference:
2492 /* If either of the pointer (or reference) expressions we are
2493 dereferencing contain a side effect, these cannot be equal. */
2494 if (TREE_SIDE_EFFECTS (arg0)
2495 || TREE_SIDE_EFFECTS (arg1))
2496 return 0;
2498 switch (TREE_CODE (arg0))
2500 case INDIRECT_REF:
2501 case ALIGN_INDIRECT_REF:
2502 case MISALIGNED_INDIRECT_REF:
2503 case REALPART_EXPR:
2504 case IMAGPART_EXPR:
2505 return OP_SAME (0);
2507 case ARRAY_REF:
2508 case ARRAY_RANGE_REF:
2509 /* Operands 2 and 3 may be null. */
2510 return (OP_SAME (0)
2511 && OP_SAME (1)
2512 && OP_SAME_WITH_NULL (2)
2513 && OP_SAME_WITH_NULL (3));
2515 case COMPONENT_REF:
2516 /* Handle operand 2 the same as for ARRAY_REF. */
2517 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2519 case BIT_FIELD_REF:
2520 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2522 default:
2523 return 0;
2526 case tcc_expression:
2527 switch (TREE_CODE (arg0))
2529 case ADDR_EXPR:
2530 case TRUTH_NOT_EXPR:
2531 return OP_SAME (0);
2533 case TRUTH_ANDIF_EXPR:
2534 case TRUTH_ORIF_EXPR:
2535 return OP_SAME (0) && OP_SAME (1);
2537 case TRUTH_AND_EXPR:
2538 case TRUTH_OR_EXPR:
2539 case TRUTH_XOR_EXPR:
2540 if (OP_SAME (0) && OP_SAME (1))
2541 return 1;
2543 /* Otherwise take into account this is a commutative operation. */
2544 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2545 TREE_OPERAND (arg1, 1), flags)
2546 && operand_equal_p (TREE_OPERAND (arg0, 1),
2547 TREE_OPERAND (arg1, 0), flags));
2549 case CALL_EXPR:
2550 /* If the CALL_EXPRs call different functions, then they
2551 clearly can not be equal. */
2552 if (!OP_SAME (0))
2553 return 0;
2556 unsigned int cef = call_expr_flags (arg0);
2557 if (flags & OEP_PURE_SAME)
2558 cef &= ECF_CONST | ECF_PURE;
2559 else
2560 cef &= ECF_CONST;
2561 if (!cef)
2562 return 0;
2565 /* Now see if all the arguments are the same. operand_equal_p
2566 does not handle TREE_LIST, so we walk the operands here
2567 feeding them to operand_equal_p. */
2568 arg0 = TREE_OPERAND (arg0, 1);
2569 arg1 = TREE_OPERAND (arg1, 1);
2570 while (arg0 && arg1)
2572 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2573 flags))
2574 return 0;
2576 arg0 = TREE_CHAIN (arg0);
2577 arg1 = TREE_CHAIN (arg1);
2580 /* If we get here and both argument lists are exhausted
2581 then the CALL_EXPRs are equal. */
2582 return ! (arg0 || arg1);
2584 default:
2585 return 0;
2588 case tcc_declaration:
2589 /* Consider __builtin_sqrt equal to sqrt. */
2590 return (TREE_CODE (arg0) == FUNCTION_DECL
2591 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2592 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2593 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2595 default:
2596 return 0;
2599 #undef OP_SAME
2600 #undef OP_SAME_WITH_NULL
2603 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2604 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2606 When in doubt, return 0. */
2608 static int
2609 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2611 int unsignedp1, unsignedpo;
2612 tree primarg0, primarg1, primother;
2613 unsigned int correct_width;
2615 if (operand_equal_p (arg0, arg1, 0))
2616 return 1;
2618 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2619 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2620 return 0;
2622 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2623 and see if the inner values are the same. This removes any
2624 signedness comparison, which doesn't matter here. */
2625 primarg0 = arg0, primarg1 = arg1;
2626 STRIP_NOPS (primarg0);
2627 STRIP_NOPS (primarg1);
2628 if (operand_equal_p (primarg0, primarg1, 0))
2629 return 1;
2631 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2632 actual comparison operand, ARG0.
2634 First throw away any conversions to wider types
2635 already present in the operands. */
2637 primarg1 = get_narrower (arg1, &unsignedp1);
2638 primother = get_narrower (other, &unsignedpo);
2640 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2641 if (unsignedp1 == unsignedpo
2642 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2643 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2645 tree type = TREE_TYPE (arg0);
2647 /* Make sure shorter operand is extended the right way
2648 to match the longer operand. */
2649 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2650 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2652 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2653 return 1;
2656 return 0;
2659 /* See if ARG is an expression that is either a comparison or is performing
2660 arithmetic on comparisons. The comparisons must only be comparing
2661 two different values, which will be stored in *CVAL1 and *CVAL2; if
2662 they are nonzero it means that some operands have already been found.
2663 No variables may be used anywhere else in the expression except in the
2664 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2665 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2667 If this is true, return 1. Otherwise, return zero. */
2669 static int
2670 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2672 enum tree_code code = TREE_CODE (arg);
2673 enum tree_code_class class = TREE_CODE_CLASS (code);
2675 /* We can handle some of the tcc_expression cases here. */
2676 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2677 class = tcc_unary;
2678 else if (class == tcc_expression
2679 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2680 || code == COMPOUND_EXPR))
2681 class = tcc_binary;
2683 else if (class == tcc_expression && code == SAVE_EXPR
2684 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2686 /* If we've already found a CVAL1 or CVAL2, this expression is
2687 two complex to handle. */
2688 if (*cval1 || *cval2)
2689 return 0;
2691 class = tcc_unary;
2692 *save_p = 1;
2695 switch (class)
2697 case tcc_unary:
2698 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2700 case tcc_binary:
2701 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2702 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2703 cval1, cval2, save_p));
2705 case tcc_constant:
2706 return 1;
2708 case tcc_expression:
2709 if (code == COND_EXPR)
2710 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2711 cval1, cval2, save_p)
2712 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2713 cval1, cval2, save_p)
2714 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2715 cval1, cval2, save_p));
2716 return 0;
2718 case tcc_comparison:
2719 /* First see if we can handle the first operand, then the second. For
2720 the second operand, we know *CVAL1 can't be zero. It must be that
2721 one side of the comparison is each of the values; test for the
2722 case where this isn't true by failing if the two operands
2723 are the same. */
2725 if (operand_equal_p (TREE_OPERAND (arg, 0),
2726 TREE_OPERAND (arg, 1), 0))
2727 return 0;
2729 if (*cval1 == 0)
2730 *cval1 = TREE_OPERAND (arg, 0);
2731 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2733 else if (*cval2 == 0)
2734 *cval2 = TREE_OPERAND (arg, 0);
2735 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2737 else
2738 return 0;
2740 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2742 else if (*cval2 == 0)
2743 *cval2 = TREE_OPERAND (arg, 1);
2744 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2746 else
2747 return 0;
2749 return 1;
2751 default:
2752 return 0;
2756 /* ARG is a tree that is known to contain just arithmetic operations and
2757 comparisons. Evaluate the operations in the tree substituting NEW0 for
2758 any occurrence of OLD0 as an operand of a comparison and likewise for
2759 NEW1 and OLD1. */
2761 static tree
2762 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2764 tree type = TREE_TYPE (arg);
2765 enum tree_code code = TREE_CODE (arg);
2766 enum tree_code_class class = TREE_CODE_CLASS (code);
2768 /* We can handle some of the tcc_expression cases here. */
2769 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2770 class = tcc_unary;
2771 else if (class == tcc_expression
2772 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2773 class = tcc_binary;
2775 switch (class)
2777 case tcc_unary:
2778 return fold (build1 (code, type,
2779 eval_subst (TREE_OPERAND (arg, 0),
2780 old0, new0, old1, new1)));
2782 case tcc_binary:
2783 return fold (build2 (code, type,
2784 eval_subst (TREE_OPERAND (arg, 0),
2785 old0, new0, old1, new1),
2786 eval_subst (TREE_OPERAND (arg, 1),
2787 old0, new0, old1, new1)));
2789 case tcc_expression:
2790 switch (code)
2792 case SAVE_EXPR:
2793 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2795 case COMPOUND_EXPR:
2796 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2798 case COND_EXPR:
2799 return fold (build3 (code, type,
2800 eval_subst (TREE_OPERAND (arg, 0),
2801 old0, new0, old1, new1),
2802 eval_subst (TREE_OPERAND (arg, 1),
2803 old0, new0, old1, new1),
2804 eval_subst (TREE_OPERAND (arg, 2),
2805 old0, new0, old1, new1)));
2806 default:
2807 break;
2809 /* Fall through - ??? */
2811 case tcc_comparison:
2813 tree arg0 = TREE_OPERAND (arg, 0);
2814 tree arg1 = TREE_OPERAND (arg, 1);
2816 /* We need to check both for exact equality and tree equality. The
2817 former will be true if the operand has a side-effect. In that
2818 case, we know the operand occurred exactly once. */
2820 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2821 arg0 = new0;
2822 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2823 arg0 = new1;
2825 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2826 arg1 = new0;
2827 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2828 arg1 = new1;
2830 return fold (build2 (code, type, arg0, arg1));
2833 default:
2834 return arg;
2838 /* Return a tree for the case when the result of an expression is RESULT
2839 converted to TYPE and OMITTED was previously an operand of the expression
2840 but is now not needed (e.g., we folded OMITTED * 0).
2842 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2843 the conversion of RESULT to TYPE. */
2845 tree
2846 omit_one_operand (tree type, tree result, tree omitted)
2848 tree t = fold_convert (type, result);
2850 if (TREE_SIDE_EFFECTS (omitted))
2851 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2853 return non_lvalue (t);
2856 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2858 static tree
2859 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2861 tree t = fold_convert (type, result);
2863 if (TREE_SIDE_EFFECTS (omitted))
2864 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2866 return pedantic_non_lvalue (t);
2869 /* Return a tree for the case when the result of an expression is RESULT
2870 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2871 of the expression but are now not needed.
2873 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2874 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2875 evaluated before OMITTED2. Otherwise, if neither has side effects,
2876 just do the conversion of RESULT to TYPE. */
2878 tree
2879 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2881 tree t = fold_convert (type, result);
2883 if (TREE_SIDE_EFFECTS (omitted2))
2884 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2885 if (TREE_SIDE_EFFECTS (omitted1))
2886 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2888 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2892 /* Return a simplified tree node for the truth-negation of ARG. This
2893 never alters ARG itself. We assume that ARG is an operation that
2894 returns a truth value (0 or 1).
2896 FIXME: one would think we would fold the result, but it causes
2897 problems with the dominator optimizer. */
2898 tree
2899 invert_truthvalue (tree arg)
2901 tree type = TREE_TYPE (arg);
2902 enum tree_code code = TREE_CODE (arg);
2904 if (code == ERROR_MARK)
2905 return arg;
2907 /* If this is a comparison, we can simply invert it, except for
2908 floating-point non-equality comparisons, in which case we just
2909 enclose a TRUTH_NOT_EXPR around what we have. */
2911 if (TREE_CODE_CLASS (code) == tcc_comparison)
2913 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2914 if (FLOAT_TYPE_P (op_type)
2915 && flag_trapping_math
2916 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2917 && code != NE_EXPR && code != EQ_EXPR)
2918 return build1 (TRUTH_NOT_EXPR, type, arg);
2919 else
2921 code = invert_tree_comparison (code,
2922 HONOR_NANS (TYPE_MODE (op_type)));
2923 if (code == ERROR_MARK)
2924 return build1 (TRUTH_NOT_EXPR, type, arg);
2925 else
2926 return build2 (code, type,
2927 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2931 switch (code)
2933 case INTEGER_CST:
2934 return fold_convert (type,
2935 build_int_cst (NULL_TREE, integer_zerop (arg)));
2937 case TRUTH_AND_EXPR:
2938 return build2 (TRUTH_OR_EXPR, type,
2939 invert_truthvalue (TREE_OPERAND (arg, 0)),
2940 invert_truthvalue (TREE_OPERAND (arg, 1)));
2942 case TRUTH_OR_EXPR:
2943 return build2 (TRUTH_AND_EXPR, type,
2944 invert_truthvalue (TREE_OPERAND (arg, 0)),
2945 invert_truthvalue (TREE_OPERAND (arg, 1)));
2947 case TRUTH_XOR_EXPR:
2948 /* Here we can invert either operand. We invert the first operand
2949 unless the second operand is a TRUTH_NOT_EXPR in which case our
2950 result is the XOR of the first operand with the inside of the
2951 negation of the second operand. */
2953 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2954 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2955 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2956 else
2957 return build2 (TRUTH_XOR_EXPR, type,
2958 invert_truthvalue (TREE_OPERAND (arg, 0)),
2959 TREE_OPERAND (arg, 1));
2961 case TRUTH_ANDIF_EXPR:
2962 return build2 (TRUTH_ORIF_EXPR, type,
2963 invert_truthvalue (TREE_OPERAND (arg, 0)),
2964 invert_truthvalue (TREE_OPERAND (arg, 1)));
2966 case TRUTH_ORIF_EXPR:
2967 return build2 (TRUTH_ANDIF_EXPR, type,
2968 invert_truthvalue (TREE_OPERAND (arg, 0)),
2969 invert_truthvalue (TREE_OPERAND (arg, 1)));
2971 case TRUTH_NOT_EXPR:
2972 return TREE_OPERAND (arg, 0);
2974 case COND_EXPR:
2975 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2976 invert_truthvalue (TREE_OPERAND (arg, 1)),
2977 invert_truthvalue (TREE_OPERAND (arg, 2)));
2979 case COMPOUND_EXPR:
2980 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2981 invert_truthvalue (TREE_OPERAND (arg, 1)));
2983 case NON_LVALUE_EXPR:
2984 return invert_truthvalue (TREE_OPERAND (arg, 0));
2986 case NOP_EXPR:
2987 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2988 break;
2990 case CONVERT_EXPR:
2991 case FLOAT_EXPR:
2992 return build1 (TREE_CODE (arg), type,
2993 invert_truthvalue (TREE_OPERAND (arg, 0)));
2995 case BIT_AND_EXPR:
2996 if (!integer_onep (TREE_OPERAND (arg, 1)))
2997 break;
2998 return build2 (EQ_EXPR, type, arg,
2999 fold_convert (type, integer_zero_node));
3001 case SAVE_EXPR:
3002 return build1 (TRUTH_NOT_EXPR, type, arg);
3004 case CLEANUP_POINT_EXPR:
3005 return build1 (CLEANUP_POINT_EXPR, type,
3006 invert_truthvalue (TREE_OPERAND (arg, 0)));
3008 default:
3009 break;
3011 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3012 return build1 (TRUTH_NOT_EXPR, type, arg);
3015 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3016 operands are another bit-wise operation with a common input. If so,
3017 distribute the bit operations to save an operation and possibly two if
3018 constants are involved. For example, convert
3019 (A | B) & (A | C) into A | (B & C)
3020 Further simplification will occur if B and C are constants.
3022 If this optimization cannot be done, 0 will be returned. */
3024 static tree
3025 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3027 tree common;
3028 tree left, right;
3030 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3031 || TREE_CODE (arg0) == code
3032 || (TREE_CODE (arg0) != BIT_AND_EXPR
3033 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3034 return 0;
3036 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3038 common = TREE_OPERAND (arg0, 0);
3039 left = TREE_OPERAND (arg0, 1);
3040 right = TREE_OPERAND (arg1, 1);
3042 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3044 common = TREE_OPERAND (arg0, 0);
3045 left = TREE_OPERAND (arg0, 1);
3046 right = TREE_OPERAND (arg1, 0);
3048 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3050 common = TREE_OPERAND (arg0, 1);
3051 left = TREE_OPERAND (arg0, 0);
3052 right = TREE_OPERAND (arg1, 1);
3054 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3056 common = TREE_OPERAND (arg0, 1);
3057 left = TREE_OPERAND (arg0, 0);
3058 right = TREE_OPERAND (arg1, 0);
3060 else
3061 return 0;
3063 return fold (build2 (TREE_CODE (arg0), type, common,
3064 fold (build2 (code, type, left, right))));
3067 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3068 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3070 static tree
3071 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3072 int unsignedp)
3074 tree result = build3 (BIT_FIELD_REF, type, inner,
3075 size_int (bitsize), bitsize_int (bitpos));
3077 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3079 return result;
3082 /* Optimize a bit-field compare.
3084 There are two cases: First is a compare against a constant and the
3085 second is a comparison of two items where the fields are at the same
3086 bit position relative to the start of a chunk (byte, halfword, word)
3087 large enough to contain it. In these cases we can avoid the shift
3088 implicit in bitfield extractions.
3090 For constants, we emit a compare of the shifted constant with the
3091 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3092 compared. For two fields at the same position, we do the ANDs with the
3093 similar mask and compare the result of the ANDs.
3095 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3096 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3097 are the left and right operands of the comparison, respectively.
3099 If the optimization described above can be done, we return the resulting
3100 tree. Otherwise we return zero. */
3102 static tree
3103 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3104 tree lhs, tree rhs)
3106 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3107 tree type = TREE_TYPE (lhs);
3108 tree signed_type, unsigned_type;
3109 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3110 enum machine_mode lmode, rmode, nmode;
3111 int lunsignedp, runsignedp;
3112 int lvolatilep = 0, rvolatilep = 0;
3113 tree linner, rinner = NULL_TREE;
3114 tree mask;
3115 tree offset;
3117 /* Get all the information about the extractions being done. If the bit size
3118 if the same as the size of the underlying object, we aren't doing an
3119 extraction at all and so can do nothing. We also don't want to
3120 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3121 then will no longer be able to replace it. */
3122 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3123 &lunsignedp, &lvolatilep, false);
3124 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3125 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3126 return 0;
3128 if (!const_p)
3130 /* If this is not a constant, we can only do something if bit positions,
3131 sizes, and signedness are the same. */
3132 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3133 &runsignedp, &rvolatilep, false);
3135 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3136 || lunsignedp != runsignedp || offset != 0
3137 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3138 return 0;
3141 /* See if we can find a mode to refer to this field. We should be able to,
3142 but fail if we can't. */
3143 nmode = get_best_mode (lbitsize, lbitpos,
3144 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3145 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3146 TYPE_ALIGN (TREE_TYPE (rinner))),
3147 word_mode, lvolatilep || rvolatilep);
3148 if (nmode == VOIDmode)
3149 return 0;
3151 /* Set signed and unsigned types of the precision of this mode for the
3152 shifts below. */
3153 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3154 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3156 /* Compute the bit position and size for the new reference and our offset
3157 within it. If the new reference is the same size as the original, we
3158 won't optimize anything, so return zero. */
3159 nbitsize = GET_MODE_BITSIZE (nmode);
3160 nbitpos = lbitpos & ~ (nbitsize - 1);
3161 lbitpos -= nbitpos;
3162 if (nbitsize == lbitsize)
3163 return 0;
3165 if (BYTES_BIG_ENDIAN)
3166 lbitpos = nbitsize - lbitsize - lbitpos;
3168 /* Make the mask to be used against the extracted field. */
3169 mask = build_int_cst (unsigned_type, -1);
3170 mask = force_fit_type (mask, 0, false, false);
3171 mask = fold_convert (unsigned_type, mask);
3172 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3173 mask = const_binop (RSHIFT_EXPR, mask,
3174 size_int (nbitsize - lbitsize - lbitpos), 0);
3176 if (! const_p)
3177 /* If not comparing with constant, just rework the comparison
3178 and return. */
3179 return build2 (code, compare_type,
3180 build2 (BIT_AND_EXPR, unsigned_type,
3181 make_bit_field_ref (linner, unsigned_type,
3182 nbitsize, nbitpos, 1),
3183 mask),
3184 build2 (BIT_AND_EXPR, unsigned_type,
3185 make_bit_field_ref (rinner, unsigned_type,
3186 nbitsize, nbitpos, 1),
3187 mask));
3189 /* Otherwise, we are handling the constant case. See if the constant is too
3190 big for the field. Warn and return a tree of for 0 (false) if so. We do
3191 this not only for its own sake, but to avoid having to test for this
3192 error case below. If we didn't, we might generate wrong code.
3194 For unsigned fields, the constant shifted right by the field length should
3195 be all zero. For signed fields, the high-order bits should agree with
3196 the sign bit. */
3198 if (lunsignedp)
3200 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3201 fold_convert (unsigned_type, rhs),
3202 size_int (lbitsize), 0)))
3204 warning ("comparison is always %d due to width of bit-field",
3205 code == NE_EXPR);
3206 return constant_boolean_node (code == NE_EXPR, compare_type);
3209 else
3211 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3212 size_int (lbitsize - 1), 0);
3213 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3215 warning ("comparison is always %d due to width of bit-field",
3216 code == NE_EXPR);
3217 return constant_boolean_node (code == NE_EXPR, compare_type);
3221 /* Single-bit compares should always be against zero. */
3222 if (lbitsize == 1 && ! integer_zerop (rhs))
3224 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3225 rhs = fold_convert (type, integer_zero_node);
3228 /* Make a new bitfield reference, shift the constant over the
3229 appropriate number of bits and mask it with the computed mask
3230 (in case this was a signed field). If we changed it, make a new one. */
3231 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3232 if (lvolatilep)
3234 TREE_SIDE_EFFECTS (lhs) = 1;
3235 TREE_THIS_VOLATILE (lhs) = 1;
3238 rhs = fold (const_binop (BIT_AND_EXPR,
3239 const_binop (LSHIFT_EXPR,
3240 fold_convert (unsigned_type, rhs),
3241 size_int (lbitpos), 0),
3242 mask, 0));
3244 return build2 (code, compare_type,
3245 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3246 rhs);
3249 /* Subroutine for fold_truthop: decode a field reference.
3251 If EXP is a comparison reference, we return the innermost reference.
3253 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3254 set to the starting bit number.
3256 If the innermost field can be completely contained in a mode-sized
3257 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3259 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3260 otherwise it is not changed.
3262 *PUNSIGNEDP is set to the signedness of the field.
3264 *PMASK is set to the mask used. This is either contained in a
3265 BIT_AND_EXPR or derived from the width of the field.
3267 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3269 Return 0 if this is not a component reference or is one that we can't
3270 do anything with. */
3272 static tree
3273 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3274 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3275 int *punsignedp, int *pvolatilep,
3276 tree *pmask, tree *pand_mask)
3278 tree outer_type = 0;
3279 tree and_mask = 0;
3280 tree mask, inner, offset;
3281 tree unsigned_type;
3282 unsigned int precision;
3284 /* All the optimizations using this function assume integer fields.
3285 There are problems with FP fields since the type_for_size call
3286 below can fail for, e.g., XFmode. */
3287 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3288 return 0;
3290 /* We are interested in the bare arrangement of bits, so strip everything
3291 that doesn't affect the machine mode. However, record the type of the
3292 outermost expression if it may matter below. */
3293 if (TREE_CODE (exp) == NOP_EXPR
3294 || TREE_CODE (exp) == CONVERT_EXPR
3295 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3296 outer_type = TREE_TYPE (exp);
3297 STRIP_NOPS (exp);
3299 if (TREE_CODE (exp) == BIT_AND_EXPR)
3301 and_mask = TREE_OPERAND (exp, 1);
3302 exp = TREE_OPERAND (exp, 0);
3303 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3304 if (TREE_CODE (and_mask) != INTEGER_CST)
3305 return 0;
3308 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3309 punsignedp, pvolatilep, false);
3310 if ((inner == exp && and_mask == 0)
3311 || *pbitsize < 0 || offset != 0
3312 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3313 return 0;
3315 /* If the number of bits in the reference is the same as the bitsize of
3316 the outer type, then the outer type gives the signedness. Otherwise
3317 (in case of a small bitfield) the signedness is unchanged. */
3318 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3319 *punsignedp = TYPE_UNSIGNED (outer_type);
3321 /* Compute the mask to access the bitfield. */
3322 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3323 precision = TYPE_PRECISION (unsigned_type);
3325 mask = build_int_cst (unsigned_type, -1);
3326 mask = force_fit_type (mask, 0, false, false);
3328 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3329 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3331 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3332 if (and_mask != 0)
3333 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3334 fold_convert (unsigned_type, and_mask), mask));
3336 *pmask = mask;
3337 *pand_mask = and_mask;
3338 return inner;
3341 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3342 bit positions. */
3344 static int
3345 all_ones_mask_p (tree mask, int size)
3347 tree type = TREE_TYPE (mask);
3348 unsigned int precision = TYPE_PRECISION (type);
3349 tree tmask;
3351 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3352 tmask = force_fit_type (tmask, 0, false, false);
3354 return
3355 tree_int_cst_equal (mask,
3356 const_binop (RSHIFT_EXPR,
3357 const_binop (LSHIFT_EXPR, tmask,
3358 size_int (precision - size),
3360 size_int (precision - size), 0));
3363 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3364 represents the sign bit of EXP's type. If EXP represents a sign
3365 or zero extension, also test VAL against the unextended type.
3366 The return value is the (sub)expression whose sign bit is VAL,
3367 or NULL_TREE otherwise. */
3369 static tree
3370 sign_bit_p (tree exp, tree val)
3372 unsigned HOST_WIDE_INT mask_lo, lo;
3373 HOST_WIDE_INT mask_hi, hi;
3374 int width;
3375 tree t;
3377 /* Tree EXP must have an integral type. */
3378 t = TREE_TYPE (exp);
3379 if (! INTEGRAL_TYPE_P (t))
3380 return NULL_TREE;
3382 /* Tree VAL must be an integer constant. */
3383 if (TREE_CODE (val) != INTEGER_CST
3384 || TREE_CONSTANT_OVERFLOW (val))
3385 return NULL_TREE;
3387 width = TYPE_PRECISION (t);
3388 if (width > HOST_BITS_PER_WIDE_INT)
3390 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3391 lo = 0;
3393 mask_hi = ((unsigned HOST_WIDE_INT) -1
3394 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3395 mask_lo = -1;
3397 else
3399 hi = 0;
3400 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3402 mask_hi = 0;
3403 mask_lo = ((unsigned HOST_WIDE_INT) -1
3404 >> (HOST_BITS_PER_WIDE_INT - width));
3407 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3408 treat VAL as if it were unsigned. */
3409 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3410 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3411 return exp;
3413 /* Handle extension from a narrower type. */
3414 if (TREE_CODE (exp) == NOP_EXPR
3415 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3416 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3418 return NULL_TREE;
3421 /* Subroutine for fold_truthop: determine if an operand is simple enough
3422 to be evaluated unconditionally. */
3424 static int
3425 simple_operand_p (tree exp)
3427 /* Strip any conversions that don't change the machine mode. */
3428 STRIP_NOPS (exp);
3430 return (CONSTANT_CLASS_P (exp)
3431 || TREE_CODE (exp) == SSA_NAME
3432 || (DECL_P (exp)
3433 && ! TREE_ADDRESSABLE (exp)
3434 && ! TREE_THIS_VOLATILE (exp)
3435 && ! DECL_NONLOCAL (exp)
3436 /* Don't regard global variables as simple. They may be
3437 allocated in ways unknown to the compiler (shared memory,
3438 #pragma weak, etc). */
3439 && ! TREE_PUBLIC (exp)
3440 && ! DECL_EXTERNAL (exp)
3441 /* Loading a static variable is unduly expensive, but global
3442 registers aren't expensive. */
3443 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3446 /* The following functions are subroutines to fold_range_test and allow it to
3447 try to change a logical combination of comparisons into a range test.
3449 For example, both
3450 X == 2 || X == 3 || X == 4 || X == 5
3452 X >= 2 && X <= 5
3453 are converted to
3454 (unsigned) (X - 2) <= 3
3456 We describe each set of comparisons as being either inside or outside
3457 a range, using a variable named like IN_P, and then describe the
3458 range with a lower and upper bound. If one of the bounds is omitted,
3459 it represents either the highest or lowest value of the type.
3461 In the comments below, we represent a range by two numbers in brackets
3462 preceded by a "+" to designate being inside that range, or a "-" to
3463 designate being outside that range, so the condition can be inverted by
3464 flipping the prefix. An omitted bound is represented by a "-". For
3465 example, "- [-, 10]" means being outside the range starting at the lowest
3466 possible value and ending at 10, in other words, being greater than 10.
3467 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3468 always false.
3470 We set up things so that the missing bounds are handled in a consistent
3471 manner so neither a missing bound nor "true" and "false" need to be
3472 handled using a special case. */
3474 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3475 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3476 and UPPER1_P are nonzero if the respective argument is an upper bound
3477 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3478 must be specified for a comparison. ARG1 will be converted to ARG0's
3479 type if both are specified. */
3481 static tree
3482 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3483 tree arg1, int upper1_p)
3485 tree tem;
3486 int result;
3487 int sgn0, sgn1;
3489 /* If neither arg represents infinity, do the normal operation.
3490 Else, if not a comparison, return infinity. Else handle the special
3491 comparison rules. Note that most of the cases below won't occur, but
3492 are handled for consistency. */
3494 if (arg0 != 0 && arg1 != 0)
3496 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3497 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3498 STRIP_NOPS (tem);
3499 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3502 if (TREE_CODE_CLASS (code) != tcc_comparison)
3503 return 0;
3505 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3506 for neither. In real maths, we cannot assume open ended ranges are
3507 the same. But, this is computer arithmetic, where numbers are finite.
3508 We can therefore make the transformation of any unbounded range with
3509 the value Z, Z being greater than any representable number. This permits
3510 us to treat unbounded ranges as equal. */
3511 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3512 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3513 switch (code)
3515 case EQ_EXPR:
3516 result = sgn0 == sgn1;
3517 break;
3518 case NE_EXPR:
3519 result = sgn0 != sgn1;
3520 break;
3521 case LT_EXPR:
3522 result = sgn0 < sgn1;
3523 break;
3524 case LE_EXPR:
3525 result = sgn0 <= sgn1;
3526 break;
3527 case GT_EXPR:
3528 result = sgn0 > sgn1;
3529 break;
3530 case GE_EXPR:
3531 result = sgn0 >= sgn1;
3532 break;
3533 default:
3534 gcc_unreachable ();
3537 return constant_boolean_node (result, type);
3540 /* Given EXP, a logical expression, set the range it is testing into
3541 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3542 actually being tested. *PLOW and *PHIGH will be made of the same type
3543 as the returned expression. If EXP is not a comparison, we will most
3544 likely not be returning a useful value and range. */
3546 static tree
3547 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3549 enum tree_code code;
3550 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3551 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3552 int in_p, n_in_p;
3553 tree low, high, n_low, n_high;
3555 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3556 and see if we can refine the range. Some of the cases below may not
3557 happen, but it doesn't seem worth worrying about this. We "continue"
3558 the outer loop when we've changed something; otherwise we "break"
3559 the switch, which will "break" the while. */
3561 in_p = 0;
3562 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3564 while (1)
3566 code = TREE_CODE (exp);
3567 exp_type = TREE_TYPE (exp);
3569 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3571 if (TREE_CODE_LENGTH (code) > 0)
3572 arg0 = TREE_OPERAND (exp, 0);
3573 if (TREE_CODE_CLASS (code) == tcc_comparison
3574 || TREE_CODE_CLASS (code) == tcc_unary
3575 || TREE_CODE_CLASS (code) == tcc_binary)
3576 arg0_type = TREE_TYPE (arg0);
3577 if (TREE_CODE_CLASS (code) == tcc_binary
3578 || TREE_CODE_CLASS (code) == tcc_comparison
3579 || (TREE_CODE_CLASS (code) == tcc_expression
3580 && TREE_CODE_LENGTH (code) > 1))
3581 arg1 = TREE_OPERAND (exp, 1);
3584 switch (code)
3586 case TRUTH_NOT_EXPR:
3587 in_p = ! in_p, exp = arg0;
3588 continue;
3590 case EQ_EXPR: case NE_EXPR:
3591 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3592 /* We can only do something if the range is testing for zero
3593 and if the second operand is an integer constant. Note that
3594 saying something is "in" the range we make is done by
3595 complementing IN_P since it will set in the initial case of
3596 being not equal to zero; "out" is leaving it alone. */
3597 if (low == 0 || high == 0
3598 || ! integer_zerop (low) || ! integer_zerop (high)
3599 || TREE_CODE (arg1) != INTEGER_CST)
3600 break;
3602 switch (code)
3604 case NE_EXPR: /* - [c, c] */
3605 low = high = arg1;
3606 break;
3607 case EQ_EXPR: /* + [c, c] */
3608 in_p = ! in_p, low = high = arg1;
3609 break;
3610 case GT_EXPR: /* - [-, c] */
3611 low = 0, high = arg1;
3612 break;
3613 case GE_EXPR: /* + [c, -] */
3614 in_p = ! in_p, low = arg1, high = 0;
3615 break;
3616 case LT_EXPR: /* - [c, -] */
3617 low = arg1, high = 0;
3618 break;
3619 case LE_EXPR: /* + [-, c] */
3620 in_p = ! in_p, low = 0, high = arg1;
3621 break;
3622 default:
3623 gcc_unreachable ();
3626 /* If this is an unsigned comparison, we also know that EXP is
3627 greater than or equal to zero. We base the range tests we make
3628 on that fact, so we record it here so we can parse existing
3629 range tests. We test arg0_type since often the return type
3630 of, e.g. EQ_EXPR, is boolean. */
3631 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3633 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3634 in_p, low, high, 1,
3635 fold_convert (arg0_type, integer_zero_node),
3636 NULL_TREE))
3637 break;
3639 in_p = n_in_p, low = n_low, high = n_high;
3641 /* If the high bound is missing, but we have a nonzero low
3642 bound, reverse the range so it goes from zero to the low bound
3643 minus 1. */
3644 if (high == 0 && low && ! integer_zerop (low))
3646 in_p = ! in_p;
3647 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3648 integer_one_node, 0);
3649 low = fold_convert (arg0_type, integer_zero_node);
3653 exp = arg0;
3654 continue;
3656 case NEGATE_EXPR:
3657 /* (-x) IN [a,b] -> x in [-b, -a] */
3658 n_low = range_binop (MINUS_EXPR, exp_type,
3659 fold_convert (exp_type, integer_zero_node),
3660 0, high, 1);
3661 n_high = range_binop (MINUS_EXPR, exp_type,
3662 fold_convert (exp_type, integer_zero_node),
3663 0, low, 0);
3664 low = n_low, high = n_high;
3665 exp = arg0;
3666 continue;
3668 case BIT_NOT_EXPR:
3669 /* ~ X -> -X - 1 */
3670 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3671 fold_convert (exp_type, integer_one_node));
3672 continue;
3674 case PLUS_EXPR: case MINUS_EXPR:
3675 if (TREE_CODE (arg1) != INTEGER_CST)
3676 break;
3678 /* If EXP is signed, any overflow in the computation is undefined,
3679 so we don't worry about it so long as our computations on
3680 the bounds don't overflow. For unsigned, overflow is defined
3681 and this is exactly the right thing. */
3682 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3683 arg0_type, low, 0, arg1, 0);
3684 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3685 arg0_type, high, 1, arg1, 0);
3686 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3687 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3688 break;
3690 /* Check for an unsigned range which has wrapped around the maximum
3691 value thus making n_high < n_low, and normalize it. */
3692 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3694 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3695 integer_one_node, 0);
3696 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3697 integer_one_node, 0);
3699 /* If the range is of the form +/- [ x+1, x ], we won't
3700 be able to normalize it. But then, it represents the
3701 whole range or the empty set, so make it
3702 +/- [ -, - ]. */
3703 if (tree_int_cst_equal (n_low, low)
3704 && tree_int_cst_equal (n_high, high))
3705 low = high = 0;
3706 else
3707 in_p = ! in_p;
3709 else
3710 low = n_low, high = n_high;
3712 exp = arg0;
3713 continue;
3715 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3716 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3717 break;
3719 if (! INTEGRAL_TYPE_P (arg0_type)
3720 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3721 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3722 break;
3724 n_low = low, n_high = high;
3726 if (n_low != 0)
3727 n_low = fold_convert (arg0_type, n_low);
3729 if (n_high != 0)
3730 n_high = fold_convert (arg0_type, n_high);
3733 /* If we're converting arg0 from an unsigned type, to exp,
3734 a signed type, we will be doing the comparison as unsigned.
3735 The tests above have already verified that LOW and HIGH
3736 are both positive.
3738 So we have to ensure that we will handle large unsigned
3739 values the same way that the current signed bounds treat
3740 negative values. */
3742 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3744 tree high_positive;
3745 tree equiv_type = lang_hooks.types.type_for_mode
3746 (TYPE_MODE (arg0_type), 1);
3748 /* A range without an upper bound is, naturally, unbounded.
3749 Since convert would have cropped a very large value, use
3750 the max value for the destination type. */
3751 high_positive
3752 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3753 : TYPE_MAX_VALUE (arg0_type);
3755 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3756 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3757 fold_convert (arg0_type,
3758 high_positive),
3759 fold_convert (arg0_type,
3760 integer_one_node)));
3762 /* If the low bound is specified, "and" the range with the
3763 range for which the original unsigned value will be
3764 positive. */
3765 if (low != 0)
3767 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3768 1, n_low, n_high, 1,
3769 fold_convert (arg0_type,
3770 integer_zero_node),
3771 high_positive))
3772 break;
3774 in_p = (n_in_p == in_p);
3776 else
3778 /* Otherwise, "or" the range with the range of the input
3779 that will be interpreted as negative. */
3780 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3781 0, n_low, n_high, 1,
3782 fold_convert (arg0_type,
3783 integer_zero_node),
3784 high_positive))
3785 break;
3787 in_p = (in_p != n_in_p);
3791 exp = arg0;
3792 low = n_low, high = n_high;
3793 continue;
3795 default:
3796 break;
3799 break;
3802 /* If EXP is a constant, we can evaluate whether this is true or false. */
3803 if (TREE_CODE (exp) == INTEGER_CST)
3805 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3806 exp, 0, low, 0))
3807 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3808 exp, 1, high, 1)));
3809 low = high = 0;
3810 exp = 0;
3813 *pin_p = in_p, *plow = low, *phigh = high;
3814 return exp;
3817 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3818 type, TYPE, return an expression to test if EXP is in (or out of, depending
3819 on IN_P) the range. Return 0 if the test couldn't be created. */
3821 static tree
3822 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3824 tree etype = TREE_TYPE (exp);
3825 tree value;
3827 if (! in_p)
3829 value = build_range_check (type, exp, 1, low, high);
3830 if (value != 0)
3831 return invert_truthvalue (value);
3833 return 0;
3836 if (low == 0 && high == 0)
3837 return fold_convert (type, integer_one_node);
3839 if (low == 0)
3840 return fold (build2 (LE_EXPR, type, exp, high));
3842 if (high == 0)
3843 return fold (build2 (GE_EXPR, type, exp, low));
3845 if (operand_equal_p (low, high, 0))
3846 return fold (build2 (EQ_EXPR, type, exp, low));
3848 if (integer_zerop (low))
3850 if (! TYPE_UNSIGNED (etype))
3852 etype = lang_hooks.types.unsigned_type (etype);
3853 high = fold_convert (etype, high);
3854 exp = fold_convert (etype, exp);
3856 return build_range_check (type, exp, 1, 0, high);
3859 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3860 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3862 unsigned HOST_WIDE_INT lo;
3863 HOST_WIDE_INT hi;
3864 int prec;
3866 prec = TYPE_PRECISION (etype);
3867 if (prec <= HOST_BITS_PER_WIDE_INT)
3869 hi = 0;
3870 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3872 else
3874 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3875 lo = (unsigned HOST_WIDE_INT) -1;
3878 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3880 if (TYPE_UNSIGNED (etype))
3882 etype = lang_hooks.types.signed_type (etype);
3883 exp = fold_convert (etype, exp);
3885 return fold (build2 (GT_EXPR, type, exp,
3886 fold_convert (etype, integer_zero_node)));
3890 value = const_binop (MINUS_EXPR, high, low, 0);
3891 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3893 tree utype, minv, maxv;
3895 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3896 for the type in question, as we rely on this here. */
3897 switch (TREE_CODE (etype))
3899 case INTEGER_TYPE:
3900 case ENUMERAL_TYPE:
3901 case CHAR_TYPE:
3902 utype = lang_hooks.types.unsigned_type (etype);
3903 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3904 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3905 integer_one_node, 1);
3906 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3907 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3908 minv, 1, maxv, 1)))
3910 etype = utype;
3911 high = fold_convert (etype, high);
3912 low = fold_convert (etype, low);
3913 exp = fold_convert (etype, exp);
3914 value = const_binop (MINUS_EXPR, high, low, 0);
3916 break;
3917 default:
3918 break;
3922 if (value != 0 && ! TREE_OVERFLOW (value))
3923 return build_range_check (type,
3924 fold (build2 (MINUS_EXPR, etype, exp, low)),
3925 1, fold_convert (etype, integer_zero_node),
3926 value);
3928 return 0;
3931 /* Given two ranges, see if we can merge them into one. Return 1 if we
3932 can, 0 if we can't. Set the output range into the specified parameters. */
3934 static int
3935 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3936 tree high0, int in1_p, tree low1, tree high1)
3938 int no_overlap;
3939 int subset;
3940 int temp;
3941 tree tem;
3942 int in_p;
3943 tree low, high;
3944 int lowequal = ((low0 == 0 && low1 == 0)
3945 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3946 low0, 0, low1, 0)));
3947 int highequal = ((high0 == 0 && high1 == 0)
3948 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3949 high0, 1, high1, 1)));
3951 /* Make range 0 be the range that starts first, or ends last if they
3952 start at the same value. Swap them if it isn't. */
3953 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3954 low0, 0, low1, 0))
3955 || (lowequal
3956 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3957 high1, 1, high0, 1))))
3959 temp = in0_p, in0_p = in1_p, in1_p = temp;
3960 tem = low0, low0 = low1, low1 = tem;
3961 tem = high0, high0 = high1, high1 = tem;
3964 /* Now flag two cases, whether the ranges are disjoint or whether the
3965 second range is totally subsumed in the first. Note that the tests
3966 below are simplified by the ones above. */
3967 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3968 high0, 1, low1, 0));
3969 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3970 high1, 1, high0, 1));
3972 /* We now have four cases, depending on whether we are including or
3973 excluding the two ranges. */
3974 if (in0_p && in1_p)
3976 /* If they don't overlap, the result is false. If the second range
3977 is a subset it is the result. Otherwise, the range is from the start
3978 of the second to the end of the first. */
3979 if (no_overlap)
3980 in_p = 0, low = high = 0;
3981 else if (subset)
3982 in_p = 1, low = low1, high = high1;
3983 else
3984 in_p = 1, low = low1, high = high0;
3987 else if (in0_p && ! in1_p)
3989 /* If they don't overlap, the result is the first range. If they are
3990 equal, the result is false. If the second range is a subset of the
3991 first, and the ranges begin at the same place, we go from just after
3992 the end of the first range to the end of the second. If the second
3993 range is not a subset of the first, or if it is a subset and both
3994 ranges end at the same place, the range starts at the start of the
3995 first range and ends just before the second range.
3996 Otherwise, we can't describe this as a single range. */
3997 if (no_overlap)
3998 in_p = 1, low = low0, high = high0;
3999 else if (lowequal && highequal)
4000 in_p = 0, low = high = 0;
4001 else if (subset && lowequal)
4003 in_p = 1, high = high0;
4004 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4005 integer_one_node, 0);
4007 else if (! subset || highequal)
4009 in_p = 1, low = low0;
4010 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4011 integer_one_node, 0);
4013 else
4014 return 0;
4017 else if (! in0_p && in1_p)
4019 /* If they don't overlap, the result is the second range. If the second
4020 is a subset of the first, the result is false. Otherwise,
4021 the range starts just after the first range and ends at the
4022 end of the second. */
4023 if (no_overlap)
4024 in_p = 1, low = low1, high = high1;
4025 else if (subset || highequal)
4026 in_p = 0, low = high = 0;
4027 else
4029 in_p = 1, high = high1;
4030 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4031 integer_one_node, 0);
4035 else
4037 /* The case where we are excluding both ranges. Here the complex case
4038 is if they don't overlap. In that case, the only time we have a
4039 range is if they are adjacent. If the second is a subset of the
4040 first, the result is the first. Otherwise, the range to exclude
4041 starts at the beginning of the first range and ends at the end of the
4042 second. */
4043 if (no_overlap)
4045 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4046 range_binop (PLUS_EXPR, NULL_TREE,
4047 high0, 1,
4048 integer_one_node, 1),
4049 1, low1, 0)))
4050 in_p = 0, low = low0, high = high1;
4051 else
4053 /* Canonicalize - [min, x] into - [-, x]. */
4054 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4055 switch (TREE_CODE (TREE_TYPE (low0)))
4057 case ENUMERAL_TYPE:
4058 if (TYPE_PRECISION (TREE_TYPE (low0))
4059 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4060 break;
4061 /* FALLTHROUGH */
4062 case INTEGER_TYPE:
4063 case CHAR_TYPE:
4064 if (tree_int_cst_equal (low0,
4065 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4066 low0 = 0;
4067 break;
4068 case POINTER_TYPE:
4069 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4070 && integer_zerop (low0))
4071 low0 = 0;
4072 break;
4073 default:
4074 break;
4077 /* Canonicalize - [x, max] into - [x, -]. */
4078 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4079 switch (TREE_CODE (TREE_TYPE (high1)))
4081 case ENUMERAL_TYPE:
4082 if (TYPE_PRECISION (TREE_TYPE (high1))
4083 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4084 break;
4085 /* FALLTHROUGH */
4086 case INTEGER_TYPE:
4087 case CHAR_TYPE:
4088 if (tree_int_cst_equal (high1,
4089 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4090 high1 = 0;
4091 break;
4092 case POINTER_TYPE:
4093 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4094 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4095 high1, 1,
4096 integer_one_node, 1)))
4097 high1 = 0;
4098 break;
4099 default:
4100 break;
4103 /* The ranges might be also adjacent between the maximum and
4104 minimum values of the given type. For
4105 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4106 return + [x + 1, y - 1]. */
4107 if (low0 == 0 && high1 == 0)
4109 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4110 integer_one_node, 1);
4111 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4112 integer_one_node, 0);
4113 if (low == 0 || high == 0)
4114 return 0;
4116 in_p = 1;
4118 else
4119 return 0;
4122 else if (subset)
4123 in_p = 0, low = low0, high = high0;
4124 else
4125 in_p = 0, low = low0, high = high1;
4128 *pin_p = in_p, *plow = low, *phigh = high;
4129 return 1;
4133 /* Subroutine of fold, looking inside expressions of the form
4134 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4135 of the COND_EXPR. This function is being used also to optimize
4136 A op B ? C : A, by reversing the comparison first.
4138 Return a folded expression whose code is not a COND_EXPR
4139 anymore, or NULL_TREE if no folding opportunity is found. */
4141 static tree
4142 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4144 enum tree_code comp_code = TREE_CODE (arg0);
4145 tree arg00 = TREE_OPERAND (arg0, 0);
4146 tree arg01 = TREE_OPERAND (arg0, 1);
4147 tree arg1_type = TREE_TYPE (arg1);
4148 tree tem;
4150 STRIP_NOPS (arg1);
4151 STRIP_NOPS (arg2);
4153 /* If we have A op 0 ? A : -A, consider applying the following
4154 transformations:
4156 A == 0? A : -A same as -A
4157 A != 0? A : -A same as A
4158 A >= 0? A : -A same as abs (A)
4159 A > 0? A : -A same as abs (A)
4160 A <= 0? A : -A same as -abs (A)
4161 A < 0? A : -A same as -abs (A)
4163 None of these transformations work for modes with signed
4164 zeros. If A is +/-0, the first two transformations will
4165 change the sign of the result (from +0 to -0, or vice
4166 versa). The last four will fix the sign of the result,
4167 even though the original expressions could be positive or
4168 negative, depending on the sign of A.
4170 Note that all these transformations are correct if A is
4171 NaN, since the two alternatives (A and -A) are also NaNs. */
4172 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4173 ? real_zerop (arg01)
4174 : integer_zerop (arg01))
4175 && TREE_CODE (arg2) == NEGATE_EXPR
4176 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4177 switch (comp_code)
4179 case EQ_EXPR:
4180 case UNEQ_EXPR:
4181 tem = fold_convert (arg1_type, arg1);
4182 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4183 case NE_EXPR:
4184 case LTGT_EXPR:
4185 return pedantic_non_lvalue (fold_convert (type, arg1));
4186 case UNGE_EXPR:
4187 case UNGT_EXPR:
4188 if (flag_trapping_math)
4189 break;
4190 /* Fall through. */
4191 case GE_EXPR:
4192 case GT_EXPR:
4193 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4194 arg1 = fold_convert (lang_hooks.types.signed_type
4195 (TREE_TYPE (arg1)), arg1);
4196 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4197 return pedantic_non_lvalue (fold_convert (type, tem));
4198 case UNLE_EXPR:
4199 case UNLT_EXPR:
4200 if (flag_trapping_math)
4201 break;
4202 case LE_EXPR:
4203 case LT_EXPR:
4204 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4205 arg1 = fold_convert (lang_hooks.types.signed_type
4206 (TREE_TYPE (arg1)), arg1);
4207 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4208 return negate_expr (fold_convert (type, tem));
4209 default:
4210 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4211 break;
4214 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4215 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4216 both transformations are correct when A is NaN: A != 0
4217 is then true, and A == 0 is false. */
4219 if (integer_zerop (arg01) && integer_zerop (arg2))
4221 if (comp_code == NE_EXPR)
4222 return pedantic_non_lvalue (fold_convert (type, arg1));
4223 else if (comp_code == EQ_EXPR)
4224 return fold_convert (type, integer_zero_node);
4227 /* Try some transformations of A op B ? A : B.
4229 A == B? A : B same as B
4230 A != B? A : B same as A
4231 A >= B? A : B same as max (A, B)
4232 A > B? A : B same as max (B, A)
4233 A <= B? A : B same as min (A, B)
4234 A < B? A : B same as min (B, A)
4236 As above, these transformations don't work in the presence
4237 of signed zeros. For example, if A and B are zeros of
4238 opposite sign, the first two transformations will change
4239 the sign of the result. In the last four, the original
4240 expressions give different results for (A=+0, B=-0) and
4241 (A=-0, B=+0), but the transformed expressions do not.
4243 The first two transformations are correct if either A or B
4244 is a NaN. In the first transformation, the condition will
4245 be false, and B will indeed be chosen. In the case of the
4246 second transformation, the condition A != B will be true,
4247 and A will be chosen.
4249 The conversions to max() and min() are not correct if B is
4250 a number and A is not. The conditions in the original
4251 expressions will be false, so all four give B. The min()
4252 and max() versions would give a NaN instead. */
4253 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4255 tree comp_op0 = arg00;
4256 tree comp_op1 = arg01;
4257 tree comp_type = TREE_TYPE (comp_op0);
4259 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4260 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4262 comp_type = type;
4263 comp_op0 = arg1;
4264 comp_op1 = arg2;
4267 switch (comp_code)
4269 case EQ_EXPR:
4270 return pedantic_non_lvalue (fold_convert (type, arg2));
4271 case NE_EXPR:
4272 return pedantic_non_lvalue (fold_convert (type, arg1));
4273 case LE_EXPR:
4274 case LT_EXPR:
4275 case UNLE_EXPR:
4276 case UNLT_EXPR:
4277 /* In C++ a ?: expression can be an lvalue, so put the
4278 operand which will be used if they are equal first
4279 so that we can convert this back to the
4280 corresponding COND_EXPR. */
4281 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4283 comp_op0 = fold_convert (comp_type, comp_op0);
4284 comp_op1 = fold_convert (comp_type, comp_op1);
4285 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4286 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4287 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4288 return pedantic_non_lvalue (fold_convert (type, tem));
4290 break;
4291 case GE_EXPR:
4292 case GT_EXPR:
4293 case UNGE_EXPR:
4294 case UNGT_EXPR:
4295 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4297 comp_op0 = fold_convert (comp_type, comp_op0);
4298 comp_op1 = fold_convert (comp_type, comp_op1);
4299 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4300 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4301 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4302 return pedantic_non_lvalue (fold_convert (type, tem));
4304 break;
4305 case UNEQ_EXPR:
4306 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4307 return pedantic_non_lvalue (fold_convert (type, arg2));
4308 break;
4309 case LTGT_EXPR:
4310 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4311 return pedantic_non_lvalue (fold_convert (type, arg1));
4312 break;
4313 default:
4314 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4315 break;
4319 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4320 we might still be able to simplify this. For example,
4321 if C1 is one less or one more than C2, this might have started
4322 out as a MIN or MAX and been transformed by this function.
4323 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4325 if (INTEGRAL_TYPE_P (type)
4326 && TREE_CODE (arg01) == INTEGER_CST
4327 && TREE_CODE (arg2) == INTEGER_CST)
4328 switch (comp_code)
4330 case EQ_EXPR:
4331 /* We can replace A with C1 in this case. */
4332 arg1 = fold_convert (type, arg01);
4333 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4335 case LT_EXPR:
4336 /* If C1 is C2 + 1, this is min(A, C2). */
4337 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4338 OEP_ONLY_CONST)
4339 && operand_equal_p (arg01,
4340 const_binop (PLUS_EXPR, arg2,
4341 integer_one_node, 0),
4342 OEP_ONLY_CONST))
4343 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4344 type, arg1, arg2)));
4345 break;
4347 case LE_EXPR:
4348 /* If C1 is C2 - 1, this is min(A, C2). */
4349 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4350 OEP_ONLY_CONST)
4351 && operand_equal_p (arg01,
4352 const_binop (MINUS_EXPR, arg2,
4353 integer_one_node, 0),
4354 OEP_ONLY_CONST))
4355 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4356 type, arg1, arg2)));
4357 break;
4359 case GT_EXPR:
4360 /* If C1 is C2 - 1, this is max(A, C2). */
4361 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4362 OEP_ONLY_CONST)
4363 && operand_equal_p (arg01,
4364 const_binop (MINUS_EXPR, arg2,
4365 integer_one_node, 0),
4366 OEP_ONLY_CONST))
4367 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4368 type, arg1, arg2)));
4369 break;
4371 case GE_EXPR:
4372 /* If C1 is C2 + 1, this is max(A, C2). */
4373 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4374 OEP_ONLY_CONST)
4375 && operand_equal_p (arg01,
4376 const_binop (PLUS_EXPR, arg2,
4377 integer_one_node, 0),
4378 OEP_ONLY_CONST))
4379 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4380 type, arg1, arg2)));
4381 break;
4382 case NE_EXPR:
4383 break;
4384 default:
4385 gcc_unreachable ();
4388 return NULL_TREE;
4393 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4394 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4395 #endif
4397 /* EXP is some logical combination of boolean tests. See if we can
4398 merge it into some range test. Return the new tree if so. */
4400 static tree
4401 fold_range_test (tree exp)
4403 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4404 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4405 int in0_p, in1_p, in_p;
4406 tree low0, low1, low, high0, high1, high;
4407 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4408 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4409 tree tem;
4411 /* If this is an OR operation, invert both sides; we will invert
4412 again at the end. */
4413 if (or_op)
4414 in0_p = ! in0_p, in1_p = ! in1_p;
4416 /* If both expressions are the same, if we can merge the ranges, and we
4417 can build the range test, return it or it inverted. If one of the
4418 ranges is always true or always false, consider it to be the same
4419 expression as the other. */
4420 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4421 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4422 in1_p, low1, high1)
4423 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4424 lhs != 0 ? lhs
4425 : rhs != 0 ? rhs : integer_zero_node,
4426 in_p, low, high))))
4427 return or_op ? invert_truthvalue (tem) : tem;
4429 /* On machines where the branch cost is expensive, if this is a
4430 short-circuited branch and the underlying object on both sides
4431 is the same, make a non-short-circuit operation. */
4432 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4433 && lhs != 0 && rhs != 0
4434 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4435 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4436 && operand_equal_p (lhs, rhs, 0))
4438 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4439 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4440 which cases we can't do this. */
4441 if (simple_operand_p (lhs))
4442 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4443 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4444 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4445 TREE_OPERAND (exp, 1));
4447 else if (lang_hooks.decls.global_bindings_p () == 0
4448 && ! CONTAINS_PLACEHOLDER_P (lhs))
4450 tree common = save_expr (lhs);
4452 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4453 or_op ? ! in0_p : in0_p,
4454 low0, high0))
4455 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4456 or_op ? ! in1_p : in1_p,
4457 low1, high1))))
4458 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4459 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4460 TREE_TYPE (exp), lhs, rhs);
4464 return 0;
4467 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4468 bit value. Arrange things so the extra bits will be set to zero if and
4469 only if C is signed-extended to its full width. If MASK is nonzero,
4470 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4472 static tree
4473 unextend (tree c, int p, int unsignedp, tree mask)
4475 tree type = TREE_TYPE (c);
4476 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4477 tree temp;
4479 if (p == modesize || unsignedp)
4480 return c;
4482 /* We work by getting just the sign bit into the low-order bit, then
4483 into the high-order bit, then sign-extend. We then XOR that value
4484 with C. */
4485 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4486 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4488 /* We must use a signed type in order to get an arithmetic right shift.
4489 However, we must also avoid introducing accidental overflows, so that
4490 a subsequent call to integer_zerop will work. Hence we must
4491 do the type conversion here. At this point, the constant is either
4492 zero or one, and the conversion to a signed type can never overflow.
4493 We could get an overflow if this conversion is done anywhere else. */
4494 if (TYPE_UNSIGNED (type))
4495 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4497 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4498 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4499 if (mask != 0)
4500 temp = const_binop (BIT_AND_EXPR, temp,
4501 fold_convert (TREE_TYPE (c), mask), 0);
4502 /* If necessary, convert the type back to match the type of C. */
4503 if (TYPE_UNSIGNED (type))
4504 temp = fold_convert (type, temp);
4506 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4509 /* Find ways of folding logical expressions of LHS and RHS:
4510 Try to merge two comparisons to the same innermost item.
4511 Look for range tests like "ch >= '0' && ch <= '9'".
4512 Look for combinations of simple terms on machines with expensive branches
4513 and evaluate the RHS unconditionally.
4515 For example, if we have p->a == 2 && p->b == 4 and we can make an
4516 object large enough to span both A and B, we can do this with a comparison
4517 against the object ANDed with the a mask.
4519 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4520 operations to do this with one comparison.
4522 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4523 function and the one above.
4525 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4526 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4528 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4529 two operands.
4531 We return the simplified tree or 0 if no optimization is possible. */
4533 static tree
4534 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4536 /* If this is the "or" of two comparisons, we can do something if
4537 the comparisons are NE_EXPR. If this is the "and", we can do something
4538 if the comparisons are EQ_EXPR. I.e.,
4539 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4541 WANTED_CODE is this operation code. For single bit fields, we can
4542 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4543 comparison for one-bit fields. */
4545 enum tree_code wanted_code;
4546 enum tree_code lcode, rcode;
4547 tree ll_arg, lr_arg, rl_arg, rr_arg;
4548 tree ll_inner, lr_inner, rl_inner, rr_inner;
4549 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4550 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4551 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4552 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4553 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4554 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4555 enum machine_mode lnmode, rnmode;
4556 tree ll_mask, lr_mask, rl_mask, rr_mask;
4557 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4558 tree l_const, r_const;
4559 tree lntype, rntype, result;
4560 int first_bit, end_bit;
4561 int volatilep;
4563 /* Start by getting the comparison codes. Fail if anything is volatile.
4564 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4565 it were surrounded with a NE_EXPR. */
4567 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4568 return 0;
4570 lcode = TREE_CODE (lhs);
4571 rcode = TREE_CODE (rhs);
4573 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4575 lhs = build2 (NE_EXPR, truth_type, lhs,
4576 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4577 lcode = NE_EXPR;
4580 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4582 rhs = build2 (NE_EXPR, truth_type, rhs,
4583 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4584 rcode = NE_EXPR;
4587 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4588 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4589 return 0;
4591 ll_arg = TREE_OPERAND (lhs, 0);
4592 lr_arg = TREE_OPERAND (lhs, 1);
4593 rl_arg = TREE_OPERAND (rhs, 0);
4594 rr_arg = TREE_OPERAND (rhs, 1);
4596 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4597 if (simple_operand_p (ll_arg)
4598 && simple_operand_p (lr_arg))
4600 tree result;
4601 if (operand_equal_p (ll_arg, rl_arg, 0)
4602 && operand_equal_p (lr_arg, rr_arg, 0))
4604 result = combine_comparisons (code, lcode, rcode,
4605 truth_type, ll_arg, lr_arg);
4606 if (result)
4607 return result;
4609 else if (operand_equal_p (ll_arg, rr_arg, 0)
4610 && operand_equal_p (lr_arg, rl_arg, 0))
4612 result = combine_comparisons (code, lcode,
4613 swap_tree_comparison (rcode),
4614 truth_type, ll_arg, lr_arg);
4615 if (result)
4616 return result;
4620 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4621 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4623 /* If the RHS can be evaluated unconditionally and its operands are
4624 simple, it wins to evaluate the RHS unconditionally on machines
4625 with expensive branches. In this case, this isn't a comparison
4626 that can be merged. Avoid doing this if the RHS is a floating-point
4627 comparison since those can trap. */
4629 if (BRANCH_COST >= 2
4630 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4631 && simple_operand_p (rl_arg)
4632 && simple_operand_p (rr_arg))
4634 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4635 if (code == TRUTH_OR_EXPR
4636 && lcode == NE_EXPR && integer_zerop (lr_arg)
4637 && rcode == NE_EXPR && integer_zerop (rr_arg)
4638 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4639 return build2 (NE_EXPR, truth_type,
4640 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4641 ll_arg, rl_arg),
4642 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4644 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4645 if (code == TRUTH_AND_EXPR
4646 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4647 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4648 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4649 return build2 (EQ_EXPR, truth_type,
4650 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4651 ll_arg, rl_arg),
4652 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4654 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4655 return build2 (code, truth_type, lhs, rhs);
4658 /* See if the comparisons can be merged. Then get all the parameters for
4659 each side. */
4661 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4662 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4663 return 0;
4665 volatilep = 0;
4666 ll_inner = decode_field_reference (ll_arg,
4667 &ll_bitsize, &ll_bitpos, &ll_mode,
4668 &ll_unsignedp, &volatilep, &ll_mask,
4669 &ll_and_mask);
4670 lr_inner = decode_field_reference (lr_arg,
4671 &lr_bitsize, &lr_bitpos, &lr_mode,
4672 &lr_unsignedp, &volatilep, &lr_mask,
4673 &lr_and_mask);
4674 rl_inner = decode_field_reference (rl_arg,
4675 &rl_bitsize, &rl_bitpos, &rl_mode,
4676 &rl_unsignedp, &volatilep, &rl_mask,
4677 &rl_and_mask);
4678 rr_inner = decode_field_reference (rr_arg,
4679 &rr_bitsize, &rr_bitpos, &rr_mode,
4680 &rr_unsignedp, &volatilep, &rr_mask,
4681 &rr_and_mask);
4683 /* It must be true that the inner operation on the lhs of each
4684 comparison must be the same if we are to be able to do anything.
4685 Then see if we have constants. If not, the same must be true for
4686 the rhs's. */
4687 if (volatilep || ll_inner == 0 || rl_inner == 0
4688 || ! operand_equal_p (ll_inner, rl_inner, 0))
4689 return 0;
4691 if (TREE_CODE (lr_arg) == INTEGER_CST
4692 && TREE_CODE (rr_arg) == INTEGER_CST)
4693 l_const = lr_arg, r_const = rr_arg;
4694 else if (lr_inner == 0 || rr_inner == 0
4695 || ! operand_equal_p (lr_inner, rr_inner, 0))
4696 return 0;
4697 else
4698 l_const = r_const = 0;
4700 /* If either comparison code is not correct for our logical operation,
4701 fail. However, we can convert a one-bit comparison against zero into
4702 the opposite comparison against that bit being set in the field. */
4704 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4705 if (lcode != wanted_code)
4707 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4709 /* Make the left operand unsigned, since we are only interested
4710 in the value of one bit. Otherwise we are doing the wrong
4711 thing below. */
4712 ll_unsignedp = 1;
4713 l_const = ll_mask;
4715 else
4716 return 0;
4719 /* This is analogous to the code for l_const above. */
4720 if (rcode != wanted_code)
4722 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4724 rl_unsignedp = 1;
4725 r_const = rl_mask;
4727 else
4728 return 0;
4731 /* After this point all optimizations will generate bit-field
4732 references, which we might not want. */
4733 if (! lang_hooks.can_use_bit_fields_p ())
4734 return 0;
4736 /* See if we can find a mode that contains both fields being compared on
4737 the left. If we can't, fail. Otherwise, update all constants and masks
4738 to be relative to a field of that size. */
4739 first_bit = MIN (ll_bitpos, rl_bitpos);
4740 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4741 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4742 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4743 volatilep);
4744 if (lnmode == VOIDmode)
4745 return 0;
4747 lnbitsize = GET_MODE_BITSIZE (lnmode);
4748 lnbitpos = first_bit & ~ (lnbitsize - 1);
4749 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4750 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4752 if (BYTES_BIG_ENDIAN)
4754 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4755 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4758 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4759 size_int (xll_bitpos), 0);
4760 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4761 size_int (xrl_bitpos), 0);
4763 if (l_const)
4765 l_const = fold_convert (lntype, l_const);
4766 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4767 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4768 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4769 fold (build1 (BIT_NOT_EXPR,
4770 lntype, ll_mask)),
4771 0)))
4773 warning ("comparison is always %d", wanted_code == NE_EXPR);
4775 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4778 if (r_const)
4780 r_const = fold_convert (lntype, r_const);
4781 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4782 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4783 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4784 fold (build1 (BIT_NOT_EXPR,
4785 lntype, rl_mask)),
4786 0)))
4788 warning ("comparison is always %d", wanted_code == NE_EXPR);
4790 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4794 /* If the right sides are not constant, do the same for it. Also,
4795 disallow this optimization if a size or signedness mismatch occurs
4796 between the left and right sides. */
4797 if (l_const == 0)
4799 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4800 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4801 /* Make sure the two fields on the right
4802 correspond to the left without being swapped. */
4803 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4804 return 0;
4806 first_bit = MIN (lr_bitpos, rr_bitpos);
4807 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4808 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4809 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4810 volatilep);
4811 if (rnmode == VOIDmode)
4812 return 0;
4814 rnbitsize = GET_MODE_BITSIZE (rnmode);
4815 rnbitpos = first_bit & ~ (rnbitsize - 1);
4816 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4817 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4819 if (BYTES_BIG_ENDIAN)
4821 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4822 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4825 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4826 size_int (xlr_bitpos), 0);
4827 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4828 size_int (xrr_bitpos), 0);
4830 /* Make a mask that corresponds to both fields being compared.
4831 Do this for both items being compared. If the operands are the
4832 same size and the bits being compared are in the same position
4833 then we can do this by masking both and comparing the masked
4834 results. */
4835 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4836 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4837 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4839 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4840 ll_unsignedp || rl_unsignedp);
4841 if (! all_ones_mask_p (ll_mask, lnbitsize))
4842 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4844 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4845 lr_unsignedp || rr_unsignedp);
4846 if (! all_ones_mask_p (lr_mask, rnbitsize))
4847 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4849 return build2 (wanted_code, truth_type, lhs, rhs);
4852 /* There is still another way we can do something: If both pairs of
4853 fields being compared are adjacent, we may be able to make a wider
4854 field containing them both.
4856 Note that we still must mask the lhs/rhs expressions. Furthermore,
4857 the mask must be shifted to account for the shift done by
4858 make_bit_field_ref. */
4859 if ((ll_bitsize + ll_bitpos == rl_bitpos
4860 && lr_bitsize + lr_bitpos == rr_bitpos)
4861 || (ll_bitpos == rl_bitpos + rl_bitsize
4862 && lr_bitpos == rr_bitpos + rr_bitsize))
4864 tree type;
4866 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4867 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4868 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4869 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4871 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4872 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4873 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4874 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4876 /* Convert to the smaller type before masking out unwanted bits. */
4877 type = lntype;
4878 if (lntype != rntype)
4880 if (lnbitsize > rnbitsize)
4882 lhs = fold_convert (rntype, lhs);
4883 ll_mask = fold_convert (rntype, ll_mask);
4884 type = rntype;
4886 else if (lnbitsize < rnbitsize)
4888 rhs = fold_convert (lntype, rhs);
4889 lr_mask = fold_convert (lntype, lr_mask);
4890 type = lntype;
4894 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4895 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4897 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4898 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4900 return build2 (wanted_code, truth_type, lhs, rhs);
4903 return 0;
4906 /* Handle the case of comparisons with constants. If there is something in
4907 common between the masks, those bits of the constants must be the same.
4908 If not, the condition is always false. Test for this to avoid generating
4909 incorrect code below. */
4910 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4911 if (! integer_zerop (result)
4912 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4913 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4915 if (wanted_code == NE_EXPR)
4917 warning ("%<or%> of unmatched not-equal tests is always 1");
4918 return constant_boolean_node (true, truth_type);
4920 else
4922 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4923 return constant_boolean_node (false, truth_type);
4927 /* Construct the expression we will return. First get the component
4928 reference we will make. Unless the mask is all ones the width of
4929 that field, perform the mask operation. Then compare with the
4930 merged constant. */
4931 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4932 ll_unsignedp || rl_unsignedp);
4934 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4935 if (! all_ones_mask_p (ll_mask, lnbitsize))
4936 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4938 return build2 (wanted_code, truth_type, result,
4939 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4942 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4943 constant. */
4945 static tree
4946 optimize_minmax_comparison (tree t)
4948 tree type = TREE_TYPE (t);
4949 tree arg0 = TREE_OPERAND (t, 0);
4950 enum tree_code op_code;
4951 tree comp_const = TREE_OPERAND (t, 1);
4952 tree minmax_const;
4953 int consts_equal, consts_lt;
4954 tree inner;
4956 STRIP_SIGN_NOPS (arg0);
4958 op_code = TREE_CODE (arg0);
4959 minmax_const = TREE_OPERAND (arg0, 1);
4960 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4961 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4962 inner = TREE_OPERAND (arg0, 0);
4964 /* If something does not permit us to optimize, return the original tree. */
4965 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4966 || TREE_CODE (comp_const) != INTEGER_CST
4967 || TREE_CONSTANT_OVERFLOW (comp_const)
4968 || TREE_CODE (minmax_const) != INTEGER_CST
4969 || TREE_CONSTANT_OVERFLOW (minmax_const))
4970 return t;
4972 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4973 and GT_EXPR, doing the rest with recursive calls using logical
4974 simplifications. */
4975 switch (TREE_CODE (t))
4977 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4978 return
4979 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4981 case GE_EXPR:
4982 return
4983 fold (build2 (TRUTH_ORIF_EXPR, type,
4984 optimize_minmax_comparison
4985 (build2 (EQ_EXPR, type, arg0, comp_const)),
4986 optimize_minmax_comparison
4987 (build2 (GT_EXPR, type, arg0, comp_const))));
4989 case EQ_EXPR:
4990 if (op_code == MAX_EXPR && consts_equal)
4991 /* MAX (X, 0) == 0 -> X <= 0 */
4992 return fold (build2 (LE_EXPR, type, inner, comp_const));
4994 else if (op_code == MAX_EXPR && consts_lt)
4995 /* MAX (X, 0) == 5 -> X == 5 */
4996 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4998 else if (op_code == MAX_EXPR)
4999 /* MAX (X, 0) == -1 -> false */
5000 return omit_one_operand (type, integer_zero_node, inner);
5002 else if (consts_equal)
5003 /* MIN (X, 0) == 0 -> X >= 0 */
5004 return fold (build2 (GE_EXPR, type, inner, comp_const));
5006 else if (consts_lt)
5007 /* MIN (X, 0) == 5 -> false */
5008 return omit_one_operand (type, integer_zero_node, inner);
5010 else
5011 /* MIN (X, 0) == -1 -> X == -1 */
5012 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5014 case GT_EXPR:
5015 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5016 /* MAX (X, 0) > 0 -> X > 0
5017 MAX (X, 0) > 5 -> X > 5 */
5018 return fold (build2 (GT_EXPR, type, inner, comp_const));
5020 else if (op_code == MAX_EXPR)
5021 /* MAX (X, 0) > -1 -> true */
5022 return omit_one_operand (type, integer_one_node, inner);
5024 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5025 /* MIN (X, 0) > 0 -> false
5026 MIN (X, 0) > 5 -> false */
5027 return omit_one_operand (type, integer_zero_node, inner);
5029 else
5030 /* MIN (X, 0) > -1 -> X > -1 */
5031 return fold (build2 (GT_EXPR, type, inner, comp_const));
5033 default:
5034 return t;
5038 /* T is an integer expression that is being multiplied, divided, or taken a
5039 modulus (CODE says which and what kind of divide or modulus) by a
5040 constant C. See if we can eliminate that operation by folding it with
5041 other operations already in T. WIDE_TYPE, if non-null, is a type that
5042 should be used for the computation if wider than our type.
5044 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5045 (X * 2) + (Y * 4). We must, however, be assured that either the original
5046 expression would not overflow or that overflow is undefined for the type
5047 in the language in question.
5049 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5050 the machine has a multiply-accumulate insn or that this is part of an
5051 addressing calculation.
5053 If we return a non-null expression, it is an equivalent form of the
5054 original computation, but need not be in the original type. */
5056 static tree
5057 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5059 /* To avoid exponential search depth, refuse to allow recursion past
5060 three levels. Beyond that (1) it's highly unlikely that we'll find
5061 something interesting and (2) we've probably processed it before
5062 when we built the inner expression. */
5064 static int depth;
5065 tree ret;
5067 if (depth > 3)
5068 return NULL;
5070 depth++;
5071 ret = extract_muldiv_1 (t, c, code, wide_type);
5072 depth--;
5074 return ret;
5077 static tree
5078 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5080 tree type = TREE_TYPE (t);
5081 enum tree_code tcode = TREE_CODE (t);
5082 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5083 > GET_MODE_SIZE (TYPE_MODE (type)))
5084 ? wide_type : type);
5085 tree t1, t2;
5086 int same_p = tcode == code;
5087 tree op0 = NULL_TREE, op1 = NULL_TREE;
5089 /* Don't deal with constants of zero here; they confuse the code below. */
5090 if (integer_zerop (c))
5091 return NULL_TREE;
5093 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5094 op0 = TREE_OPERAND (t, 0);
5096 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5097 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5099 /* Note that we need not handle conditional operations here since fold
5100 already handles those cases. So just do arithmetic here. */
5101 switch (tcode)
5103 case INTEGER_CST:
5104 /* For a constant, we can always simplify if we are a multiply
5105 or (for divide and modulus) if it is a multiple of our constant. */
5106 if (code == MULT_EXPR
5107 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5108 return const_binop (code, fold_convert (ctype, t),
5109 fold_convert (ctype, c), 0);
5110 break;
5112 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5113 /* If op0 is an expression ... */
5114 if ((COMPARISON_CLASS_P (op0)
5115 || UNARY_CLASS_P (op0)
5116 || BINARY_CLASS_P (op0)
5117 || EXPRESSION_CLASS_P (op0))
5118 /* ... and is unsigned, and its type is smaller than ctype,
5119 then we cannot pass through as widening. */
5120 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5121 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5122 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5123 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5124 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5125 /* ... or this is a truncation (t is narrower than op0),
5126 then we cannot pass through this narrowing. */
5127 || (GET_MODE_SIZE (TYPE_MODE (type))
5128 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5129 /* ... or signedness changes for division or modulus,
5130 then we cannot pass through this conversion. */
5131 || (code != MULT_EXPR
5132 && (TYPE_UNSIGNED (ctype)
5133 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5134 break;
5136 /* Pass the constant down and see if we can make a simplification. If
5137 we can, replace this expression with the inner simplification for
5138 possible later conversion to our or some other type. */
5139 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5140 && TREE_CODE (t2) == INTEGER_CST
5141 && ! TREE_CONSTANT_OVERFLOW (t2)
5142 && (0 != (t1 = extract_muldiv (op0, t2, code,
5143 code == MULT_EXPR
5144 ? ctype : NULL_TREE))))
5145 return t1;
5146 break;
5148 case ABS_EXPR:
5149 /* If widening the type changes it from signed to unsigned, then we
5150 must avoid building ABS_EXPR itself as unsigned. */
5151 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5153 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5154 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5156 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5157 return fold_convert (ctype, t1);
5159 break;
5161 /* FALLTHROUGH */
5162 case NEGATE_EXPR:
5163 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5164 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5165 break;
5167 case MIN_EXPR: case MAX_EXPR:
5168 /* If widening the type changes the signedness, then we can't perform
5169 this optimization as that changes the result. */
5170 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5171 break;
5173 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5174 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5175 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5177 if (tree_int_cst_sgn (c) < 0)
5178 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5180 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5181 fold_convert (ctype, t2)));
5183 break;
5185 case LSHIFT_EXPR: case RSHIFT_EXPR:
5186 /* If the second operand is constant, this is a multiplication
5187 or floor division, by a power of two, so we can treat it that
5188 way unless the multiplier or divisor overflows. Signed
5189 left-shift overflow is implementation-defined rather than
5190 undefined in C90, so do not convert signed left shift into
5191 multiplication. */
5192 if (TREE_CODE (op1) == INTEGER_CST
5193 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5194 /* const_binop may not detect overflow correctly,
5195 so check for it explicitly here. */
5196 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5197 && TREE_INT_CST_HIGH (op1) == 0
5198 && 0 != (t1 = fold_convert (ctype,
5199 const_binop (LSHIFT_EXPR,
5200 size_one_node,
5201 op1, 0)))
5202 && ! TREE_OVERFLOW (t1))
5203 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5204 ? MULT_EXPR : FLOOR_DIV_EXPR,
5205 ctype, fold_convert (ctype, op0), t1),
5206 c, code, wide_type);
5207 break;
5209 case PLUS_EXPR: case MINUS_EXPR:
5210 /* See if we can eliminate the operation on both sides. If we can, we
5211 can return a new PLUS or MINUS. If we can't, the only remaining
5212 cases where we can do anything are if the second operand is a
5213 constant. */
5214 t1 = extract_muldiv (op0, c, code, wide_type);
5215 t2 = extract_muldiv (op1, c, code, wide_type);
5216 if (t1 != 0 && t2 != 0
5217 && (code == MULT_EXPR
5218 /* If not multiplication, we can only do this if both operands
5219 are divisible by c. */
5220 || (multiple_of_p (ctype, op0, c)
5221 && multiple_of_p (ctype, op1, c))))
5222 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5223 fold_convert (ctype, t2)));
5225 /* If this was a subtraction, negate OP1 and set it to be an addition.
5226 This simplifies the logic below. */
5227 if (tcode == MINUS_EXPR)
5228 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5230 if (TREE_CODE (op1) != INTEGER_CST)
5231 break;
5233 /* If either OP1 or C are negative, this optimization is not safe for
5234 some of the division and remainder types while for others we need
5235 to change the code. */
5236 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5238 if (code == CEIL_DIV_EXPR)
5239 code = FLOOR_DIV_EXPR;
5240 else if (code == FLOOR_DIV_EXPR)
5241 code = CEIL_DIV_EXPR;
5242 else if (code != MULT_EXPR
5243 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5244 break;
5247 /* If it's a multiply or a division/modulus operation of a multiple
5248 of our constant, do the operation and verify it doesn't overflow. */
5249 if (code == MULT_EXPR
5250 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5252 op1 = const_binop (code, fold_convert (ctype, op1),
5253 fold_convert (ctype, c), 0);
5254 /* We allow the constant to overflow with wrapping semantics. */
5255 if (op1 == 0
5256 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5257 break;
5259 else
5260 break;
5262 /* If we have an unsigned type is not a sizetype, we cannot widen
5263 the operation since it will change the result if the original
5264 computation overflowed. */
5265 if (TYPE_UNSIGNED (ctype)
5266 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5267 && ctype != type)
5268 break;
5270 /* If we were able to eliminate our operation from the first side,
5271 apply our operation to the second side and reform the PLUS. */
5272 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5273 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5275 /* The last case is if we are a multiply. In that case, we can
5276 apply the distributive law to commute the multiply and addition
5277 if the multiplication of the constants doesn't overflow. */
5278 if (code == MULT_EXPR)
5279 return fold (build2 (tcode, ctype,
5280 fold (build2 (code, ctype,
5281 fold_convert (ctype, op0),
5282 fold_convert (ctype, c))),
5283 op1));
5285 break;
5287 case MULT_EXPR:
5288 /* We have a special case here if we are doing something like
5289 (C * 8) % 4 since we know that's zero. */
5290 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5291 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5292 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5293 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5294 return omit_one_operand (type, integer_zero_node, op0);
5296 /* ... fall through ... */
5298 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5299 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5300 /* If we can extract our operation from the LHS, do so and return a
5301 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5302 do something only if the second operand is a constant. */
5303 if (same_p
5304 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5305 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5306 fold_convert (ctype, op1)));
5307 else if (tcode == MULT_EXPR && code == MULT_EXPR
5308 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5309 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5310 fold_convert (ctype, t1)));
5311 else if (TREE_CODE (op1) != INTEGER_CST)
5312 return 0;
5314 /* If these are the same operation types, we can associate them
5315 assuming no overflow. */
5316 if (tcode == code
5317 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5318 fold_convert (ctype, c), 0))
5319 && ! TREE_OVERFLOW (t1))
5320 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5322 /* If these operations "cancel" each other, we have the main
5323 optimizations of this pass, which occur when either constant is a
5324 multiple of the other, in which case we replace this with either an
5325 operation or CODE or TCODE.
5327 If we have an unsigned type that is not a sizetype, we cannot do
5328 this since it will change the result if the original computation
5329 overflowed. */
5330 if ((! TYPE_UNSIGNED (ctype)
5331 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5332 && ! flag_wrapv
5333 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5334 || (tcode == MULT_EXPR
5335 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5336 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5338 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5339 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5340 fold_convert (ctype,
5341 const_binop (TRUNC_DIV_EXPR,
5342 op1, c, 0))));
5343 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5344 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5345 fold_convert (ctype,
5346 const_binop (TRUNC_DIV_EXPR,
5347 c, op1, 0))));
5349 break;
5351 default:
5352 break;
5355 return 0;
5358 /* Return a node which has the indicated constant VALUE (either 0 or
5359 1), and is of the indicated TYPE. */
5361 tree
5362 constant_boolean_node (int value, tree type)
5364 if (type == integer_type_node)
5365 return value ? integer_one_node : integer_zero_node;
5366 else if (type == boolean_type_node)
5367 return value ? boolean_true_node : boolean_false_node;
5368 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5369 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5370 : integer_zero_node);
5371 else
5372 return build_int_cst (type, value);
5375 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5376 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5377 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5378 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5379 COND is the first argument to CODE; otherwise (as in the example
5380 given here), it is the second argument. TYPE is the type of the
5381 original expression. Return NULL_TREE if no simplification is
5382 possible. */
5384 static tree
5385 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5386 tree cond, tree arg, int cond_first_p)
5388 tree test, true_value, false_value;
5389 tree lhs = NULL_TREE;
5390 tree rhs = NULL_TREE;
5392 /* This transformation is only worthwhile if we don't have to wrap
5393 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5394 one of the branches once its pushed inside the COND_EXPR. */
5395 if (!TREE_CONSTANT (arg))
5396 return NULL_TREE;
5398 if (TREE_CODE (cond) == COND_EXPR)
5400 test = TREE_OPERAND (cond, 0);
5401 true_value = TREE_OPERAND (cond, 1);
5402 false_value = TREE_OPERAND (cond, 2);
5403 /* If this operand throws an expression, then it does not make
5404 sense to try to perform a logical or arithmetic operation
5405 involving it. */
5406 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5407 lhs = true_value;
5408 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5409 rhs = false_value;
5411 else
5413 tree testtype = TREE_TYPE (cond);
5414 test = cond;
5415 true_value = constant_boolean_node (true, testtype);
5416 false_value = constant_boolean_node (false, testtype);
5419 if (lhs == 0)
5420 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5421 : build2 (code, type, arg, true_value));
5422 if (rhs == 0)
5423 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5424 : build2 (code, type, arg, false_value));
5426 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5427 return fold_convert (type, test);
5431 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5433 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5434 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5435 ADDEND is the same as X.
5437 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5438 and finite. The problematic cases are when X is zero, and its mode
5439 has signed zeros. In the case of rounding towards -infinity,
5440 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5441 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5443 static bool
5444 fold_real_zero_addition_p (tree type, tree addend, int negate)
5446 if (!real_zerop (addend))
5447 return false;
5449 /* Don't allow the fold with -fsignaling-nans. */
5450 if (HONOR_SNANS (TYPE_MODE (type)))
5451 return false;
5453 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5454 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5455 return true;
5457 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5458 if (TREE_CODE (addend) == REAL_CST
5459 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5460 negate = !negate;
5462 /* The mode has signed zeros, and we have to honor their sign.
5463 In this situation, there is only one case we can return true for.
5464 X - 0 is the same as X unless rounding towards -infinity is
5465 supported. */
5466 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5469 /* Subroutine of fold() that checks comparisons of built-in math
5470 functions against real constants.
5472 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5473 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5474 is the type of the result and ARG0 and ARG1 are the operands of the
5475 comparison. ARG1 must be a TREE_REAL_CST.
5477 The function returns the constant folded tree if a simplification
5478 can be made, and NULL_TREE otherwise. */
5480 static tree
5481 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5482 tree type, tree arg0, tree arg1)
5484 REAL_VALUE_TYPE c;
5486 if (BUILTIN_SQRT_P (fcode))
5488 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5489 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5491 c = TREE_REAL_CST (arg1);
5492 if (REAL_VALUE_NEGATIVE (c))
5494 /* sqrt(x) < y is always false, if y is negative. */
5495 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5496 return omit_one_operand (type, integer_zero_node, arg);
5498 /* sqrt(x) > y is always true, if y is negative and we
5499 don't care about NaNs, i.e. negative values of x. */
5500 if (code == NE_EXPR || !HONOR_NANS (mode))
5501 return omit_one_operand (type, integer_one_node, arg);
5503 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5504 return fold (build2 (GE_EXPR, type, arg,
5505 build_real (TREE_TYPE (arg), dconst0)));
5507 else if (code == GT_EXPR || code == GE_EXPR)
5509 REAL_VALUE_TYPE c2;
5511 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5512 real_convert (&c2, mode, &c2);
5514 if (REAL_VALUE_ISINF (c2))
5516 /* sqrt(x) > y is x == +Inf, when y is very large. */
5517 if (HONOR_INFINITIES (mode))
5518 return fold (build2 (EQ_EXPR, type, arg,
5519 build_real (TREE_TYPE (arg), c2)));
5521 /* sqrt(x) > y is always false, when y is very large
5522 and we don't care about infinities. */
5523 return omit_one_operand (type, integer_zero_node, arg);
5526 /* sqrt(x) > c is the same as x > c*c. */
5527 return fold (build2 (code, type, arg,
5528 build_real (TREE_TYPE (arg), c2)));
5530 else if (code == LT_EXPR || code == LE_EXPR)
5532 REAL_VALUE_TYPE c2;
5534 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5535 real_convert (&c2, mode, &c2);
5537 if (REAL_VALUE_ISINF (c2))
5539 /* sqrt(x) < y is always true, when y is a very large
5540 value and we don't care about NaNs or Infinities. */
5541 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5542 return omit_one_operand (type, integer_one_node, arg);
5544 /* sqrt(x) < y is x != +Inf when y is very large and we
5545 don't care about NaNs. */
5546 if (! HONOR_NANS (mode))
5547 return fold (build2 (NE_EXPR, type, arg,
5548 build_real (TREE_TYPE (arg), c2)));
5550 /* sqrt(x) < y is x >= 0 when y is very large and we
5551 don't care about Infinities. */
5552 if (! HONOR_INFINITIES (mode))
5553 return fold (build2 (GE_EXPR, type, arg,
5554 build_real (TREE_TYPE (arg), dconst0)));
5556 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5557 if (lang_hooks.decls.global_bindings_p () != 0
5558 || CONTAINS_PLACEHOLDER_P (arg))
5559 return NULL_TREE;
5561 arg = save_expr (arg);
5562 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5563 fold (build2 (GE_EXPR, type, arg,
5564 build_real (TREE_TYPE (arg),
5565 dconst0))),
5566 fold (build2 (NE_EXPR, type, arg,
5567 build_real (TREE_TYPE (arg),
5568 c2)))));
5571 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5572 if (! HONOR_NANS (mode))
5573 return fold (build2 (code, type, arg,
5574 build_real (TREE_TYPE (arg), c2)));
5576 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5577 if (lang_hooks.decls.global_bindings_p () == 0
5578 && ! CONTAINS_PLACEHOLDER_P (arg))
5580 arg = save_expr (arg);
5581 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5582 fold (build2 (GE_EXPR, type, arg,
5583 build_real (TREE_TYPE (arg),
5584 dconst0))),
5585 fold (build2 (code, type, arg,
5586 build_real (TREE_TYPE (arg),
5587 c2)))));
5592 return NULL_TREE;
5595 /* Subroutine of fold() that optimizes comparisons against Infinities,
5596 either +Inf or -Inf.
5598 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5599 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5600 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5602 The function returns the constant folded tree if a simplification
5603 can be made, and NULL_TREE otherwise. */
5605 static tree
5606 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5608 enum machine_mode mode;
5609 REAL_VALUE_TYPE max;
5610 tree temp;
5611 bool neg;
5613 mode = TYPE_MODE (TREE_TYPE (arg0));
5615 /* For negative infinity swap the sense of the comparison. */
5616 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5617 if (neg)
5618 code = swap_tree_comparison (code);
5620 switch (code)
5622 case GT_EXPR:
5623 /* x > +Inf is always false, if with ignore sNANs. */
5624 if (HONOR_SNANS (mode))
5625 return NULL_TREE;
5626 return omit_one_operand (type, integer_zero_node, arg0);
5628 case LE_EXPR:
5629 /* x <= +Inf is always true, if we don't case about NaNs. */
5630 if (! HONOR_NANS (mode))
5631 return omit_one_operand (type, integer_one_node, arg0);
5633 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5634 if (lang_hooks.decls.global_bindings_p () == 0
5635 && ! CONTAINS_PLACEHOLDER_P (arg0))
5637 arg0 = save_expr (arg0);
5638 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5640 break;
5642 case EQ_EXPR:
5643 case GE_EXPR:
5644 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5645 real_maxval (&max, neg, mode);
5646 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5647 arg0, build_real (TREE_TYPE (arg0), max)));
5649 case LT_EXPR:
5650 /* x < +Inf is always equal to x <= DBL_MAX. */
5651 real_maxval (&max, neg, mode);
5652 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5653 arg0, build_real (TREE_TYPE (arg0), max)));
5655 case NE_EXPR:
5656 /* x != +Inf is always equal to !(x > DBL_MAX). */
5657 real_maxval (&max, neg, mode);
5658 if (! HONOR_NANS (mode))
5659 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5660 arg0, build_real (TREE_TYPE (arg0), max)));
5662 /* The transformation below creates non-gimple code and thus is
5663 not appropriate if we are in gimple form. */
5664 if (in_gimple_form)
5665 return NULL_TREE;
5667 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5668 arg0, build_real (TREE_TYPE (arg0), max)));
5669 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5671 default:
5672 break;
5675 return NULL_TREE;
5678 /* Subroutine of fold() that optimizes comparisons of a division by
5679 a nonzero integer constant against an integer constant, i.e.
5680 X/C1 op C2.
5682 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5683 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5684 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5686 The function returns the constant folded tree if a simplification
5687 can be made, and NULL_TREE otherwise. */
5689 static tree
5690 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5692 tree prod, tmp, hi, lo;
5693 tree arg00 = TREE_OPERAND (arg0, 0);
5694 tree arg01 = TREE_OPERAND (arg0, 1);
5695 unsigned HOST_WIDE_INT lpart;
5696 HOST_WIDE_INT hpart;
5697 int overflow;
5699 /* We have to do this the hard way to detect unsigned overflow.
5700 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5701 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5702 TREE_INT_CST_HIGH (arg01),
5703 TREE_INT_CST_LOW (arg1),
5704 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5705 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5706 prod = force_fit_type (prod, -1, overflow, false);
5708 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5710 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5711 lo = prod;
5713 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5714 overflow = add_double (TREE_INT_CST_LOW (prod),
5715 TREE_INT_CST_HIGH (prod),
5716 TREE_INT_CST_LOW (tmp),
5717 TREE_INT_CST_HIGH (tmp),
5718 &lpart, &hpart);
5719 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5720 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5721 TREE_CONSTANT_OVERFLOW (prod));
5723 else if (tree_int_cst_sgn (arg01) >= 0)
5725 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5726 switch (tree_int_cst_sgn (arg1))
5728 case -1:
5729 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5730 hi = prod;
5731 break;
5733 case 0:
5734 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5735 hi = tmp;
5736 break;
5738 case 1:
5739 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5740 lo = prod;
5741 break;
5743 default:
5744 gcc_unreachable ();
5747 else
5749 /* A negative divisor reverses the relational operators. */
5750 code = swap_tree_comparison (code);
5752 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5753 switch (tree_int_cst_sgn (arg1))
5755 case -1:
5756 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5757 lo = prod;
5758 break;
5760 case 0:
5761 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5762 lo = tmp;
5763 break;
5765 case 1:
5766 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5767 hi = prod;
5768 break;
5770 default:
5771 gcc_unreachable ();
5775 switch (code)
5777 case EQ_EXPR:
5778 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5779 return omit_one_operand (type, integer_zero_node, arg00);
5780 if (TREE_OVERFLOW (hi))
5781 return fold (build2 (GE_EXPR, type, arg00, lo));
5782 if (TREE_OVERFLOW (lo))
5783 return fold (build2 (LE_EXPR, type, arg00, hi));
5784 return build_range_check (type, arg00, 1, lo, hi);
5786 case NE_EXPR:
5787 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5788 return omit_one_operand (type, integer_one_node, arg00);
5789 if (TREE_OVERFLOW (hi))
5790 return fold (build2 (LT_EXPR, type, arg00, lo));
5791 if (TREE_OVERFLOW (lo))
5792 return fold (build2 (GT_EXPR, type, arg00, hi));
5793 return build_range_check (type, arg00, 0, lo, hi);
5795 case LT_EXPR:
5796 if (TREE_OVERFLOW (lo))
5797 return omit_one_operand (type, integer_zero_node, arg00);
5798 return fold (build2 (LT_EXPR, type, arg00, lo));
5800 case LE_EXPR:
5801 if (TREE_OVERFLOW (hi))
5802 return omit_one_operand (type, integer_one_node, arg00);
5803 return fold (build2 (LE_EXPR, type, arg00, hi));
5805 case GT_EXPR:
5806 if (TREE_OVERFLOW (hi))
5807 return omit_one_operand (type, integer_zero_node, arg00);
5808 return fold (build2 (GT_EXPR, type, arg00, hi));
5810 case GE_EXPR:
5811 if (TREE_OVERFLOW (lo))
5812 return omit_one_operand (type, integer_one_node, arg00);
5813 return fold (build2 (GE_EXPR, type, arg00, lo));
5815 default:
5816 break;
5819 return NULL_TREE;
5823 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5824 equality/inequality test, then return a simplified form of
5825 the test using shifts and logical operations. Otherwise return
5826 NULL. TYPE is the desired result type. */
5828 tree
5829 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5830 tree result_type)
5832 /* If this is testing a single bit, we can optimize the test. */
5833 if ((code == NE_EXPR || code == EQ_EXPR)
5834 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5835 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5837 tree inner = TREE_OPERAND (arg0, 0);
5838 tree type = TREE_TYPE (arg0);
5839 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5840 enum machine_mode operand_mode = TYPE_MODE (type);
5841 int ops_unsigned;
5842 tree signed_type, unsigned_type, intermediate_type;
5843 tree arg00;
5845 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5846 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5847 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5848 if (arg00 != NULL_TREE
5849 /* This is only a win if casting to a signed type is cheap,
5850 i.e. when arg00's type is not a partial mode. */
5851 && TYPE_PRECISION (TREE_TYPE (arg00))
5852 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5854 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5855 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5856 result_type, fold_convert (stype, arg00),
5857 fold_convert (stype, integer_zero_node)));
5860 /* Otherwise we have (A & C) != 0 where C is a single bit,
5861 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5862 Similarly for (A & C) == 0. */
5864 /* If INNER is a right shift of a constant and it plus BITNUM does
5865 not overflow, adjust BITNUM and INNER. */
5866 if (TREE_CODE (inner) == RSHIFT_EXPR
5867 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5868 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5869 && bitnum < TYPE_PRECISION (type)
5870 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5871 bitnum - TYPE_PRECISION (type)))
5873 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5874 inner = TREE_OPERAND (inner, 0);
5877 /* If we are going to be able to omit the AND below, we must do our
5878 operations as unsigned. If we must use the AND, we have a choice.
5879 Normally unsigned is faster, but for some machines signed is. */
5880 #ifdef LOAD_EXTEND_OP
5881 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5882 && !flag_syntax_only) ? 0 : 1;
5883 #else
5884 ops_unsigned = 1;
5885 #endif
5887 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5888 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5889 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5890 inner = fold_convert (intermediate_type, inner);
5892 if (bitnum != 0)
5893 inner = build2 (RSHIFT_EXPR, intermediate_type,
5894 inner, size_int (bitnum));
5896 if (code == EQ_EXPR)
5897 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5898 inner, integer_one_node));
5900 /* Put the AND last so it can combine with more things. */
5901 inner = build2 (BIT_AND_EXPR, intermediate_type,
5902 inner, integer_one_node);
5904 /* Make sure to return the proper type. */
5905 inner = fold_convert (result_type, inner);
5907 return inner;
5909 return NULL_TREE;
5912 /* Check whether we are allowed to reorder operands arg0 and arg1,
5913 such that the evaluation of arg1 occurs before arg0. */
5915 static bool
5916 reorder_operands_p (tree arg0, tree arg1)
5918 if (! flag_evaluation_order)
5919 return true;
5920 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5921 return true;
5922 return ! TREE_SIDE_EFFECTS (arg0)
5923 && ! TREE_SIDE_EFFECTS (arg1);
5926 /* Test whether it is preferable two swap two operands, ARG0 and
5927 ARG1, for example because ARG0 is an integer constant and ARG1
5928 isn't. If REORDER is true, only recommend swapping if we can
5929 evaluate the operands in reverse order. */
5931 bool
5932 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5934 STRIP_SIGN_NOPS (arg0);
5935 STRIP_SIGN_NOPS (arg1);
5937 if (TREE_CODE (arg1) == INTEGER_CST)
5938 return 0;
5939 if (TREE_CODE (arg0) == INTEGER_CST)
5940 return 1;
5942 if (TREE_CODE (arg1) == REAL_CST)
5943 return 0;
5944 if (TREE_CODE (arg0) == REAL_CST)
5945 return 1;
5947 if (TREE_CODE (arg1) == COMPLEX_CST)
5948 return 0;
5949 if (TREE_CODE (arg0) == COMPLEX_CST)
5950 return 1;
5952 if (TREE_CONSTANT (arg1))
5953 return 0;
5954 if (TREE_CONSTANT (arg0))
5955 return 1;
5957 if (optimize_size)
5958 return 0;
5960 if (reorder && flag_evaluation_order
5961 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5962 return 0;
5964 if (DECL_P (arg1))
5965 return 0;
5966 if (DECL_P (arg0))
5967 return 1;
5969 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5970 for commutative and comparison operators. Ensuring a canonical
5971 form allows the optimizers to find additional redundancies without
5972 having to explicitly check for both orderings. */
5973 if (TREE_CODE (arg0) == SSA_NAME
5974 && TREE_CODE (arg1) == SSA_NAME
5975 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5976 return 1;
5978 return 0;
5981 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
5982 ARG0 is extended to a wider type. */
5984 static tree
5985 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
5987 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
5988 tree arg1_unw;
5989 tree shorter_type, outer_type;
5990 tree min, max;
5991 bool above, below;
5993 if (arg0_unw == arg0)
5994 return NULL_TREE;
5995 shorter_type = TREE_TYPE (arg0_unw);
5997 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
5998 return NULL_TREE;
6000 arg1_unw = get_unwidened (arg1, shorter_type);
6001 if (!arg1_unw)
6002 return NULL_TREE;
6004 /* If possible, express the comparison in the shorter mode. */
6005 if ((code == EQ_EXPR || code == NE_EXPR
6006 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6007 && (TREE_TYPE (arg1_unw) == shorter_type
6008 || (TREE_CODE (arg1_unw) == INTEGER_CST
6009 && TREE_CODE (shorter_type) == INTEGER_TYPE
6010 && int_fits_type_p (arg1_unw, shorter_type))))
6011 return fold (build (code, type, arg0_unw,
6012 fold_convert (shorter_type, arg1_unw)));
6014 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6015 return NULL_TREE;
6017 /* If we are comparing with the integer that does not fit into the range
6018 of the shorter type, the result is known. */
6019 outer_type = TREE_TYPE (arg1_unw);
6020 min = lower_bound_in_type (outer_type, shorter_type);
6021 max = upper_bound_in_type (outer_type, shorter_type);
6023 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6024 max, arg1_unw));
6025 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6026 arg1_unw, min));
6028 switch (code)
6030 case EQ_EXPR:
6031 if (above || below)
6032 return omit_one_operand (type, integer_zero_node, arg0);
6033 break;
6035 case NE_EXPR:
6036 if (above || below)
6037 return omit_one_operand (type, integer_one_node, arg0);
6038 break;
6040 case LT_EXPR:
6041 case LE_EXPR:
6042 if (above)
6043 return omit_one_operand (type, integer_one_node, arg0);
6044 else if (below)
6045 return omit_one_operand (type, integer_zero_node, arg0);
6047 case GT_EXPR:
6048 case GE_EXPR:
6049 if (above)
6050 return omit_one_operand (type, integer_zero_node, arg0);
6051 else if (below)
6052 return omit_one_operand (type, integer_one_node, arg0);
6054 default:
6055 break;
6058 return NULL_TREE;
6061 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6062 ARG0 just the signedness is changed. */
6064 static tree
6065 fold_sign_changed_comparison (enum tree_code code, tree type,
6066 tree arg0, tree arg1)
6068 tree arg0_inner, tmp;
6069 tree inner_type, outer_type;
6071 if (TREE_CODE (arg0) != NOP_EXPR)
6072 return NULL_TREE;
6074 outer_type = TREE_TYPE (arg0);
6075 arg0_inner = TREE_OPERAND (arg0, 0);
6076 inner_type = TREE_TYPE (arg0_inner);
6078 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6079 return NULL_TREE;
6081 if (TREE_CODE (arg1) != INTEGER_CST
6082 && !(TREE_CODE (arg1) == NOP_EXPR
6083 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6084 return NULL_TREE;
6086 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6087 && code != NE_EXPR
6088 && code != EQ_EXPR)
6089 return NULL_TREE;
6091 if (TREE_CODE (arg1) == INTEGER_CST)
6093 tmp = build_int_cst_wide (inner_type,
6094 TREE_INT_CST_LOW (arg1),
6095 TREE_INT_CST_HIGH (arg1));
6096 arg1 = force_fit_type (tmp, 0,
6097 TREE_OVERFLOW (arg1),
6098 TREE_CONSTANT_OVERFLOW (arg1));
6100 else
6101 arg1 = fold_convert (inner_type, arg1);
6103 return fold (build (code, type, arg0_inner, arg1));
6106 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6107 step of the array. TYPE is the type of the expression. ADDR is the address.
6108 MULT is the multiplicative expression. If the function succeeds, the new
6109 address expression is returned. Otherwise NULL_TREE is returned. */
6111 static tree
6112 try_move_mult_to_index (tree type, enum tree_code code, tree addr, tree mult)
6114 tree s, delta, step;
6115 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6116 tree ref = TREE_OPERAND (addr, 0), pref;
6117 tree ret, pos;
6118 tree itype;
6120 STRIP_NOPS (arg0);
6121 STRIP_NOPS (arg1);
6123 if (TREE_CODE (arg0) == INTEGER_CST)
6125 s = arg0;
6126 delta = arg1;
6128 else if (TREE_CODE (arg1) == INTEGER_CST)
6130 s = arg1;
6131 delta = arg0;
6133 else
6134 return NULL_TREE;
6136 for (;; ref = TREE_OPERAND (ref, 0))
6138 if (TREE_CODE (ref) == ARRAY_REF)
6140 step = array_ref_element_size (ref);
6142 if (TREE_CODE (step) != INTEGER_CST)
6143 continue;
6145 itype = TREE_TYPE (step);
6147 /* If the type sizes do not match, we might run into problems
6148 when one of them would overflow. */
6149 if (TYPE_PRECISION (itype) != TYPE_PRECISION (type))
6150 continue;
6152 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6153 continue;
6155 delta = fold_convert (itype, delta);
6156 break;
6159 if (!handled_component_p (ref))
6160 return NULL_TREE;
6163 /* We found the suitable array reference. So copy everything up to it,
6164 and replace the index. */
6166 pref = TREE_OPERAND (addr, 0);
6167 ret = copy_node (pref);
6168 pos = ret;
6170 while (pref != ref)
6172 pref = TREE_OPERAND (pref, 0);
6173 TREE_OPERAND (pos, 0) = copy_node (pref);
6174 pos = TREE_OPERAND (pos, 0);
6177 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6178 TREE_OPERAND (pos, 1),
6179 delta));
6181 return build1 (ADDR_EXPR, type, ret);
6185 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6186 means A >= Y && A != MAX, but in this case we know that
6187 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6189 static tree
6190 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6192 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6194 if (TREE_CODE (bound) == LT_EXPR)
6195 a = TREE_OPERAND (bound, 0);
6196 else if (TREE_CODE (bound) == GT_EXPR)
6197 a = TREE_OPERAND (bound, 1);
6198 else
6199 return NULL_TREE;
6201 typea = TREE_TYPE (a);
6202 if (!INTEGRAL_TYPE_P (typea)
6203 && !POINTER_TYPE_P (typea))
6204 return NULL_TREE;
6206 if (TREE_CODE (ineq) == LT_EXPR)
6208 a1 = TREE_OPERAND (ineq, 1);
6209 y = TREE_OPERAND (ineq, 0);
6211 else if (TREE_CODE (ineq) == GT_EXPR)
6213 a1 = TREE_OPERAND (ineq, 0);
6214 y = TREE_OPERAND (ineq, 1);
6216 else
6217 return NULL_TREE;
6219 if (TREE_TYPE (a1) != typea)
6220 return NULL_TREE;
6222 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6223 if (!integer_onep (diff))
6224 return NULL_TREE;
6226 return fold (build2 (GE_EXPR, type, a, y));
6229 /* Perform constant folding and related simplification of EXPR.
6230 The related simplifications include x*1 => x, x*0 => 0, etc.,
6231 and application of the associative law.
6232 NOP_EXPR conversions may be removed freely (as long as we
6233 are careful not to change the type of the overall expression).
6234 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6235 but we can constant-fold them if they have constant operands. */
6237 #ifdef ENABLE_FOLD_CHECKING
6238 # define fold(x) fold_1 (x)
6239 static tree fold_1 (tree);
6240 static
6241 #endif
6242 tree
6243 fold (tree expr)
6245 const tree t = expr;
6246 const tree type = TREE_TYPE (expr);
6247 tree t1 = NULL_TREE;
6248 tree tem;
6249 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6250 enum tree_code code = TREE_CODE (t);
6251 enum tree_code_class kind = TREE_CODE_CLASS (code);
6253 /* WINS will be nonzero when the switch is done
6254 if all operands are constant. */
6255 int wins = 1;
6257 /* Return right away if a constant. */
6258 if (kind == tcc_constant)
6259 return t;
6261 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6263 tree subop;
6265 /* Special case for conversion ops that can have fixed point args. */
6266 arg0 = TREE_OPERAND (t, 0);
6268 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6269 if (arg0 != 0)
6270 STRIP_SIGN_NOPS (arg0);
6272 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6273 subop = TREE_REALPART (arg0);
6274 else
6275 subop = arg0;
6277 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6278 && TREE_CODE (subop) != REAL_CST)
6279 /* Note that TREE_CONSTANT isn't enough:
6280 static var addresses are constant but we can't
6281 do arithmetic on them. */
6282 wins = 0;
6284 else if (IS_EXPR_CODE_CLASS (kind))
6286 int len = TREE_CODE_LENGTH (code);
6287 int i;
6288 for (i = 0; i < len; i++)
6290 tree op = TREE_OPERAND (t, i);
6291 tree subop;
6293 if (op == 0)
6294 continue; /* Valid for CALL_EXPR, at least. */
6296 /* Strip any conversions that don't change the mode. This is
6297 safe for every expression, except for a comparison expression
6298 because its signedness is derived from its operands. So, in
6299 the latter case, only strip conversions that don't change the
6300 signedness.
6302 Note that this is done as an internal manipulation within the
6303 constant folder, in order to find the simplest representation
6304 of the arguments so that their form can be studied. In any
6305 cases, the appropriate type conversions should be put back in
6306 the tree that will get out of the constant folder. */
6307 if (kind == tcc_comparison)
6308 STRIP_SIGN_NOPS (op);
6309 else
6310 STRIP_NOPS (op);
6312 if (TREE_CODE (op) == COMPLEX_CST)
6313 subop = TREE_REALPART (op);
6314 else
6315 subop = op;
6317 if (TREE_CODE (subop) != INTEGER_CST
6318 && TREE_CODE (subop) != REAL_CST)
6319 /* Note that TREE_CONSTANT isn't enough:
6320 static var addresses are constant but we can't
6321 do arithmetic on them. */
6322 wins = 0;
6324 if (i == 0)
6325 arg0 = op;
6326 else if (i == 1)
6327 arg1 = op;
6331 /* If this is a commutative operation, and ARG0 is a constant, move it
6332 to ARG1 to reduce the number of tests below. */
6333 if (commutative_tree_code (code)
6334 && tree_swap_operands_p (arg0, arg1, true))
6335 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6336 TREE_OPERAND (t, 0)));
6338 /* Now WINS is set as described above,
6339 ARG0 is the first operand of EXPR,
6340 and ARG1 is the second operand (if it has more than one operand).
6342 First check for cases where an arithmetic operation is applied to a
6343 compound, conditional, or comparison operation. Push the arithmetic
6344 operation inside the compound or conditional to see if any folding
6345 can then be done. Convert comparison to conditional for this purpose.
6346 The also optimizes non-constant cases that used to be done in
6347 expand_expr.
6349 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6350 one of the operands is a comparison and the other is a comparison, a
6351 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6352 code below would make the expression more complex. Change it to a
6353 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6354 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6356 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6357 || code == EQ_EXPR || code == NE_EXPR)
6358 && ((truth_value_p (TREE_CODE (arg0))
6359 && (truth_value_p (TREE_CODE (arg1))
6360 || (TREE_CODE (arg1) == BIT_AND_EXPR
6361 && integer_onep (TREE_OPERAND (arg1, 1)))))
6362 || (truth_value_p (TREE_CODE (arg1))
6363 && (truth_value_p (TREE_CODE (arg0))
6364 || (TREE_CODE (arg0) == BIT_AND_EXPR
6365 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6367 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6368 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6369 : TRUTH_XOR_EXPR,
6370 type, fold_convert (boolean_type_node, arg0),
6371 fold_convert (boolean_type_node, arg1)));
6373 if (code == EQ_EXPR)
6374 tem = invert_truthvalue (tem);
6376 return tem;
6379 if (TREE_CODE_CLASS (code) == tcc_unary)
6381 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6382 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6383 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6384 else if (TREE_CODE (arg0) == COND_EXPR)
6386 tree arg01 = TREE_OPERAND (arg0, 1);
6387 tree arg02 = TREE_OPERAND (arg0, 2);
6388 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6389 arg01 = fold (build1 (code, type, arg01));
6390 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6391 arg02 = fold (build1 (code, type, arg02));
6392 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6393 arg01, arg02));
6395 /* If this was a conversion, and all we did was to move into
6396 inside the COND_EXPR, bring it back out. But leave it if
6397 it is a conversion from integer to integer and the
6398 result precision is no wider than a word since such a
6399 conversion is cheap and may be optimized away by combine,
6400 while it couldn't if it were outside the COND_EXPR. Then return
6401 so we don't get into an infinite recursion loop taking the
6402 conversion out and then back in. */
6404 if ((code == NOP_EXPR || code == CONVERT_EXPR
6405 || code == NON_LVALUE_EXPR)
6406 && TREE_CODE (tem) == COND_EXPR
6407 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6408 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6409 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6410 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6411 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6412 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6413 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6414 && (INTEGRAL_TYPE_P
6415 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6416 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6417 || flag_syntax_only))
6418 tem = build1 (code, type,
6419 build3 (COND_EXPR,
6420 TREE_TYPE (TREE_OPERAND
6421 (TREE_OPERAND (tem, 1), 0)),
6422 TREE_OPERAND (tem, 0),
6423 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6424 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6425 return tem;
6427 else if (COMPARISON_CLASS_P (arg0))
6429 if (TREE_CODE (type) == BOOLEAN_TYPE)
6431 arg0 = copy_node (arg0);
6432 TREE_TYPE (arg0) = type;
6433 return arg0;
6435 else if (TREE_CODE (type) != INTEGER_TYPE)
6436 return fold (build3 (COND_EXPR, type, arg0,
6437 fold (build1 (code, type,
6438 integer_one_node)),
6439 fold (build1 (code, type,
6440 integer_zero_node))));
6443 else if (TREE_CODE_CLASS (code) == tcc_comparison
6444 && TREE_CODE (arg0) == COMPOUND_EXPR)
6445 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6446 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6447 else if (TREE_CODE_CLASS (code) == tcc_comparison
6448 && TREE_CODE (arg1) == COMPOUND_EXPR)
6449 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6450 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6451 else if (TREE_CODE_CLASS (code) == tcc_binary
6452 || TREE_CODE_CLASS (code) == tcc_comparison)
6454 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6455 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6456 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6457 arg1)));
6458 if (TREE_CODE (arg1) == COMPOUND_EXPR
6459 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6460 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6461 fold (build2 (code, type,
6462 arg0, TREE_OPERAND (arg1, 1))));
6464 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6466 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6467 /*cond_first_p=*/1);
6468 if (tem != NULL_TREE)
6469 return tem;
6472 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6474 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6475 /*cond_first_p=*/0);
6476 if (tem != NULL_TREE)
6477 return tem;
6481 switch (code)
6483 case CONST_DECL:
6484 return fold (DECL_INITIAL (t));
6486 case NOP_EXPR:
6487 case FLOAT_EXPR:
6488 case CONVERT_EXPR:
6489 case FIX_TRUNC_EXPR:
6490 case FIX_CEIL_EXPR:
6491 case FIX_FLOOR_EXPR:
6492 case FIX_ROUND_EXPR:
6493 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6494 return TREE_OPERAND (t, 0);
6496 /* Handle cases of two conversions in a row. */
6497 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6498 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6500 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6501 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6502 int inside_int = INTEGRAL_TYPE_P (inside_type);
6503 int inside_ptr = POINTER_TYPE_P (inside_type);
6504 int inside_float = FLOAT_TYPE_P (inside_type);
6505 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6506 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6507 int inter_int = INTEGRAL_TYPE_P (inter_type);
6508 int inter_ptr = POINTER_TYPE_P (inter_type);
6509 int inter_float = FLOAT_TYPE_P (inter_type);
6510 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6511 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6512 int final_int = INTEGRAL_TYPE_P (type);
6513 int final_ptr = POINTER_TYPE_P (type);
6514 int final_float = FLOAT_TYPE_P (type);
6515 unsigned int final_prec = TYPE_PRECISION (type);
6516 int final_unsignedp = TYPE_UNSIGNED (type);
6518 /* In addition to the cases of two conversions in a row
6519 handled below, if we are converting something to its own
6520 type via an object of identical or wider precision, neither
6521 conversion is needed. */
6522 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6523 && ((inter_int && final_int) || (inter_float && final_float))
6524 && inter_prec >= final_prec)
6525 return fold (build1 (code, type,
6526 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6528 /* Likewise, if the intermediate and final types are either both
6529 float or both integer, we don't need the middle conversion if
6530 it is wider than the final type and doesn't change the signedness
6531 (for integers). Avoid this if the final type is a pointer
6532 since then we sometimes need the inner conversion. Likewise if
6533 the outer has a precision not equal to the size of its mode. */
6534 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6535 || (inter_float && inside_float))
6536 && inter_prec >= inside_prec
6537 && (inter_float || inter_unsignedp == inside_unsignedp)
6538 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6539 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6540 && ! final_ptr)
6541 return fold (build1 (code, type,
6542 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6544 /* If we have a sign-extension of a zero-extended value, we can
6545 replace that by a single zero-extension. */
6546 if (inside_int && inter_int && final_int
6547 && inside_prec < inter_prec && inter_prec < final_prec
6548 && inside_unsignedp && !inter_unsignedp)
6549 return fold (build1 (code, type,
6550 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6552 /* Two conversions in a row are not needed unless:
6553 - some conversion is floating-point (overstrict for now), or
6554 - the intermediate type is narrower than both initial and
6555 final, or
6556 - the intermediate type and innermost type differ in signedness,
6557 and the outermost type is wider than the intermediate, or
6558 - the initial type is a pointer type and the precisions of the
6559 intermediate and final types differ, or
6560 - the final type is a pointer type and the precisions of the
6561 initial and intermediate types differ. */
6562 if (! inside_float && ! inter_float && ! final_float
6563 && (inter_prec > inside_prec || inter_prec > final_prec)
6564 && ! (inside_int && inter_int
6565 && inter_unsignedp != inside_unsignedp
6566 && inter_prec < final_prec)
6567 && ((inter_unsignedp && inter_prec > inside_prec)
6568 == (final_unsignedp && final_prec > inter_prec))
6569 && ! (inside_ptr && inter_prec != final_prec)
6570 && ! (final_ptr && inside_prec != inter_prec)
6571 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6572 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6573 && ! final_ptr)
6574 return fold (build1 (code, type,
6575 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6578 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6579 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6580 /* Detect assigning a bitfield. */
6581 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6582 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6584 /* Don't leave an assignment inside a conversion
6585 unless assigning a bitfield. */
6586 tree prev = TREE_OPERAND (t, 0);
6587 tem = copy_node (t);
6588 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6589 /* First do the assignment, then return converted constant. */
6590 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6591 TREE_NO_WARNING (tem) = 1;
6592 TREE_USED (tem) = 1;
6593 return tem;
6596 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6597 constants (if x has signed type, the sign bit cannot be set
6598 in c). This folds extension into the BIT_AND_EXPR. */
6599 if (INTEGRAL_TYPE_P (type)
6600 && TREE_CODE (type) != BOOLEAN_TYPE
6601 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6602 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6604 tree and = TREE_OPERAND (t, 0);
6605 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6606 int change = 0;
6608 if (TYPE_UNSIGNED (TREE_TYPE (and))
6609 || (TYPE_PRECISION (type)
6610 <= TYPE_PRECISION (TREE_TYPE (and))))
6611 change = 1;
6612 else if (TYPE_PRECISION (TREE_TYPE (and1))
6613 <= HOST_BITS_PER_WIDE_INT
6614 && host_integerp (and1, 1))
6616 unsigned HOST_WIDE_INT cst;
6618 cst = tree_low_cst (and1, 1);
6619 cst &= (HOST_WIDE_INT) -1
6620 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6621 change = (cst == 0);
6622 #ifdef LOAD_EXTEND_OP
6623 if (change
6624 && !flag_syntax_only
6625 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6626 == ZERO_EXTEND))
6628 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6629 and0 = fold_convert (uns, and0);
6630 and1 = fold_convert (uns, and1);
6632 #endif
6634 if (change)
6635 return fold (build2 (BIT_AND_EXPR, type,
6636 fold_convert (type, and0),
6637 fold_convert (type, and1)));
6640 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6641 T2 being pointers to types of the same size. */
6642 if (POINTER_TYPE_P (TREE_TYPE (t))
6643 && BINARY_CLASS_P (arg0)
6644 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6645 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6647 tree arg00 = TREE_OPERAND (arg0, 0);
6648 tree t0 = TREE_TYPE (t);
6649 tree t1 = TREE_TYPE (arg00);
6650 tree tt0 = TREE_TYPE (t0);
6651 tree tt1 = TREE_TYPE (t1);
6652 tree s0 = TYPE_SIZE (tt0);
6653 tree s1 = TYPE_SIZE (tt1);
6655 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6656 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6657 TREE_OPERAND (arg0, 1));
6660 tem = fold_convert_const (code, type, arg0);
6661 return tem ? tem : t;
6663 case VIEW_CONVERT_EXPR:
6664 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6665 return build1 (VIEW_CONVERT_EXPR, type,
6666 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6667 return t;
6669 case COMPONENT_REF:
6670 if (TREE_CODE (arg0) == CONSTRUCTOR
6671 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6673 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6674 if (m)
6675 return TREE_VALUE (m);
6677 return t;
6679 case RANGE_EXPR:
6680 if (TREE_CONSTANT (t) != wins)
6682 tem = copy_node (t);
6683 TREE_CONSTANT (tem) = wins;
6684 TREE_INVARIANT (tem) = wins;
6685 return tem;
6687 return t;
6689 case NEGATE_EXPR:
6690 if (negate_expr_p (arg0))
6691 return fold_convert (type, negate_expr (arg0));
6692 return t;
6694 case ABS_EXPR:
6695 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6696 return fold_abs_const (arg0, type);
6697 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6698 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6699 /* Convert fabs((double)float) into (double)fabsf(float). */
6700 else if (TREE_CODE (arg0) == NOP_EXPR
6701 && TREE_CODE (type) == REAL_TYPE)
6703 tree targ0 = strip_float_extensions (arg0);
6704 if (targ0 != arg0)
6705 return fold_convert (type, fold (build1 (ABS_EXPR,
6706 TREE_TYPE (targ0),
6707 targ0)));
6709 else if (tree_expr_nonnegative_p (arg0))
6710 return arg0;
6711 return t;
6713 case CONJ_EXPR:
6714 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6715 return fold_convert (type, arg0);
6716 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6717 return build2 (COMPLEX_EXPR, type,
6718 TREE_OPERAND (arg0, 0),
6719 negate_expr (TREE_OPERAND (arg0, 1)));
6720 else if (TREE_CODE (arg0) == COMPLEX_CST)
6721 return build_complex (type, TREE_REALPART (arg0),
6722 negate_expr (TREE_IMAGPART (arg0)));
6723 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6724 return fold (build2 (TREE_CODE (arg0), type,
6725 fold (build1 (CONJ_EXPR, type,
6726 TREE_OPERAND (arg0, 0))),
6727 fold (build1 (CONJ_EXPR, type,
6728 TREE_OPERAND (arg0, 1)))));
6729 else if (TREE_CODE (arg0) == CONJ_EXPR)
6730 return TREE_OPERAND (arg0, 0);
6731 return t;
6733 case BIT_NOT_EXPR:
6734 if (TREE_CODE (arg0) == INTEGER_CST)
6735 return fold_not_const (arg0, type);
6736 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6737 return TREE_OPERAND (arg0, 0);
6738 return t;
6740 case PLUS_EXPR:
6741 /* A + (-B) -> A - B */
6742 if (TREE_CODE (arg1) == NEGATE_EXPR)
6743 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6744 /* (-A) + B -> B - A */
6745 if (TREE_CODE (arg0) == NEGATE_EXPR
6746 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6747 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6748 if (! FLOAT_TYPE_P (type))
6750 if (integer_zerop (arg1))
6751 return non_lvalue (fold_convert (type, arg0));
6753 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6754 with a constant, and the two constants have no bits in common,
6755 we should treat this as a BIT_IOR_EXPR since this may produce more
6756 simplifications. */
6757 if (TREE_CODE (arg0) == BIT_AND_EXPR
6758 && TREE_CODE (arg1) == BIT_AND_EXPR
6759 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6760 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6761 && integer_zerop (const_binop (BIT_AND_EXPR,
6762 TREE_OPERAND (arg0, 1),
6763 TREE_OPERAND (arg1, 1), 0)))
6765 code = BIT_IOR_EXPR;
6766 goto bit_ior;
6769 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6770 (plus (plus (mult) (mult)) (foo)) so that we can
6771 take advantage of the factoring cases below. */
6772 if (((TREE_CODE (arg0) == PLUS_EXPR
6773 || TREE_CODE (arg0) == MINUS_EXPR)
6774 && TREE_CODE (arg1) == MULT_EXPR)
6775 || ((TREE_CODE (arg1) == PLUS_EXPR
6776 || TREE_CODE (arg1) == MINUS_EXPR)
6777 && TREE_CODE (arg0) == MULT_EXPR))
6779 tree parg0, parg1, parg, marg;
6780 enum tree_code pcode;
6782 if (TREE_CODE (arg1) == MULT_EXPR)
6783 parg = arg0, marg = arg1;
6784 else
6785 parg = arg1, marg = arg0;
6786 pcode = TREE_CODE (parg);
6787 parg0 = TREE_OPERAND (parg, 0);
6788 parg1 = TREE_OPERAND (parg, 1);
6789 STRIP_NOPS (parg0);
6790 STRIP_NOPS (parg1);
6792 if (TREE_CODE (parg0) == MULT_EXPR
6793 && TREE_CODE (parg1) != MULT_EXPR)
6794 return fold (build2 (pcode, type,
6795 fold (build2 (PLUS_EXPR, type,
6796 fold_convert (type, parg0),
6797 fold_convert (type, marg))),
6798 fold_convert (type, parg1)));
6799 if (TREE_CODE (parg0) != MULT_EXPR
6800 && TREE_CODE (parg1) == MULT_EXPR)
6801 return fold (build2 (PLUS_EXPR, type,
6802 fold_convert (type, parg0),
6803 fold (build2 (pcode, type,
6804 fold_convert (type, marg),
6805 fold_convert (type,
6806 parg1)))));
6809 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6811 tree arg00, arg01, arg10, arg11;
6812 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6814 /* (A * C) + (B * C) -> (A+B) * C.
6815 We are most concerned about the case where C is a constant,
6816 but other combinations show up during loop reduction. Since
6817 it is not difficult, try all four possibilities. */
6819 arg00 = TREE_OPERAND (arg0, 0);
6820 arg01 = TREE_OPERAND (arg0, 1);
6821 arg10 = TREE_OPERAND (arg1, 0);
6822 arg11 = TREE_OPERAND (arg1, 1);
6823 same = NULL_TREE;
6825 if (operand_equal_p (arg01, arg11, 0))
6826 same = arg01, alt0 = arg00, alt1 = arg10;
6827 else if (operand_equal_p (arg00, arg10, 0))
6828 same = arg00, alt0 = arg01, alt1 = arg11;
6829 else if (operand_equal_p (arg00, arg11, 0))
6830 same = arg00, alt0 = arg01, alt1 = arg10;
6831 else if (operand_equal_p (arg01, arg10, 0))
6832 same = arg01, alt0 = arg00, alt1 = arg11;
6834 /* No identical multiplicands; see if we can find a common
6835 power-of-two factor in non-power-of-two multiplies. This
6836 can help in multi-dimensional array access. */
6837 else if (TREE_CODE (arg01) == INTEGER_CST
6838 && TREE_CODE (arg11) == INTEGER_CST
6839 && TREE_INT_CST_HIGH (arg01) == 0
6840 && TREE_INT_CST_HIGH (arg11) == 0)
6842 HOST_WIDE_INT int01, int11, tmp;
6843 int01 = TREE_INT_CST_LOW (arg01);
6844 int11 = TREE_INT_CST_LOW (arg11);
6846 /* Move min of absolute values to int11. */
6847 if ((int01 >= 0 ? int01 : -int01)
6848 < (int11 >= 0 ? int11 : -int11))
6850 tmp = int01, int01 = int11, int11 = tmp;
6851 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6852 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6855 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6857 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6858 build_int_cst (NULL_TREE,
6859 int01 / int11)));
6860 alt1 = arg10;
6861 same = arg11;
6865 if (same)
6866 return fold (build2 (MULT_EXPR, type,
6867 fold (build2 (PLUS_EXPR, type,
6868 fold_convert (type, alt0),
6869 fold_convert (type, alt1))),
6870 same));
6873 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6874 of the array. Loop optimizer sometimes produce this type of
6875 expressions. */
6876 if (TREE_CODE (arg0) == ADDR_EXPR
6877 && TREE_CODE (arg1) == MULT_EXPR)
6879 tem = try_move_mult_to_index (type, PLUS_EXPR, arg0, arg1);
6880 if (tem)
6881 return fold (tem);
6883 else if (TREE_CODE (arg1) == ADDR_EXPR
6884 && TREE_CODE (arg0) == MULT_EXPR)
6886 tem = try_move_mult_to_index (type, PLUS_EXPR, arg1, arg0);
6887 if (tem)
6888 return fold (tem);
6891 else
6893 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6894 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6895 return non_lvalue (fold_convert (type, arg0));
6897 /* Likewise if the operands are reversed. */
6898 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6899 return non_lvalue (fold_convert (type, arg1));
6901 /* Convert X + -C into X - C. */
6902 if (TREE_CODE (arg1) == REAL_CST
6903 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6905 tem = fold_negate_const (arg1, type);
6906 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6907 return fold (build2 (MINUS_EXPR, type,
6908 fold_convert (type, arg0),
6909 fold_convert (type, tem)));
6912 /* Convert x+x into x*2.0. */
6913 if (operand_equal_p (arg0, arg1, 0)
6914 && SCALAR_FLOAT_TYPE_P (type))
6915 return fold (build2 (MULT_EXPR, type, arg0,
6916 build_real (type, dconst2)));
6918 /* Convert x*c+x into x*(c+1). */
6919 if (flag_unsafe_math_optimizations
6920 && TREE_CODE (arg0) == MULT_EXPR
6921 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6922 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6923 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6925 REAL_VALUE_TYPE c;
6927 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6928 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6929 return fold (build2 (MULT_EXPR, type, arg1,
6930 build_real (type, c)));
6933 /* Convert x+x*c into x*(c+1). */
6934 if (flag_unsafe_math_optimizations
6935 && TREE_CODE (arg1) == MULT_EXPR
6936 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6937 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6938 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6940 REAL_VALUE_TYPE c;
6942 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6943 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6944 return fold (build2 (MULT_EXPR, type, arg0,
6945 build_real (type, c)));
6948 /* Convert x*c1+x*c2 into x*(c1+c2). */
6949 if (flag_unsafe_math_optimizations
6950 && TREE_CODE (arg0) == MULT_EXPR
6951 && TREE_CODE (arg1) == MULT_EXPR
6952 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6953 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6954 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6955 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6956 && operand_equal_p (TREE_OPERAND (arg0, 0),
6957 TREE_OPERAND (arg1, 0), 0))
6959 REAL_VALUE_TYPE c1, c2;
6961 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6962 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6963 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6964 return fold (build2 (MULT_EXPR, type,
6965 TREE_OPERAND (arg0, 0),
6966 build_real (type, c1)));
6968 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
6969 if (flag_unsafe_math_optimizations
6970 && TREE_CODE (arg1) == PLUS_EXPR
6971 && TREE_CODE (arg0) != MULT_EXPR)
6973 tree tree10 = TREE_OPERAND (arg1, 0);
6974 tree tree11 = TREE_OPERAND (arg1, 1);
6975 if (TREE_CODE (tree11) == MULT_EXPR
6976 && TREE_CODE (tree10) == MULT_EXPR)
6978 tree tree0;
6979 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6980 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6983 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
6984 if (flag_unsafe_math_optimizations
6985 && TREE_CODE (arg0) == PLUS_EXPR
6986 && TREE_CODE (arg1) != MULT_EXPR)
6988 tree tree00 = TREE_OPERAND (arg0, 0);
6989 tree tree01 = TREE_OPERAND (arg0, 1);
6990 if (TREE_CODE (tree01) == MULT_EXPR
6991 && TREE_CODE (tree00) == MULT_EXPR)
6993 tree tree0;
6994 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6995 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
7000 bit_rotate:
7001 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7002 is a rotate of A by C1 bits. */
7003 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7004 is a rotate of A by B bits. */
7006 enum tree_code code0, code1;
7007 code0 = TREE_CODE (arg0);
7008 code1 = TREE_CODE (arg1);
7009 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7010 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7011 && operand_equal_p (TREE_OPERAND (arg0, 0),
7012 TREE_OPERAND (arg1, 0), 0)
7013 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7015 tree tree01, tree11;
7016 enum tree_code code01, code11;
7018 tree01 = TREE_OPERAND (arg0, 1);
7019 tree11 = TREE_OPERAND (arg1, 1);
7020 STRIP_NOPS (tree01);
7021 STRIP_NOPS (tree11);
7022 code01 = TREE_CODE (tree01);
7023 code11 = TREE_CODE (tree11);
7024 if (code01 == INTEGER_CST
7025 && code11 == INTEGER_CST
7026 && TREE_INT_CST_HIGH (tree01) == 0
7027 && TREE_INT_CST_HIGH (tree11) == 0
7028 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7029 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7030 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7031 code0 == LSHIFT_EXPR ? tree01 : tree11);
7032 else if (code11 == MINUS_EXPR)
7034 tree tree110, tree111;
7035 tree110 = TREE_OPERAND (tree11, 0);
7036 tree111 = TREE_OPERAND (tree11, 1);
7037 STRIP_NOPS (tree110);
7038 STRIP_NOPS (tree111);
7039 if (TREE_CODE (tree110) == INTEGER_CST
7040 && 0 == compare_tree_int (tree110,
7041 TYPE_PRECISION
7042 (TREE_TYPE (TREE_OPERAND
7043 (arg0, 0))))
7044 && operand_equal_p (tree01, tree111, 0))
7045 return build2 ((code0 == LSHIFT_EXPR
7046 ? LROTATE_EXPR
7047 : RROTATE_EXPR),
7048 type, TREE_OPERAND (arg0, 0), tree01);
7050 else if (code01 == MINUS_EXPR)
7052 tree tree010, tree011;
7053 tree010 = TREE_OPERAND (tree01, 0);
7054 tree011 = TREE_OPERAND (tree01, 1);
7055 STRIP_NOPS (tree010);
7056 STRIP_NOPS (tree011);
7057 if (TREE_CODE (tree010) == INTEGER_CST
7058 && 0 == compare_tree_int (tree010,
7059 TYPE_PRECISION
7060 (TREE_TYPE (TREE_OPERAND
7061 (arg0, 0))))
7062 && operand_equal_p (tree11, tree011, 0))
7063 return build2 ((code0 != LSHIFT_EXPR
7064 ? LROTATE_EXPR
7065 : RROTATE_EXPR),
7066 type, TREE_OPERAND (arg0, 0), tree11);
7071 associate:
7072 /* In most languages, can't associate operations on floats through
7073 parentheses. Rather than remember where the parentheses were, we
7074 don't associate floats at all, unless the user has specified
7075 -funsafe-math-optimizations. */
7077 if (! wins
7078 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7080 tree var0, con0, lit0, minus_lit0;
7081 tree var1, con1, lit1, minus_lit1;
7083 /* Split both trees into variables, constants, and literals. Then
7084 associate each group together, the constants with literals,
7085 then the result with variables. This increases the chances of
7086 literals being recombined later and of generating relocatable
7087 expressions for the sum of a constant and literal. */
7088 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7089 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7090 code == MINUS_EXPR);
7092 /* Only do something if we found more than two objects. Otherwise,
7093 nothing has changed and we risk infinite recursion. */
7094 if (2 < ((var0 != 0) + (var1 != 0)
7095 + (con0 != 0) + (con1 != 0)
7096 + (lit0 != 0) + (lit1 != 0)
7097 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7099 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7100 if (code == MINUS_EXPR)
7101 code = PLUS_EXPR;
7103 var0 = associate_trees (var0, var1, code, type);
7104 con0 = associate_trees (con0, con1, code, type);
7105 lit0 = associate_trees (lit0, lit1, code, type);
7106 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7108 /* Preserve the MINUS_EXPR if the negative part of the literal is
7109 greater than the positive part. Otherwise, the multiplicative
7110 folding code (i.e extract_muldiv) may be fooled in case
7111 unsigned constants are subtracted, like in the following
7112 example: ((X*2 + 4) - 8U)/2. */
7113 if (minus_lit0 && lit0)
7115 if (TREE_CODE (lit0) == INTEGER_CST
7116 && TREE_CODE (minus_lit0) == INTEGER_CST
7117 && tree_int_cst_lt (lit0, minus_lit0))
7119 minus_lit0 = associate_trees (minus_lit0, lit0,
7120 MINUS_EXPR, type);
7121 lit0 = 0;
7123 else
7125 lit0 = associate_trees (lit0, minus_lit0,
7126 MINUS_EXPR, type);
7127 minus_lit0 = 0;
7130 if (minus_lit0)
7132 if (con0 == 0)
7133 return fold_convert (type,
7134 associate_trees (var0, minus_lit0,
7135 MINUS_EXPR, type));
7136 else
7138 con0 = associate_trees (con0, minus_lit0,
7139 MINUS_EXPR, type);
7140 return fold_convert (type,
7141 associate_trees (var0, con0,
7142 PLUS_EXPR, type));
7146 con0 = associate_trees (con0, lit0, code, type);
7147 return fold_convert (type, associate_trees (var0, con0,
7148 code, type));
7152 binary:
7153 if (wins)
7154 t1 = const_binop (code, arg0, arg1, 0);
7155 if (t1 != NULL_TREE)
7157 /* The return value should always have
7158 the same type as the original expression. */
7159 if (TREE_TYPE (t1) != type)
7160 t1 = fold_convert (type, t1);
7162 return t1;
7164 return t;
7166 case MINUS_EXPR:
7167 /* A - (-B) -> A + B */
7168 if (TREE_CODE (arg1) == NEGATE_EXPR)
7169 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7170 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7171 if (TREE_CODE (arg0) == NEGATE_EXPR
7172 && (FLOAT_TYPE_P (type)
7173 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7174 && negate_expr_p (arg1)
7175 && reorder_operands_p (arg0, arg1))
7176 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7177 TREE_OPERAND (arg0, 0)));
7179 if (! FLOAT_TYPE_P (type))
7181 if (! wins && integer_zerop (arg0))
7182 return negate_expr (fold_convert (type, arg1));
7183 if (integer_zerop (arg1))
7184 return non_lvalue (fold_convert (type, arg0));
7186 /* Fold A - (A & B) into ~B & A. */
7187 if (!TREE_SIDE_EFFECTS (arg0)
7188 && TREE_CODE (arg1) == BIT_AND_EXPR)
7190 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7191 return fold (build2 (BIT_AND_EXPR, type,
7192 fold (build1 (BIT_NOT_EXPR, type,
7193 TREE_OPERAND (arg1, 0))),
7194 arg0));
7195 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7196 return fold (build2 (BIT_AND_EXPR, type,
7197 fold (build1 (BIT_NOT_EXPR, type,
7198 TREE_OPERAND (arg1, 1))),
7199 arg0));
7202 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7203 any power of 2 minus 1. */
7204 if (TREE_CODE (arg0) == BIT_AND_EXPR
7205 && TREE_CODE (arg1) == BIT_AND_EXPR
7206 && operand_equal_p (TREE_OPERAND (arg0, 0),
7207 TREE_OPERAND (arg1, 0), 0))
7209 tree mask0 = TREE_OPERAND (arg0, 1);
7210 tree mask1 = TREE_OPERAND (arg1, 1);
7211 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7213 if (operand_equal_p (tem, mask1, 0))
7215 tem = fold (build2 (BIT_XOR_EXPR, type,
7216 TREE_OPERAND (arg0, 0), mask1));
7217 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7222 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7223 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7224 return non_lvalue (fold_convert (type, arg0));
7226 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7227 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7228 (-ARG1 + ARG0) reduces to -ARG1. */
7229 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7230 return negate_expr (fold_convert (type, arg1));
7232 /* Fold &x - &x. This can happen from &x.foo - &x.
7233 This is unsafe for certain floats even in non-IEEE formats.
7234 In IEEE, it is unsafe because it does wrong for NaNs.
7235 Also note that operand_equal_p is always false if an operand
7236 is volatile. */
7238 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7239 && operand_equal_p (arg0, arg1, 0))
7240 return fold_convert (type, integer_zero_node);
7242 /* A - B -> A + (-B) if B is easily negatable. */
7243 if (!wins && negate_expr_p (arg1)
7244 && ((FLOAT_TYPE_P (type)
7245 /* Avoid this transformation if B is a positive REAL_CST. */
7246 && (TREE_CODE (arg1) != REAL_CST
7247 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7248 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7249 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7251 /* Try folding difference of addresses. */
7253 HOST_WIDE_INT diff;
7255 if ((TREE_CODE (arg0) == ADDR_EXPR
7256 || TREE_CODE (arg1) == ADDR_EXPR)
7257 && ptr_difference_const (arg0, arg1, &diff))
7258 return build_int_cst_type (type, diff);
7261 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7262 of the array. Loop optimizer sometimes produce this type of
7263 expressions. */
7264 if (TREE_CODE (arg0) == ADDR_EXPR
7265 && TREE_CODE (arg1) == MULT_EXPR)
7267 tem = try_move_mult_to_index (type, MINUS_EXPR, arg0, arg1);
7268 if (tem)
7269 return fold (tem);
7272 if (TREE_CODE (arg0) == MULT_EXPR
7273 && TREE_CODE (arg1) == MULT_EXPR
7274 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7276 /* (A * C) - (B * C) -> (A-B) * C. */
7277 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7278 TREE_OPERAND (arg1, 1), 0))
7279 return fold (build2 (MULT_EXPR, type,
7280 fold (build2 (MINUS_EXPR, type,
7281 TREE_OPERAND (arg0, 0),
7282 TREE_OPERAND (arg1, 0))),
7283 TREE_OPERAND (arg0, 1)));
7284 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7285 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7286 TREE_OPERAND (arg1, 0), 0))
7287 return fold (build2 (MULT_EXPR, type,
7288 TREE_OPERAND (arg0, 0),
7289 fold (build2 (MINUS_EXPR, type,
7290 TREE_OPERAND (arg0, 1),
7291 TREE_OPERAND (arg1, 1)))));
7294 goto associate;
7296 case MULT_EXPR:
7297 /* (-A) * (-B) -> A * B */
7298 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7299 return fold (build2 (MULT_EXPR, type,
7300 TREE_OPERAND (arg0, 0),
7301 negate_expr (arg1)));
7302 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7303 return fold (build2 (MULT_EXPR, type,
7304 negate_expr (arg0),
7305 TREE_OPERAND (arg1, 0)));
7307 if (! FLOAT_TYPE_P (type))
7309 if (integer_zerop (arg1))
7310 return omit_one_operand (type, arg1, arg0);
7311 if (integer_onep (arg1))
7312 return non_lvalue (fold_convert (type, arg0));
7314 /* (a * (1 << b)) is (a << b) */
7315 if (TREE_CODE (arg1) == LSHIFT_EXPR
7316 && integer_onep (TREE_OPERAND (arg1, 0)))
7317 return fold (build2 (LSHIFT_EXPR, type, arg0,
7318 TREE_OPERAND (arg1, 1)));
7319 if (TREE_CODE (arg0) == LSHIFT_EXPR
7320 && integer_onep (TREE_OPERAND (arg0, 0)))
7321 return fold (build2 (LSHIFT_EXPR, type, arg1,
7322 TREE_OPERAND (arg0, 1)));
7324 if (TREE_CODE (arg1) == INTEGER_CST
7325 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7326 fold_convert (type, arg1),
7327 code, NULL_TREE)))
7328 return fold_convert (type, tem);
7331 else
7333 /* Maybe fold x * 0 to 0. The expressions aren't the same
7334 when x is NaN, since x * 0 is also NaN. Nor are they the
7335 same in modes with signed zeros, since multiplying a
7336 negative value by 0 gives -0, not +0. */
7337 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7338 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7339 && real_zerop (arg1))
7340 return omit_one_operand (type, arg1, arg0);
7341 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7342 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7343 && real_onep (arg1))
7344 return non_lvalue (fold_convert (type, arg0));
7346 /* Transform x * -1.0 into -x. */
7347 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7348 && real_minus_onep (arg1))
7349 return fold_convert (type, negate_expr (arg0));
7351 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7352 if (flag_unsafe_math_optimizations
7353 && TREE_CODE (arg0) == RDIV_EXPR
7354 && TREE_CODE (arg1) == REAL_CST
7355 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7357 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7358 arg1, 0);
7359 if (tem)
7360 return fold (build2 (RDIV_EXPR, type, tem,
7361 TREE_OPERAND (arg0, 1)));
7364 if (flag_unsafe_math_optimizations)
7366 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7367 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7369 /* Optimizations of root(...)*root(...). */
7370 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7372 tree rootfn, arg, arglist;
7373 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7374 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7376 /* Optimize sqrt(x)*sqrt(x) as x. */
7377 if (BUILTIN_SQRT_P (fcode0)
7378 && operand_equal_p (arg00, arg10, 0)
7379 && ! HONOR_SNANS (TYPE_MODE (type)))
7380 return arg00;
7382 /* Optimize root(x)*root(y) as root(x*y). */
7383 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7384 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7385 arglist = build_tree_list (NULL_TREE, arg);
7386 return build_function_call_expr (rootfn, arglist);
7389 /* Optimize expN(x)*expN(y) as expN(x+y). */
7390 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7392 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7393 tree arg = build2 (PLUS_EXPR, type,
7394 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7395 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7396 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7397 return build_function_call_expr (expfn, arglist);
7400 /* Optimizations of pow(...)*pow(...). */
7401 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7402 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7403 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7405 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7406 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7407 1)));
7408 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7409 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7410 1)));
7412 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7413 if (operand_equal_p (arg01, arg11, 0))
7415 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7416 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7417 tree arglist = tree_cons (NULL_TREE, fold (arg),
7418 build_tree_list (NULL_TREE,
7419 arg01));
7420 return build_function_call_expr (powfn, arglist);
7423 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7424 if (operand_equal_p (arg00, arg10, 0))
7426 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7427 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7428 tree arglist = tree_cons (NULL_TREE, arg00,
7429 build_tree_list (NULL_TREE,
7430 arg));
7431 return build_function_call_expr (powfn, arglist);
7435 /* Optimize tan(x)*cos(x) as sin(x). */
7436 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7437 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7438 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7439 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7440 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7441 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7442 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7443 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7445 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7447 if (sinfn != NULL_TREE)
7448 return build_function_call_expr (sinfn,
7449 TREE_OPERAND (arg0, 1));
7452 /* Optimize x*pow(x,c) as pow(x,c+1). */
7453 if (fcode1 == BUILT_IN_POW
7454 || fcode1 == BUILT_IN_POWF
7455 || fcode1 == BUILT_IN_POWL)
7457 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7458 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7459 1)));
7460 if (TREE_CODE (arg11) == REAL_CST
7461 && ! TREE_CONSTANT_OVERFLOW (arg11)
7462 && operand_equal_p (arg0, arg10, 0))
7464 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7465 REAL_VALUE_TYPE c;
7466 tree arg, arglist;
7468 c = TREE_REAL_CST (arg11);
7469 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7470 arg = build_real (type, c);
7471 arglist = build_tree_list (NULL_TREE, arg);
7472 arglist = tree_cons (NULL_TREE, arg0, arglist);
7473 return build_function_call_expr (powfn, arglist);
7477 /* Optimize pow(x,c)*x as pow(x,c+1). */
7478 if (fcode0 == BUILT_IN_POW
7479 || fcode0 == BUILT_IN_POWF
7480 || fcode0 == BUILT_IN_POWL)
7482 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7483 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7484 1)));
7485 if (TREE_CODE (arg01) == REAL_CST
7486 && ! TREE_CONSTANT_OVERFLOW (arg01)
7487 && operand_equal_p (arg1, arg00, 0))
7489 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7490 REAL_VALUE_TYPE c;
7491 tree arg, arglist;
7493 c = TREE_REAL_CST (arg01);
7494 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7495 arg = build_real (type, c);
7496 arglist = build_tree_list (NULL_TREE, arg);
7497 arglist = tree_cons (NULL_TREE, arg1, arglist);
7498 return build_function_call_expr (powfn, arglist);
7502 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7503 if (! optimize_size
7504 && operand_equal_p (arg0, arg1, 0))
7506 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7508 if (powfn)
7510 tree arg = build_real (type, dconst2);
7511 tree arglist = build_tree_list (NULL_TREE, arg);
7512 arglist = tree_cons (NULL_TREE, arg0, arglist);
7513 return build_function_call_expr (powfn, arglist);
7518 goto associate;
7520 case BIT_IOR_EXPR:
7521 bit_ior:
7522 if (integer_all_onesp (arg1))
7523 return omit_one_operand (type, arg1, arg0);
7524 if (integer_zerop (arg1))
7525 return non_lvalue (fold_convert (type, arg0));
7526 if (operand_equal_p (arg0, arg1, 0))
7527 return non_lvalue (fold_convert (type, arg0));
7529 /* ~X | X is -1. */
7530 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7531 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7533 t1 = build_int_cst (type, -1);
7534 t1 = force_fit_type (t1, 0, false, false);
7535 return omit_one_operand (type, t1, arg1);
7538 /* X | ~X is -1. */
7539 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7540 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7542 t1 = build_int_cst (type, -1);
7543 t1 = force_fit_type (t1, 0, false, false);
7544 return omit_one_operand (type, t1, arg0);
7547 t1 = distribute_bit_expr (code, type, arg0, arg1);
7548 if (t1 != NULL_TREE)
7549 return t1;
7551 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7553 This results in more efficient code for machines without a NAND
7554 instruction. Combine will canonicalize to the first form
7555 which will allow use of NAND instructions provided by the
7556 backend if they exist. */
7557 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7558 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7560 return fold (build1 (BIT_NOT_EXPR, type,
7561 build2 (BIT_AND_EXPR, type,
7562 TREE_OPERAND (arg0, 0),
7563 TREE_OPERAND (arg1, 0))));
7566 /* See if this can be simplified into a rotate first. If that
7567 is unsuccessful continue in the association code. */
7568 goto bit_rotate;
7570 case BIT_XOR_EXPR:
7571 if (integer_zerop (arg1))
7572 return non_lvalue (fold_convert (type, arg0));
7573 if (integer_all_onesp (arg1))
7574 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7575 if (operand_equal_p (arg0, arg1, 0))
7576 return omit_one_operand (type, integer_zero_node, arg0);
7578 /* ~X ^ X is -1. */
7579 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7580 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7582 t1 = build_int_cst (type, -1);
7583 t1 = force_fit_type (t1, 0, false, false);
7584 return omit_one_operand (type, t1, arg1);
7587 /* X ^ ~X is -1. */
7588 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7589 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7591 t1 = build_int_cst (type, -1);
7592 t1 = force_fit_type (t1, 0, false, false);
7593 return omit_one_operand (type, t1, arg0);
7596 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7597 with a constant, and the two constants have no bits in common,
7598 we should treat this as a BIT_IOR_EXPR since this may produce more
7599 simplifications. */
7600 if (TREE_CODE (arg0) == BIT_AND_EXPR
7601 && TREE_CODE (arg1) == BIT_AND_EXPR
7602 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7603 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7604 && integer_zerop (const_binop (BIT_AND_EXPR,
7605 TREE_OPERAND (arg0, 1),
7606 TREE_OPERAND (arg1, 1), 0)))
7608 code = BIT_IOR_EXPR;
7609 goto bit_ior;
7612 /* See if this can be simplified into a rotate first. If that
7613 is unsuccessful continue in the association code. */
7614 goto bit_rotate;
7616 case BIT_AND_EXPR:
7617 if (integer_all_onesp (arg1))
7618 return non_lvalue (fold_convert (type, arg0));
7619 if (integer_zerop (arg1))
7620 return omit_one_operand (type, arg1, arg0);
7621 if (operand_equal_p (arg0, arg1, 0))
7622 return non_lvalue (fold_convert (type, arg0));
7624 /* ~X & X is always zero. */
7625 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7626 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7627 return omit_one_operand (type, integer_zero_node, arg1);
7629 /* X & ~X is always zero. */
7630 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7631 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7632 return omit_one_operand (type, integer_zero_node, arg0);
7634 t1 = distribute_bit_expr (code, type, arg0, arg1);
7635 if (t1 != NULL_TREE)
7636 return t1;
7637 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7638 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7639 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7641 unsigned int prec
7642 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7644 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7645 && (~TREE_INT_CST_LOW (arg1)
7646 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7647 return fold_convert (type, TREE_OPERAND (arg0, 0));
7650 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7652 This results in more efficient code for machines without a NOR
7653 instruction. Combine will canonicalize to the first form
7654 which will allow use of NOR instructions provided by the
7655 backend if they exist. */
7656 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7657 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7659 return fold (build1 (BIT_NOT_EXPR, type,
7660 build2 (BIT_IOR_EXPR, type,
7661 TREE_OPERAND (arg0, 0),
7662 TREE_OPERAND (arg1, 0))));
7665 goto associate;
7667 case RDIV_EXPR:
7668 /* Don't touch a floating-point divide by zero unless the mode
7669 of the constant can represent infinity. */
7670 if (TREE_CODE (arg1) == REAL_CST
7671 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7672 && real_zerop (arg1))
7673 return t;
7675 /* (-A) / (-B) -> A / B */
7676 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7677 return fold (build2 (RDIV_EXPR, type,
7678 TREE_OPERAND (arg0, 0),
7679 negate_expr (arg1)));
7680 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7681 return fold (build2 (RDIV_EXPR, type,
7682 negate_expr (arg0),
7683 TREE_OPERAND (arg1, 0)));
7685 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7686 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7687 && real_onep (arg1))
7688 return non_lvalue (fold_convert (type, arg0));
7690 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7691 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7692 && real_minus_onep (arg1))
7693 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7695 /* If ARG1 is a constant, we can convert this to a multiply by the
7696 reciprocal. This does not have the same rounding properties,
7697 so only do this if -funsafe-math-optimizations. We can actually
7698 always safely do it if ARG1 is a power of two, but it's hard to
7699 tell if it is or not in a portable manner. */
7700 if (TREE_CODE (arg1) == REAL_CST)
7702 if (flag_unsafe_math_optimizations
7703 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7704 arg1, 0)))
7705 return fold (build2 (MULT_EXPR, type, arg0, tem));
7706 /* Find the reciprocal if optimizing and the result is exact. */
7707 if (optimize)
7709 REAL_VALUE_TYPE r;
7710 r = TREE_REAL_CST (arg1);
7711 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7713 tem = build_real (type, r);
7714 return fold (build2 (MULT_EXPR, type, arg0, tem));
7718 /* Convert A/B/C to A/(B*C). */
7719 if (flag_unsafe_math_optimizations
7720 && TREE_CODE (arg0) == RDIV_EXPR)
7721 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7722 fold (build2 (MULT_EXPR, type,
7723 TREE_OPERAND (arg0, 1), arg1))));
7725 /* Convert A/(B/C) to (A/B)*C. */
7726 if (flag_unsafe_math_optimizations
7727 && TREE_CODE (arg1) == RDIV_EXPR)
7728 return fold (build2 (MULT_EXPR, type,
7729 fold (build2 (RDIV_EXPR, type, arg0,
7730 TREE_OPERAND (arg1, 0))),
7731 TREE_OPERAND (arg1, 1)));
7733 /* Convert C1/(X*C2) into (C1/C2)/X. */
7734 if (flag_unsafe_math_optimizations
7735 && TREE_CODE (arg1) == MULT_EXPR
7736 && TREE_CODE (arg0) == REAL_CST
7737 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7739 tree tem = const_binop (RDIV_EXPR, arg0,
7740 TREE_OPERAND (arg1, 1), 0);
7741 if (tem)
7742 return fold (build2 (RDIV_EXPR, type, tem,
7743 TREE_OPERAND (arg1, 0)));
7746 if (flag_unsafe_math_optimizations)
7748 enum built_in_function fcode = builtin_mathfn_code (arg1);
7749 /* Optimize x/expN(y) into x*expN(-y). */
7750 if (BUILTIN_EXPONENT_P (fcode))
7752 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7753 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7754 tree arglist = build_tree_list (NULL_TREE,
7755 fold_convert (type, arg));
7756 arg1 = build_function_call_expr (expfn, arglist);
7757 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7760 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7761 if (fcode == BUILT_IN_POW
7762 || fcode == BUILT_IN_POWF
7763 || fcode == BUILT_IN_POWL)
7765 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7766 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7767 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7768 tree neg11 = fold_convert (type, negate_expr (arg11));
7769 tree arglist = tree_cons(NULL_TREE, arg10,
7770 build_tree_list (NULL_TREE, neg11));
7771 arg1 = build_function_call_expr (powfn, arglist);
7772 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7776 if (flag_unsafe_math_optimizations)
7778 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7779 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7781 /* Optimize sin(x)/cos(x) as tan(x). */
7782 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7783 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7784 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7785 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7786 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7788 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7790 if (tanfn != NULL_TREE)
7791 return build_function_call_expr (tanfn,
7792 TREE_OPERAND (arg0, 1));
7795 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7796 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7797 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7798 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7799 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7800 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7802 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7804 if (tanfn != NULL_TREE)
7806 tree tmp = TREE_OPERAND (arg0, 1);
7807 tmp = build_function_call_expr (tanfn, tmp);
7808 return fold (build2 (RDIV_EXPR, type,
7809 build_real (type, dconst1), tmp));
7813 /* Optimize pow(x,c)/x as pow(x,c-1). */
7814 if (fcode0 == BUILT_IN_POW
7815 || fcode0 == BUILT_IN_POWF
7816 || fcode0 == BUILT_IN_POWL)
7818 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7819 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7820 if (TREE_CODE (arg01) == REAL_CST
7821 && ! TREE_CONSTANT_OVERFLOW (arg01)
7822 && operand_equal_p (arg1, arg00, 0))
7824 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7825 REAL_VALUE_TYPE c;
7826 tree arg, arglist;
7828 c = TREE_REAL_CST (arg01);
7829 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7830 arg = build_real (type, c);
7831 arglist = build_tree_list (NULL_TREE, arg);
7832 arglist = tree_cons (NULL_TREE, arg1, arglist);
7833 return build_function_call_expr (powfn, arglist);
7837 goto binary;
7839 case TRUNC_DIV_EXPR:
7840 case ROUND_DIV_EXPR:
7841 case FLOOR_DIV_EXPR:
7842 case CEIL_DIV_EXPR:
7843 case EXACT_DIV_EXPR:
7844 if (integer_onep (arg1))
7845 return non_lvalue (fold_convert (type, arg0));
7846 if (integer_zerop (arg1))
7847 return t;
7848 /* X / -1 is -X. */
7849 if (!TYPE_UNSIGNED (type)
7850 && TREE_CODE (arg1) == INTEGER_CST
7851 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7852 && TREE_INT_CST_HIGH (arg1) == -1)
7853 return fold_convert (type, negate_expr (arg0));
7855 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7856 operation, EXACT_DIV_EXPR.
7858 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7859 At one time others generated faster code, it's not clear if they do
7860 after the last round to changes to the DIV code in expmed.c. */
7861 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7862 && multiple_of_p (type, arg0, arg1))
7863 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7865 if (TREE_CODE (arg1) == INTEGER_CST
7866 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7867 code, NULL_TREE)))
7868 return fold_convert (type, tem);
7870 goto binary;
7872 case CEIL_MOD_EXPR:
7873 case FLOOR_MOD_EXPR:
7874 case ROUND_MOD_EXPR:
7875 case TRUNC_MOD_EXPR:
7876 if (integer_onep (arg1))
7877 return omit_one_operand (type, integer_zero_node, arg0);
7878 if (integer_zerop (arg1))
7879 return t;
7881 /* X % -1 is zero. */
7882 if (!TYPE_UNSIGNED (type)
7883 && TREE_CODE (arg1) == INTEGER_CST
7884 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7885 && TREE_INT_CST_HIGH (arg1) == -1)
7886 return omit_one_operand (type, integer_zero_node, arg0);
7888 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7889 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7890 if (code == TRUNC_MOD_EXPR
7891 && TYPE_UNSIGNED (type)
7892 && integer_pow2p (arg1))
7894 unsigned HOST_WIDE_INT high, low;
7895 tree mask;
7896 int l;
7898 l = tree_log2 (arg1);
7899 if (l >= HOST_BITS_PER_WIDE_INT)
7901 high = ((unsigned HOST_WIDE_INT) 1
7902 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7903 low = -1;
7905 else
7907 high = 0;
7908 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7911 mask = build_int_cst_wide (type, low, high);
7912 return fold (build2 (BIT_AND_EXPR, type,
7913 fold_convert (type, arg0), mask));
7916 /* X % -C is the same as X % C. */
7917 if (code == TRUNC_MOD_EXPR
7918 && !TYPE_UNSIGNED (type)
7919 && TREE_CODE (arg1) == INTEGER_CST
7920 && TREE_INT_CST_HIGH (arg1) < 0
7921 && !flag_trapv
7922 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7923 && !sign_bit_p (arg1, arg1))
7924 return fold (build2 (code, type, fold_convert (type, arg0),
7925 fold_convert (type, negate_expr (arg1))));
7927 /* X % -Y is the same as X % Y. */
7928 if (code == TRUNC_MOD_EXPR
7929 && !TYPE_UNSIGNED (type)
7930 && TREE_CODE (arg1) == NEGATE_EXPR
7931 && !flag_trapv)
7932 return fold (build2 (code, type, fold_convert (type, arg0),
7933 fold_convert (type, TREE_OPERAND (arg1, 0))));
7935 if (TREE_CODE (arg1) == INTEGER_CST
7936 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7937 code, NULL_TREE)))
7938 return fold_convert (type, tem);
7940 goto binary;
7942 case LROTATE_EXPR:
7943 case RROTATE_EXPR:
7944 if (integer_all_onesp (arg0))
7945 return omit_one_operand (type, arg0, arg1);
7946 goto shift;
7948 case RSHIFT_EXPR:
7949 /* Optimize -1 >> x for arithmetic right shifts. */
7950 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7951 return omit_one_operand (type, arg0, arg1);
7952 /* ... fall through ... */
7954 case LSHIFT_EXPR:
7955 shift:
7956 if (integer_zerop (arg1))
7957 return non_lvalue (fold_convert (type, arg0));
7958 if (integer_zerop (arg0))
7959 return omit_one_operand (type, arg0, arg1);
7961 /* Since negative shift count is not well-defined,
7962 don't try to compute it in the compiler. */
7963 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7964 return t;
7965 /* Rewrite an LROTATE_EXPR by a constant into an
7966 RROTATE_EXPR by a new constant. */
7967 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7969 tree tem = build_int_cst (NULL_TREE,
7970 GET_MODE_BITSIZE (TYPE_MODE (type)));
7971 tem = fold_convert (TREE_TYPE (arg1), tem);
7972 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7973 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7976 /* If we have a rotate of a bit operation with the rotate count and
7977 the second operand of the bit operation both constant,
7978 permute the two operations. */
7979 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7980 && (TREE_CODE (arg0) == BIT_AND_EXPR
7981 || TREE_CODE (arg0) == BIT_IOR_EXPR
7982 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7983 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7984 return fold (build2 (TREE_CODE (arg0), type,
7985 fold (build2 (code, type,
7986 TREE_OPERAND (arg0, 0), arg1)),
7987 fold (build2 (code, type,
7988 TREE_OPERAND (arg0, 1), arg1))));
7990 /* Two consecutive rotates adding up to the width of the mode can
7991 be ignored. */
7992 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7993 && TREE_CODE (arg0) == RROTATE_EXPR
7994 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7995 && TREE_INT_CST_HIGH (arg1) == 0
7996 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7997 && ((TREE_INT_CST_LOW (arg1)
7998 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7999 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8000 return TREE_OPERAND (arg0, 0);
8002 goto binary;
8004 case MIN_EXPR:
8005 if (operand_equal_p (arg0, arg1, 0))
8006 return omit_one_operand (type, arg0, arg1);
8007 if (INTEGRAL_TYPE_P (type)
8008 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8009 return omit_one_operand (type, arg1, arg0);
8010 goto associate;
8012 case MAX_EXPR:
8013 if (operand_equal_p (arg0, arg1, 0))
8014 return omit_one_operand (type, arg0, arg1);
8015 if (INTEGRAL_TYPE_P (type)
8016 && TYPE_MAX_VALUE (type)
8017 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8018 return omit_one_operand (type, arg1, arg0);
8019 goto associate;
8021 case TRUTH_NOT_EXPR:
8022 /* The argument to invert_truthvalue must have Boolean type. */
8023 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8024 arg0 = fold_convert (boolean_type_node, arg0);
8026 /* Note that the operand of this must be an int
8027 and its values must be 0 or 1.
8028 ("true" is a fixed value perhaps depending on the language,
8029 but we don't handle values other than 1 correctly yet.) */
8030 tem = invert_truthvalue (arg0);
8031 /* Avoid infinite recursion. */
8032 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
8033 return t;
8034 return fold_convert (type, tem);
8036 case TRUTH_ANDIF_EXPR:
8037 /* Note that the operands of this must be ints
8038 and their values must be 0 or 1.
8039 ("true" is a fixed value perhaps depending on the language.) */
8040 /* If first arg is constant zero, return it. */
8041 if (integer_zerop (arg0))
8042 return fold_convert (type, arg0);
8043 case TRUTH_AND_EXPR:
8044 /* If either arg is constant true, drop it. */
8045 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8046 return non_lvalue (fold_convert (type, arg1));
8047 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8048 /* Preserve sequence points. */
8049 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8050 return non_lvalue (fold_convert (type, arg0));
8051 /* If second arg is constant zero, result is zero, but first arg
8052 must be evaluated. */
8053 if (integer_zerop (arg1))
8054 return omit_one_operand (type, arg1, arg0);
8055 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8056 case will be handled here. */
8057 if (integer_zerop (arg0))
8058 return omit_one_operand (type, arg0, arg1);
8060 /* !X && X is always false. */
8061 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8062 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8063 return omit_one_operand (type, integer_zero_node, arg1);
8064 /* X && !X is always false. */
8065 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8066 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8067 return omit_one_operand (type, integer_zero_node, arg0);
8069 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8070 means A >= Y && A != MAX, but in this case we know that
8071 A < X <= MAX. */
8073 if (!TREE_SIDE_EFFECTS (arg0)
8074 && !TREE_SIDE_EFFECTS (arg1))
8076 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8077 if (tem)
8078 return fold (build2 (code, type, tem, arg1));
8080 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8081 if (tem)
8082 return fold (build2 (code, type, arg0, tem));
8085 truth_andor:
8086 /* We only do these simplifications if we are optimizing. */
8087 if (!optimize)
8088 return t;
8090 /* Check for things like (A || B) && (A || C). We can convert this
8091 to A || (B && C). Note that either operator can be any of the four
8092 truth and/or operations and the transformation will still be
8093 valid. Also note that we only care about order for the
8094 ANDIF and ORIF operators. If B contains side effects, this
8095 might change the truth-value of A. */
8096 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8097 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8098 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8099 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8100 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8101 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8103 tree a00 = TREE_OPERAND (arg0, 0);
8104 tree a01 = TREE_OPERAND (arg0, 1);
8105 tree a10 = TREE_OPERAND (arg1, 0);
8106 tree a11 = TREE_OPERAND (arg1, 1);
8107 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8108 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8109 && (code == TRUTH_AND_EXPR
8110 || code == TRUTH_OR_EXPR));
8112 if (operand_equal_p (a00, a10, 0))
8113 return fold (build2 (TREE_CODE (arg0), type, a00,
8114 fold (build2 (code, type, a01, a11))));
8115 else if (commutative && operand_equal_p (a00, a11, 0))
8116 return fold (build2 (TREE_CODE (arg0), type, a00,
8117 fold (build2 (code, type, a01, a10))));
8118 else if (commutative && operand_equal_p (a01, a10, 0))
8119 return fold (build2 (TREE_CODE (arg0), type, a01,
8120 fold (build2 (code, type, a00, a11))));
8122 /* This case if tricky because we must either have commutative
8123 operators or else A10 must not have side-effects. */
8125 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8126 && operand_equal_p (a01, a11, 0))
8127 return fold (build2 (TREE_CODE (arg0), type,
8128 fold (build2 (code, type, a00, a10)),
8129 a01));
8132 /* See if we can build a range comparison. */
8133 if (0 != (tem = fold_range_test (t)))
8134 return tem;
8136 /* Check for the possibility of merging component references. If our
8137 lhs is another similar operation, try to merge its rhs with our
8138 rhs. Then try to merge our lhs and rhs. */
8139 if (TREE_CODE (arg0) == code
8140 && 0 != (tem = fold_truthop (code, type,
8141 TREE_OPERAND (arg0, 1), arg1)))
8142 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8144 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8145 return tem;
8147 return t;
8149 case TRUTH_ORIF_EXPR:
8150 /* Note that the operands of this must be ints
8151 and their values must be 0 or true.
8152 ("true" is a fixed value perhaps depending on the language.) */
8153 /* If first arg is constant true, return it. */
8154 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8155 return fold_convert (type, arg0);
8156 case TRUTH_OR_EXPR:
8157 /* If either arg is constant zero, drop it. */
8158 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8159 return non_lvalue (fold_convert (type, arg1));
8160 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8161 /* Preserve sequence points. */
8162 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8163 return non_lvalue (fold_convert (type, arg0));
8164 /* If second arg is constant true, result is true, but we must
8165 evaluate first arg. */
8166 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8167 return omit_one_operand (type, arg1, arg0);
8168 /* Likewise for first arg, but note this only occurs here for
8169 TRUTH_OR_EXPR. */
8170 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8171 return omit_one_operand (type, arg0, arg1);
8173 /* !X || X is always true. */
8174 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8175 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8176 return omit_one_operand (type, integer_one_node, arg1);
8177 /* X || !X is always true. */
8178 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8179 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8180 return omit_one_operand (type, integer_one_node, arg0);
8182 goto truth_andor;
8184 case TRUTH_XOR_EXPR:
8185 /* If the second arg is constant zero, drop it. */
8186 if (integer_zerop (arg1))
8187 return non_lvalue (fold_convert (type, arg0));
8188 /* If the second arg is constant true, this is a logical inversion. */
8189 if (integer_onep (arg1))
8190 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8191 /* Identical arguments cancel to zero. */
8192 if (operand_equal_p (arg0, arg1, 0))
8193 return omit_one_operand (type, integer_zero_node, arg0);
8195 /* !X ^ X is always true. */
8196 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8197 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8198 return omit_one_operand (type, integer_one_node, arg1);
8200 /* X ^ !X is always true. */
8201 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8202 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8203 return omit_one_operand (type, integer_one_node, arg0);
8205 return t;
8207 case EQ_EXPR:
8208 case NE_EXPR:
8209 case LT_EXPR:
8210 case GT_EXPR:
8211 case LE_EXPR:
8212 case GE_EXPR:
8213 /* If one arg is a real or integer constant, put it last. */
8214 if (tree_swap_operands_p (arg0, arg1, true))
8215 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8217 /* If this is an equality comparison of the address of a non-weak
8218 object against zero, then we know the result. */
8219 if ((code == EQ_EXPR || code == NE_EXPR)
8220 && TREE_CODE (arg0) == ADDR_EXPR
8221 && DECL_P (TREE_OPERAND (arg0, 0))
8222 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8223 && integer_zerop (arg1))
8224 return constant_boolean_node (code != EQ_EXPR, type);
8226 /* If this is an equality comparison of the address of two non-weak,
8227 unaliased symbols neither of which are extern (since we do not
8228 have access to attributes for externs), then we know the result. */
8229 if ((code == EQ_EXPR || code == NE_EXPR)
8230 && TREE_CODE (arg0) == ADDR_EXPR
8231 && DECL_P (TREE_OPERAND (arg0, 0))
8232 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8233 && ! lookup_attribute ("alias",
8234 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8235 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8236 && TREE_CODE (arg1) == ADDR_EXPR
8237 && DECL_P (TREE_OPERAND (arg1, 0))
8238 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8239 && ! lookup_attribute ("alias",
8240 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8241 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8242 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8243 ? code == EQ_EXPR : code != EQ_EXPR,
8244 type);
8246 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8248 tree targ0 = strip_float_extensions (arg0);
8249 tree targ1 = strip_float_extensions (arg1);
8250 tree newtype = TREE_TYPE (targ0);
8252 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8253 newtype = TREE_TYPE (targ1);
8255 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8256 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8257 return fold (build2 (code, type, fold_convert (newtype, targ0),
8258 fold_convert (newtype, targ1)));
8260 /* (-a) CMP (-b) -> b CMP a */
8261 if (TREE_CODE (arg0) == NEGATE_EXPR
8262 && TREE_CODE (arg1) == NEGATE_EXPR)
8263 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8264 TREE_OPERAND (arg0, 0)));
8266 if (TREE_CODE (arg1) == REAL_CST)
8268 REAL_VALUE_TYPE cst;
8269 cst = TREE_REAL_CST (arg1);
8271 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8272 if (TREE_CODE (arg0) == NEGATE_EXPR)
8273 return
8274 fold (build2 (swap_tree_comparison (code), type,
8275 TREE_OPERAND (arg0, 0),
8276 build_real (TREE_TYPE (arg1),
8277 REAL_VALUE_NEGATE (cst))));
8279 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8280 /* a CMP (-0) -> a CMP 0 */
8281 if (REAL_VALUE_MINUS_ZERO (cst))
8282 return fold (build2 (code, type, arg0,
8283 build_real (TREE_TYPE (arg1), dconst0)));
8285 /* x != NaN is always true, other ops are always false. */
8286 if (REAL_VALUE_ISNAN (cst)
8287 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8289 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8290 return omit_one_operand (type, tem, arg0);
8293 /* Fold comparisons against infinity. */
8294 if (REAL_VALUE_ISINF (cst))
8296 tem = fold_inf_compare (code, type, arg0, arg1);
8297 if (tem != NULL_TREE)
8298 return tem;
8302 /* If this is a comparison of a real constant with a PLUS_EXPR
8303 or a MINUS_EXPR of a real constant, we can convert it into a
8304 comparison with a revised real constant as long as no overflow
8305 occurs when unsafe_math_optimizations are enabled. */
8306 if (flag_unsafe_math_optimizations
8307 && TREE_CODE (arg1) == REAL_CST
8308 && (TREE_CODE (arg0) == PLUS_EXPR
8309 || TREE_CODE (arg0) == MINUS_EXPR)
8310 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8311 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8312 ? MINUS_EXPR : PLUS_EXPR,
8313 arg1, TREE_OPERAND (arg0, 1), 0))
8314 && ! TREE_CONSTANT_OVERFLOW (tem))
8315 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8317 /* Likewise, we can simplify a comparison of a real constant with
8318 a MINUS_EXPR whose first operand is also a real constant, i.e.
8319 (c1 - x) < c2 becomes x > c1-c2. */
8320 if (flag_unsafe_math_optimizations
8321 && TREE_CODE (arg1) == REAL_CST
8322 && TREE_CODE (arg0) == MINUS_EXPR
8323 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8324 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8325 arg1, 0))
8326 && ! TREE_CONSTANT_OVERFLOW (tem))
8327 return fold (build2 (swap_tree_comparison (code), type,
8328 TREE_OPERAND (arg0, 1), tem));
8330 /* Fold comparisons against built-in math functions. */
8331 if (TREE_CODE (arg1) == REAL_CST
8332 && flag_unsafe_math_optimizations
8333 && ! flag_errno_math)
8335 enum built_in_function fcode = builtin_mathfn_code (arg0);
8337 if (fcode != END_BUILTINS)
8339 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8340 if (tem != NULL_TREE)
8341 return tem;
8346 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8347 if (TREE_CONSTANT (arg1)
8348 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8349 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8350 /* This optimization is invalid for ordered comparisons
8351 if CONST+INCR overflows or if foo+incr might overflow.
8352 This optimization is invalid for floating point due to rounding.
8353 For pointer types we assume overflow doesn't happen. */
8354 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8355 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8356 && (code == EQ_EXPR || code == NE_EXPR))))
8358 tree varop, newconst;
8360 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8362 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8363 arg1, TREE_OPERAND (arg0, 1)));
8364 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8365 TREE_OPERAND (arg0, 0),
8366 TREE_OPERAND (arg0, 1));
8368 else
8370 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8371 arg1, TREE_OPERAND (arg0, 1)));
8372 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8373 TREE_OPERAND (arg0, 0),
8374 TREE_OPERAND (arg0, 1));
8378 /* If VAROP is a reference to a bitfield, we must mask
8379 the constant by the width of the field. */
8380 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8381 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8382 && host_integerp (DECL_SIZE (TREE_OPERAND
8383 (TREE_OPERAND (varop, 0), 1)), 1))
8385 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8386 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8387 tree folded_compare, shift;
8389 /* First check whether the comparison would come out
8390 always the same. If we don't do that we would
8391 change the meaning with the masking. */
8392 folded_compare = fold (build2 (code, type,
8393 TREE_OPERAND (varop, 0), arg1));
8394 if (integer_zerop (folded_compare)
8395 || integer_onep (folded_compare))
8396 return omit_one_operand (type, folded_compare, varop);
8398 shift = build_int_cst (NULL_TREE,
8399 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8400 shift = fold_convert (TREE_TYPE (varop), shift);
8401 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8402 newconst, shift));
8403 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8404 newconst, shift));
8407 return fold (build2 (code, type, varop, newconst));
8410 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8411 This transformation affects the cases which are handled in later
8412 optimizations involving comparisons with non-negative constants. */
8413 if (TREE_CODE (arg1) == INTEGER_CST
8414 && TREE_CODE (arg0) != INTEGER_CST
8415 && tree_int_cst_sgn (arg1) > 0)
8417 switch (code)
8419 case GE_EXPR:
8420 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8421 return fold (build2 (GT_EXPR, type, arg0, arg1));
8423 case LT_EXPR:
8424 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8425 return fold (build2 (LE_EXPR, type, arg0, arg1));
8427 default:
8428 break;
8432 /* Comparisons with the highest or lowest possible integer of
8433 the specified size will have known values.
8435 This is quite similar to fold_relational_hi_lo, however,
8436 attempts to share the code have been nothing but trouble. */
8438 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8440 if (TREE_CODE (arg1) == INTEGER_CST
8441 && ! TREE_CONSTANT_OVERFLOW (arg1)
8442 && width <= HOST_BITS_PER_WIDE_INT
8443 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8444 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8446 unsigned HOST_WIDE_INT signed_max;
8447 unsigned HOST_WIDE_INT max, min;
8449 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8451 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8453 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8454 min = 0;
8456 else
8458 max = signed_max;
8459 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8462 if (TREE_INT_CST_HIGH (arg1) == 0
8463 && TREE_INT_CST_LOW (arg1) == max)
8464 switch (code)
8466 case GT_EXPR:
8467 return omit_one_operand (type, integer_zero_node, arg0);
8469 case GE_EXPR:
8470 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8472 case LE_EXPR:
8473 return omit_one_operand (type, integer_one_node, arg0);
8475 case LT_EXPR:
8476 return fold (build2 (NE_EXPR, type, arg0, arg1));
8478 /* The GE_EXPR and LT_EXPR cases above are not normally
8479 reached because of previous transformations. */
8481 default:
8482 break;
8484 else if (TREE_INT_CST_HIGH (arg1) == 0
8485 && TREE_INT_CST_LOW (arg1) == max - 1)
8486 switch (code)
8488 case GT_EXPR:
8489 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8490 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8491 case LE_EXPR:
8492 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8493 return fold (build2 (NE_EXPR, type, arg0, arg1));
8494 default:
8495 break;
8497 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8498 && TREE_INT_CST_LOW (arg1) == min)
8499 switch (code)
8501 case LT_EXPR:
8502 return omit_one_operand (type, integer_zero_node, arg0);
8504 case LE_EXPR:
8505 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8507 case GE_EXPR:
8508 return omit_one_operand (type, integer_one_node, arg0);
8510 case GT_EXPR:
8511 return fold (build2 (NE_EXPR, type, arg0, arg1));
8513 default:
8514 break;
8516 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8517 && TREE_INT_CST_LOW (arg1) == min + 1)
8518 switch (code)
8520 case GE_EXPR:
8521 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8522 return fold (build2 (NE_EXPR, type, arg0, arg1));
8523 case LT_EXPR:
8524 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8525 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8526 default:
8527 break;
8530 else if (!in_gimple_form
8531 && TREE_INT_CST_HIGH (arg1) == 0
8532 && TREE_INT_CST_LOW (arg1) == signed_max
8533 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8534 /* signed_type does not work on pointer types. */
8535 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8537 /* The following case also applies to X < signed_max+1
8538 and X >= signed_max+1 because previous transformations. */
8539 if (code == LE_EXPR || code == GT_EXPR)
8541 tree st0, st1;
8542 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8543 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8544 return fold
8545 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8546 type, fold_convert (st0, arg0),
8547 fold_convert (st1, integer_zero_node)));
8553 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8554 a MINUS_EXPR of a constant, we can convert it into a comparison with
8555 a revised constant as long as no overflow occurs. */
8556 if ((code == EQ_EXPR || code == NE_EXPR)
8557 && TREE_CODE (arg1) == INTEGER_CST
8558 && (TREE_CODE (arg0) == PLUS_EXPR
8559 || TREE_CODE (arg0) == MINUS_EXPR)
8560 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8561 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8562 ? MINUS_EXPR : PLUS_EXPR,
8563 arg1, TREE_OPERAND (arg0, 1), 0))
8564 && ! TREE_CONSTANT_OVERFLOW (tem))
8565 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8567 /* Similarly for a NEGATE_EXPR. */
8568 else if ((code == EQ_EXPR || code == NE_EXPR)
8569 && TREE_CODE (arg0) == NEGATE_EXPR
8570 && TREE_CODE (arg1) == INTEGER_CST
8571 && 0 != (tem = negate_expr (arg1))
8572 && TREE_CODE (tem) == INTEGER_CST
8573 && ! TREE_CONSTANT_OVERFLOW (tem))
8574 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8576 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8577 for !=. Don't do this for ordered comparisons due to overflow. */
8578 else if ((code == NE_EXPR || code == EQ_EXPR)
8579 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8580 return fold (build2 (code, type,
8581 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8583 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8584 && TREE_CODE (arg0) == NOP_EXPR)
8586 /* If we are widening one operand of an integer comparison,
8587 see if the other operand is similarly being widened. Perhaps we
8588 can do the comparison in the narrower type. */
8589 tem = fold_widened_comparison (code, type, arg0, arg1);
8590 if (tem)
8591 return tem;
8593 /* Or if we are changing signedness. */
8594 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8595 if (tem)
8596 return tem;
8599 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8600 constant, we can simplify it. */
8601 else if (TREE_CODE (arg1) == INTEGER_CST
8602 && (TREE_CODE (arg0) == MIN_EXPR
8603 || TREE_CODE (arg0) == MAX_EXPR)
8604 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8605 return optimize_minmax_comparison (t);
8607 /* If we are comparing an ABS_EXPR with a constant, we can
8608 convert all the cases into explicit comparisons, but they may
8609 well not be faster than doing the ABS and one comparison.
8610 But ABS (X) <= C is a range comparison, which becomes a subtraction
8611 and a comparison, and is probably faster. */
8612 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8613 && TREE_CODE (arg0) == ABS_EXPR
8614 && ! TREE_SIDE_EFFECTS (arg0)
8615 && (0 != (tem = negate_expr (arg1)))
8616 && TREE_CODE (tem) == INTEGER_CST
8617 && ! TREE_CONSTANT_OVERFLOW (tem))
8618 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8619 build2 (GE_EXPR, type,
8620 TREE_OPERAND (arg0, 0), tem),
8621 build2 (LE_EXPR, type,
8622 TREE_OPERAND (arg0, 0), arg1)));
8624 /* If this is an EQ or NE comparison with zero and ARG0 is
8625 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8626 two operations, but the latter can be done in one less insn
8627 on machines that have only two-operand insns or on which a
8628 constant cannot be the first operand. */
8629 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8630 && TREE_CODE (arg0) == BIT_AND_EXPR)
8632 tree arg00 = TREE_OPERAND (arg0, 0);
8633 tree arg01 = TREE_OPERAND (arg0, 1);
8634 if (TREE_CODE (arg00) == LSHIFT_EXPR
8635 && integer_onep (TREE_OPERAND (arg00, 0)))
8636 return
8637 fold (build2 (code, type,
8638 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8639 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8640 arg01, TREE_OPERAND (arg00, 1)),
8641 fold_convert (TREE_TYPE (arg0),
8642 integer_one_node)),
8643 arg1));
8644 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8645 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8646 return
8647 fold (build2 (code, type,
8648 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8649 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8650 arg00, TREE_OPERAND (arg01, 1)),
8651 fold_convert (TREE_TYPE (arg0),
8652 integer_one_node)),
8653 arg1));
8656 /* If this is an NE or EQ comparison of zero against the result of a
8657 signed MOD operation whose second operand is a power of 2, make
8658 the MOD operation unsigned since it is simpler and equivalent. */
8659 if ((code == NE_EXPR || code == EQ_EXPR)
8660 && integer_zerop (arg1)
8661 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8662 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8663 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8664 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8665 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8666 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8668 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8669 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8670 fold_convert (newtype,
8671 TREE_OPERAND (arg0, 0)),
8672 fold_convert (newtype,
8673 TREE_OPERAND (arg0, 1))));
8675 return fold (build2 (code, type, newmod,
8676 fold_convert (newtype, arg1)));
8679 /* If this is an NE comparison of zero with an AND of one, remove the
8680 comparison since the AND will give the correct value. */
8681 if (code == NE_EXPR && integer_zerop (arg1)
8682 && TREE_CODE (arg0) == BIT_AND_EXPR
8683 && integer_onep (TREE_OPERAND (arg0, 1)))
8684 return fold_convert (type, arg0);
8686 /* If we have (A & C) == C where C is a power of 2, convert this into
8687 (A & C) != 0. Similarly for NE_EXPR. */
8688 if ((code == EQ_EXPR || code == NE_EXPR)
8689 && TREE_CODE (arg0) == BIT_AND_EXPR
8690 && integer_pow2p (TREE_OPERAND (arg0, 1))
8691 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8692 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8693 arg0, fold_convert (TREE_TYPE (arg0),
8694 integer_zero_node)));
8696 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8697 2, then fold the expression into shifts and logical operations. */
8698 tem = fold_single_bit_test (code, arg0, arg1, type);
8699 if (tem)
8700 return tem;
8702 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8703 Similarly for NE_EXPR. */
8704 if ((code == EQ_EXPR || code == NE_EXPR)
8705 && TREE_CODE (arg0) == BIT_AND_EXPR
8706 && TREE_CODE (arg1) == INTEGER_CST
8707 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8709 tree notc = fold (build1 (BIT_NOT_EXPR,
8710 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8711 TREE_OPERAND (arg0, 1)));
8712 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8713 arg1, notc));
8714 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8715 if (integer_nonzerop (dandnotc))
8716 return omit_one_operand (type, rslt, arg0);
8719 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8720 Similarly for NE_EXPR. */
8721 if ((code == EQ_EXPR || code == NE_EXPR)
8722 && TREE_CODE (arg0) == BIT_IOR_EXPR
8723 && TREE_CODE (arg1) == INTEGER_CST
8724 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8726 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8727 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8728 TREE_OPERAND (arg0, 1), notd));
8729 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8730 if (integer_nonzerop (candnotd))
8731 return omit_one_operand (type, rslt, arg0);
8734 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8735 and similarly for >= into !=. */
8736 if ((code == LT_EXPR || code == GE_EXPR)
8737 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8738 && TREE_CODE (arg1) == LSHIFT_EXPR
8739 && integer_onep (TREE_OPERAND (arg1, 0)))
8740 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8741 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8742 TREE_OPERAND (arg1, 1)),
8743 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8745 else if ((code == LT_EXPR || code == GE_EXPR)
8746 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8747 && (TREE_CODE (arg1) == NOP_EXPR
8748 || TREE_CODE (arg1) == CONVERT_EXPR)
8749 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8750 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8751 return
8752 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8753 fold_convert (TREE_TYPE (arg0),
8754 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8755 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8756 1))),
8757 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8759 /* Simplify comparison of something with itself. (For IEEE
8760 floating-point, we can only do some of these simplifications.) */
8761 if (operand_equal_p (arg0, arg1, 0))
8763 switch (code)
8765 case EQ_EXPR:
8766 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8767 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8768 return constant_boolean_node (1, type);
8769 break;
8771 case GE_EXPR:
8772 case LE_EXPR:
8773 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8774 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8775 return constant_boolean_node (1, type);
8776 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8778 case NE_EXPR:
8779 /* For NE, we can only do this simplification if integer
8780 or we don't honor IEEE floating point NaNs. */
8781 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8782 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8783 break;
8784 /* ... fall through ... */
8785 case GT_EXPR:
8786 case LT_EXPR:
8787 return constant_boolean_node (0, type);
8788 default:
8789 gcc_unreachable ();
8793 /* If we are comparing an expression that just has comparisons
8794 of two integer values, arithmetic expressions of those comparisons,
8795 and constants, we can simplify it. There are only three cases
8796 to check: the two values can either be equal, the first can be
8797 greater, or the second can be greater. Fold the expression for
8798 those three values. Since each value must be 0 or 1, we have
8799 eight possibilities, each of which corresponds to the constant 0
8800 or 1 or one of the six possible comparisons.
8802 This handles common cases like (a > b) == 0 but also handles
8803 expressions like ((x > y) - (y > x)) > 0, which supposedly
8804 occur in macroized code. */
8806 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8808 tree cval1 = 0, cval2 = 0;
8809 int save_p = 0;
8811 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8812 /* Don't handle degenerate cases here; they should already
8813 have been handled anyway. */
8814 && cval1 != 0 && cval2 != 0
8815 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8816 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8817 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8818 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8819 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8820 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8821 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8823 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8824 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8826 /* We can't just pass T to eval_subst in case cval1 or cval2
8827 was the same as ARG1. */
8829 tree high_result
8830 = fold (build2 (code, type,
8831 eval_subst (arg0, cval1, maxval,
8832 cval2, minval),
8833 arg1));
8834 tree equal_result
8835 = fold (build2 (code, type,
8836 eval_subst (arg0, cval1, maxval,
8837 cval2, maxval),
8838 arg1));
8839 tree low_result
8840 = fold (build2 (code, type,
8841 eval_subst (arg0, cval1, minval,
8842 cval2, maxval),
8843 arg1));
8845 /* All three of these results should be 0 or 1. Confirm they
8846 are. Then use those values to select the proper code
8847 to use. */
8849 if ((integer_zerop (high_result)
8850 || integer_onep (high_result))
8851 && (integer_zerop (equal_result)
8852 || integer_onep (equal_result))
8853 && (integer_zerop (low_result)
8854 || integer_onep (low_result)))
8856 /* Make a 3-bit mask with the high-order bit being the
8857 value for `>', the next for '=', and the low for '<'. */
8858 switch ((integer_onep (high_result) * 4)
8859 + (integer_onep (equal_result) * 2)
8860 + integer_onep (low_result))
8862 case 0:
8863 /* Always false. */
8864 return omit_one_operand (type, integer_zero_node, arg0);
8865 case 1:
8866 code = LT_EXPR;
8867 break;
8868 case 2:
8869 code = EQ_EXPR;
8870 break;
8871 case 3:
8872 code = LE_EXPR;
8873 break;
8874 case 4:
8875 code = GT_EXPR;
8876 break;
8877 case 5:
8878 code = NE_EXPR;
8879 break;
8880 case 6:
8881 code = GE_EXPR;
8882 break;
8883 case 7:
8884 /* Always true. */
8885 return omit_one_operand (type, integer_one_node, arg0);
8888 tem = build2 (code, type, cval1, cval2);
8889 if (save_p)
8890 return save_expr (tem);
8891 else
8892 return fold (tem);
8897 /* If this is a comparison of a field, we may be able to simplify it. */
8898 if (((TREE_CODE (arg0) == COMPONENT_REF
8899 && lang_hooks.can_use_bit_fields_p ())
8900 || TREE_CODE (arg0) == BIT_FIELD_REF)
8901 && (code == EQ_EXPR || code == NE_EXPR)
8902 /* Handle the constant case even without -O
8903 to make sure the warnings are given. */
8904 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8906 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8907 if (t1)
8908 return t1;
8911 /* If this is a comparison of complex values and either or both sides
8912 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8913 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8914 This may prevent needless evaluations. */
8915 if ((code == EQ_EXPR || code == NE_EXPR)
8916 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8917 && (TREE_CODE (arg0) == COMPLEX_EXPR
8918 || TREE_CODE (arg1) == COMPLEX_EXPR
8919 || TREE_CODE (arg0) == COMPLEX_CST
8920 || TREE_CODE (arg1) == COMPLEX_CST))
8922 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8923 tree real0, imag0, real1, imag1;
8925 arg0 = save_expr (arg0);
8926 arg1 = save_expr (arg1);
8927 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8928 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8929 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8930 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8932 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8933 : TRUTH_ORIF_EXPR),
8934 type,
8935 fold (build2 (code, type, real0, real1)),
8936 fold (build2 (code, type, imag0, imag1))));
8939 /* Optimize comparisons of strlen vs zero to a compare of the
8940 first character of the string vs zero. To wit,
8941 strlen(ptr) == 0 => *ptr == 0
8942 strlen(ptr) != 0 => *ptr != 0
8943 Other cases should reduce to one of these two (or a constant)
8944 due to the return value of strlen being unsigned. */
8945 if ((code == EQ_EXPR || code == NE_EXPR)
8946 && integer_zerop (arg1)
8947 && TREE_CODE (arg0) == CALL_EXPR)
8949 tree fndecl = get_callee_fndecl (arg0);
8950 tree arglist;
8952 if (fndecl
8953 && DECL_BUILT_IN (fndecl)
8954 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8955 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8956 && (arglist = TREE_OPERAND (arg0, 1))
8957 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8958 && ! TREE_CHAIN (arglist))
8959 return fold (build2 (code, type,
8960 build1 (INDIRECT_REF, char_type_node,
8961 TREE_VALUE (arglist)),
8962 fold_convert (char_type_node,
8963 integer_zero_node)));
8966 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8967 into a single range test. */
8968 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8969 && TREE_CODE (arg1) == INTEGER_CST
8970 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8971 && !integer_zerop (TREE_OPERAND (arg0, 1))
8972 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8973 && !TREE_OVERFLOW (arg1))
8975 t1 = fold_div_compare (code, type, arg0, arg1);
8976 if (t1 != NULL_TREE)
8977 return t1;
8980 if ((code == EQ_EXPR || code == NE_EXPR)
8981 && !TREE_SIDE_EFFECTS (arg0)
8982 && integer_zerop (arg1)
8983 && tree_expr_nonzero_p (arg0))
8984 return constant_boolean_node (code==NE_EXPR, type);
8986 t1 = fold_relational_const (code, type, arg0, arg1);
8987 return t1 == NULL_TREE ? t : t1;
8989 case UNORDERED_EXPR:
8990 case ORDERED_EXPR:
8991 case UNLT_EXPR:
8992 case UNLE_EXPR:
8993 case UNGT_EXPR:
8994 case UNGE_EXPR:
8995 case UNEQ_EXPR:
8996 case LTGT_EXPR:
8997 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8999 t1 = fold_relational_const (code, type, arg0, arg1);
9000 if (t1 != NULL_TREE)
9001 return t1;
9004 /* If the first operand is NaN, the result is constant. */
9005 if (TREE_CODE (arg0) == REAL_CST
9006 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9007 && (code != LTGT_EXPR || ! flag_trapping_math))
9009 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9010 ? integer_zero_node
9011 : integer_one_node;
9012 return omit_one_operand (type, t1, arg1);
9015 /* If the second operand is NaN, the result is constant. */
9016 if (TREE_CODE (arg1) == REAL_CST
9017 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9018 && (code != LTGT_EXPR || ! flag_trapping_math))
9020 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9021 ? integer_zero_node
9022 : integer_one_node;
9023 return omit_one_operand (type, t1, arg0);
9026 /* Simplify unordered comparison of something with itself. */
9027 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9028 && operand_equal_p (arg0, arg1, 0))
9029 return constant_boolean_node (1, type);
9031 if (code == LTGT_EXPR
9032 && !flag_trapping_math
9033 && operand_equal_p (arg0, arg1, 0))
9034 return constant_boolean_node (0, type);
9036 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9038 tree targ0 = strip_float_extensions (arg0);
9039 tree targ1 = strip_float_extensions (arg1);
9040 tree newtype = TREE_TYPE (targ0);
9042 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9043 newtype = TREE_TYPE (targ1);
9045 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9046 return fold (build2 (code, type, fold_convert (newtype, targ0),
9047 fold_convert (newtype, targ1)));
9050 return t;
9052 case COND_EXPR:
9053 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9054 so all simple results must be passed through pedantic_non_lvalue. */
9055 if (TREE_CODE (arg0) == INTEGER_CST)
9057 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
9058 /* Only optimize constant conditions when the selected branch
9059 has the same type as the COND_EXPR. This avoids optimizing
9060 away "c ? x : throw", where the throw has a void type. */
9061 if (! VOID_TYPE_P (TREE_TYPE (tem))
9062 || VOID_TYPE_P (type))
9063 return pedantic_non_lvalue (tem);
9064 return t;
9066 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
9067 return pedantic_omit_one_operand (type, arg1, arg0);
9069 /* If we have A op B ? A : C, we may be able to convert this to a
9070 simpler expression, depending on the operation and the values
9071 of B and C. Signed zeros prevent all of these transformations,
9072 for reasons given above each one.
9074 Also try swapping the arguments and inverting the conditional. */
9075 if (COMPARISON_CLASS_P (arg0)
9076 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9077 arg1, TREE_OPERAND (arg0, 1))
9078 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9080 tem = fold_cond_expr_with_comparison (type, arg0,
9081 TREE_OPERAND (t, 1),
9082 TREE_OPERAND (t, 2));
9083 if (tem)
9084 return tem;
9087 if (COMPARISON_CLASS_P (arg0)
9088 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9089 TREE_OPERAND (t, 2),
9090 TREE_OPERAND (arg0, 1))
9091 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
9093 tem = invert_truthvalue (arg0);
9094 if (COMPARISON_CLASS_P (tem))
9096 tem = fold_cond_expr_with_comparison (type, tem,
9097 TREE_OPERAND (t, 2),
9098 TREE_OPERAND (t, 1));
9099 if (tem)
9100 return tem;
9104 /* If the second operand is simpler than the third, swap them
9105 since that produces better jump optimization results. */
9106 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
9107 TREE_OPERAND (t, 2), false))
9109 /* See if this can be inverted. If it can't, possibly because
9110 it was a floating-point inequality comparison, don't do
9111 anything. */
9112 tem = invert_truthvalue (arg0);
9114 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9115 return fold (build3 (code, type, tem,
9116 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
9119 /* Convert A ? 1 : 0 to simply A. */
9120 if (integer_onep (TREE_OPERAND (t, 1))
9121 && integer_zerop (TREE_OPERAND (t, 2))
9122 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9123 call to fold will try to move the conversion inside
9124 a COND, which will recurse. In that case, the COND_EXPR
9125 is probably the best choice, so leave it alone. */
9126 && type == TREE_TYPE (arg0))
9127 return pedantic_non_lvalue (arg0);
9129 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9130 over COND_EXPR in cases such as floating point comparisons. */
9131 if (integer_zerop (TREE_OPERAND (t, 1))
9132 && integer_onep (TREE_OPERAND (t, 2))
9133 && truth_value_p (TREE_CODE (arg0)))
9134 return pedantic_non_lvalue (fold_convert (type,
9135 invert_truthvalue (arg0)));
9137 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9138 if (TREE_CODE (arg0) == LT_EXPR
9139 && integer_zerop (TREE_OPERAND (arg0, 1))
9140 && integer_zerop (TREE_OPERAND (t, 2))
9141 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9142 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9143 TREE_TYPE (tem), tem, arg1)));
9145 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9146 already handled above. */
9147 if (TREE_CODE (arg0) == BIT_AND_EXPR
9148 && integer_onep (TREE_OPERAND (arg0, 1))
9149 && integer_zerop (TREE_OPERAND (t, 2))
9150 && integer_pow2p (arg1))
9152 tree tem = TREE_OPERAND (arg0, 0);
9153 STRIP_NOPS (tem);
9154 if (TREE_CODE (tem) == RSHIFT_EXPR
9155 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9156 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9157 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9158 return fold (build2 (BIT_AND_EXPR, type,
9159 TREE_OPERAND (tem, 0), arg1));
9162 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9163 is probably obsolete because the first operand should be a
9164 truth value (that's why we have the two cases above), but let's
9165 leave it in until we can confirm this for all front-ends. */
9166 if (integer_zerop (TREE_OPERAND (t, 2))
9167 && TREE_CODE (arg0) == NE_EXPR
9168 && integer_zerop (TREE_OPERAND (arg0, 1))
9169 && integer_pow2p (arg1)
9170 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9171 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9172 arg1, OEP_ONLY_CONST))
9173 return pedantic_non_lvalue (fold_convert (type,
9174 TREE_OPERAND (arg0, 0)));
9176 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9177 if (integer_zerop (TREE_OPERAND (t, 2))
9178 && truth_value_p (TREE_CODE (arg0))
9179 && truth_value_p (TREE_CODE (arg1)))
9180 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9182 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9183 if (integer_onep (TREE_OPERAND (t, 2))
9184 && truth_value_p (TREE_CODE (arg0))
9185 && truth_value_p (TREE_CODE (arg1)))
9187 /* Only perform transformation if ARG0 is easily inverted. */
9188 tem = invert_truthvalue (arg0);
9189 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9190 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9193 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9194 if (integer_zerop (arg1)
9195 && truth_value_p (TREE_CODE (arg0))
9196 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9198 /* Only perform transformation if ARG0 is easily inverted. */
9199 tem = invert_truthvalue (arg0);
9200 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9201 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9202 TREE_OPERAND (t, 2)));
9205 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9206 if (integer_onep (arg1)
9207 && truth_value_p (TREE_CODE (arg0))
9208 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9209 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9210 TREE_OPERAND (t, 2)));
9212 return t;
9214 case COMPOUND_EXPR:
9215 /* When pedantic, a compound expression can be neither an lvalue
9216 nor an integer constant expression. */
9217 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9218 return t;
9219 /* Don't let (0, 0) be null pointer constant. */
9220 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9221 : fold_convert (type, arg1);
9222 return pedantic_non_lvalue (tem);
9224 case COMPLEX_EXPR:
9225 if (wins)
9226 return build_complex (type, arg0, arg1);
9227 return t;
9229 case REALPART_EXPR:
9230 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9231 return t;
9232 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9233 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9234 TREE_OPERAND (arg0, 1));
9235 else if (TREE_CODE (arg0) == COMPLEX_CST)
9236 return TREE_REALPART (arg0);
9237 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9238 return fold (build2 (TREE_CODE (arg0), type,
9239 fold (build1 (REALPART_EXPR, type,
9240 TREE_OPERAND (arg0, 0))),
9241 fold (build1 (REALPART_EXPR, type,
9242 TREE_OPERAND (arg0, 1)))));
9243 return t;
9245 case IMAGPART_EXPR:
9246 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9247 return fold_convert (type, integer_zero_node);
9248 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9249 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9250 TREE_OPERAND (arg0, 0));
9251 else if (TREE_CODE (arg0) == COMPLEX_CST)
9252 return TREE_IMAGPART (arg0);
9253 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9254 return fold (build2 (TREE_CODE (arg0), type,
9255 fold (build1 (IMAGPART_EXPR, type,
9256 TREE_OPERAND (arg0, 0))),
9257 fold (build1 (IMAGPART_EXPR, type,
9258 TREE_OPERAND (arg0, 1)))));
9259 return t;
9261 case CALL_EXPR:
9262 /* Check for a built-in function. */
9263 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9264 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9265 == FUNCTION_DECL)
9266 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9268 tree tmp = fold_builtin (t, false);
9269 if (tmp)
9270 return tmp;
9272 return t;
9274 default:
9275 return t;
9276 } /* switch (code) */
9279 #ifdef ENABLE_FOLD_CHECKING
9280 #undef fold
9282 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9283 static void fold_check_failed (tree, tree);
9284 void print_fold_checksum (tree);
9286 /* When --enable-checking=fold, compute a digest of expr before
9287 and after actual fold call to see if fold did not accidentally
9288 change original expr. */
9290 tree
9291 fold (tree expr)
9293 tree ret;
9294 struct md5_ctx ctx;
9295 unsigned char checksum_before[16], checksum_after[16];
9296 htab_t ht;
9298 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9299 md5_init_ctx (&ctx);
9300 fold_checksum_tree (expr, &ctx, ht);
9301 md5_finish_ctx (&ctx, checksum_before);
9302 htab_empty (ht);
9304 ret = fold_1 (expr);
9306 md5_init_ctx (&ctx);
9307 fold_checksum_tree (expr, &ctx, ht);
9308 md5_finish_ctx (&ctx, checksum_after);
9309 htab_delete (ht);
9311 if (memcmp (checksum_before, checksum_after, 16))
9312 fold_check_failed (expr, ret);
9314 return ret;
9317 void
9318 print_fold_checksum (tree expr)
9320 struct md5_ctx ctx;
9321 unsigned char checksum[16], cnt;
9322 htab_t ht;
9324 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9325 md5_init_ctx (&ctx);
9326 fold_checksum_tree (expr, &ctx, ht);
9327 md5_finish_ctx (&ctx, checksum);
9328 htab_delete (ht);
9329 for (cnt = 0; cnt < 16; ++cnt)
9330 fprintf (stderr, "%02x", checksum[cnt]);
9331 putc ('\n', stderr);
9334 static void
9335 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9337 internal_error ("fold check: original tree changed by fold");
9340 static void
9341 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9343 void **slot;
9344 enum tree_code code;
9345 char buf[sizeof (struct tree_decl)];
9346 int i, len;
9348 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9349 <= sizeof (struct tree_decl))
9350 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9351 if (expr == NULL)
9352 return;
9353 slot = htab_find_slot (ht, expr, INSERT);
9354 if (*slot != NULL)
9355 return;
9356 *slot = expr;
9357 code = TREE_CODE (expr);
9358 if (TREE_CODE_CLASS (code) == tcc_declaration
9359 && DECL_ASSEMBLER_NAME_SET_P (expr))
9361 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9362 memcpy (buf, expr, tree_size (expr));
9363 expr = (tree) buf;
9364 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9366 else if (TREE_CODE_CLASS (code) == tcc_type
9367 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9368 || TYPE_CACHED_VALUES_P (expr)))
9370 /* Allow these fields to be modified. */
9371 memcpy (buf, expr, tree_size (expr));
9372 expr = (tree) buf;
9373 TYPE_POINTER_TO (expr) = NULL;
9374 TYPE_REFERENCE_TO (expr) = NULL;
9375 TYPE_CACHED_VALUES_P (expr) = 0;
9376 TYPE_CACHED_VALUES (expr) = NULL;
9378 md5_process_bytes (expr, tree_size (expr), ctx);
9379 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9380 if (TREE_CODE_CLASS (code) != tcc_type
9381 && TREE_CODE_CLASS (code) != tcc_declaration)
9382 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9383 switch (TREE_CODE_CLASS (code))
9385 case tcc_constant:
9386 switch (code)
9388 case STRING_CST:
9389 md5_process_bytes (TREE_STRING_POINTER (expr),
9390 TREE_STRING_LENGTH (expr), ctx);
9391 break;
9392 case COMPLEX_CST:
9393 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9394 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9395 break;
9396 case VECTOR_CST:
9397 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9398 break;
9399 default:
9400 break;
9402 break;
9403 case tcc_exceptional:
9404 switch (code)
9406 case TREE_LIST:
9407 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9408 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9409 break;
9410 case TREE_VEC:
9411 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9412 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9413 break;
9414 default:
9415 break;
9417 break;
9418 case tcc_expression:
9419 case tcc_reference:
9420 case tcc_comparison:
9421 case tcc_unary:
9422 case tcc_binary:
9423 case tcc_statement:
9424 len = TREE_CODE_LENGTH (code);
9425 for (i = 0; i < len; ++i)
9426 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9427 break;
9428 case tcc_declaration:
9429 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9430 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9431 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9432 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9433 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9434 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9435 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9436 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9437 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9438 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9439 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9440 break;
9441 case tcc_type:
9442 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9443 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9444 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9445 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9446 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9447 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9448 if (INTEGRAL_TYPE_P (expr)
9449 || SCALAR_FLOAT_TYPE_P (expr))
9451 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9452 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9454 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9455 if (TREE_CODE (expr) == RECORD_TYPE
9456 || TREE_CODE (expr) == UNION_TYPE
9457 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9458 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9459 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9460 break;
9461 default:
9462 break;
9466 #endif
9468 /* Perform constant folding and related simplification of initializer
9469 expression EXPR. This behaves identically to "fold" but ignores
9470 potential run-time traps and exceptions that fold must preserve. */
9472 tree
9473 fold_initializer (tree expr)
9475 int saved_signaling_nans = flag_signaling_nans;
9476 int saved_trapping_math = flag_trapping_math;
9477 int saved_trapv = flag_trapv;
9478 tree result;
9480 flag_signaling_nans = 0;
9481 flag_trapping_math = 0;
9482 flag_trapv = 0;
9484 result = fold (expr);
9486 flag_signaling_nans = saved_signaling_nans;
9487 flag_trapping_math = saved_trapping_math;
9488 flag_trapv = saved_trapv;
9490 return result;
9493 /* Determine if first argument is a multiple of second argument. Return 0 if
9494 it is not, or we cannot easily determined it to be.
9496 An example of the sort of thing we care about (at this point; this routine
9497 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9498 fold cases do now) is discovering that
9500 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9502 is a multiple of
9504 SAVE_EXPR (J * 8)
9506 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9508 This code also handles discovering that
9510 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9512 is a multiple of 8 so we don't have to worry about dealing with a
9513 possible remainder.
9515 Note that we *look* inside a SAVE_EXPR only to determine how it was
9516 calculated; it is not safe for fold to do much of anything else with the
9517 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9518 at run time. For example, the latter example above *cannot* be implemented
9519 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9520 evaluation time of the original SAVE_EXPR is not necessarily the same at
9521 the time the new expression is evaluated. The only optimization of this
9522 sort that would be valid is changing
9524 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9526 divided by 8 to
9528 SAVE_EXPR (I) * SAVE_EXPR (J)
9530 (where the same SAVE_EXPR (J) is used in the original and the
9531 transformed version). */
9533 static int
9534 multiple_of_p (tree type, tree top, tree bottom)
9536 if (operand_equal_p (top, bottom, 0))
9537 return 1;
9539 if (TREE_CODE (type) != INTEGER_TYPE)
9540 return 0;
9542 switch (TREE_CODE (top))
9544 case BIT_AND_EXPR:
9545 /* Bitwise and provides a power of two multiple. If the mask is
9546 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
9547 if (!integer_pow2p (bottom))
9548 return 0;
9549 /* FALLTHRU */
9551 case MULT_EXPR:
9552 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9553 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9555 case PLUS_EXPR:
9556 case MINUS_EXPR:
9557 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9558 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9560 case LSHIFT_EXPR:
9561 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9563 tree op1, t1;
9565 op1 = TREE_OPERAND (top, 1);
9566 /* const_binop may not detect overflow correctly,
9567 so check for it explicitly here. */
9568 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9569 > TREE_INT_CST_LOW (op1)
9570 && TREE_INT_CST_HIGH (op1) == 0
9571 && 0 != (t1 = fold_convert (type,
9572 const_binop (LSHIFT_EXPR,
9573 size_one_node,
9574 op1, 0)))
9575 && ! TREE_OVERFLOW (t1))
9576 return multiple_of_p (type, t1, bottom);
9578 return 0;
9580 case NOP_EXPR:
9581 /* Can't handle conversions from non-integral or wider integral type. */
9582 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9583 || (TYPE_PRECISION (type)
9584 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9585 return 0;
9587 /* .. fall through ... */
9589 case SAVE_EXPR:
9590 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9592 case INTEGER_CST:
9593 if (TREE_CODE (bottom) != INTEGER_CST
9594 || (TYPE_UNSIGNED (type)
9595 && (tree_int_cst_sgn (top) < 0
9596 || tree_int_cst_sgn (bottom) < 0)))
9597 return 0;
9598 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9599 top, bottom, 0));
9601 default:
9602 return 0;
9606 /* Return true if `t' is known to be non-negative. */
9609 tree_expr_nonnegative_p (tree t)
9611 switch (TREE_CODE (t))
9613 case ABS_EXPR:
9614 return 1;
9616 case INTEGER_CST:
9617 return tree_int_cst_sgn (t) >= 0;
9619 case REAL_CST:
9620 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9622 case PLUS_EXPR:
9623 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9624 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9625 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9627 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9628 both unsigned and at least 2 bits shorter than the result. */
9629 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9630 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9631 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9633 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9634 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9635 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9636 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9638 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9639 TYPE_PRECISION (inner2)) + 1;
9640 return prec < TYPE_PRECISION (TREE_TYPE (t));
9643 break;
9645 case MULT_EXPR:
9646 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9648 /* x * x for floating point x is always non-negative. */
9649 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9650 return 1;
9651 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9652 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9655 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9656 both unsigned and their total bits is shorter than the result. */
9657 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9658 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9659 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9661 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9662 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9663 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9664 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9665 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9666 < TYPE_PRECISION (TREE_TYPE (t));
9668 return 0;
9670 case TRUNC_DIV_EXPR:
9671 case CEIL_DIV_EXPR:
9672 case FLOOR_DIV_EXPR:
9673 case ROUND_DIV_EXPR:
9674 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9675 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9677 case TRUNC_MOD_EXPR:
9678 case CEIL_MOD_EXPR:
9679 case FLOOR_MOD_EXPR:
9680 case ROUND_MOD_EXPR:
9681 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9683 case RDIV_EXPR:
9684 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9685 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9687 case BIT_AND_EXPR:
9688 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9689 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9690 case BIT_IOR_EXPR:
9691 case BIT_XOR_EXPR:
9692 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9693 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9695 case NOP_EXPR:
9697 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9698 tree outer_type = TREE_TYPE (t);
9700 if (TREE_CODE (outer_type) == REAL_TYPE)
9702 if (TREE_CODE (inner_type) == REAL_TYPE)
9703 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9704 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9706 if (TYPE_UNSIGNED (inner_type))
9707 return 1;
9708 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9711 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9713 if (TREE_CODE (inner_type) == REAL_TYPE)
9714 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9715 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9716 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9717 && TYPE_UNSIGNED (inner_type);
9720 break;
9722 case COND_EXPR:
9723 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9724 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9725 case COMPOUND_EXPR:
9726 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9727 case MIN_EXPR:
9728 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9729 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9730 case MAX_EXPR:
9731 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9732 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9733 case MODIFY_EXPR:
9734 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9735 case BIND_EXPR:
9736 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9737 case SAVE_EXPR:
9738 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9739 case NON_LVALUE_EXPR:
9740 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9741 case FLOAT_EXPR:
9742 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9744 case TARGET_EXPR:
9746 tree temp = TARGET_EXPR_SLOT (t);
9747 t = TARGET_EXPR_INITIAL (t);
9749 /* If the initializer is non-void, then it's a normal expression
9750 that will be assigned to the slot. */
9751 if (!VOID_TYPE_P (t))
9752 return tree_expr_nonnegative_p (t);
9754 /* Otherwise, the initializer sets the slot in some way. One common
9755 way is an assignment statement at the end of the initializer. */
9756 while (1)
9758 if (TREE_CODE (t) == BIND_EXPR)
9759 t = expr_last (BIND_EXPR_BODY (t));
9760 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9761 || TREE_CODE (t) == TRY_CATCH_EXPR)
9762 t = expr_last (TREE_OPERAND (t, 0));
9763 else if (TREE_CODE (t) == STATEMENT_LIST)
9764 t = expr_last (t);
9765 else
9766 break;
9768 if (TREE_CODE (t) == MODIFY_EXPR
9769 && TREE_OPERAND (t, 0) == temp)
9770 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9772 return 0;
9775 case CALL_EXPR:
9777 tree fndecl = get_callee_fndecl (t);
9778 tree arglist = TREE_OPERAND (t, 1);
9779 if (fndecl
9780 && DECL_BUILT_IN (fndecl)
9781 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9782 switch (DECL_FUNCTION_CODE (fndecl))
9784 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9785 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9786 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9787 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9789 CASE_BUILTIN_F (BUILT_IN_ACOS)
9790 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9791 CASE_BUILTIN_F (BUILT_IN_CABS)
9792 CASE_BUILTIN_F (BUILT_IN_COSH)
9793 CASE_BUILTIN_F (BUILT_IN_ERFC)
9794 CASE_BUILTIN_F (BUILT_IN_EXP)
9795 CASE_BUILTIN_F (BUILT_IN_EXP10)
9796 CASE_BUILTIN_F (BUILT_IN_EXP2)
9797 CASE_BUILTIN_F (BUILT_IN_FABS)
9798 CASE_BUILTIN_F (BUILT_IN_FDIM)
9799 CASE_BUILTIN_F (BUILT_IN_FREXP)
9800 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9801 CASE_BUILTIN_F (BUILT_IN_POW10)
9802 CASE_BUILTIN_I (BUILT_IN_FFS)
9803 CASE_BUILTIN_I (BUILT_IN_PARITY)
9804 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9805 /* Always true. */
9806 return 1;
9808 CASE_BUILTIN_F (BUILT_IN_SQRT)
9809 /* sqrt(-0.0) is -0.0. */
9810 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9811 return 1;
9812 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9814 CASE_BUILTIN_F (BUILT_IN_ASINH)
9815 CASE_BUILTIN_F (BUILT_IN_ATAN)
9816 CASE_BUILTIN_F (BUILT_IN_ATANH)
9817 CASE_BUILTIN_F (BUILT_IN_CBRT)
9818 CASE_BUILTIN_F (BUILT_IN_CEIL)
9819 CASE_BUILTIN_F (BUILT_IN_ERF)
9820 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9821 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9822 CASE_BUILTIN_F (BUILT_IN_FMOD)
9823 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9824 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9825 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9826 CASE_BUILTIN_F (BUILT_IN_LRINT)
9827 CASE_BUILTIN_F (BUILT_IN_LROUND)
9828 CASE_BUILTIN_F (BUILT_IN_MODF)
9829 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9830 CASE_BUILTIN_F (BUILT_IN_POW)
9831 CASE_BUILTIN_F (BUILT_IN_RINT)
9832 CASE_BUILTIN_F (BUILT_IN_ROUND)
9833 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9834 CASE_BUILTIN_F (BUILT_IN_SINH)
9835 CASE_BUILTIN_F (BUILT_IN_TANH)
9836 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9837 /* True if the 1st argument is nonnegative. */
9838 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9840 CASE_BUILTIN_F (BUILT_IN_FMAX)
9841 /* True if the 1st OR 2nd arguments are nonnegative. */
9842 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9843 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9845 CASE_BUILTIN_F (BUILT_IN_FMIN)
9846 /* True if the 1st AND 2nd arguments are nonnegative. */
9847 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9848 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9850 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9851 /* True if the 2nd argument is nonnegative. */
9852 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9854 default:
9855 break;
9856 #undef CASE_BUILTIN_F
9857 #undef CASE_BUILTIN_I
9861 /* ... fall through ... */
9863 default:
9864 if (truth_value_p (TREE_CODE (t)))
9865 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9866 return 1;
9869 /* We don't know sign of `t', so be conservative and return false. */
9870 return 0;
9873 /* Return true when T is an address and is known to be nonzero.
9874 For floating point we further ensure that T is not denormal.
9875 Similar logic is present in nonzero_address in rtlanal.h. */
9877 static bool
9878 tree_expr_nonzero_p (tree t)
9880 tree type = TREE_TYPE (t);
9882 /* Doing something useful for floating point would need more work. */
9883 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9884 return false;
9886 switch (TREE_CODE (t))
9888 case ABS_EXPR:
9889 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9890 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9892 case INTEGER_CST:
9893 /* We used to test for !integer_zerop here. This does not work correctly
9894 if TREE_CONSTANT_OVERFLOW (t). */
9895 return (TREE_INT_CST_LOW (t) != 0
9896 || TREE_INT_CST_HIGH (t) != 0);
9898 case PLUS_EXPR:
9899 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9901 /* With the presence of negative values it is hard
9902 to say something. */
9903 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9904 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9905 return false;
9906 /* One of operands must be positive and the other non-negative. */
9907 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9908 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9910 break;
9912 case MULT_EXPR:
9913 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9915 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9916 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9918 break;
9920 case NOP_EXPR:
9922 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9923 tree outer_type = TREE_TYPE (t);
9925 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9926 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9928 break;
9930 case ADDR_EXPR:
9932 tree base = get_base_address (TREE_OPERAND (t, 0));
9934 if (!base)
9935 return false;
9937 /* Weak declarations may link to NULL. */
9938 if (DECL_P (base))
9939 return !DECL_WEAK (base);
9941 /* Constants are never weak. */
9942 if (CONSTANT_CLASS_P (base))
9943 return true;
9945 return false;
9948 case COND_EXPR:
9949 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9950 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9952 case MIN_EXPR:
9953 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9954 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9956 case MAX_EXPR:
9957 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9959 /* When both operands are nonzero, then MAX must be too. */
9960 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9961 return true;
9963 /* MAX where operand 0 is positive is positive. */
9964 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9966 /* MAX where operand 1 is positive is positive. */
9967 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9968 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9969 return true;
9970 break;
9972 case COMPOUND_EXPR:
9973 case MODIFY_EXPR:
9974 case BIND_EXPR:
9975 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9977 case SAVE_EXPR:
9978 case NON_LVALUE_EXPR:
9979 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9981 case BIT_IOR_EXPR:
9982 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9983 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9985 default:
9986 break;
9988 return false;
9991 /* See if we are applying CODE, a relational to the highest or lowest
9992 possible integer of TYPE. If so, then the result is a compile
9993 time constant. */
9995 static tree
9996 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9997 tree *op1_p)
9999 tree op0 = *op0_p;
10000 tree op1 = *op1_p;
10001 enum tree_code code = *code_p;
10002 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10004 if (TREE_CODE (op1) == INTEGER_CST
10005 && ! TREE_CONSTANT_OVERFLOW (op1)
10006 && width <= HOST_BITS_PER_WIDE_INT
10007 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10008 || POINTER_TYPE_P (TREE_TYPE (op1))))
10010 unsigned HOST_WIDE_INT signed_max;
10011 unsigned HOST_WIDE_INT max, min;
10013 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10015 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10017 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10018 min = 0;
10020 else
10022 max = signed_max;
10023 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10026 if (TREE_INT_CST_HIGH (op1) == 0
10027 && TREE_INT_CST_LOW (op1) == max)
10028 switch (code)
10030 case GT_EXPR:
10031 return omit_one_operand (type, integer_zero_node, op0);
10033 case GE_EXPR:
10034 *code_p = EQ_EXPR;
10035 break;
10036 case LE_EXPR:
10037 return omit_one_operand (type, integer_one_node, op0);
10039 case LT_EXPR:
10040 *code_p = NE_EXPR;
10041 break;
10043 /* The GE_EXPR and LT_EXPR cases above are not normally
10044 reached because of previous transformations. */
10046 default:
10047 break;
10049 else if (TREE_INT_CST_HIGH (op1) == 0
10050 && TREE_INT_CST_LOW (op1) == max - 1)
10051 switch (code)
10053 case GT_EXPR:
10054 *code_p = EQ_EXPR;
10055 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10056 break;
10057 case LE_EXPR:
10058 *code_p = NE_EXPR;
10059 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10060 break;
10061 default:
10062 break;
10064 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10065 && TREE_INT_CST_LOW (op1) == min)
10066 switch (code)
10068 case LT_EXPR:
10069 return omit_one_operand (type, integer_zero_node, op0);
10071 case LE_EXPR:
10072 *code_p = EQ_EXPR;
10073 break;
10075 case GE_EXPR:
10076 return omit_one_operand (type, integer_one_node, op0);
10078 case GT_EXPR:
10079 *code_p = NE_EXPR;
10080 break;
10082 default:
10083 break;
10085 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10086 && TREE_INT_CST_LOW (op1) == min + 1)
10087 switch (code)
10089 case GE_EXPR:
10090 *code_p = NE_EXPR;
10091 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10092 break;
10093 case LT_EXPR:
10094 *code_p = EQ_EXPR;
10095 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10096 break;
10097 default:
10098 break;
10101 else if (TREE_INT_CST_HIGH (op1) == 0
10102 && TREE_INT_CST_LOW (op1) == signed_max
10103 && TYPE_UNSIGNED (TREE_TYPE (op1))
10104 /* signed_type does not work on pointer types. */
10105 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10107 /* The following case also applies to X < signed_max+1
10108 and X >= signed_max+1 because previous transformations. */
10109 if (code == LE_EXPR || code == GT_EXPR)
10111 tree st0, st1, exp, retval;
10112 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10113 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10115 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10116 type,
10117 fold_convert (st0, op0),
10118 fold_convert (st1, integer_zero_node));
10120 retval = fold_binary_to_constant (TREE_CODE (exp),
10121 TREE_TYPE (exp),
10122 TREE_OPERAND (exp, 0),
10123 TREE_OPERAND (exp, 1));
10125 /* If we are in gimple form, then returning EXP would create
10126 non-gimple expressions. Clearing it is safe and insures
10127 we do not allow a non-gimple expression to escape. */
10128 if (in_gimple_form)
10129 exp = NULL;
10131 return (retval ? retval : exp);
10136 return NULL_TREE;
10140 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10141 attempt to fold the expression to a constant without modifying TYPE,
10142 OP0 or OP1.
10144 If the expression could be simplified to a constant, then return
10145 the constant. If the expression would not be simplified to a
10146 constant, then return NULL_TREE.
10148 Note this is primarily designed to be called after gimplification
10149 of the tree structures and when at least one operand is a constant.
10150 As a result of those simplifying assumptions this routine is far
10151 simpler than the generic fold routine. */
10153 tree
10154 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10156 int wins = 1;
10157 tree subop0;
10158 tree subop1;
10159 tree tem;
10161 /* If this is a commutative operation, and ARG0 is a constant, move it
10162 to ARG1 to reduce the number of tests below. */
10163 if (commutative_tree_code (code)
10164 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10166 tem = op0;
10167 op0 = op1;
10168 op1 = tem;
10171 /* If either operand is a complex type, extract its real component. */
10172 if (TREE_CODE (op0) == COMPLEX_CST)
10173 subop0 = TREE_REALPART (op0);
10174 else
10175 subop0 = op0;
10177 if (TREE_CODE (op1) == COMPLEX_CST)
10178 subop1 = TREE_REALPART (op1);
10179 else
10180 subop1 = op1;
10182 /* Note if either argument is not a real or integer constant.
10183 With a few exceptions, simplification is limited to cases
10184 where both arguments are constants. */
10185 if ((TREE_CODE (subop0) != INTEGER_CST
10186 && TREE_CODE (subop0) != REAL_CST)
10187 || (TREE_CODE (subop1) != INTEGER_CST
10188 && TREE_CODE (subop1) != REAL_CST))
10189 wins = 0;
10191 switch (code)
10193 case PLUS_EXPR:
10194 /* (plus (address) (const_int)) is a constant. */
10195 if (TREE_CODE (op0) == PLUS_EXPR
10196 && TREE_CODE (op1) == INTEGER_CST
10197 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10198 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10199 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10200 == ADDR_EXPR)))
10201 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10203 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10204 const_binop (PLUS_EXPR, op1,
10205 TREE_OPERAND (op0, 1), 0));
10207 case BIT_XOR_EXPR:
10209 binary:
10210 if (!wins)
10211 return NULL_TREE;
10213 /* Both arguments are constants. Simplify. */
10214 tem = const_binop (code, op0, op1, 0);
10215 if (tem != NULL_TREE)
10217 /* The return value should always have the same type as
10218 the original expression. */
10219 if (TREE_TYPE (tem) != type)
10220 tem = fold_convert (type, tem);
10222 return tem;
10224 return NULL_TREE;
10226 case MINUS_EXPR:
10227 /* Fold &x - &x. This can happen from &x.foo - &x.
10228 This is unsafe for certain floats even in non-IEEE formats.
10229 In IEEE, it is unsafe because it does wrong for NaNs.
10230 Also note that operand_equal_p is always false if an
10231 operand is volatile. */
10232 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10233 return fold_convert (type, integer_zero_node);
10235 goto binary;
10237 case MULT_EXPR:
10238 case BIT_AND_EXPR:
10239 /* Special case multiplication or bitwise AND where one argument
10240 is zero. */
10241 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10242 return omit_one_operand (type, op1, op0);
10243 else
10244 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10245 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10246 && real_zerop (op1))
10247 return omit_one_operand (type, op1, op0);
10249 goto binary;
10251 case BIT_IOR_EXPR:
10252 /* Special case when we know the result will be all ones. */
10253 if (integer_all_onesp (op1))
10254 return omit_one_operand (type, op1, op0);
10256 goto binary;
10258 case TRUNC_DIV_EXPR:
10259 case ROUND_DIV_EXPR:
10260 case FLOOR_DIV_EXPR:
10261 case CEIL_DIV_EXPR:
10262 case EXACT_DIV_EXPR:
10263 case TRUNC_MOD_EXPR:
10264 case ROUND_MOD_EXPR:
10265 case FLOOR_MOD_EXPR:
10266 case CEIL_MOD_EXPR:
10267 case RDIV_EXPR:
10268 /* Division by zero is undefined. */
10269 if (integer_zerop (op1))
10270 return NULL_TREE;
10272 if (TREE_CODE (op1) == REAL_CST
10273 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10274 && real_zerop (op1))
10275 return NULL_TREE;
10277 goto binary;
10279 case MIN_EXPR:
10280 if (INTEGRAL_TYPE_P (type)
10281 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10282 return omit_one_operand (type, op1, op0);
10284 goto binary;
10286 case MAX_EXPR:
10287 if (INTEGRAL_TYPE_P (type)
10288 && TYPE_MAX_VALUE (type)
10289 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10290 return omit_one_operand (type, op1, op0);
10292 goto binary;
10294 case RSHIFT_EXPR:
10295 /* Optimize -1 >> x for arithmetic right shifts. */
10296 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10297 return omit_one_operand (type, op0, op1);
10298 /* ... fall through ... */
10300 case LSHIFT_EXPR:
10301 if (integer_zerop (op0))
10302 return omit_one_operand (type, op0, op1);
10304 /* Since negative shift count is not well-defined, don't
10305 try to compute it in the compiler. */
10306 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10307 return NULL_TREE;
10309 goto binary;
10311 case LROTATE_EXPR:
10312 case RROTATE_EXPR:
10313 /* -1 rotated either direction by any amount is still -1. */
10314 if (integer_all_onesp (op0))
10315 return omit_one_operand (type, op0, op1);
10317 /* 0 rotated either direction by any amount is still zero. */
10318 if (integer_zerop (op0))
10319 return omit_one_operand (type, op0, op1);
10321 goto binary;
10323 case COMPLEX_EXPR:
10324 if (wins)
10325 return build_complex (type, op0, op1);
10326 return NULL_TREE;
10328 case LT_EXPR:
10329 case LE_EXPR:
10330 case GT_EXPR:
10331 case GE_EXPR:
10332 case EQ_EXPR:
10333 case NE_EXPR:
10334 /* If one arg is a real or integer constant, put it last. */
10335 if ((TREE_CODE (op0) == INTEGER_CST
10336 && TREE_CODE (op1) != INTEGER_CST)
10337 || (TREE_CODE (op0) == REAL_CST
10338 && TREE_CODE (op0) != REAL_CST))
10340 tree temp;
10342 temp = op0;
10343 op0 = op1;
10344 op1 = temp;
10345 code = swap_tree_comparison (code);
10348 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10349 This transformation affects the cases which are handled in later
10350 optimizations involving comparisons with non-negative constants. */
10351 if (TREE_CODE (op1) == INTEGER_CST
10352 && TREE_CODE (op0) != INTEGER_CST
10353 && tree_int_cst_sgn (op1) > 0)
10355 switch (code)
10357 case GE_EXPR:
10358 code = GT_EXPR;
10359 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10360 break;
10362 case LT_EXPR:
10363 code = LE_EXPR;
10364 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10365 break;
10367 default:
10368 break;
10372 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10373 if (tem)
10374 return tem;
10376 /* Fall through. */
10378 case ORDERED_EXPR:
10379 case UNORDERED_EXPR:
10380 case UNLT_EXPR:
10381 case UNLE_EXPR:
10382 case UNGT_EXPR:
10383 case UNGE_EXPR:
10384 case UNEQ_EXPR:
10385 case LTGT_EXPR:
10386 if (!wins)
10387 return NULL_TREE;
10389 return fold_relational_const (code, type, op0, op1);
10391 case RANGE_EXPR:
10392 /* This could probably be handled. */
10393 return NULL_TREE;
10395 case TRUTH_AND_EXPR:
10396 /* If second arg is constant zero, result is zero, but first arg
10397 must be evaluated. */
10398 if (integer_zerop (op1))
10399 return omit_one_operand (type, op1, op0);
10400 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10401 case will be handled here. */
10402 if (integer_zerop (op0))
10403 return omit_one_operand (type, op0, op1);
10404 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10405 return constant_boolean_node (true, type);
10406 return NULL_TREE;
10408 case TRUTH_OR_EXPR:
10409 /* If second arg is constant true, result is true, but we must
10410 evaluate first arg. */
10411 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10412 return omit_one_operand (type, op1, op0);
10413 /* Likewise for first arg, but note this only occurs here for
10414 TRUTH_OR_EXPR. */
10415 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10416 return omit_one_operand (type, op0, op1);
10417 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10418 return constant_boolean_node (false, type);
10419 return NULL_TREE;
10421 case TRUTH_XOR_EXPR:
10422 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10424 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10425 return constant_boolean_node (x, type);
10427 return NULL_TREE;
10429 default:
10430 return NULL_TREE;
10434 /* Given the components of a unary expression CODE, TYPE and OP0,
10435 attempt to fold the expression to a constant without modifying
10436 TYPE or OP0.
10438 If the expression could be simplified to a constant, then return
10439 the constant. If the expression would not be simplified to a
10440 constant, then return NULL_TREE.
10442 Note this is primarily designed to be called after gimplification
10443 of the tree structures and when op0 is a constant. As a result
10444 of those simplifying assumptions this routine is far simpler than
10445 the generic fold routine. */
10447 tree
10448 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10450 /* Make sure we have a suitable constant argument. */
10451 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10453 tree subop;
10455 if (TREE_CODE (op0) == COMPLEX_CST)
10456 subop = TREE_REALPART (op0);
10457 else
10458 subop = op0;
10460 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10461 return NULL_TREE;
10464 switch (code)
10466 case NOP_EXPR:
10467 case FLOAT_EXPR:
10468 case CONVERT_EXPR:
10469 case FIX_TRUNC_EXPR:
10470 case FIX_FLOOR_EXPR:
10471 case FIX_CEIL_EXPR:
10472 return fold_convert_const (code, type, op0);
10474 case NEGATE_EXPR:
10475 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10476 return fold_negate_const (op0, type);
10477 else
10478 return NULL_TREE;
10480 case ABS_EXPR:
10481 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10482 return fold_abs_const (op0, type);
10483 else
10484 return NULL_TREE;
10486 case BIT_NOT_EXPR:
10487 if (TREE_CODE (op0) == INTEGER_CST)
10488 return fold_not_const (op0, type);
10489 else
10490 return NULL_TREE;
10492 case REALPART_EXPR:
10493 if (TREE_CODE (op0) == COMPLEX_CST)
10494 return TREE_REALPART (op0);
10495 else
10496 return NULL_TREE;
10498 case IMAGPART_EXPR:
10499 if (TREE_CODE (op0) == COMPLEX_CST)
10500 return TREE_IMAGPART (op0);
10501 else
10502 return NULL_TREE;
10504 case CONJ_EXPR:
10505 if (TREE_CODE (op0) == COMPLEX_CST
10506 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10507 return build_complex (type, TREE_REALPART (op0),
10508 negate_expr (TREE_IMAGPART (op0)));
10509 return NULL_TREE;
10511 default:
10512 return NULL_TREE;
10516 /* If EXP represents referencing an element in a constant string
10517 (either via pointer arithmetic or array indexing), return the
10518 tree representing the value accessed, otherwise return NULL. */
10520 tree
10521 fold_read_from_constant_string (tree exp)
10523 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10525 tree exp1 = TREE_OPERAND (exp, 0);
10526 tree index;
10527 tree string;
10529 if (TREE_CODE (exp) == INDIRECT_REF)
10530 string = string_constant (exp1, &index);
10531 else
10533 tree low_bound = array_ref_low_bound (exp);
10534 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10536 /* Optimize the special-case of a zero lower bound.
10538 We convert the low_bound to sizetype to avoid some problems
10539 with constant folding. (E.g. suppose the lower bound is 1,
10540 and its mode is QI. Without the conversion,l (ARRAY
10541 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10542 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10543 if (! integer_zerop (low_bound))
10544 index = size_diffop (index, fold_convert (sizetype, low_bound));
10546 string = exp1;
10549 if (string
10550 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10551 && TREE_CODE (string) == STRING_CST
10552 && TREE_CODE (index) == INTEGER_CST
10553 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10554 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10555 == MODE_INT)
10556 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10557 return fold_convert (TREE_TYPE (exp),
10558 build_int_cst (NULL_TREE,
10559 (TREE_STRING_POINTER (string)
10560 [TREE_INT_CST_LOW (index)])));
10562 return NULL;
10565 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10566 an integer constant or real constant.
10568 TYPE is the type of the result. */
10570 static tree
10571 fold_negate_const (tree arg0, tree type)
10573 tree t = NULL_TREE;
10575 switch (TREE_CODE (arg0))
10577 case INTEGER_CST:
10579 unsigned HOST_WIDE_INT low;
10580 HOST_WIDE_INT high;
10581 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10582 TREE_INT_CST_HIGH (arg0),
10583 &low, &high);
10584 t = build_int_cst_wide (type, low, high);
10585 t = force_fit_type (t, 1,
10586 (overflow | TREE_OVERFLOW (arg0))
10587 && !TYPE_UNSIGNED (type),
10588 TREE_CONSTANT_OVERFLOW (arg0));
10589 break;
10592 case REAL_CST:
10593 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10594 break;
10596 default:
10597 gcc_unreachable ();
10600 return t;
10603 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10604 an integer constant or real constant.
10606 TYPE is the type of the result. */
10608 tree
10609 fold_abs_const (tree arg0, tree type)
10611 tree t = NULL_TREE;
10613 switch (TREE_CODE (arg0))
10615 case INTEGER_CST:
10616 /* If the value is unsigned, then the absolute value is
10617 the same as the ordinary value. */
10618 if (TYPE_UNSIGNED (type))
10619 t = arg0;
10620 /* Similarly, if the value is non-negative. */
10621 else if (INT_CST_LT (integer_minus_one_node, arg0))
10622 t = arg0;
10623 /* If the value is negative, then the absolute value is
10624 its negation. */
10625 else
10627 unsigned HOST_WIDE_INT low;
10628 HOST_WIDE_INT high;
10629 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10630 TREE_INT_CST_HIGH (arg0),
10631 &low, &high);
10632 t = build_int_cst_wide (type, low, high);
10633 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10634 TREE_CONSTANT_OVERFLOW (arg0));
10636 break;
10638 case REAL_CST:
10639 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10640 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10641 else
10642 t = arg0;
10643 break;
10645 default:
10646 gcc_unreachable ();
10649 return t;
10652 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10653 constant. TYPE is the type of the result. */
10655 static tree
10656 fold_not_const (tree arg0, tree type)
10658 tree t = NULL_TREE;
10660 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10662 t = build_int_cst_wide (type,
10663 ~ TREE_INT_CST_LOW (arg0),
10664 ~ TREE_INT_CST_HIGH (arg0));
10665 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10666 TREE_CONSTANT_OVERFLOW (arg0));
10668 return t;
10671 /* Given CODE, a relational operator, the target type, TYPE and two
10672 constant operands OP0 and OP1, return the result of the
10673 relational operation. If the result is not a compile time
10674 constant, then return NULL_TREE. */
10676 static tree
10677 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10679 int result, invert;
10681 /* From here on, the only cases we handle are when the result is
10682 known to be a constant. */
10684 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10686 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10687 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10689 /* Handle the cases where either operand is a NaN. */
10690 if (real_isnan (c0) || real_isnan (c1))
10692 switch (code)
10694 case EQ_EXPR:
10695 case ORDERED_EXPR:
10696 result = 0;
10697 break;
10699 case NE_EXPR:
10700 case UNORDERED_EXPR:
10701 case UNLT_EXPR:
10702 case UNLE_EXPR:
10703 case UNGT_EXPR:
10704 case UNGE_EXPR:
10705 case UNEQ_EXPR:
10706 result = 1;
10707 break;
10709 case LT_EXPR:
10710 case LE_EXPR:
10711 case GT_EXPR:
10712 case GE_EXPR:
10713 case LTGT_EXPR:
10714 if (flag_trapping_math)
10715 return NULL_TREE;
10716 result = 0;
10717 break;
10719 default:
10720 gcc_unreachable ();
10723 return constant_boolean_node (result, type);
10726 return constant_boolean_node (real_compare (code, c0, c1), type);
10729 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10731 To compute GT, swap the arguments and do LT.
10732 To compute GE, do LT and invert the result.
10733 To compute LE, swap the arguments, do LT and invert the result.
10734 To compute NE, do EQ and invert the result.
10736 Therefore, the code below must handle only EQ and LT. */
10738 if (code == LE_EXPR || code == GT_EXPR)
10740 tree tem = op0;
10741 op0 = op1;
10742 op1 = tem;
10743 code = swap_tree_comparison (code);
10746 /* Note that it is safe to invert for real values here because we
10747 have already handled the one case that it matters. */
10749 invert = 0;
10750 if (code == NE_EXPR || code == GE_EXPR)
10752 invert = 1;
10753 code = invert_tree_comparison (code, false);
10756 /* Compute a result for LT or EQ if args permit;
10757 Otherwise return T. */
10758 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10760 if (code == EQ_EXPR)
10761 result = tree_int_cst_equal (op0, op1);
10762 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10763 result = INT_CST_LT_UNSIGNED (op0, op1);
10764 else
10765 result = INT_CST_LT (op0, op1);
10767 else
10768 return NULL_TREE;
10770 if (invert)
10771 result ^= 1;
10772 return constant_boolean_node (result, type);
10775 /* Build an expression for the a clean point containing EXPR with type TYPE.
10776 Don't build a cleanup point expression for EXPR which don't have side
10777 effects. */
10779 tree
10780 fold_build_cleanup_point_expr (tree type, tree expr)
10782 /* If the expression does not have side effects then we don't have to wrap
10783 it with a cleanup point expression. */
10784 if (!TREE_SIDE_EFFECTS (expr))
10785 return expr;
10787 /* If the expression is a return, check to see if the expression inside the
10788 return has no side effects or the right hand side of the modify expression
10789 inside the return. If either don't have side effects set we don't need to
10790 wrap the expression in a cleanup point expression. Note we don't check the
10791 left hand side of the modify because it should always be a return decl. */
10792 if (TREE_CODE (expr) == RETURN_EXPR)
10794 tree op = TREE_OPERAND (expr, 0);
10795 if (!op || !TREE_SIDE_EFFECTS (op))
10796 return expr;
10797 op = TREE_OPERAND (op, 1);
10798 if (!TREE_SIDE_EFFECTS (op))
10799 return expr;
10802 return build1 (CLEANUP_POINT_EXPR, type, expr);
10805 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10806 avoid confusing the gimplify process. */
10808 tree
10809 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10811 /* The size of the object is not relevant when talking about its address. */
10812 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10813 t = TREE_OPERAND (t, 0);
10815 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10816 if (TREE_CODE (t) == INDIRECT_REF
10817 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
10819 t = TREE_OPERAND (t, 0);
10820 if (TREE_TYPE (t) != ptrtype)
10821 t = build1 (NOP_EXPR, ptrtype, t);
10823 else
10825 tree base = t;
10827 while (handled_component_p (base))
10828 base = TREE_OPERAND (base, 0);
10829 if (DECL_P (base))
10830 TREE_ADDRESSABLE (base) = 1;
10832 t = build1 (ADDR_EXPR, ptrtype, t);
10835 return t;
10838 tree
10839 build_fold_addr_expr (tree t)
10841 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10844 /* Builds an expression for an indirection through T, simplifying some
10845 cases. */
10847 tree
10848 build_fold_indirect_ref (tree t)
10850 tree type = TREE_TYPE (TREE_TYPE (t));
10851 tree sub = t;
10852 tree subtype;
10854 STRIP_NOPS (sub);
10855 if (TREE_CODE (sub) == ADDR_EXPR)
10857 tree op = TREE_OPERAND (sub, 0);
10858 tree optype = TREE_TYPE (op);
10859 /* *&p => p */
10860 if (lang_hooks.types_compatible_p (type, optype))
10861 return op;
10862 /* *(foo *)&fooarray => fooarray[0] */
10863 else if (TREE_CODE (optype) == ARRAY_TYPE
10864 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10865 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10868 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10869 subtype = TREE_TYPE (sub);
10870 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10871 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10873 sub = build_fold_indirect_ref (sub);
10874 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10877 return build1 (INDIRECT_REF, type, t);
10880 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10881 whose result is ignored. The type of the returned tree need not be
10882 the same as the original expression. */
10884 tree
10885 fold_ignored_result (tree t)
10887 if (!TREE_SIDE_EFFECTS (t))
10888 return integer_zero_node;
10890 for (;;)
10891 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10893 case tcc_unary:
10894 t = TREE_OPERAND (t, 0);
10895 break;
10897 case tcc_binary:
10898 case tcc_comparison:
10899 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10900 t = TREE_OPERAND (t, 0);
10901 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10902 t = TREE_OPERAND (t, 1);
10903 else
10904 return t;
10905 break;
10907 case tcc_expression:
10908 switch (TREE_CODE (t))
10910 case COMPOUND_EXPR:
10911 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10912 return t;
10913 t = TREE_OPERAND (t, 0);
10914 break;
10916 case COND_EXPR:
10917 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10918 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10919 return t;
10920 t = TREE_OPERAND (t, 0);
10921 break;
10923 default:
10924 return t;
10926 break;
10928 default:
10929 return t;
10933 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10934 This can only be applied to objects of a sizetype. */
10936 tree
10937 round_up (tree value, int divisor)
10939 tree div = NULL_TREE;
10941 gcc_assert (divisor > 0);
10942 if (divisor == 1)
10943 return value;
10945 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10946 have to do anything. Only do this when we are not given a const,
10947 because in that case, this check is more expensive than just
10948 doing it. */
10949 if (TREE_CODE (value) != INTEGER_CST)
10951 div = build_int_cst (TREE_TYPE (value), divisor);
10953 if (multiple_of_p (TREE_TYPE (value), value, div))
10954 return value;
10957 /* If divisor is a power of two, simplify this to bit manipulation. */
10958 if (divisor == (divisor & -divisor))
10960 tree t;
10962 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10963 value = size_binop (PLUS_EXPR, value, t);
10964 t = build_int_cst (TREE_TYPE (value), -divisor);
10965 value = size_binop (BIT_AND_EXPR, value, t);
10967 else
10969 if (!div)
10970 div = build_int_cst (TREE_TYPE (value), divisor);
10971 value = size_binop (CEIL_DIV_EXPR, value, div);
10972 value = size_binop (MULT_EXPR, value, div);
10975 return value;
10978 /* Likewise, but round down. */
10980 tree
10981 round_down (tree value, int divisor)
10983 tree div = NULL_TREE;
10985 gcc_assert (divisor > 0);
10986 if (divisor == 1)
10987 return value;
10989 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10990 have to do anything. Only do this when we are not given a const,
10991 because in that case, this check is more expensive than just
10992 doing it. */
10993 if (TREE_CODE (value) != INTEGER_CST)
10995 div = build_int_cst (TREE_TYPE (value), divisor);
10997 if (multiple_of_p (TREE_TYPE (value), value, div))
10998 return value;
11001 /* If divisor is a power of two, simplify this to bit manipulation. */
11002 if (divisor == (divisor & -divisor))
11004 tree t;
11006 t = build_int_cst (TREE_TYPE (value), -divisor);
11007 value = size_binop (BIT_AND_EXPR, value, t);
11009 else
11011 if (!div)
11012 div = build_int_cst (TREE_TYPE (value), divisor);
11013 value = size_binop (FLOOR_DIV_EXPR, value, div);
11014 value = size_binop (MULT_EXPR, value, div);
11017 return value;
11020 /* Returns the pointer to the base of the object addressed by EXP and
11021 extracts the information about the offset of the access, storing it
11022 to PBITPOS and POFFSET. */
11024 static tree
11025 split_address_to_core_and_offset (tree exp,
11026 HOST_WIDE_INT *pbitpos, tree *poffset)
11028 tree core;
11029 enum machine_mode mode;
11030 int unsignedp, volatilep;
11031 HOST_WIDE_INT bitsize;
11033 if (TREE_CODE (exp) == ADDR_EXPR)
11035 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11036 poffset, &mode, &unsignedp, &volatilep,
11037 false);
11039 if (TREE_CODE (core) == INDIRECT_REF)
11040 core = TREE_OPERAND (core, 0);
11042 else
11044 core = exp;
11045 *pbitpos = 0;
11046 *poffset = NULL_TREE;
11049 return core;
11052 /* Returns true if addresses of E1 and E2 differ by a constant, false
11053 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11055 bool
11056 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11058 tree core1, core2;
11059 HOST_WIDE_INT bitpos1, bitpos2;
11060 tree toffset1, toffset2, tdiff, type;
11062 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11063 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11065 if (bitpos1 % BITS_PER_UNIT != 0
11066 || bitpos2 % BITS_PER_UNIT != 0
11067 || !operand_equal_p (core1, core2, 0))
11068 return false;
11070 if (toffset1 && toffset2)
11072 type = TREE_TYPE (toffset1);
11073 if (type != TREE_TYPE (toffset2))
11074 toffset2 = fold_convert (type, toffset2);
11076 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11077 if (!host_integerp (tdiff, 0))
11078 return false;
11080 *diff = tree_low_cst (tdiff, 0);
11082 else if (toffset1 || toffset2)
11084 /* If only one of the offsets is non-constant, the difference cannot
11085 be a constant. */
11086 return false;
11088 else
11089 *diff = 0;
11091 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11092 return true;