Merge from the pain train
[official-gcc.git] / gcc / fold-const.c
blob5c3139ccb50a8a99c78fb48a47f18f7f31077d75
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (tree, enum tree_code,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static tree fold_relational_hi_lo (enum tree_code *, const tree,
136 tree *, tree *);
137 static bool tree_expr_nonzero_p (tree);
139 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
140 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
141 and SUM1. Then this yields nonzero if overflow occurred during the
142 addition.
144 Overflow occurs if A and B have the same sign, but A and SUM differ in
145 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
146 sign. */
147 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
150 We do that by representing the two-word integer in 4 words, with only
151 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
152 number. The value of the word is LOWPART + HIGHPART * BASE. */
154 #define LOWPART(x) \
155 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
156 #define HIGHPART(x) \
157 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
158 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160 /* Unpack a two-word integer into 4 words.
161 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
162 WORDS points to the array of HOST_WIDE_INTs. */
164 static void
165 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 words[0] = LOWPART (low);
168 words[1] = HIGHPART (low);
169 words[2] = LOWPART (hi);
170 words[3] = HIGHPART (hi);
173 /* Pack an array of 4 words into a two-word integer.
174 WORDS points to the array of words.
175 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
177 static void
178 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
179 HOST_WIDE_INT *hi)
181 *low = words[0] + words[1] * BASE;
182 *hi = words[2] + words[3] * BASE;
185 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
186 in overflow of the value, when >0 we are only interested in signed
187 overflow, for <0 we are interested in any overflow. OVERFLOWED
188 indicates whether overflow has already occurred. CONST_OVERFLOWED
189 indicates whether constant overflow has already occurred. We force
190 T's value to be within range of T's type (by setting to 0 or 1 all
191 the bits outside the type's range). We set TREE_OVERFLOWED if,
192 OVERFLOWED is nonzero,
193 or OVERFLOWABLE is >0 and signed overflow occurs
194 or OVERFLOWABLE is <0 and any overflow occurs
195 We set TREE_CONSTANT_OVERFLOWED if,
196 CONST_OVERFLOWED is nonzero
197 or we set TREE_OVERFLOWED.
198 We return either the original T, or a copy. */
200 tree
201 force_fit_type (tree t, int overflowable,
202 bool overflowed, bool overflowed_const)
204 unsigned HOST_WIDE_INT low;
205 HOST_WIDE_INT high;
206 unsigned int prec;
207 int sign_extended_type;
209 gcc_assert (TREE_CODE (t) == INTEGER_CST);
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
216 prec = POINTER_SIZE;
217 else
218 prec = TYPE_PRECISION (TREE_TYPE (t));
219 /* Size types *are* sign extended. */
220 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
221 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
222 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
224 /* First clear all bits that are beyond the type's precision. */
226 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
228 else if (prec > HOST_BITS_PER_WIDE_INT)
229 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 else
232 high = 0;
233 if (prec < HOST_BITS_PER_WIDE_INT)
234 low &= ~((HOST_WIDE_INT) (-1) << prec);
237 if (!sign_extended_type)
238 /* No sign extension */;
239 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
240 /* Correct width already. */;
241 else if (prec > HOST_BITS_PER_WIDE_INT)
243 /* Sign extend top half? */
244 if (high & ((unsigned HOST_WIDE_INT)1
245 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
246 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
248 else if (prec == HOST_BITS_PER_WIDE_INT)
250 if ((HOST_WIDE_INT)low < 0)
251 high = -1;
253 else
255 /* Sign extend bottom half? */
256 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
258 high = -1;
259 low |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value changed, return a new node. */
264 if (overflowed || overflowed_const
265 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
267 t = build_int_cst_wide (TREE_TYPE (t), low, high);
269 if (overflowed
270 || overflowable < 0
271 || (overflowable > 0 && sign_extended_type))
273 t = copy_node (t);
274 TREE_OVERFLOW (t) = 1;
275 TREE_CONSTANT_OVERFLOW (t) = 1;
277 else if (overflowed_const)
279 t = copy_node (t);
280 TREE_CONSTANT_OVERFLOW (t) = 1;
284 return t;
287 /* Add two doubleword integers with doubleword result.
288 Each argument is given as two `HOST_WIDE_INT' pieces.
289 One argument is L1 and H1; the other, L2 and H2.
290 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
294 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
295 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 unsigned HOST_WIDE_INT l;
298 HOST_WIDE_INT h;
300 l = l1 + l2;
301 h = h1 + h2 + (l < l1);
303 *lv = l;
304 *hv = h;
305 return OVERFLOW_SUM_SIGN (h1, h2, h);
308 /* Negate a doubleword integer with doubleword result.
309 Return nonzero if the operation overflows, assuming it's signed.
310 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
311 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
314 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
315 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
317 if (l1 == 0)
319 *lv = 0;
320 *hv = - h1;
321 return (*hv & h1) < 0;
323 else
325 *lv = -l1;
326 *hv = ~h1;
327 return 0;
331 /* Multiply two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows, assuming it's signed.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
342 HOST_WIDE_INT arg1[4];
343 HOST_WIDE_INT arg2[4];
344 HOST_WIDE_INT prod[4 * 2];
345 unsigned HOST_WIDE_INT carry;
346 int i, j, k;
347 unsigned HOST_WIDE_INT toplow, neglow;
348 HOST_WIDE_INT tophigh, neghigh;
350 encode (arg1, l1, h1);
351 encode (arg2, l2, h2);
353 memset (prod, 0, sizeof prod);
355 for (i = 0; i < 4; i++)
357 carry = 0;
358 for (j = 0; j < 4; j++)
360 k = i + j;
361 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
362 carry += arg1[i] * arg2[j];
363 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
364 carry += prod[k];
365 prod[k] = LOWPART (carry);
366 carry = HIGHPART (carry);
368 prod[i + 4] = carry;
371 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
373 /* Check for overflow by calculating the top half of the answer in full;
374 it should agree with the low half's sign bit. */
375 decode (prod + 4, &toplow, &tophigh);
376 if (h1 < 0)
378 neg_double (l2, h2, &neglow, &neghigh);
379 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
381 if (h2 < 0)
383 neg_double (l1, h1, &neglow, &neghigh);
384 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
386 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
389 /* Shift the doubleword integer in L1, H1 left by COUNT places
390 keeping only PREC bits of result.
391 Shift right if COUNT is negative.
392 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
393 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
395 void
396 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
397 HOST_WIDE_INT count, unsigned int prec,
398 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
400 unsigned HOST_WIDE_INT signmask;
402 if (count < 0)
404 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 return;
408 if (SHIFT_COUNT_TRUNCATED)
409 count %= prec;
411 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
413 /* Shifting by the host word size is undefined according to the
414 ANSI standard, so we must handle this as a special case. */
415 *hv = 0;
416 *lv = 0;
418 else if (count >= HOST_BITS_PER_WIDE_INT)
420 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
421 *lv = 0;
423 else
425 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
426 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 *lv = l1 << count;
430 /* Sign extend all bits that are beyond the precision. */
432 signmask = -((prec > HOST_BITS_PER_WIDE_INT
433 ? ((unsigned HOST_WIDE_INT) *hv
434 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
435 : (*lv >> (prec - 1))) & 1);
437 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
439 else if (prec >= HOST_BITS_PER_WIDE_INT)
441 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
442 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 else
446 *hv = signmask;
447 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
448 *lv |= signmask << prec;
452 /* Shift the doubleword integer in L1, H1 right by COUNT places
453 keeping only PREC bits of result. COUNT must be positive.
454 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
455 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
457 void
458 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
459 HOST_WIDE_INT count, unsigned int prec,
460 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
461 int arith)
463 unsigned HOST_WIDE_INT signmask;
465 signmask = (arith
466 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
467 : 0);
469 if (SHIFT_COUNT_TRUNCATED)
470 count %= prec;
472 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
474 /* Shifting by the host word size is undefined according to the
475 ANSI standard, so we must handle this as a special case. */
476 *hv = 0;
477 *lv = 0;
479 else if (count >= HOST_BITS_PER_WIDE_INT)
481 *hv = 0;
482 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
484 else
486 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
487 *lv = ((l1 >> count)
488 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
491 /* Zero / sign extend all bits that are beyond the precision. */
493 if (count >= (HOST_WIDE_INT)prec)
495 *hv = signmask;
496 *lv = signmask;
498 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
500 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
502 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
503 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 else
507 *hv = signmask;
508 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
509 *lv |= signmask << (prec - count);
513 /* Rotate the doubleword integer in L1, H1 left by COUNT places
514 keeping only PREC bits of result.
515 Rotate right if COUNT is negative.
516 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
518 void
519 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
520 HOST_WIDE_INT count, unsigned int prec,
521 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
523 unsigned HOST_WIDE_INT s1l, s2l;
524 HOST_WIDE_INT s1h, s2h;
526 count %= prec;
527 if (count < 0)
528 count += prec;
530 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
531 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
532 *lv = s1l | s2l;
533 *hv = s1h | s2h;
536 /* Rotate the doubleword integer in L1, H1 left by COUNT places
537 keeping only PREC bits of result. COUNT must be positive.
538 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
540 void
541 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
542 HOST_WIDE_INT count, unsigned int prec,
543 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
545 unsigned HOST_WIDE_INT s1l, s2l;
546 HOST_WIDE_INT s1h, s2h;
548 count %= prec;
549 if (count < 0)
550 count += prec;
552 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
553 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
554 *lv = s1l | s2l;
555 *hv = s1h | s2h;
558 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
559 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
560 CODE is a tree code for a kind of division, one of
561 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
562 or EXACT_DIV_EXPR
563 It controls how the quotient is rounded to an integer.
564 Return nonzero if the operation overflows.
565 UNS nonzero says do unsigned division. */
568 div_and_round_double (enum tree_code code, int uns,
569 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
570 HOST_WIDE_INT hnum_orig,
571 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
572 HOST_WIDE_INT hden_orig,
573 unsigned HOST_WIDE_INT *lquo,
574 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT *hrem)
577 int quo_neg = 0;
578 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
579 HOST_WIDE_INT den[4], quo[4];
580 int i, j;
581 unsigned HOST_WIDE_INT work;
582 unsigned HOST_WIDE_INT carry = 0;
583 unsigned HOST_WIDE_INT lnum = lnum_orig;
584 HOST_WIDE_INT hnum = hnum_orig;
585 unsigned HOST_WIDE_INT lden = lden_orig;
586 HOST_WIDE_INT hden = hden_orig;
587 int overflow = 0;
589 if (hden == 0 && lden == 0)
590 overflow = 1, lden = 1;
592 /* Calculate quotient sign and convert operands to unsigned. */
593 if (!uns)
595 if (hnum < 0)
597 quo_neg = ~ quo_neg;
598 /* (minimum integer) / (-1) is the only overflow case. */
599 if (neg_double (lnum, hnum, &lnum, &hnum)
600 && ((HOST_WIDE_INT) lden & hden) == -1)
601 overflow = 1;
603 if (hden < 0)
605 quo_neg = ~ quo_neg;
606 neg_double (lden, hden, &lden, &hden);
610 if (hnum == 0 && hden == 0)
611 { /* single precision */
612 *hquo = *hrem = 0;
613 /* This unsigned division rounds toward zero. */
614 *lquo = lnum / lden;
615 goto finish_up;
618 if (hnum == 0)
619 { /* trivial case: dividend < divisor */
620 /* hden != 0 already checked. */
621 *hquo = *lquo = 0;
622 *hrem = hnum;
623 *lrem = lnum;
624 goto finish_up;
627 memset (quo, 0, sizeof quo);
629 memset (num, 0, sizeof num); /* to zero 9th element */
630 memset (den, 0, sizeof den);
632 encode (num, lnum, hnum);
633 encode (den, lden, hden);
635 /* Special code for when the divisor < BASE. */
636 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
638 /* hnum != 0 already checked. */
639 for (i = 4 - 1; i >= 0; i--)
641 work = num[i] + carry * BASE;
642 quo[i] = work / lden;
643 carry = work % lden;
646 else
648 /* Full double precision division,
649 with thanks to Don Knuth's "Seminumerical Algorithms". */
650 int num_hi_sig, den_hi_sig;
651 unsigned HOST_WIDE_INT quo_est, scale;
653 /* Find the highest nonzero divisor digit. */
654 for (i = 4 - 1;; i--)
655 if (den[i] != 0)
657 den_hi_sig = i;
658 break;
661 /* Insure that the first digit of the divisor is at least BASE/2.
662 This is required by the quotient digit estimation algorithm. */
664 scale = BASE / (den[den_hi_sig] + 1);
665 if (scale > 1)
666 { /* scale divisor and dividend */
667 carry = 0;
668 for (i = 0; i <= 4 - 1; i++)
670 work = (num[i] * scale) + carry;
671 num[i] = LOWPART (work);
672 carry = HIGHPART (work);
675 num[4] = carry;
676 carry = 0;
677 for (i = 0; i <= 4 - 1; i++)
679 work = (den[i] * scale) + carry;
680 den[i] = LOWPART (work);
681 carry = HIGHPART (work);
682 if (den[i] != 0) den_hi_sig = i;
686 num_hi_sig = 4;
688 /* Main loop */
689 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
691 /* Guess the next quotient digit, quo_est, by dividing the first
692 two remaining dividend digits by the high order quotient digit.
693 quo_est is never low and is at most 2 high. */
694 unsigned HOST_WIDE_INT tmp;
696 num_hi_sig = i + den_hi_sig + 1;
697 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698 if (num[num_hi_sig] != den[den_hi_sig])
699 quo_est = work / den[den_hi_sig];
700 else
701 quo_est = BASE - 1;
703 /* Refine quo_est so it's usually correct, and at most one high. */
704 tmp = work - quo_est * den[den_hi_sig];
705 if (tmp < BASE
706 && (den[den_hi_sig - 1] * quo_est
707 > (tmp * BASE + num[num_hi_sig - 2])))
708 quo_est--;
710 /* Try QUO_EST as the quotient digit, by multiplying the
711 divisor by QUO_EST and subtracting from the remaining dividend.
712 Keep in mind that QUO_EST is the I - 1st digit. */
714 carry = 0;
715 for (j = 0; j <= den_hi_sig; j++)
717 work = quo_est * den[j] + carry;
718 carry = HIGHPART (work);
719 work = num[i + j] - LOWPART (work);
720 num[i + j] = LOWPART (work);
721 carry += HIGHPART (work) != 0;
724 /* If quo_est was high by one, then num[i] went negative and
725 we need to correct things. */
726 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
728 quo_est--;
729 carry = 0; /* add divisor back in */
730 for (j = 0; j <= den_hi_sig; j++)
732 work = num[i + j] + den[j] + carry;
733 carry = HIGHPART (work);
734 num[i + j] = LOWPART (work);
737 num [num_hi_sig] += carry;
740 /* Store the quotient digit. */
741 quo[i] = quo_est;
745 decode (quo, lquo, hquo);
747 finish_up:
748 /* If result is negative, make it so. */
749 if (quo_neg)
750 neg_double (*lquo, *hquo, lquo, hquo);
752 /* Compute trial remainder: rem = num - (quo * den) */
753 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754 neg_double (*lrem, *hrem, lrem, hrem);
755 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 switch (code)
759 case TRUNC_DIV_EXPR:
760 case TRUNC_MOD_EXPR: /* round toward zero */
761 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 return overflow;
764 case FLOOR_DIV_EXPR:
765 case FLOOR_MOD_EXPR: /* round toward negative infinity */
766 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
768 /* quo = quo - 1; */
769 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
770 lquo, hquo);
772 else
773 return overflow;
774 break;
776 case CEIL_DIV_EXPR:
777 case CEIL_MOD_EXPR: /* round toward positive infinity */
778 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
780 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
781 lquo, hquo);
783 else
784 return overflow;
785 break;
787 case ROUND_DIV_EXPR:
788 case ROUND_MOD_EXPR: /* round to closest integer */
790 unsigned HOST_WIDE_INT labs_rem = *lrem;
791 HOST_WIDE_INT habs_rem = *hrem;
792 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793 HOST_WIDE_INT habs_den = hden, htwice;
795 /* Get absolute values. */
796 if (*hrem < 0)
797 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
798 if (hden < 0)
799 neg_double (lden, hden, &labs_den, &habs_den);
801 /* If (2 * abs (lrem) >= abs (lden)) */
802 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803 labs_rem, habs_rem, &ltwice, &htwice);
805 if (((unsigned HOST_WIDE_INT) habs_den
806 < (unsigned HOST_WIDE_INT) htwice)
807 || (((unsigned HOST_WIDE_INT) habs_den
808 == (unsigned HOST_WIDE_INT) htwice)
809 && (labs_den < ltwice)))
811 if (*hquo < 0)
812 /* quo = quo - 1; */
813 add_double (*lquo, *hquo,
814 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815 else
816 /* quo = quo + 1; */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
820 else
821 return overflow;
823 break;
825 default:
826 gcc_unreachable ();
829 /* Compute true remainder: rem = num - (quo * den) */
830 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831 neg_double (*lrem, *hrem, lrem, hrem);
832 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 return overflow;
836 /* Return true if built-in mathematical function specified by CODE
837 preserves the sign of it argument, i.e. -f(x) == f(-x). */
839 static bool
840 negate_mathfn_p (enum built_in_function code)
842 switch (code)
844 case BUILT_IN_ASIN:
845 case BUILT_IN_ASINF:
846 case BUILT_IN_ASINL:
847 case BUILT_IN_ATAN:
848 case BUILT_IN_ATANF:
849 case BUILT_IN_ATANL:
850 case BUILT_IN_SIN:
851 case BUILT_IN_SINF:
852 case BUILT_IN_SINL:
853 case BUILT_IN_TAN:
854 case BUILT_IN_TANF:
855 case BUILT_IN_TANL:
856 return true;
858 default:
859 break;
861 return false;
864 /* Check whether we may negate an integer constant T without causing
865 overflow. */
867 bool
868 may_negate_without_overflow_p (tree t)
870 unsigned HOST_WIDE_INT val;
871 unsigned int prec;
872 tree type;
874 gcc_assert (TREE_CODE (t) == INTEGER_CST);
876 type = TREE_TYPE (t);
877 if (TYPE_UNSIGNED (type))
878 return false;
880 prec = TYPE_PRECISION (type);
881 if (prec > HOST_BITS_PER_WIDE_INT)
883 if (TREE_INT_CST_LOW (t) != 0)
884 return true;
885 prec -= HOST_BITS_PER_WIDE_INT;
886 val = TREE_INT_CST_HIGH (t);
888 else
889 val = TREE_INT_CST_LOW (t);
890 if (prec < HOST_BITS_PER_WIDE_INT)
891 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
892 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
895 /* Determine whether an expression T can be cheaply negated using
896 the function negate_expr. */
898 static bool
899 negate_expr_p (tree t)
901 tree type;
903 if (t == 0)
904 return false;
906 type = TREE_TYPE (t);
908 STRIP_SIGN_NOPS (t);
909 switch (TREE_CODE (t))
911 case INTEGER_CST:
912 if (TYPE_UNSIGNED (type) || ! flag_trapv)
913 return true;
915 /* Check that -CST will not overflow type. */
916 return may_negate_without_overflow_p (t);
918 case REAL_CST:
919 case NEGATE_EXPR:
920 return true;
922 case COMPLEX_CST:
923 return negate_expr_p (TREE_REALPART (t))
924 && negate_expr_p (TREE_IMAGPART (t));
926 case PLUS_EXPR:
927 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
928 return false;
929 /* -(A + B) -> (-B) - A. */
930 if (negate_expr_p (TREE_OPERAND (t, 1))
931 && reorder_operands_p (TREE_OPERAND (t, 0),
932 TREE_OPERAND (t, 1)))
933 return true;
934 /* -(A + B) -> (-A) - B. */
935 return negate_expr_p (TREE_OPERAND (t, 0));
937 case MINUS_EXPR:
938 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
939 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
940 && reorder_operands_p (TREE_OPERAND (t, 0),
941 TREE_OPERAND (t, 1));
943 case MULT_EXPR:
944 if (TYPE_UNSIGNED (TREE_TYPE (t)))
945 break;
947 /* Fall through. */
949 case RDIV_EXPR:
950 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
951 return negate_expr_p (TREE_OPERAND (t, 1))
952 || negate_expr_p (TREE_OPERAND (t, 0));
953 break;
955 case NOP_EXPR:
956 /* Negate -((double)float) as (double)(-float). */
957 if (TREE_CODE (type) == REAL_TYPE)
959 tree tem = strip_float_extensions (t);
960 if (tem != t)
961 return negate_expr_p (tem);
963 break;
965 case CALL_EXPR:
966 /* Negate -f(x) as f(-x). */
967 if (negate_mathfn_p (builtin_mathfn_code (t)))
968 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
969 break;
971 case RSHIFT_EXPR:
972 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
973 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
975 tree op1 = TREE_OPERAND (t, 1);
976 if (TREE_INT_CST_HIGH (op1) == 0
977 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
978 == TREE_INT_CST_LOW (op1))
979 return true;
981 break;
983 default:
984 break;
986 return false;
989 /* Given T, an expression, return the negation of T. Allow for T to be
990 null, in which case return null. */
992 static tree
993 negate_expr (tree t)
995 tree type;
996 tree tem;
998 if (t == 0)
999 return 0;
1001 type = TREE_TYPE (t);
1002 STRIP_SIGN_NOPS (t);
1004 switch (TREE_CODE (t))
1006 case INTEGER_CST:
1007 tem = fold_negate_const (t, type);
1008 if (! TREE_OVERFLOW (tem)
1009 || TYPE_UNSIGNED (type)
1010 || ! flag_trapv)
1011 return tem;
1012 break;
1014 case REAL_CST:
1015 tem = fold_negate_const (t, type);
1016 /* Two's complement FP formats, such as c4x, may overflow. */
1017 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1018 return fold_convert (type, tem);
1019 break;
1021 case COMPLEX_CST:
1023 tree rpart = negate_expr (TREE_REALPART (t));
1024 tree ipart = negate_expr (TREE_IMAGPART (t));
1026 if ((TREE_CODE (rpart) == REAL_CST
1027 && TREE_CODE (ipart) == REAL_CST)
1028 || (TREE_CODE (rpart) == INTEGER_CST
1029 && TREE_CODE (ipart) == INTEGER_CST))
1030 return build_complex (type, rpart, ipart);
1032 break;
1034 case NEGATE_EXPR:
1035 return fold_convert (type, TREE_OPERAND (t, 0));
1037 case PLUS_EXPR:
1038 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1040 /* -(A + B) -> (-B) - A. */
1041 if (negate_expr_p (TREE_OPERAND (t, 1))
1042 && reorder_operands_p (TREE_OPERAND (t, 0),
1043 TREE_OPERAND (t, 1)))
1045 tem = negate_expr (TREE_OPERAND (t, 1));
1046 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1047 tem, TREE_OPERAND (t, 0)));
1048 return fold_convert (type, tem);
1051 /* -(A + B) -> (-A) - B. */
1052 if (negate_expr_p (TREE_OPERAND (t, 0)))
1054 tem = negate_expr (TREE_OPERAND (t, 0));
1055 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1056 tem, TREE_OPERAND (t, 1)));
1057 return fold_convert (type, tem);
1060 break;
1062 case MINUS_EXPR:
1063 /* - (A - B) -> B - A */
1064 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1066 return fold_convert (type,
1067 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1068 TREE_OPERAND (t, 1),
1069 TREE_OPERAND (t, 0))));
1070 break;
1072 case MULT_EXPR:
1073 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1074 break;
1076 /* Fall through. */
1078 case RDIV_EXPR:
1079 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1081 tem = TREE_OPERAND (t, 1);
1082 if (negate_expr_p (tem))
1083 return fold_convert (type,
1084 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1085 TREE_OPERAND (t, 0),
1086 negate_expr (tem))));
1087 tem = TREE_OPERAND (t, 0);
1088 if (negate_expr_p (tem))
1089 return fold_convert (type,
1090 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1091 negate_expr (tem),
1092 TREE_OPERAND (t, 1))));
1094 break;
1096 case NOP_EXPR:
1097 /* Convert -((double)float) into (double)(-float). */
1098 if (TREE_CODE (type) == REAL_TYPE)
1100 tem = strip_float_extensions (t);
1101 if (tem != t && negate_expr_p (tem))
1102 return fold_convert (type, negate_expr (tem));
1104 break;
1106 case CALL_EXPR:
1107 /* Negate -f(x) as f(-x). */
1108 if (negate_mathfn_p (builtin_mathfn_code (t))
1109 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1111 tree fndecl, arg, arglist;
1113 fndecl = get_callee_fndecl (t);
1114 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1115 arglist = build_tree_list (NULL_TREE, arg);
1116 return build_function_call_expr (fndecl, arglist);
1118 break;
1120 case RSHIFT_EXPR:
1121 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1122 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1124 tree op1 = TREE_OPERAND (t, 1);
1125 if (TREE_INT_CST_HIGH (op1) == 0
1126 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1127 == TREE_INT_CST_LOW (op1))
1129 tree ntype = TYPE_UNSIGNED (type)
1130 ? lang_hooks.types.signed_type (type)
1131 : lang_hooks.types.unsigned_type (type);
1132 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1133 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1134 return fold_convert (type, temp);
1137 break;
1139 default:
1140 break;
1143 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1144 return fold_convert (type, tem);
1147 /* Split a tree IN into a constant, literal and variable parts that could be
1148 combined with CODE to make IN. "constant" means an expression with
1149 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1150 commutative arithmetic operation. Store the constant part into *CONP,
1151 the literal in *LITP and return the variable part. If a part isn't
1152 present, set it to null. If the tree does not decompose in this way,
1153 return the entire tree as the variable part and the other parts as null.
1155 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1156 case, we negate an operand that was subtracted. Except if it is a
1157 literal for which we use *MINUS_LITP instead.
1159 If NEGATE_P is true, we are negating all of IN, again except a literal
1160 for which we use *MINUS_LITP instead.
1162 If IN is itself a literal or constant, return it as appropriate.
1164 Note that we do not guarantee that any of the three values will be the
1165 same type as IN, but they will have the same signedness and mode. */
1167 static tree
1168 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1169 tree *minus_litp, int negate_p)
1171 tree var = 0;
1173 *conp = 0;
1174 *litp = 0;
1175 *minus_litp = 0;
1177 /* Strip any conversions that don't change the machine mode or signedness. */
1178 STRIP_SIGN_NOPS (in);
1180 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1181 *litp = in;
1182 else if (TREE_CODE (in) == code
1183 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1184 /* We can associate addition and subtraction together (even
1185 though the C standard doesn't say so) for integers because
1186 the value is not affected. For reals, the value might be
1187 affected, so we can't. */
1188 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1189 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1191 tree op0 = TREE_OPERAND (in, 0);
1192 tree op1 = TREE_OPERAND (in, 1);
1193 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1194 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1196 /* First see if either of the operands is a literal, then a constant. */
1197 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1198 *litp = op0, op0 = 0;
1199 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1200 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1202 if (op0 != 0 && TREE_CONSTANT (op0))
1203 *conp = op0, op0 = 0;
1204 else if (op1 != 0 && TREE_CONSTANT (op1))
1205 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1207 /* If we haven't dealt with either operand, this is not a case we can
1208 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1209 if (op0 != 0 && op1 != 0)
1210 var = in;
1211 else if (op0 != 0)
1212 var = op0;
1213 else
1214 var = op1, neg_var_p = neg1_p;
1216 /* Now do any needed negations. */
1217 if (neg_litp_p)
1218 *minus_litp = *litp, *litp = 0;
1219 if (neg_conp_p)
1220 *conp = negate_expr (*conp);
1221 if (neg_var_p)
1222 var = negate_expr (var);
1224 else if (TREE_CONSTANT (in))
1225 *conp = in;
1226 else
1227 var = in;
1229 if (negate_p)
1231 if (*litp)
1232 *minus_litp = *litp, *litp = 0;
1233 else if (*minus_litp)
1234 *litp = *minus_litp, *minus_litp = 0;
1235 *conp = negate_expr (*conp);
1236 var = negate_expr (var);
1239 return var;
1242 /* Re-associate trees split by the above function. T1 and T2 are either
1243 expressions to associate or null. Return the new expression, if any. If
1244 we build an operation, do it in TYPE and with CODE. */
1246 static tree
1247 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1249 if (t1 == 0)
1250 return t2;
1251 else if (t2 == 0)
1252 return t1;
1254 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1255 try to fold this since we will have infinite recursion. But do
1256 deal with any NEGATE_EXPRs. */
1257 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1258 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1260 if (code == PLUS_EXPR)
1262 if (TREE_CODE (t1) == NEGATE_EXPR)
1263 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1264 fold_convert (type, TREE_OPERAND (t1, 0)));
1265 else if (TREE_CODE (t2) == NEGATE_EXPR)
1266 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1267 fold_convert (type, TREE_OPERAND (t2, 0)));
1268 else if (integer_zerop (t2))
1269 return fold_convert (type, t1);
1271 else if (code == MINUS_EXPR)
1273 if (integer_zerop (t2))
1274 return fold_convert (type, t1);
1277 return build2 (code, type, fold_convert (type, t1),
1278 fold_convert (type, t2));
1281 return fold (build2 (code, type, fold_convert (type, t1),
1282 fold_convert (type, t2)));
1285 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1286 to produce a new constant.
1288 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1290 tree
1291 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1293 unsigned HOST_WIDE_INT int1l, int2l;
1294 HOST_WIDE_INT int1h, int2h;
1295 unsigned HOST_WIDE_INT low;
1296 HOST_WIDE_INT hi;
1297 unsigned HOST_WIDE_INT garbagel;
1298 HOST_WIDE_INT garbageh;
1299 tree t;
1300 tree type = TREE_TYPE (arg1);
1301 int uns = TYPE_UNSIGNED (type);
1302 int is_sizetype
1303 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1304 int overflow = 0;
1305 int no_overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1312 switch (code)
1314 case BIT_IOR_EXPR:
1315 low = int1l | int2l, hi = int1h | int2h;
1316 break;
1318 case BIT_XOR_EXPR:
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1320 break;
1322 case BIT_AND_EXPR:
1323 low = int1l & int2l, hi = int1h & int2h;
1324 break;
1326 case RSHIFT_EXPR:
1327 int2l = -int2l;
1328 case LSHIFT_EXPR:
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 &low, &hi, !uns);
1334 no_overflow = 1;
1335 break;
1337 case RROTATE_EXPR:
1338 int2l = - int2l;
1339 case LROTATE_EXPR:
1340 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1341 &low, &hi);
1342 break;
1344 case PLUS_EXPR:
1345 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1346 break;
1348 case MINUS_EXPR:
1349 neg_double (int2l, int2h, &low, &hi);
1350 add_double (int1l, int1h, low, hi, &low, &hi);
1351 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1352 break;
1354 case MULT_EXPR:
1355 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1356 break;
1358 case TRUNC_DIV_EXPR:
1359 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1360 case EXACT_DIV_EXPR:
1361 /* This is a shortcut for a common special case. */
1362 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1363 && ! TREE_CONSTANT_OVERFLOW (arg1)
1364 && ! TREE_CONSTANT_OVERFLOW (arg2)
1365 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1367 if (code == CEIL_DIV_EXPR)
1368 int1l += int2l - 1;
1370 low = int1l / int2l, hi = 0;
1371 break;
1374 /* ... fall through ... */
1376 case ROUND_DIV_EXPR:
1377 if (int2h == 0 && int2l == 1)
1379 low = int1l, hi = int1h;
1380 break;
1382 if (int1l == int2l && int1h == int2h
1383 && ! (int1l == 0 && int1h == 0))
1385 low = 1, hi = 0;
1386 break;
1388 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1389 &low, &hi, &garbagel, &garbageh);
1390 break;
1392 case TRUNC_MOD_EXPR:
1393 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1394 /* This is a shortcut for a common special case. */
1395 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1396 && ! TREE_CONSTANT_OVERFLOW (arg1)
1397 && ! TREE_CONSTANT_OVERFLOW (arg2)
1398 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1400 if (code == CEIL_MOD_EXPR)
1401 int1l += int2l - 1;
1402 low = int1l % int2l, hi = 0;
1403 break;
1406 /* ... fall through ... */
1408 case ROUND_MOD_EXPR:
1409 overflow = div_and_round_double (code, uns,
1410 int1l, int1h, int2l, int2h,
1411 &garbagel, &garbageh, &low, &hi);
1412 break;
1414 case MIN_EXPR:
1415 case MAX_EXPR:
1416 if (uns)
1417 low = (((unsigned HOST_WIDE_INT) int1h
1418 < (unsigned HOST_WIDE_INT) int2h)
1419 || (((unsigned HOST_WIDE_INT) int1h
1420 == (unsigned HOST_WIDE_INT) int2h)
1421 && int1l < int2l));
1422 else
1423 low = (int1h < int2h
1424 || (int1h == int2h && int1l < int2l));
1426 if (low == (code == MIN_EXPR))
1427 low = int1l, hi = int1h;
1428 else
1429 low = int2l, hi = int2h;
1430 break;
1432 default:
1433 gcc_unreachable ();
1436 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1438 if (notrunc)
1440 /* Propagate overflow flags ourselves. */
1441 if (((!uns || is_sizetype) && overflow)
1442 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1444 t = copy_node (t);
1445 TREE_OVERFLOW (t) = 1;
1446 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1450 t = copy_node (t);
1451 TREE_CONSTANT_OVERFLOW (t) = 1;
1454 else
1455 t = force_fit_type (t, 1,
1456 ((!uns || is_sizetype) && overflow)
1457 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1458 TREE_CONSTANT_OVERFLOW (arg1)
1459 | TREE_CONSTANT_OVERFLOW (arg2));
1461 return t;
1464 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1465 constant. We assume ARG1 and ARG2 have the same data type, or at least
1466 are the same kind of constant and the same machine mode.
1468 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1470 static tree
1471 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1473 STRIP_NOPS (arg1);
1474 STRIP_NOPS (arg2);
1476 if (TREE_CODE (arg1) == INTEGER_CST)
1477 return int_const_binop (code, arg1, arg2, notrunc);
1479 if (TREE_CODE (arg1) == REAL_CST)
1481 enum machine_mode mode;
1482 REAL_VALUE_TYPE d1;
1483 REAL_VALUE_TYPE d2;
1484 REAL_VALUE_TYPE value;
1485 REAL_VALUE_TYPE result;
1486 bool inexact;
1487 tree t, type;
1489 d1 = TREE_REAL_CST (arg1);
1490 d2 = TREE_REAL_CST (arg2);
1492 type = TREE_TYPE (arg1);
1493 mode = TYPE_MODE (type);
1495 /* Don't perform operation if we honor signaling NaNs and
1496 either operand is a NaN. */
1497 if (HONOR_SNANS (mode)
1498 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1499 return NULL_TREE;
1501 /* Don't perform operation if it would raise a division
1502 by zero exception. */
1503 if (code == RDIV_EXPR
1504 && REAL_VALUES_EQUAL (d2, dconst0)
1505 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1506 return NULL_TREE;
1508 /* If either operand is a NaN, just return it. Otherwise, set up
1509 for floating-point trap; we return an overflow. */
1510 if (REAL_VALUE_ISNAN (d1))
1511 return arg1;
1512 else if (REAL_VALUE_ISNAN (d2))
1513 return arg2;
1515 inexact = real_arithmetic (&value, code, &d1, &d2);
1516 real_convert (&result, mode, &value);
1518 /* Don't constant fold this floating point operation if the
1519 result may dependent upon the run-time rounding mode and
1520 flag_rounding_math is set, or if GCC's software emulation
1521 is unable to accurately represent the result. */
1523 if ((flag_rounding_math
1524 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1525 && !flag_unsafe_math_optimizations))
1526 && (inexact || !real_identical (&result, &value)))
1527 return NULL_TREE;
1529 t = build_real (type, result);
1531 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1532 TREE_CONSTANT_OVERFLOW (t)
1533 = TREE_OVERFLOW (t)
1534 | TREE_CONSTANT_OVERFLOW (arg1)
1535 | TREE_CONSTANT_OVERFLOW (arg2);
1536 return t;
1538 if (TREE_CODE (arg1) == COMPLEX_CST)
1540 tree type = TREE_TYPE (arg1);
1541 tree r1 = TREE_REALPART (arg1);
1542 tree i1 = TREE_IMAGPART (arg1);
1543 tree r2 = TREE_REALPART (arg2);
1544 tree i2 = TREE_IMAGPART (arg2);
1545 tree t;
1547 switch (code)
1549 case PLUS_EXPR:
1550 t = build_complex (type,
1551 const_binop (PLUS_EXPR, r1, r2, notrunc),
1552 const_binop (PLUS_EXPR, i1, i2, notrunc));
1553 break;
1555 case MINUS_EXPR:
1556 t = build_complex (type,
1557 const_binop (MINUS_EXPR, r1, r2, notrunc),
1558 const_binop (MINUS_EXPR, i1, i2, notrunc));
1559 break;
1561 case MULT_EXPR:
1562 t = build_complex (type,
1563 const_binop (MINUS_EXPR,
1564 const_binop (MULT_EXPR,
1565 r1, r2, notrunc),
1566 const_binop (MULT_EXPR,
1567 i1, i2, notrunc),
1568 notrunc),
1569 const_binop (PLUS_EXPR,
1570 const_binop (MULT_EXPR,
1571 r1, i2, notrunc),
1572 const_binop (MULT_EXPR,
1573 i1, r2, notrunc),
1574 notrunc));
1575 break;
1577 case RDIV_EXPR:
1579 tree magsquared
1580 = const_binop (PLUS_EXPR,
1581 const_binop (MULT_EXPR, r2, r2, notrunc),
1582 const_binop (MULT_EXPR, i2, i2, notrunc),
1583 notrunc);
1585 t = build_complex (type,
1586 const_binop
1587 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1588 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1589 const_binop (PLUS_EXPR,
1590 const_binop (MULT_EXPR, r1, r2,
1591 notrunc),
1592 const_binop (MULT_EXPR, i1, i2,
1593 notrunc),
1594 notrunc),
1595 magsquared, notrunc),
1596 const_binop
1597 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1598 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1599 const_binop (MINUS_EXPR,
1600 const_binop (MULT_EXPR, i1, r2,
1601 notrunc),
1602 const_binop (MULT_EXPR, r1, i2,
1603 notrunc),
1604 notrunc),
1605 magsquared, notrunc));
1607 break;
1609 default:
1610 gcc_unreachable ();
1612 return t;
1614 return 0;
1617 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1618 indicates which particular sizetype to create. */
1620 tree
1621 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1623 return build_int_cst (sizetype_tab[(int) kind], number);
1626 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1627 is a tree code. The type of the result is taken from the operands.
1628 Both must be the same type integer type and it must be a size type.
1629 If the operands are constant, so is the result. */
1631 tree
1632 size_binop (enum tree_code code, tree arg0, tree arg1)
1634 tree type = TREE_TYPE (arg0);
1636 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1637 && type == TREE_TYPE (arg1));
1639 /* Handle the special case of two integer constants faster. */
1640 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1642 /* And some specific cases even faster than that. */
1643 if (code == PLUS_EXPR && integer_zerop (arg0))
1644 return arg1;
1645 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1646 && integer_zerop (arg1))
1647 return arg0;
1648 else if (code == MULT_EXPR && integer_onep (arg0))
1649 return arg1;
1651 /* Handle general case of two integer constants. */
1652 return int_const_binop (code, arg0, arg1, 0);
1655 if (arg0 == error_mark_node || arg1 == error_mark_node)
1656 return error_mark_node;
1658 return fold (build2 (code, type, arg0, arg1));
1661 /* Given two values, either both of sizetype or both of bitsizetype,
1662 compute the difference between the two values. Return the value
1663 in signed type corresponding to the type of the operands. */
1665 tree
1666 size_diffop (tree arg0, tree arg1)
1668 tree type = TREE_TYPE (arg0);
1669 tree ctype;
1671 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1672 && type == TREE_TYPE (arg1));
1674 /* If the type is already signed, just do the simple thing. */
1675 if (!TYPE_UNSIGNED (type))
1676 return size_binop (MINUS_EXPR, arg0, arg1);
1678 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1680 /* If either operand is not a constant, do the conversions to the signed
1681 type and subtract. The hardware will do the right thing with any
1682 overflow in the subtraction. */
1683 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1684 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1685 fold_convert (ctype, arg1));
1687 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1688 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1689 overflow) and negate (which can't either). Special-case a result
1690 of zero while we're here. */
1691 if (tree_int_cst_equal (arg0, arg1))
1692 return fold_convert (ctype, integer_zero_node);
1693 else if (tree_int_cst_lt (arg1, arg0))
1694 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1695 else
1696 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1697 fold_convert (ctype, size_binop (MINUS_EXPR,
1698 arg1, arg0)));
1701 /* A subroutine of fold_convert_const handling conversions of an
1702 INTEGER_CST to another integer type. */
1704 static tree
1705 fold_convert_const_int_from_int (tree type, tree arg1)
1707 tree t;
1709 /* Given an integer constant, make new constant with new type,
1710 appropriately sign-extended or truncated. */
1711 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1712 TREE_INT_CST_HIGH (arg1));
1714 t = force_fit_type (t,
1715 /* Don't set the overflow when
1716 converting a pointer */
1717 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1718 (TREE_INT_CST_HIGH (arg1) < 0
1719 && (TYPE_UNSIGNED (type)
1720 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1721 | TREE_OVERFLOW (arg1),
1722 TREE_CONSTANT_OVERFLOW (arg1));
1724 return t;
1727 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1728 to an integer type. */
1730 static tree
1731 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1733 int overflow = 0;
1734 tree t;
1736 /* The following code implements the floating point to integer
1737 conversion rules required by the Java Language Specification,
1738 that IEEE NaNs are mapped to zero and values that overflow
1739 the target precision saturate, i.e. values greater than
1740 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1741 are mapped to INT_MIN. These semantics are allowed by the
1742 C and C++ standards that simply state that the behavior of
1743 FP-to-integer conversion is unspecified upon overflow. */
1745 HOST_WIDE_INT high, low;
1746 REAL_VALUE_TYPE r;
1747 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1749 switch (code)
1751 case FIX_TRUNC_EXPR:
1752 real_trunc (&r, VOIDmode, &x);
1753 break;
1755 case FIX_CEIL_EXPR:
1756 real_ceil (&r, VOIDmode, &x);
1757 break;
1759 case FIX_FLOOR_EXPR:
1760 real_floor (&r, VOIDmode, &x);
1761 break;
1763 case FIX_ROUND_EXPR:
1764 real_round (&r, VOIDmode, &x);
1765 break;
1767 default:
1768 gcc_unreachable ();
1771 /* If R is NaN, return zero and show we have an overflow. */
1772 if (REAL_VALUE_ISNAN (r))
1774 overflow = 1;
1775 high = 0;
1776 low = 0;
1779 /* See if R is less than the lower bound or greater than the
1780 upper bound. */
1782 if (! overflow)
1784 tree lt = TYPE_MIN_VALUE (type);
1785 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1786 if (REAL_VALUES_LESS (r, l))
1788 overflow = 1;
1789 high = TREE_INT_CST_HIGH (lt);
1790 low = TREE_INT_CST_LOW (lt);
1794 if (! overflow)
1796 tree ut = TYPE_MAX_VALUE (type);
1797 if (ut)
1799 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1800 if (REAL_VALUES_LESS (u, r))
1802 overflow = 1;
1803 high = TREE_INT_CST_HIGH (ut);
1804 low = TREE_INT_CST_LOW (ut);
1809 if (! overflow)
1810 REAL_VALUE_TO_INT (&low, &high, r);
1812 t = build_int_cst_wide (type, low, high);
1814 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1815 TREE_CONSTANT_OVERFLOW (arg1));
1816 return t;
1819 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1820 to another floating point type. */
1822 static tree
1823 fold_convert_const_real_from_real (tree type, tree arg1)
1825 REAL_VALUE_TYPE value;
1826 tree t;
1828 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1829 t = build_real (type, value);
1831 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1832 TREE_CONSTANT_OVERFLOW (t)
1833 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1834 return t;
1837 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1838 type TYPE. If no simplification can be done return NULL_TREE. */
1840 static tree
1841 fold_convert_const (enum tree_code code, tree type, tree arg1)
1843 if (TREE_TYPE (arg1) == type)
1844 return arg1;
1846 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return fold_convert_const_int_from_int (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_int_from_real (code, type, arg1);
1853 else if (TREE_CODE (type) == REAL_TYPE)
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return build_real_from_int_cst (type, arg1);
1857 if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_real_from_real (type, arg1);
1860 return NULL_TREE;
1863 /* Construct a vector of zero elements of vector type TYPE. */
1865 static tree
1866 build_zero_vector (tree type)
1868 tree elem, list;
1869 int i, units;
1871 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1872 units = TYPE_VECTOR_SUBPARTS (type);
1874 list = NULL_TREE;
1875 for (i = 0; i < units; i++)
1876 list = tree_cons (NULL_TREE, elem, list);
1877 return build_vector (type, list);
1880 /* Convert expression ARG to type TYPE. Used by the middle-end for
1881 simple conversions in preference to calling the front-end's convert. */
1883 tree
1884 fold_convert (tree type, tree arg)
1886 tree orig = TREE_TYPE (arg);
1887 tree tem;
1889 if (type == orig)
1890 return arg;
1892 if (TREE_CODE (arg) == ERROR_MARK
1893 || TREE_CODE (type) == ERROR_MARK
1894 || TREE_CODE (orig) == ERROR_MARK)
1895 return error_mark_node;
1897 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1898 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1899 TYPE_MAIN_VARIANT (orig)))
1900 return fold (build1 (NOP_EXPR, type, arg));
1902 switch (TREE_CODE (type))
1904 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1905 case POINTER_TYPE: case REFERENCE_TYPE:
1906 case OFFSET_TYPE:
1907 if (TREE_CODE (arg) == INTEGER_CST)
1909 tem = fold_convert_const (NOP_EXPR, type, arg);
1910 if (tem != NULL_TREE)
1911 return tem;
1913 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1914 || TREE_CODE (orig) == OFFSET_TYPE)
1915 return fold (build1 (NOP_EXPR, type, arg));
1916 if (TREE_CODE (orig) == COMPLEX_TYPE)
1918 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1919 return fold_convert (type, tem);
1921 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1922 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1923 return fold (build1 (NOP_EXPR, type, arg));
1925 case REAL_TYPE:
1926 if (TREE_CODE (arg) == INTEGER_CST)
1928 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1929 if (tem != NULL_TREE)
1930 return tem;
1932 else if (TREE_CODE (arg) == REAL_CST)
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 return tem;
1939 switch (TREE_CODE (orig))
1941 case INTEGER_TYPE: case CHAR_TYPE:
1942 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1943 case POINTER_TYPE: case REFERENCE_TYPE:
1944 return fold (build1 (FLOAT_EXPR, type, arg));
1946 case REAL_TYPE:
1947 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1948 type, arg));
1950 case COMPLEX_TYPE:
1951 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1952 return fold_convert (type, tem);
1954 default:
1955 gcc_unreachable ();
1958 case COMPLEX_TYPE:
1959 switch (TREE_CODE (orig))
1961 case INTEGER_TYPE: case CHAR_TYPE:
1962 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1963 case POINTER_TYPE: case REFERENCE_TYPE:
1964 case REAL_TYPE:
1965 return build2 (COMPLEX_EXPR, type,
1966 fold_convert (TREE_TYPE (type), arg),
1967 fold_convert (TREE_TYPE (type), integer_zero_node));
1968 case COMPLEX_TYPE:
1970 tree rpart, ipart;
1972 if (TREE_CODE (arg) == COMPLEX_EXPR)
1974 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1975 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1976 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1979 arg = save_expr (arg);
1980 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1981 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1982 rpart = fold_convert (TREE_TYPE (type), rpart);
1983 ipart = fold_convert (TREE_TYPE (type), ipart);
1984 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1987 default:
1988 gcc_unreachable ();
1991 case VECTOR_TYPE:
1992 if (integer_zerop (arg))
1993 return build_zero_vector (type);
1994 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1995 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1996 || TREE_CODE (orig) == VECTOR_TYPE);
1997 return fold (build1 (NOP_EXPR, type, arg));
1999 case VOID_TYPE:
2000 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2002 default:
2003 gcc_unreachable ();
2007 /* Return an expr equal to X but certainly not valid as an lvalue. */
2009 tree
2010 non_lvalue (tree x)
2012 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2013 us. */
2014 if (in_gimple_form)
2015 return x;
2017 /* We only need to wrap lvalue tree codes. */
2018 switch (TREE_CODE (x))
2020 case VAR_DECL:
2021 case PARM_DECL:
2022 case RESULT_DECL:
2023 case LABEL_DECL:
2024 case FUNCTION_DECL:
2025 case SSA_NAME:
2027 case COMPONENT_REF:
2028 case INDIRECT_REF:
2029 case ALIGN_INDIRECT_REF:
2030 case MISALIGNED_INDIRECT_REF:
2031 case ARRAY_REF:
2032 case ARRAY_RANGE_REF:
2033 case BIT_FIELD_REF:
2034 case OBJ_TYPE_REF:
2036 case REALPART_EXPR:
2037 case IMAGPART_EXPR:
2038 case PREINCREMENT_EXPR:
2039 case PREDECREMENT_EXPR:
2040 case SAVE_EXPR:
2041 case TRY_CATCH_EXPR:
2042 case WITH_CLEANUP_EXPR:
2043 case COMPOUND_EXPR:
2044 case MODIFY_EXPR:
2045 case TARGET_EXPR:
2046 case COND_EXPR:
2047 case BIND_EXPR:
2048 case MIN_EXPR:
2049 case MAX_EXPR:
2050 break;
2052 default:
2053 /* Assume the worst for front-end tree codes. */
2054 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2055 break;
2056 return x;
2058 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2061 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2062 Zero means allow extended lvalues. */
2064 int pedantic_lvalues;
2066 /* When pedantic, return an expr equal to X but certainly not valid as a
2067 pedantic lvalue. Otherwise, return X. */
2069 static tree
2070 pedantic_non_lvalue (tree x)
2072 if (pedantic_lvalues)
2073 return non_lvalue (x);
2074 else
2075 return x;
2078 /* Given a tree comparison code, return the code that is the logical inverse
2079 of the given code. It is not safe to do this for floating-point
2080 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2081 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2083 static enum tree_code
2084 invert_tree_comparison (enum tree_code code, bool honor_nans)
2086 if (honor_nans && flag_trapping_math)
2087 return ERROR_MARK;
2089 switch (code)
2091 case EQ_EXPR:
2092 return NE_EXPR;
2093 case NE_EXPR:
2094 return EQ_EXPR;
2095 case GT_EXPR:
2096 return honor_nans ? UNLE_EXPR : LE_EXPR;
2097 case GE_EXPR:
2098 return honor_nans ? UNLT_EXPR : LT_EXPR;
2099 case LT_EXPR:
2100 return honor_nans ? UNGE_EXPR : GE_EXPR;
2101 case LE_EXPR:
2102 return honor_nans ? UNGT_EXPR : GT_EXPR;
2103 case LTGT_EXPR:
2104 return UNEQ_EXPR;
2105 case UNEQ_EXPR:
2106 return LTGT_EXPR;
2107 case UNGT_EXPR:
2108 return LE_EXPR;
2109 case UNGE_EXPR:
2110 return LT_EXPR;
2111 case UNLT_EXPR:
2112 return GE_EXPR;
2113 case UNLE_EXPR:
2114 return GT_EXPR;
2115 case ORDERED_EXPR:
2116 return UNORDERED_EXPR;
2117 case UNORDERED_EXPR:
2118 return ORDERED_EXPR;
2119 default:
2120 gcc_unreachable ();
2124 /* Similar, but return the comparison that results if the operands are
2125 swapped. This is safe for floating-point. */
2127 enum tree_code
2128 swap_tree_comparison (enum tree_code code)
2130 switch (code)
2132 case EQ_EXPR:
2133 case NE_EXPR:
2134 return code;
2135 case GT_EXPR:
2136 return LT_EXPR;
2137 case GE_EXPR:
2138 return LE_EXPR;
2139 case LT_EXPR:
2140 return GT_EXPR;
2141 case LE_EXPR:
2142 return GE_EXPR;
2143 default:
2144 gcc_unreachable ();
2149 /* Convert a comparison tree code from an enum tree_code representation
2150 into a compcode bit-based encoding. This function is the inverse of
2151 compcode_to_comparison. */
2153 static enum comparison_code
2154 comparison_to_compcode (enum tree_code code)
2156 switch (code)
2158 case LT_EXPR:
2159 return COMPCODE_LT;
2160 case EQ_EXPR:
2161 return COMPCODE_EQ;
2162 case LE_EXPR:
2163 return COMPCODE_LE;
2164 case GT_EXPR:
2165 return COMPCODE_GT;
2166 case NE_EXPR:
2167 return COMPCODE_NE;
2168 case GE_EXPR:
2169 return COMPCODE_GE;
2170 case ORDERED_EXPR:
2171 return COMPCODE_ORD;
2172 case UNORDERED_EXPR:
2173 return COMPCODE_UNORD;
2174 case UNLT_EXPR:
2175 return COMPCODE_UNLT;
2176 case UNEQ_EXPR:
2177 return COMPCODE_UNEQ;
2178 case UNLE_EXPR:
2179 return COMPCODE_UNLE;
2180 case UNGT_EXPR:
2181 return COMPCODE_UNGT;
2182 case LTGT_EXPR:
2183 return COMPCODE_LTGT;
2184 case UNGE_EXPR:
2185 return COMPCODE_UNGE;
2186 default:
2187 gcc_unreachable ();
2191 /* Convert a compcode bit-based encoding of a comparison operator back
2192 to GCC's enum tree_code representation. This function is the
2193 inverse of comparison_to_compcode. */
2195 static enum tree_code
2196 compcode_to_comparison (enum comparison_code code)
2198 switch (code)
2200 case COMPCODE_LT:
2201 return LT_EXPR;
2202 case COMPCODE_EQ:
2203 return EQ_EXPR;
2204 case COMPCODE_LE:
2205 return LE_EXPR;
2206 case COMPCODE_GT:
2207 return GT_EXPR;
2208 case COMPCODE_NE:
2209 return NE_EXPR;
2210 case COMPCODE_GE:
2211 return GE_EXPR;
2212 case COMPCODE_ORD:
2213 return ORDERED_EXPR;
2214 case COMPCODE_UNORD:
2215 return UNORDERED_EXPR;
2216 case COMPCODE_UNLT:
2217 return UNLT_EXPR;
2218 case COMPCODE_UNEQ:
2219 return UNEQ_EXPR;
2220 case COMPCODE_UNLE:
2221 return UNLE_EXPR;
2222 case COMPCODE_UNGT:
2223 return UNGT_EXPR;
2224 case COMPCODE_LTGT:
2225 return LTGT_EXPR;
2226 case COMPCODE_UNGE:
2227 return UNGE_EXPR;
2228 default:
2229 gcc_unreachable ();
2233 /* Return a tree for the comparison which is the combination of
2234 doing the AND or OR (depending on CODE) of the two operations LCODE
2235 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2236 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2237 if this makes the transformation invalid. */
2239 tree
2240 combine_comparisons (enum tree_code code, enum tree_code lcode,
2241 enum tree_code rcode, tree truth_type,
2242 tree ll_arg, tree lr_arg)
2244 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2245 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2246 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2247 enum comparison_code compcode;
2249 switch (code)
2251 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2252 compcode = lcompcode & rcompcode;
2253 break;
2255 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2256 compcode = lcompcode | rcompcode;
2257 break;
2259 default:
2260 return NULL_TREE;
2263 if (!honor_nans)
2265 /* Eliminate unordered comparisons, as well as LTGT and ORD
2266 which are not used unless the mode has NaNs. */
2267 compcode &= ~COMPCODE_UNORD;
2268 if (compcode == COMPCODE_LTGT)
2269 compcode = COMPCODE_NE;
2270 else if (compcode == COMPCODE_ORD)
2271 compcode = COMPCODE_TRUE;
2273 else if (flag_trapping_math)
2275 /* Check that the original operation and the optimized ones will trap
2276 under the same condition. */
2277 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2278 && (lcompcode != COMPCODE_EQ)
2279 && (lcompcode != COMPCODE_ORD);
2280 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2281 && (rcompcode != COMPCODE_EQ)
2282 && (rcompcode != COMPCODE_ORD);
2283 bool trap = (compcode & COMPCODE_UNORD) == 0
2284 && (compcode != COMPCODE_EQ)
2285 && (compcode != COMPCODE_ORD);
2287 /* In a short-circuited boolean expression the LHS might be
2288 such that the RHS, if evaluated, will never trap. For
2289 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2290 if neither x nor y is NaN. (This is a mixed blessing: for
2291 example, the expression above will never trap, hence
2292 optimizing it to x < y would be invalid). */
2293 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2294 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2295 rtrap = false;
2297 /* If the comparison was short-circuited, and only the RHS
2298 trapped, we may now generate a spurious trap. */
2299 if (rtrap && !ltrap
2300 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2301 return NULL_TREE;
2303 /* If we changed the conditions that cause a trap, we lose. */
2304 if ((ltrap || rtrap) != trap)
2305 return NULL_TREE;
2308 if (compcode == COMPCODE_TRUE)
2309 return constant_boolean_node (true, truth_type);
2310 else if (compcode == COMPCODE_FALSE)
2311 return constant_boolean_node (false, truth_type);
2312 else
2313 return fold (build2 (compcode_to_comparison (compcode),
2314 truth_type, ll_arg, lr_arg));
2317 /* Return nonzero if CODE is a tree code that represents a truth value. */
2319 static int
2320 truth_value_p (enum tree_code code)
2322 return (TREE_CODE_CLASS (code) == tcc_comparison
2323 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2324 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2325 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2359 return 0;
2361 /* If both types don't have the same signedness, then we can't consider
2362 them equal. We must check this before the STRIP_NOPS calls
2363 because they may change the signedness of the arguments. */
2364 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2365 return 0;
2367 STRIP_NOPS (arg0);
2368 STRIP_NOPS (arg1);
2370 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2371 /* This is needed for conversions and for COMPONENT_REF.
2372 Might as well play it safe and always test this. */
2373 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2374 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2375 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2376 return 0;
2378 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2379 We don't care about side effects in that case because the SAVE_EXPR
2380 takes care of that for us. In all other cases, two expressions are
2381 equal if they have no side effects. If we have two identical
2382 expressions with side effects that should be treated the same due
2383 to the only side effects being identical SAVE_EXPR's, that will
2384 be detected in the recursive calls below. */
2385 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2386 && (TREE_CODE (arg0) == SAVE_EXPR
2387 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2388 return 1;
2390 /* Next handle constant cases, those for which we can return 1 even
2391 if ONLY_CONST is set. */
2392 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2393 switch (TREE_CODE (arg0))
2395 case INTEGER_CST:
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && tree_int_cst_equal (arg0, arg1));
2400 case REAL_CST:
2401 return (! TREE_CONSTANT_OVERFLOW (arg0)
2402 && ! TREE_CONSTANT_OVERFLOW (arg1)
2403 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2404 TREE_REAL_CST (arg1)));
2406 case VECTOR_CST:
2408 tree v1, v2;
2410 if (TREE_CONSTANT_OVERFLOW (arg0)
2411 || TREE_CONSTANT_OVERFLOW (arg1))
2412 return 0;
2414 v1 = TREE_VECTOR_CST_ELTS (arg0);
2415 v2 = TREE_VECTOR_CST_ELTS (arg1);
2416 while (v1 && v2)
2418 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2419 flags))
2420 return 0;
2421 v1 = TREE_CHAIN (v1);
2422 v2 = TREE_CHAIN (v2);
2425 return 1;
2428 case COMPLEX_CST:
2429 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2430 flags)
2431 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2432 flags));
2434 case STRING_CST:
2435 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2436 && ! memcmp (TREE_STRING_POINTER (arg0),
2437 TREE_STRING_POINTER (arg1),
2438 TREE_STRING_LENGTH (arg0)));
2440 case ADDR_EXPR:
2441 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2443 default:
2444 break;
2447 if (flags & OEP_ONLY_CONST)
2448 return 0;
2450 /* Define macros to test an operand from arg0 and arg1 for equality and a
2451 variant that allows null and views null as being different from any
2452 non-null value. In the latter case, if either is null, the both
2453 must be; otherwise, do the normal comparison. */
2454 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2455 TREE_OPERAND (arg1, N), flags)
2457 #define OP_SAME_WITH_NULL(N) \
2458 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2459 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2461 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2463 case tcc_unary:
2464 /* Two conversions are equal only if signedness and modes match. */
2465 switch (TREE_CODE (arg0))
2467 case NOP_EXPR:
2468 case CONVERT_EXPR:
2469 case FIX_CEIL_EXPR:
2470 case FIX_TRUNC_EXPR:
2471 case FIX_FLOOR_EXPR:
2472 case FIX_ROUND_EXPR:
2473 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2474 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2475 return 0;
2476 break;
2477 default:
2478 break;
2481 return OP_SAME (0);
2484 case tcc_comparison:
2485 case tcc_binary:
2486 if (OP_SAME (0) && OP_SAME (1))
2487 return 1;
2489 /* For commutative ops, allow the other order. */
2490 return (commutative_tree_code (TREE_CODE (arg0))
2491 && operand_equal_p (TREE_OPERAND (arg0, 0),
2492 TREE_OPERAND (arg1, 1), flags)
2493 && operand_equal_p (TREE_OPERAND (arg0, 1),
2494 TREE_OPERAND (arg1, 0), flags));
2496 case tcc_reference:
2497 /* If either of the pointer (or reference) expressions we are
2498 dereferencing contain a side effect, these cannot be equal. */
2499 if (TREE_SIDE_EFFECTS (arg0)
2500 || TREE_SIDE_EFFECTS (arg1))
2501 return 0;
2503 switch (TREE_CODE (arg0))
2505 case INDIRECT_REF:
2506 case ALIGN_INDIRECT_REF:
2507 case MISALIGNED_INDIRECT_REF:
2508 case REALPART_EXPR:
2509 case IMAGPART_EXPR:
2510 return OP_SAME (0);
2512 case ARRAY_REF:
2513 case ARRAY_RANGE_REF:
2514 /* Operands 2 and 3 may be null. */
2515 return (OP_SAME (0)
2516 && OP_SAME (1)
2517 && OP_SAME_WITH_NULL (2)
2518 && OP_SAME_WITH_NULL (3));
2520 case COMPONENT_REF:
2521 /* Handle operand 2 the same as for ARRAY_REF. */
2522 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2524 case BIT_FIELD_REF:
2525 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2527 default:
2528 return 0;
2531 case tcc_expression:
2532 switch (TREE_CODE (arg0))
2534 case ADDR_EXPR:
2535 case TRUTH_NOT_EXPR:
2536 return OP_SAME (0);
2538 case TRUTH_ANDIF_EXPR:
2539 case TRUTH_ORIF_EXPR:
2540 return OP_SAME (0) && OP_SAME (1);
2542 case TRUTH_AND_EXPR:
2543 case TRUTH_OR_EXPR:
2544 case TRUTH_XOR_EXPR:
2545 if (OP_SAME (0) && OP_SAME (1))
2546 return 1;
2548 /* Otherwise take into account this is a commutative operation. */
2549 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2550 TREE_OPERAND (arg1, 1), flags)
2551 && operand_equal_p (TREE_OPERAND (arg0, 1),
2552 TREE_OPERAND (arg1, 0), flags));
2554 case CALL_EXPR:
2555 /* If the CALL_EXPRs call different functions, then they
2556 clearly can not be equal. */
2557 if (!OP_SAME (0))
2558 return 0;
2561 unsigned int cef = call_expr_flags (arg0);
2562 if (flags & OEP_PURE_SAME)
2563 cef &= ECF_CONST | ECF_PURE;
2564 else
2565 cef &= ECF_CONST;
2566 if (!cef)
2567 return 0;
2570 /* Now see if all the arguments are the same. operand_equal_p
2571 does not handle TREE_LIST, so we walk the operands here
2572 feeding them to operand_equal_p. */
2573 arg0 = TREE_OPERAND (arg0, 1);
2574 arg1 = TREE_OPERAND (arg1, 1);
2575 while (arg0 && arg1)
2577 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2578 flags))
2579 return 0;
2581 arg0 = TREE_CHAIN (arg0);
2582 arg1 = TREE_CHAIN (arg1);
2585 /* If we get here and both argument lists are exhausted
2586 then the CALL_EXPRs are equal. */
2587 return ! (arg0 || arg1);
2589 default:
2590 return 0;
2593 case tcc_declaration:
2594 /* Consider __builtin_sqrt equal to sqrt. */
2595 return (TREE_CODE (arg0) == FUNCTION_DECL
2596 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2597 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2598 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2600 default:
2601 return 0;
2604 #undef OP_SAME
2605 #undef OP_SAME_WITH_NULL
2608 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2609 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2611 When in doubt, return 0. */
2613 static int
2614 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2616 int unsignedp1, unsignedpo;
2617 tree primarg0, primarg1, primother;
2618 unsigned int correct_width;
2620 if (operand_equal_p (arg0, arg1, 0))
2621 return 1;
2623 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2624 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2625 return 0;
2627 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2628 and see if the inner values are the same. This removes any
2629 signedness comparison, which doesn't matter here. */
2630 primarg0 = arg0, primarg1 = arg1;
2631 STRIP_NOPS (primarg0);
2632 STRIP_NOPS (primarg1);
2633 if (operand_equal_p (primarg0, primarg1, 0))
2634 return 1;
2636 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2637 actual comparison operand, ARG0.
2639 First throw away any conversions to wider types
2640 already present in the operands. */
2642 primarg1 = get_narrower (arg1, &unsignedp1);
2643 primother = get_narrower (other, &unsignedpo);
2645 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2646 if (unsignedp1 == unsignedpo
2647 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2648 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2650 tree type = TREE_TYPE (arg0);
2652 /* Make sure shorter operand is extended the right way
2653 to match the longer operand. */
2654 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2655 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2657 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2658 return 1;
2661 return 0;
2664 /* See if ARG is an expression that is either a comparison or is performing
2665 arithmetic on comparisons. The comparisons must only be comparing
2666 two different values, which will be stored in *CVAL1 and *CVAL2; if
2667 they are nonzero it means that some operands have already been found.
2668 No variables may be used anywhere else in the expression except in the
2669 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2670 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2672 If this is true, return 1. Otherwise, return zero. */
2674 static int
2675 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2677 enum tree_code code = TREE_CODE (arg);
2678 enum tree_code_class class = TREE_CODE_CLASS (code);
2680 /* We can handle some of the tcc_expression cases here. */
2681 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2682 class = tcc_unary;
2683 else if (class == tcc_expression
2684 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2685 || code == COMPOUND_EXPR))
2686 class = tcc_binary;
2688 else if (class == tcc_expression && code == SAVE_EXPR
2689 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2691 /* If we've already found a CVAL1 or CVAL2, this expression is
2692 two complex to handle. */
2693 if (*cval1 || *cval2)
2694 return 0;
2696 class = tcc_unary;
2697 *save_p = 1;
2700 switch (class)
2702 case tcc_unary:
2703 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2705 case tcc_binary:
2706 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2707 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2708 cval1, cval2, save_p));
2710 case tcc_constant:
2711 return 1;
2713 case tcc_expression:
2714 if (code == COND_EXPR)
2715 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2716 cval1, cval2, save_p)
2717 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2718 cval1, cval2, save_p)
2719 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2720 cval1, cval2, save_p));
2721 return 0;
2723 case tcc_comparison:
2724 /* First see if we can handle the first operand, then the second. For
2725 the second operand, we know *CVAL1 can't be zero. It must be that
2726 one side of the comparison is each of the values; test for the
2727 case where this isn't true by failing if the two operands
2728 are the same. */
2730 if (operand_equal_p (TREE_OPERAND (arg, 0),
2731 TREE_OPERAND (arg, 1), 0))
2732 return 0;
2734 if (*cval1 == 0)
2735 *cval1 = TREE_OPERAND (arg, 0);
2736 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2738 else if (*cval2 == 0)
2739 *cval2 = TREE_OPERAND (arg, 0);
2740 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2742 else
2743 return 0;
2745 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2747 else if (*cval2 == 0)
2748 *cval2 = TREE_OPERAND (arg, 1);
2749 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2751 else
2752 return 0;
2754 return 1;
2756 default:
2757 return 0;
2761 /* ARG is a tree that is known to contain just arithmetic operations and
2762 comparisons. Evaluate the operations in the tree substituting NEW0 for
2763 any occurrence of OLD0 as an operand of a comparison and likewise for
2764 NEW1 and OLD1. */
2766 static tree
2767 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2769 tree type = TREE_TYPE (arg);
2770 enum tree_code code = TREE_CODE (arg);
2771 enum tree_code_class class = TREE_CODE_CLASS (code);
2773 /* We can handle some of the tcc_expression cases here. */
2774 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2775 class = tcc_unary;
2776 else if (class == tcc_expression
2777 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2778 class = tcc_binary;
2780 switch (class)
2782 case tcc_unary:
2783 return fold (build1 (code, type,
2784 eval_subst (TREE_OPERAND (arg, 0),
2785 old0, new0, old1, new1)));
2787 case tcc_binary:
2788 return fold (build2 (code, type,
2789 eval_subst (TREE_OPERAND (arg, 0),
2790 old0, new0, old1, new1),
2791 eval_subst (TREE_OPERAND (arg, 1),
2792 old0, new0, old1, new1)));
2794 case tcc_expression:
2795 switch (code)
2797 case SAVE_EXPR:
2798 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2800 case COMPOUND_EXPR:
2801 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2803 case COND_EXPR:
2804 return fold (build3 (code, type,
2805 eval_subst (TREE_OPERAND (arg, 0),
2806 old0, new0, old1, new1),
2807 eval_subst (TREE_OPERAND (arg, 1),
2808 old0, new0, old1, new1),
2809 eval_subst (TREE_OPERAND (arg, 2),
2810 old0, new0, old1, new1)));
2811 default:
2812 break;
2814 /* Fall through - ??? */
2816 case tcc_comparison:
2818 tree arg0 = TREE_OPERAND (arg, 0);
2819 tree arg1 = TREE_OPERAND (arg, 1);
2821 /* We need to check both for exact equality and tree equality. The
2822 former will be true if the operand has a side-effect. In that
2823 case, we know the operand occurred exactly once. */
2825 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2826 arg0 = new0;
2827 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2828 arg0 = new1;
2830 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2831 arg1 = new0;
2832 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2833 arg1 = new1;
2835 return fold (build2 (code, type, arg0, arg1));
2838 default:
2839 return arg;
2843 /* Return a tree for the case when the result of an expression is RESULT
2844 converted to TYPE and OMITTED was previously an operand of the expression
2845 but is now not needed (e.g., we folded OMITTED * 0).
2847 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2848 the conversion of RESULT to TYPE. */
2850 tree
2851 omit_one_operand (tree type, tree result, tree omitted)
2853 tree t = fold_convert (type, result);
2855 if (TREE_SIDE_EFFECTS (omitted))
2856 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2858 return non_lvalue (t);
2861 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2863 static tree
2864 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2866 tree t = fold_convert (type, result);
2868 if (TREE_SIDE_EFFECTS (omitted))
2869 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2871 return pedantic_non_lvalue (t);
2874 /* Return a tree for the case when the result of an expression is RESULT
2875 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2876 of the expression but are now not needed.
2878 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2879 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2880 evaluated before OMITTED2. Otherwise, if neither has side effects,
2881 just do the conversion of RESULT to TYPE. */
2883 tree
2884 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2886 tree t = fold_convert (type, result);
2888 if (TREE_SIDE_EFFECTS (omitted2))
2889 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2890 if (TREE_SIDE_EFFECTS (omitted1))
2891 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2893 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2897 /* Return a simplified tree node for the truth-negation of ARG. This
2898 never alters ARG itself. We assume that ARG is an operation that
2899 returns a truth value (0 or 1).
2901 FIXME: one would think we would fold the result, but it causes
2902 problems with the dominator optimizer. */
2903 tree
2904 invert_truthvalue (tree arg)
2906 tree type = TREE_TYPE (arg);
2907 enum tree_code code = TREE_CODE (arg);
2909 if (code == ERROR_MARK)
2910 return arg;
2912 /* If this is a comparison, we can simply invert it, except for
2913 floating-point non-equality comparisons, in which case we just
2914 enclose a TRUTH_NOT_EXPR around what we have. */
2916 if (TREE_CODE_CLASS (code) == tcc_comparison)
2918 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2919 if (FLOAT_TYPE_P (op_type)
2920 && flag_trapping_math
2921 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2922 && code != NE_EXPR && code != EQ_EXPR)
2923 return build1 (TRUTH_NOT_EXPR, type, arg);
2924 else
2926 code = invert_tree_comparison (code,
2927 HONOR_NANS (TYPE_MODE (op_type)));
2928 if (code == ERROR_MARK)
2929 return build1 (TRUTH_NOT_EXPR, type, arg);
2930 else
2931 return build2 (code, type,
2932 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2936 switch (code)
2938 case INTEGER_CST:
2939 return constant_boolean_node (integer_zerop (arg), type);
2941 case TRUTH_AND_EXPR:
2942 return build2 (TRUTH_OR_EXPR, type,
2943 invert_truthvalue (TREE_OPERAND (arg, 0)),
2944 invert_truthvalue (TREE_OPERAND (arg, 1)));
2946 case TRUTH_OR_EXPR:
2947 return build2 (TRUTH_AND_EXPR, type,
2948 invert_truthvalue (TREE_OPERAND (arg, 0)),
2949 invert_truthvalue (TREE_OPERAND (arg, 1)));
2951 case TRUTH_XOR_EXPR:
2952 /* Here we can invert either operand. We invert the first operand
2953 unless the second operand is a TRUTH_NOT_EXPR in which case our
2954 result is the XOR of the first operand with the inside of the
2955 negation of the second operand. */
2957 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2958 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2959 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2960 else
2961 return build2 (TRUTH_XOR_EXPR, type,
2962 invert_truthvalue (TREE_OPERAND (arg, 0)),
2963 TREE_OPERAND (arg, 1));
2965 case TRUTH_ANDIF_EXPR:
2966 return build2 (TRUTH_ORIF_EXPR, type,
2967 invert_truthvalue (TREE_OPERAND (arg, 0)),
2968 invert_truthvalue (TREE_OPERAND (arg, 1)));
2970 case TRUTH_ORIF_EXPR:
2971 return build2 (TRUTH_ANDIF_EXPR, type,
2972 invert_truthvalue (TREE_OPERAND (arg, 0)),
2973 invert_truthvalue (TREE_OPERAND (arg, 1)));
2975 case TRUTH_NOT_EXPR:
2976 return TREE_OPERAND (arg, 0);
2978 case COND_EXPR:
2979 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2980 invert_truthvalue (TREE_OPERAND (arg, 1)),
2981 invert_truthvalue (TREE_OPERAND (arg, 2)));
2983 case COMPOUND_EXPR:
2984 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2985 invert_truthvalue (TREE_OPERAND (arg, 1)));
2987 case NON_LVALUE_EXPR:
2988 return invert_truthvalue (TREE_OPERAND (arg, 0));
2990 case NOP_EXPR:
2991 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2992 break;
2994 case CONVERT_EXPR:
2995 case FLOAT_EXPR:
2996 return build1 (TREE_CODE (arg), type,
2997 invert_truthvalue (TREE_OPERAND (arg, 0)));
2999 case BIT_AND_EXPR:
3000 if (!integer_onep (TREE_OPERAND (arg, 1)))
3001 break;
3002 return build2 (EQ_EXPR, type, arg,
3003 fold_convert (type, integer_zero_node));
3005 case SAVE_EXPR:
3006 return build1 (TRUTH_NOT_EXPR, type, arg);
3008 case CLEANUP_POINT_EXPR:
3009 return build1 (CLEANUP_POINT_EXPR, type,
3010 invert_truthvalue (TREE_OPERAND (arg, 0)));
3012 default:
3013 break;
3015 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3016 return build1 (TRUTH_NOT_EXPR, type, arg);
3019 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3020 operands are another bit-wise operation with a common input. If so,
3021 distribute the bit operations to save an operation and possibly two if
3022 constants are involved. For example, convert
3023 (A | B) & (A | C) into A | (B & C)
3024 Further simplification will occur if B and C are constants.
3026 If this optimization cannot be done, 0 will be returned. */
3028 static tree
3029 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3031 tree common;
3032 tree left, right;
3034 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3035 || TREE_CODE (arg0) == code
3036 || (TREE_CODE (arg0) != BIT_AND_EXPR
3037 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3038 return 0;
3040 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3042 common = TREE_OPERAND (arg0, 0);
3043 left = TREE_OPERAND (arg0, 1);
3044 right = TREE_OPERAND (arg1, 1);
3046 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3048 common = TREE_OPERAND (arg0, 0);
3049 left = TREE_OPERAND (arg0, 1);
3050 right = TREE_OPERAND (arg1, 0);
3052 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3054 common = TREE_OPERAND (arg0, 1);
3055 left = TREE_OPERAND (arg0, 0);
3056 right = TREE_OPERAND (arg1, 1);
3058 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3060 common = TREE_OPERAND (arg0, 1);
3061 left = TREE_OPERAND (arg0, 0);
3062 right = TREE_OPERAND (arg1, 0);
3064 else
3065 return 0;
3067 return fold (build2 (TREE_CODE (arg0), type, common,
3068 fold (build2 (code, type, left, right))));
3071 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3072 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3074 static tree
3075 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3076 int unsignedp)
3078 tree result;
3080 if (bitpos == 0)
3082 tree size = TYPE_SIZE (TREE_TYPE (inner));
3083 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3084 || POINTER_TYPE_P (TREE_TYPE (inner)))
3085 && host_integerp (size, 0)
3086 && tree_low_cst (size, 0) == bitsize)
3087 return fold_convert (type, inner);
3090 result = build3 (BIT_FIELD_REF, type, inner,
3091 size_int (bitsize), bitsize_int (bitpos));
3093 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3095 return result;
3098 /* Optimize a bit-field compare.
3100 There are two cases: First is a compare against a constant and the
3101 second is a comparison of two items where the fields are at the same
3102 bit position relative to the start of a chunk (byte, halfword, word)
3103 large enough to contain it. In these cases we can avoid the shift
3104 implicit in bitfield extractions.
3106 For constants, we emit a compare of the shifted constant with the
3107 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3108 compared. For two fields at the same position, we do the ANDs with the
3109 similar mask and compare the result of the ANDs.
3111 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3112 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3113 are the left and right operands of the comparison, respectively.
3115 If the optimization described above can be done, we return the resulting
3116 tree. Otherwise we return zero. */
3118 static tree
3119 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3120 tree lhs, tree rhs)
3122 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3123 tree type = TREE_TYPE (lhs);
3124 tree signed_type, unsigned_type;
3125 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3126 enum machine_mode lmode, rmode, nmode;
3127 int lunsignedp, runsignedp;
3128 int lvolatilep = 0, rvolatilep = 0;
3129 tree linner, rinner = NULL_TREE;
3130 tree mask;
3131 tree offset;
3133 /* Get all the information about the extractions being done. If the bit size
3134 if the same as the size of the underlying object, we aren't doing an
3135 extraction at all and so can do nothing. We also don't want to
3136 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3137 then will no longer be able to replace it. */
3138 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3139 &lunsignedp, &lvolatilep, false);
3140 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3141 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3142 return 0;
3144 if (!const_p)
3146 /* If this is not a constant, we can only do something if bit positions,
3147 sizes, and signedness are the same. */
3148 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3149 &runsignedp, &rvolatilep, false);
3151 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3152 || lunsignedp != runsignedp || offset != 0
3153 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3154 return 0;
3157 /* See if we can find a mode to refer to this field. We should be able to,
3158 but fail if we can't. */
3159 nmode = get_best_mode (lbitsize, lbitpos,
3160 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3161 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3162 TYPE_ALIGN (TREE_TYPE (rinner))),
3163 word_mode, lvolatilep || rvolatilep);
3164 if (nmode == VOIDmode)
3165 return 0;
3167 /* Set signed and unsigned types of the precision of this mode for the
3168 shifts below. */
3169 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3170 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3172 /* Compute the bit position and size for the new reference and our offset
3173 within it. If the new reference is the same size as the original, we
3174 won't optimize anything, so return zero. */
3175 nbitsize = GET_MODE_BITSIZE (nmode);
3176 nbitpos = lbitpos & ~ (nbitsize - 1);
3177 lbitpos -= nbitpos;
3178 if (nbitsize == lbitsize)
3179 return 0;
3181 if (BYTES_BIG_ENDIAN)
3182 lbitpos = nbitsize - lbitsize - lbitpos;
3184 /* Make the mask to be used against the extracted field. */
3185 mask = build_int_cst (unsigned_type, -1);
3186 mask = force_fit_type (mask, 0, false, false);
3187 mask = fold_convert (unsigned_type, mask);
3188 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3189 mask = const_binop (RSHIFT_EXPR, mask,
3190 size_int (nbitsize - lbitsize - lbitpos), 0);
3192 if (! const_p)
3193 /* If not comparing with constant, just rework the comparison
3194 and return. */
3195 return build2 (code, compare_type,
3196 build2 (BIT_AND_EXPR, unsigned_type,
3197 make_bit_field_ref (linner, unsigned_type,
3198 nbitsize, nbitpos, 1),
3199 mask),
3200 build2 (BIT_AND_EXPR, unsigned_type,
3201 make_bit_field_ref (rinner, unsigned_type,
3202 nbitsize, nbitpos, 1),
3203 mask));
3205 /* Otherwise, we are handling the constant case. See if the constant is too
3206 big for the field. Warn and return a tree of for 0 (false) if so. We do
3207 this not only for its own sake, but to avoid having to test for this
3208 error case below. If we didn't, we might generate wrong code.
3210 For unsigned fields, the constant shifted right by the field length should
3211 be all zero. For signed fields, the high-order bits should agree with
3212 the sign bit. */
3214 if (lunsignedp)
3216 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3217 fold_convert (unsigned_type, rhs),
3218 size_int (lbitsize), 0)))
3220 warning ("comparison is always %d due to width of bit-field",
3221 code == NE_EXPR);
3222 return constant_boolean_node (code == NE_EXPR, compare_type);
3225 else
3227 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3228 size_int (lbitsize - 1), 0);
3229 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3231 warning ("comparison is always %d due to width of bit-field",
3232 code == NE_EXPR);
3233 return constant_boolean_node (code == NE_EXPR, compare_type);
3237 /* Single-bit compares should always be against zero. */
3238 if (lbitsize == 1 && ! integer_zerop (rhs))
3240 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3241 rhs = fold_convert (type, integer_zero_node);
3244 /* Make a new bitfield reference, shift the constant over the
3245 appropriate number of bits and mask it with the computed mask
3246 (in case this was a signed field). If we changed it, make a new one. */
3247 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3248 if (lvolatilep)
3250 TREE_SIDE_EFFECTS (lhs) = 1;
3251 TREE_THIS_VOLATILE (lhs) = 1;
3254 rhs = fold (const_binop (BIT_AND_EXPR,
3255 const_binop (LSHIFT_EXPR,
3256 fold_convert (unsigned_type, rhs),
3257 size_int (lbitpos), 0),
3258 mask, 0));
3260 return build2 (code, compare_type,
3261 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3262 rhs);
3265 /* Subroutine for fold_truthop: decode a field reference.
3267 If EXP is a comparison reference, we return the innermost reference.
3269 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3270 set to the starting bit number.
3272 If the innermost field can be completely contained in a mode-sized
3273 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3275 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3276 otherwise it is not changed.
3278 *PUNSIGNEDP is set to the signedness of the field.
3280 *PMASK is set to the mask used. This is either contained in a
3281 BIT_AND_EXPR or derived from the width of the field.
3283 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3285 Return 0 if this is not a component reference or is one that we can't
3286 do anything with. */
3288 static tree
3289 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3290 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3291 int *punsignedp, int *pvolatilep,
3292 tree *pmask, tree *pand_mask)
3294 tree outer_type = 0;
3295 tree and_mask = 0;
3296 tree mask, inner, offset;
3297 tree unsigned_type;
3298 unsigned int precision;
3300 /* All the optimizations using this function assume integer fields.
3301 There are problems with FP fields since the type_for_size call
3302 below can fail for, e.g., XFmode. */
3303 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3304 return 0;
3306 /* We are interested in the bare arrangement of bits, so strip everything
3307 that doesn't affect the machine mode. However, record the type of the
3308 outermost expression if it may matter below. */
3309 if (TREE_CODE (exp) == NOP_EXPR
3310 || TREE_CODE (exp) == CONVERT_EXPR
3311 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3312 outer_type = TREE_TYPE (exp);
3313 STRIP_NOPS (exp);
3315 if (TREE_CODE (exp) == BIT_AND_EXPR)
3317 and_mask = TREE_OPERAND (exp, 1);
3318 exp = TREE_OPERAND (exp, 0);
3319 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3320 if (TREE_CODE (and_mask) != INTEGER_CST)
3321 return 0;
3324 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3325 punsignedp, pvolatilep, false);
3326 if ((inner == exp && and_mask == 0)
3327 || *pbitsize < 0 || offset != 0
3328 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3329 return 0;
3331 /* If the number of bits in the reference is the same as the bitsize of
3332 the outer type, then the outer type gives the signedness. Otherwise
3333 (in case of a small bitfield) the signedness is unchanged. */
3334 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3335 *punsignedp = TYPE_UNSIGNED (outer_type);
3337 /* Compute the mask to access the bitfield. */
3338 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3339 precision = TYPE_PRECISION (unsigned_type);
3341 mask = build_int_cst (unsigned_type, -1);
3342 mask = force_fit_type (mask, 0, false, false);
3344 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3345 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3347 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3348 if (and_mask != 0)
3349 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3350 fold_convert (unsigned_type, and_mask), mask));
3352 *pmask = mask;
3353 *pand_mask = and_mask;
3354 return inner;
3357 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3358 bit positions. */
3360 static int
3361 all_ones_mask_p (tree mask, int size)
3363 tree type = TREE_TYPE (mask);
3364 unsigned int precision = TYPE_PRECISION (type);
3365 tree tmask;
3367 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3368 tmask = force_fit_type (tmask, 0, false, false);
3370 return
3371 tree_int_cst_equal (mask,
3372 const_binop (RSHIFT_EXPR,
3373 const_binop (LSHIFT_EXPR, tmask,
3374 size_int (precision - size),
3376 size_int (precision - size), 0));
3379 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3380 represents the sign bit of EXP's type. If EXP represents a sign
3381 or zero extension, also test VAL against the unextended type.
3382 The return value is the (sub)expression whose sign bit is VAL,
3383 or NULL_TREE otherwise. */
3385 static tree
3386 sign_bit_p (tree exp, tree val)
3388 unsigned HOST_WIDE_INT mask_lo, lo;
3389 HOST_WIDE_INT mask_hi, hi;
3390 int width;
3391 tree t;
3393 /* Tree EXP must have an integral type. */
3394 t = TREE_TYPE (exp);
3395 if (! INTEGRAL_TYPE_P (t))
3396 return NULL_TREE;
3398 /* Tree VAL must be an integer constant. */
3399 if (TREE_CODE (val) != INTEGER_CST
3400 || TREE_CONSTANT_OVERFLOW (val))
3401 return NULL_TREE;
3403 width = TYPE_PRECISION (t);
3404 if (width > HOST_BITS_PER_WIDE_INT)
3406 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3407 lo = 0;
3409 mask_hi = ((unsigned HOST_WIDE_INT) -1
3410 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3411 mask_lo = -1;
3413 else
3415 hi = 0;
3416 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3418 mask_hi = 0;
3419 mask_lo = ((unsigned HOST_WIDE_INT) -1
3420 >> (HOST_BITS_PER_WIDE_INT - width));
3423 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3424 treat VAL as if it were unsigned. */
3425 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3426 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3427 return exp;
3429 /* Handle extension from a narrower type. */
3430 if (TREE_CODE (exp) == NOP_EXPR
3431 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3432 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3434 return NULL_TREE;
3437 /* Subroutine for fold_truthop: determine if an operand is simple enough
3438 to be evaluated unconditionally. */
3440 static int
3441 simple_operand_p (tree exp)
3443 /* Strip any conversions that don't change the machine mode. */
3444 STRIP_NOPS (exp);
3446 return (CONSTANT_CLASS_P (exp)
3447 || TREE_CODE (exp) == SSA_NAME
3448 || (DECL_P (exp)
3449 && ! TREE_ADDRESSABLE (exp)
3450 && ! TREE_THIS_VOLATILE (exp)
3451 && ! DECL_NONLOCAL (exp)
3452 /* Don't regard global variables as simple. They may be
3453 allocated in ways unknown to the compiler (shared memory,
3454 #pragma weak, etc). */
3455 && ! TREE_PUBLIC (exp)
3456 && ! DECL_EXTERNAL (exp)
3457 /* Loading a static variable is unduly expensive, but global
3458 registers aren't expensive. */
3459 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3462 /* The following functions are subroutines to fold_range_test and allow it to
3463 try to change a logical combination of comparisons into a range test.
3465 For example, both
3466 X == 2 || X == 3 || X == 4 || X == 5
3468 X >= 2 && X <= 5
3469 are converted to
3470 (unsigned) (X - 2) <= 3
3472 We describe each set of comparisons as being either inside or outside
3473 a range, using a variable named like IN_P, and then describe the
3474 range with a lower and upper bound. If one of the bounds is omitted,
3475 it represents either the highest or lowest value of the type.
3477 In the comments below, we represent a range by two numbers in brackets
3478 preceded by a "+" to designate being inside that range, or a "-" to
3479 designate being outside that range, so the condition can be inverted by
3480 flipping the prefix. An omitted bound is represented by a "-". For
3481 example, "- [-, 10]" means being outside the range starting at the lowest
3482 possible value and ending at 10, in other words, being greater than 10.
3483 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3484 always false.
3486 We set up things so that the missing bounds are handled in a consistent
3487 manner so neither a missing bound nor "true" and "false" need to be
3488 handled using a special case. */
3490 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3491 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3492 and UPPER1_P are nonzero if the respective argument is an upper bound
3493 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3494 must be specified for a comparison. ARG1 will be converted to ARG0's
3495 type if both are specified. */
3497 static tree
3498 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3499 tree arg1, int upper1_p)
3501 tree tem;
3502 int result;
3503 int sgn0, sgn1;
3505 /* If neither arg represents infinity, do the normal operation.
3506 Else, if not a comparison, return infinity. Else handle the special
3507 comparison rules. Note that most of the cases below won't occur, but
3508 are handled for consistency. */
3510 if (arg0 != 0 && arg1 != 0)
3512 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3513 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3514 STRIP_NOPS (tem);
3515 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3518 if (TREE_CODE_CLASS (code) != tcc_comparison)
3519 return 0;
3521 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3522 for neither. In real maths, we cannot assume open ended ranges are
3523 the same. But, this is computer arithmetic, where numbers are finite.
3524 We can therefore make the transformation of any unbounded range with
3525 the value Z, Z being greater than any representable number. This permits
3526 us to treat unbounded ranges as equal. */
3527 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3528 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3529 switch (code)
3531 case EQ_EXPR:
3532 result = sgn0 == sgn1;
3533 break;
3534 case NE_EXPR:
3535 result = sgn0 != sgn1;
3536 break;
3537 case LT_EXPR:
3538 result = sgn0 < sgn1;
3539 break;
3540 case LE_EXPR:
3541 result = sgn0 <= sgn1;
3542 break;
3543 case GT_EXPR:
3544 result = sgn0 > sgn1;
3545 break;
3546 case GE_EXPR:
3547 result = sgn0 >= sgn1;
3548 break;
3549 default:
3550 gcc_unreachable ();
3553 return constant_boolean_node (result, type);
3556 /* Given EXP, a logical expression, set the range it is testing into
3557 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3558 actually being tested. *PLOW and *PHIGH will be made of the same type
3559 as the returned expression. If EXP is not a comparison, we will most
3560 likely not be returning a useful value and range. */
3562 static tree
3563 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3565 enum tree_code code;
3566 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3567 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3568 int in_p, n_in_p;
3569 tree low, high, n_low, n_high;
3571 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3572 and see if we can refine the range. Some of the cases below may not
3573 happen, but it doesn't seem worth worrying about this. We "continue"
3574 the outer loop when we've changed something; otherwise we "break"
3575 the switch, which will "break" the while. */
3577 in_p = 0;
3578 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3580 while (1)
3582 code = TREE_CODE (exp);
3583 exp_type = TREE_TYPE (exp);
3585 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3587 if (TREE_CODE_LENGTH (code) > 0)
3588 arg0 = TREE_OPERAND (exp, 0);
3589 if (TREE_CODE_CLASS (code) == tcc_comparison
3590 || TREE_CODE_CLASS (code) == tcc_unary
3591 || TREE_CODE_CLASS (code) == tcc_binary)
3592 arg0_type = TREE_TYPE (arg0);
3593 if (TREE_CODE_CLASS (code) == tcc_binary
3594 || TREE_CODE_CLASS (code) == tcc_comparison
3595 || (TREE_CODE_CLASS (code) == tcc_expression
3596 && TREE_CODE_LENGTH (code) > 1))
3597 arg1 = TREE_OPERAND (exp, 1);
3600 switch (code)
3602 case TRUTH_NOT_EXPR:
3603 in_p = ! in_p, exp = arg0;
3604 continue;
3606 case EQ_EXPR: case NE_EXPR:
3607 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3608 /* We can only do something if the range is testing for zero
3609 and if the second operand is an integer constant. Note that
3610 saying something is "in" the range we make is done by
3611 complementing IN_P since it will set in the initial case of
3612 being not equal to zero; "out" is leaving it alone. */
3613 if (low == 0 || high == 0
3614 || ! integer_zerop (low) || ! integer_zerop (high)
3615 || TREE_CODE (arg1) != INTEGER_CST)
3616 break;
3618 switch (code)
3620 case NE_EXPR: /* - [c, c] */
3621 low = high = arg1;
3622 break;
3623 case EQ_EXPR: /* + [c, c] */
3624 in_p = ! in_p, low = high = arg1;
3625 break;
3626 case GT_EXPR: /* - [-, c] */
3627 low = 0, high = arg1;
3628 break;
3629 case GE_EXPR: /* + [c, -] */
3630 in_p = ! in_p, low = arg1, high = 0;
3631 break;
3632 case LT_EXPR: /* - [c, -] */
3633 low = arg1, high = 0;
3634 break;
3635 case LE_EXPR: /* + [-, c] */
3636 in_p = ! in_p, low = 0, high = arg1;
3637 break;
3638 default:
3639 gcc_unreachable ();
3642 /* If this is an unsigned comparison, we also know that EXP is
3643 greater than or equal to zero. We base the range tests we make
3644 on that fact, so we record it here so we can parse existing
3645 range tests. We test arg0_type since often the return type
3646 of, e.g. EQ_EXPR, is boolean. */
3647 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3649 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3650 in_p, low, high, 1,
3651 fold_convert (arg0_type, integer_zero_node),
3652 NULL_TREE))
3653 break;
3655 in_p = n_in_p, low = n_low, high = n_high;
3657 /* If the high bound is missing, but we have a nonzero low
3658 bound, reverse the range so it goes from zero to the low bound
3659 minus 1. */
3660 if (high == 0 && low && ! integer_zerop (low))
3662 in_p = ! in_p;
3663 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3664 integer_one_node, 0);
3665 low = fold_convert (arg0_type, integer_zero_node);
3669 exp = arg0;
3670 continue;
3672 case NEGATE_EXPR:
3673 /* (-x) IN [a,b] -> x in [-b, -a] */
3674 n_low = range_binop (MINUS_EXPR, exp_type,
3675 fold_convert (exp_type, integer_zero_node),
3676 0, high, 1);
3677 n_high = range_binop (MINUS_EXPR, exp_type,
3678 fold_convert (exp_type, integer_zero_node),
3679 0, low, 0);
3680 low = n_low, high = n_high;
3681 exp = arg0;
3682 continue;
3684 case BIT_NOT_EXPR:
3685 /* ~ X -> -X - 1 */
3686 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3687 fold_convert (exp_type, integer_one_node));
3688 continue;
3690 case PLUS_EXPR: case MINUS_EXPR:
3691 if (TREE_CODE (arg1) != INTEGER_CST)
3692 break;
3694 /* If EXP is signed, any overflow in the computation is undefined,
3695 so we don't worry about it so long as our computations on
3696 the bounds don't overflow. For unsigned, overflow is defined
3697 and this is exactly the right thing. */
3698 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3699 arg0_type, low, 0, arg1, 0);
3700 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3701 arg0_type, high, 1, arg1, 0);
3702 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3703 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3704 break;
3706 /* Check for an unsigned range which has wrapped around the maximum
3707 value thus making n_high < n_low, and normalize it. */
3708 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3710 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3711 integer_one_node, 0);
3712 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3713 integer_one_node, 0);
3715 /* If the range is of the form +/- [ x+1, x ], we won't
3716 be able to normalize it. But then, it represents the
3717 whole range or the empty set, so make it
3718 +/- [ -, - ]. */
3719 if (tree_int_cst_equal (n_low, low)
3720 && tree_int_cst_equal (n_high, high))
3721 low = high = 0;
3722 else
3723 in_p = ! in_p;
3725 else
3726 low = n_low, high = n_high;
3728 exp = arg0;
3729 continue;
3731 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3732 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3733 break;
3735 if (! INTEGRAL_TYPE_P (arg0_type)
3736 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3737 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3738 break;
3740 n_low = low, n_high = high;
3742 if (n_low != 0)
3743 n_low = fold_convert (arg0_type, n_low);
3745 if (n_high != 0)
3746 n_high = fold_convert (arg0_type, n_high);
3749 /* If we're converting arg0 from an unsigned type, to exp,
3750 a signed type, we will be doing the comparison as unsigned.
3751 The tests above have already verified that LOW and HIGH
3752 are both positive.
3754 So we have to ensure that we will handle large unsigned
3755 values the same way that the current signed bounds treat
3756 negative values. */
3758 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3760 tree high_positive;
3761 tree equiv_type = lang_hooks.types.type_for_mode
3762 (TYPE_MODE (arg0_type), 1);
3764 /* A range without an upper bound is, naturally, unbounded.
3765 Since convert would have cropped a very large value, use
3766 the max value for the destination type. */
3767 high_positive
3768 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3769 : TYPE_MAX_VALUE (arg0_type);
3771 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3772 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3773 fold_convert (arg0_type,
3774 high_positive),
3775 fold_convert (arg0_type,
3776 integer_one_node)));
3778 /* If the low bound is specified, "and" the range with the
3779 range for which the original unsigned value will be
3780 positive. */
3781 if (low != 0)
3783 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3784 1, n_low, n_high, 1,
3785 fold_convert (arg0_type,
3786 integer_zero_node),
3787 high_positive))
3788 break;
3790 in_p = (n_in_p == in_p);
3792 else
3794 /* Otherwise, "or" the range with the range of the input
3795 that will be interpreted as negative. */
3796 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3797 0, n_low, n_high, 1,
3798 fold_convert (arg0_type,
3799 integer_zero_node),
3800 high_positive))
3801 break;
3803 in_p = (in_p != n_in_p);
3807 exp = arg0;
3808 low = n_low, high = n_high;
3809 continue;
3811 default:
3812 break;
3815 break;
3818 /* If EXP is a constant, we can evaluate whether this is true or false. */
3819 if (TREE_CODE (exp) == INTEGER_CST)
3821 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3822 exp, 0, low, 0))
3823 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3824 exp, 1, high, 1)));
3825 low = high = 0;
3826 exp = 0;
3829 *pin_p = in_p, *plow = low, *phigh = high;
3830 return exp;
3833 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3834 type, TYPE, return an expression to test if EXP is in (or out of, depending
3835 on IN_P) the range. Return 0 if the test couldn't be created. */
3837 static tree
3838 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3840 tree etype = TREE_TYPE (exp);
3841 tree value;
3843 if (! in_p)
3845 value = build_range_check (type, exp, 1, low, high);
3846 if (value != 0)
3847 return invert_truthvalue (value);
3849 return 0;
3852 if (low == 0 && high == 0)
3853 return fold_convert (type, integer_one_node);
3855 if (low == 0)
3856 return fold (build2 (LE_EXPR, type, exp, high));
3858 if (high == 0)
3859 return fold (build2 (GE_EXPR, type, exp, low));
3861 if (operand_equal_p (low, high, 0))
3862 return fold (build2 (EQ_EXPR, type, exp, low));
3864 if (integer_zerop (low))
3866 if (! TYPE_UNSIGNED (etype))
3868 etype = lang_hooks.types.unsigned_type (etype);
3869 high = fold_convert (etype, high);
3870 exp = fold_convert (etype, exp);
3872 return build_range_check (type, exp, 1, 0, high);
3875 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3876 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3878 unsigned HOST_WIDE_INT lo;
3879 HOST_WIDE_INT hi;
3880 int prec;
3882 prec = TYPE_PRECISION (etype);
3883 if (prec <= HOST_BITS_PER_WIDE_INT)
3885 hi = 0;
3886 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3888 else
3890 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3891 lo = (unsigned HOST_WIDE_INT) -1;
3894 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3896 if (TYPE_UNSIGNED (etype))
3898 etype = lang_hooks.types.signed_type (etype);
3899 exp = fold_convert (etype, exp);
3901 return fold (build2 (GT_EXPR, type, exp,
3902 fold_convert (etype, integer_zero_node)));
3906 value = const_binop (MINUS_EXPR, high, low, 0);
3907 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3909 tree utype, minv, maxv;
3911 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3912 for the type in question, as we rely on this here. */
3913 switch (TREE_CODE (etype))
3915 case INTEGER_TYPE:
3916 case ENUMERAL_TYPE:
3917 case CHAR_TYPE:
3918 utype = lang_hooks.types.unsigned_type (etype);
3919 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3920 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3921 integer_one_node, 1);
3922 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3923 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3924 minv, 1, maxv, 1)))
3926 etype = utype;
3927 high = fold_convert (etype, high);
3928 low = fold_convert (etype, low);
3929 exp = fold_convert (etype, exp);
3930 value = const_binop (MINUS_EXPR, high, low, 0);
3932 break;
3933 default:
3934 break;
3938 if (value != 0 && ! TREE_OVERFLOW (value))
3939 return build_range_check (type,
3940 fold (build2 (MINUS_EXPR, etype, exp, low)),
3941 1, fold_convert (etype, integer_zero_node),
3942 value);
3944 return 0;
3947 /* Given two ranges, see if we can merge them into one. Return 1 if we
3948 can, 0 if we can't. Set the output range into the specified parameters. */
3950 static int
3951 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3952 tree high0, int in1_p, tree low1, tree high1)
3954 int no_overlap;
3955 int subset;
3956 int temp;
3957 tree tem;
3958 int in_p;
3959 tree low, high;
3960 int lowequal = ((low0 == 0 && low1 == 0)
3961 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3962 low0, 0, low1, 0)));
3963 int highequal = ((high0 == 0 && high1 == 0)
3964 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3965 high0, 1, high1, 1)));
3967 /* Make range 0 be the range that starts first, or ends last if they
3968 start at the same value. Swap them if it isn't. */
3969 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3970 low0, 0, low1, 0))
3971 || (lowequal
3972 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3973 high1, 1, high0, 1))))
3975 temp = in0_p, in0_p = in1_p, in1_p = temp;
3976 tem = low0, low0 = low1, low1 = tem;
3977 tem = high0, high0 = high1, high1 = tem;
3980 /* Now flag two cases, whether the ranges are disjoint or whether the
3981 second range is totally subsumed in the first. Note that the tests
3982 below are simplified by the ones above. */
3983 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3984 high0, 1, low1, 0));
3985 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3986 high1, 1, high0, 1));
3988 /* We now have four cases, depending on whether we are including or
3989 excluding the two ranges. */
3990 if (in0_p && in1_p)
3992 /* If they don't overlap, the result is false. If the second range
3993 is a subset it is the result. Otherwise, the range is from the start
3994 of the second to the end of the first. */
3995 if (no_overlap)
3996 in_p = 0, low = high = 0;
3997 else if (subset)
3998 in_p = 1, low = low1, high = high1;
3999 else
4000 in_p = 1, low = low1, high = high0;
4003 else if (in0_p && ! in1_p)
4005 /* If they don't overlap, the result is the first range. If they are
4006 equal, the result is false. If the second range is a subset of the
4007 first, and the ranges begin at the same place, we go from just after
4008 the end of the first range to the end of the second. If the second
4009 range is not a subset of the first, or if it is a subset and both
4010 ranges end at the same place, the range starts at the start of the
4011 first range and ends just before the second range.
4012 Otherwise, we can't describe this as a single range. */
4013 if (no_overlap)
4014 in_p = 1, low = low0, high = high0;
4015 else if (lowequal && highequal)
4016 in_p = 0, low = high = 0;
4017 else if (subset && lowequal)
4019 in_p = 1, high = high0;
4020 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4021 integer_one_node, 0);
4023 else if (! subset || highequal)
4025 in_p = 1, low = low0;
4026 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4027 integer_one_node, 0);
4029 else
4030 return 0;
4033 else if (! in0_p && in1_p)
4035 /* If they don't overlap, the result is the second range. If the second
4036 is a subset of the first, the result is false. Otherwise,
4037 the range starts just after the first range and ends at the
4038 end of the second. */
4039 if (no_overlap)
4040 in_p = 1, low = low1, high = high1;
4041 else if (subset || highequal)
4042 in_p = 0, low = high = 0;
4043 else
4045 in_p = 1, high = high1;
4046 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4047 integer_one_node, 0);
4051 else
4053 /* The case where we are excluding both ranges. Here the complex case
4054 is if they don't overlap. In that case, the only time we have a
4055 range is if they are adjacent. If the second is a subset of the
4056 first, the result is the first. Otherwise, the range to exclude
4057 starts at the beginning of the first range and ends at the end of the
4058 second. */
4059 if (no_overlap)
4061 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4062 range_binop (PLUS_EXPR, NULL_TREE,
4063 high0, 1,
4064 integer_one_node, 1),
4065 1, low1, 0)))
4066 in_p = 0, low = low0, high = high1;
4067 else
4069 /* Canonicalize - [min, x] into - [-, x]. */
4070 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4071 switch (TREE_CODE (TREE_TYPE (low0)))
4073 case ENUMERAL_TYPE:
4074 if (TYPE_PRECISION (TREE_TYPE (low0))
4075 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4076 break;
4077 /* FALLTHROUGH */
4078 case INTEGER_TYPE:
4079 case CHAR_TYPE:
4080 if (tree_int_cst_equal (low0,
4081 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4082 low0 = 0;
4083 break;
4084 case POINTER_TYPE:
4085 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4086 && integer_zerop (low0))
4087 low0 = 0;
4088 break;
4089 default:
4090 break;
4093 /* Canonicalize - [x, max] into - [x, -]. */
4094 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4095 switch (TREE_CODE (TREE_TYPE (high1)))
4097 case ENUMERAL_TYPE:
4098 if (TYPE_PRECISION (TREE_TYPE (high1))
4099 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4100 break;
4101 /* FALLTHROUGH */
4102 case INTEGER_TYPE:
4103 case CHAR_TYPE:
4104 if (tree_int_cst_equal (high1,
4105 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4106 high1 = 0;
4107 break;
4108 case POINTER_TYPE:
4109 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4110 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4111 high1, 1,
4112 integer_one_node, 1)))
4113 high1 = 0;
4114 break;
4115 default:
4116 break;
4119 /* The ranges might be also adjacent between the maximum and
4120 minimum values of the given type. For
4121 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4122 return + [x + 1, y - 1]. */
4123 if (low0 == 0 && high1 == 0)
4125 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4126 integer_one_node, 1);
4127 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4128 integer_one_node, 0);
4129 if (low == 0 || high == 0)
4130 return 0;
4132 in_p = 1;
4134 else
4135 return 0;
4138 else if (subset)
4139 in_p = 0, low = low0, high = high0;
4140 else
4141 in_p = 0, low = low0, high = high1;
4144 *pin_p = in_p, *plow = low, *phigh = high;
4145 return 1;
4149 /* Subroutine of fold, looking inside expressions of the form
4150 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4151 of the COND_EXPR. This function is being used also to optimize
4152 A op B ? C : A, by reversing the comparison first.
4154 Return a folded expression whose code is not a COND_EXPR
4155 anymore, or NULL_TREE if no folding opportunity is found. */
4157 static tree
4158 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4160 enum tree_code comp_code = TREE_CODE (arg0);
4161 tree arg00 = TREE_OPERAND (arg0, 0);
4162 tree arg01 = TREE_OPERAND (arg0, 1);
4163 tree arg1_type = TREE_TYPE (arg1);
4164 tree tem;
4166 STRIP_NOPS (arg1);
4167 STRIP_NOPS (arg2);
4169 /* If we have A op 0 ? A : -A, consider applying the following
4170 transformations:
4172 A == 0? A : -A same as -A
4173 A != 0? A : -A same as A
4174 A >= 0? A : -A same as abs (A)
4175 A > 0? A : -A same as abs (A)
4176 A <= 0? A : -A same as -abs (A)
4177 A < 0? A : -A same as -abs (A)
4179 None of these transformations work for modes with signed
4180 zeros. If A is +/-0, the first two transformations will
4181 change the sign of the result (from +0 to -0, or vice
4182 versa). The last four will fix the sign of the result,
4183 even though the original expressions could be positive or
4184 negative, depending on the sign of A.
4186 Note that all these transformations are correct if A is
4187 NaN, since the two alternatives (A and -A) are also NaNs. */
4188 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4189 ? real_zerop (arg01)
4190 : integer_zerop (arg01))
4191 && TREE_CODE (arg2) == NEGATE_EXPR
4192 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4193 switch (comp_code)
4195 case EQ_EXPR:
4196 case UNEQ_EXPR:
4197 tem = fold_convert (arg1_type, arg1);
4198 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4199 case NE_EXPR:
4200 case LTGT_EXPR:
4201 return pedantic_non_lvalue (fold_convert (type, arg1));
4202 case UNGE_EXPR:
4203 case UNGT_EXPR:
4204 if (flag_trapping_math)
4205 break;
4206 /* Fall through. */
4207 case GE_EXPR:
4208 case GT_EXPR:
4209 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4210 arg1 = fold_convert (lang_hooks.types.signed_type
4211 (TREE_TYPE (arg1)), arg1);
4212 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4213 return pedantic_non_lvalue (fold_convert (type, tem));
4214 case UNLE_EXPR:
4215 case UNLT_EXPR:
4216 if (flag_trapping_math)
4217 break;
4218 case LE_EXPR:
4219 case LT_EXPR:
4220 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4221 arg1 = fold_convert (lang_hooks.types.signed_type
4222 (TREE_TYPE (arg1)), arg1);
4223 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4224 return negate_expr (fold_convert (type, tem));
4225 default:
4226 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4227 break;
4230 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4231 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4232 both transformations are correct when A is NaN: A != 0
4233 is then true, and A == 0 is false. */
4235 if (integer_zerop (arg01) && integer_zerop (arg2))
4237 if (comp_code == NE_EXPR)
4238 return pedantic_non_lvalue (fold_convert (type, arg1));
4239 else if (comp_code == EQ_EXPR)
4240 return fold_convert (type, integer_zero_node);
4243 /* Try some transformations of A op B ? A : B.
4245 A == B? A : B same as B
4246 A != B? A : B same as A
4247 A >= B? A : B same as max (A, B)
4248 A > B? A : B same as max (B, A)
4249 A <= B? A : B same as min (A, B)
4250 A < B? A : B same as min (B, A)
4252 As above, these transformations don't work in the presence
4253 of signed zeros. For example, if A and B are zeros of
4254 opposite sign, the first two transformations will change
4255 the sign of the result. In the last four, the original
4256 expressions give different results for (A=+0, B=-0) and
4257 (A=-0, B=+0), but the transformed expressions do not.
4259 The first two transformations are correct if either A or B
4260 is a NaN. In the first transformation, the condition will
4261 be false, and B will indeed be chosen. In the case of the
4262 second transformation, the condition A != B will be true,
4263 and A will be chosen.
4265 The conversions to max() and min() are not correct if B is
4266 a number and A is not. The conditions in the original
4267 expressions will be false, so all four give B. The min()
4268 and max() versions would give a NaN instead. */
4269 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4271 tree comp_op0 = arg00;
4272 tree comp_op1 = arg01;
4273 tree comp_type = TREE_TYPE (comp_op0);
4275 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4276 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4278 comp_type = type;
4279 comp_op0 = arg1;
4280 comp_op1 = arg2;
4283 switch (comp_code)
4285 case EQ_EXPR:
4286 return pedantic_non_lvalue (fold_convert (type, arg2));
4287 case NE_EXPR:
4288 return pedantic_non_lvalue (fold_convert (type, arg1));
4289 case LE_EXPR:
4290 case LT_EXPR:
4291 case UNLE_EXPR:
4292 case UNLT_EXPR:
4293 /* In C++ a ?: expression can be an lvalue, so put the
4294 operand which will be used if they are equal first
4295 so that we can convert this back to the
4296 corresponding COND_EXPR. */
4297 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4299 comp_op0 = fold_convert (comp_type, comp_op0);
4300 comp_op1 = fold_convert (comp_type, comp_op1);
4301 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4302 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4303 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4304 return pedantic_non_lvalue (fold_convert (type, tem));
4306 break;
4307 case GE_EXPR:
4308 case GT_EXPR:
4309 case UNGE_EXPR:
4310 case UNGT_EXPR:
4311 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4313 comp_op0 = fold_convert (comp_type, comp_op0);
4314 comp_op1 = fold_convert (comp_type, comp_op1);
4315 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4316 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4317 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4318 return pedantic_non_lvalue (fold_convert (type, tem));
4320 break;
4321 case UNEQ_EXPR:
4322 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4323 return pedantic_non_lvalue (fold_convert (type, arg2));
4324 break;
4325 case LTGT_EXPR:
4326 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4327 return pedantic_non_lvalue (fold_convert (type, arg1));
4328 break;
4329 default:
4330 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4331 break;
4335 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4336 we might still be able to simplify this. For example,
4337 if C1 is one less or one more than C2, this might have started
4338 out as a MIN or MAX and been transformed by this function.
4339 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4341 if (INTEGRAL_TYPE_P (type)
4342 && TREE_CODE (arg01) == INTEGER_CST
4343 && TREE_CODE (arg2) == INTEGER_CST)
4344 switch (comp_code)
4346 case EQ_EXPR:
4347 /* We can replace A with C1 in this case. */
4348 arg1 = fold_convert (type, arg01);
4349 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4351 case LT_EXPR:
4352 /* If C1 is C2 + 1, this is min(A, C2). */
4353 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4354 OEP_ONLY_CONST)
4355 && operand_equal_p (arg01,
4356 const_binop (PLUS_EXPR, arg2,
4357 integer_one_node, 0),
4358 OEP_ONLY_CONST))
4359 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4360 type, arg1, arg2)));
4361 break;
4363 case LE_EXPR:
4364 /* If C1 is C2 - 1, this is min(A, C2). */
4365 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4366 OEP_ONLY_CONST)
4367 && operand_equal_p (arg01,
4368 const_binop (MINUS_EXPR, arg2,
4369 integer_one_node, 0),
4370 OEP_ONLY_CONST))
4371 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4372 type, arg1, arg2)));
4373 break;
4375 case GT_EXPR:
4376 /* If C1 is C2 - 1, this is max(A, C2). */
4377 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4378 OEP_ONLY_CONST)
4379 && operand_equal_p (arg01,
4380 const_binop (MINUS_EXPR, arg2,
4381 integer_one_node, 0),
4382 OEP_ONLY_CONST))
4383 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4384 type, arg1, arg2)));
4385 break;
4387 case GE_EXPR:
4388 /* If C1 is C2 + 1, this is max(A, C2). */
4389 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4390 OEP_ONLY_CONST)
4391 && operand_equal_p (arg01,
4392 const_binop (PLUS_EXPR, arg2,
4393 integer_one_node, 0),
4394 OEP_ONLY_CONST))
4395 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4396 type, arg1, arg2)));
4397 break;
4398 case NE_EXPR:
4399 break;
4400 default:
4401 gcc_unreachable ();
4404 return NULL_TREE;
4409 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4410 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4411 #endif
4413 /* EXP is some logical combination of boolean tests. See if we can
4414 merge it into some range test. Return the new tree if so. */
4416 static tree
4417 fold_range_test (tree exp)
4419 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4420 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4421 int in0_p, in1_p, in_p;
4422 tree low0, low1, low, high0, high1, high;
4423 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4424 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4425 tree tem;
4427 /* If this is an OR operation, invert both sides; we will invert
4428 again at the end. */
4429 if (or_op)
4430 in0_p = ! in0_p, in1_p = ! in1_p;
4432 /* If both expressions are the same, if we can merge the ranges, and we
4433 can build the range test, return it or it inverted. If one of the
4434 ranges is always true or always false, consider it to be the same
4435 expression as the other. */
4436 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4437 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4438 in1_p, low1, high1)
4439 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4440 lhs != 0 ? lhs
4441 : rhs != 0 ? rhs : integer_zero_node,
4442 in_p, low, high))))
4443 return or_op ? invert_truthvalue (tem) : tem;
4445 /* On machines where the branch cost is expensive, if this is a
4446 short-circuited branch and the underlying object on both sides
4447 is the same, make a non-short-circuit operation. */
4448 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4449 && lhs != 0 && rhs != 0
4450 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4451 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4452 && operand_equal_p (lhs, rhs, 0))
4454 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4455 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4456 which cases we can't do this. */
4457 if (simple_operand_p (lhs))
4458 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4459 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4460 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4461 TREE_OPERAND (exp, 1));
4463 else if (lang_hooks.decls.global_bindings_p () == 0
4464 && ! CONTAINS_PLACEHOLDER_P (lhs))
4466 tree common = save_expr (lhs);
4468 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4469 or_op ? ! in0_p : in0_p,
4470 low0, high0))
4471 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4472 or_op ? ! in1_p : in1_p,
4473 low1, high1))))
4474 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4475 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4476 TREE_TYPE (exp), lhs, rhs);
4480 return 0;
4483 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4484 bit value. Arrange things so the extra bits will be set to zero if and
4485 only if C is signed-extended to its full width. If MASK is nonzero,
4486 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4488 static tree
4489 unextend (tree c, int p, int unsignedp, tree mask)
4491 tree type = TREE_TYPE (c);
4492 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4493 tree temp;
4495 if (p == modesize || unsignedp)
4496 return c;
4498 /* We work by getting just the sign bit into the low-order bit, then
4499 into the high-order bit, then sign-extend. We then XOR that value
4500 with C. */
4501 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4502 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4504 /* We must use a signed type in order to get an arithmetic right shift.
4505 However, we must also avoid introducing accidental overflows, so that
4506 a subsequent call to integer_zerop will work. Hence we must
4507 do the type conversion here. At this point, the constant is either
4508 zero or one, and the conversion to a signed type can never overflow.
4509 We could get an overflow if this conversion is done anywhere else. */
4510 if (TYPE_UNSIGNED (type))
4511 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4513 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4514 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4515 if (mask != 0)
4516 temp = const_binop (BIT_AND_EXPR, temp,
4517 fold_convert (TREE_TYPE (c), mask), 0);
4518 /* If necessary, convert the type back to match the type of C. */
4519 if (TYPE_UNSIGNED (type))
4520 temp = fold_convert (type, temp);
4522 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4525 /* Find ways of folding logical expressions of LHS and RHS:
4526 Try to merge two comparisons to the same innermost item.
4527 Look for range tests like "ch >= '0' && ch <= '9'".
4528 Look for combinations of simple terms on machines with expensive branches
4529 and evaluate the RHS unconditionally.
4531 For example, if we have p->a == 2 && p->b == 4 and we can make an
4532 object large enough to span both A and B, we can do this with a comparison
4533 against the object ANDed with the a mask.
4535 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4536 operations to do this with one comparison.
4538 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4539 function and the one above.
4541 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4542 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4544 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4545 two operands.
4547 We return the simplified tree or 0 if no optimization is possible. */
4549 static tree
4550 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4552 /* If this is the "or" of two comparisons, we can do something if
4553 the comparisons are NE_EXPR. If this is the "and", we can do something
4554 if the comparisons are EQ_EXPR. I.e.,
4555 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4557 WANTED_CODE is this operation code. For single bit fields, we can
4558 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4559 comparison for one-bit fields. */
4561 enum tree_code wanted_code;
4562 enum tree_code lcode, rcode;
4563 tree ll_arg, lr_arg, rl_arg, rr_arg;
4564 tree ll_inner, lr_inner, rl_inner, rr_inner;
4565 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4566 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4567 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4568 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4569 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4570 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4571 enum machine_mode lnmode, rnmode;
4572 tree ll_mask, lr_mask, rl_mask, rr_mask;
4573 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4574 tree l_const, r_const;
4575 tree lntype, rntype, result;
4576 int first_bit, end_bit;
4577 int volatilep;
4579 /* Start by getting the comparison codes. Fail if anything is volatile.
4580 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4581 it were surrounded with a NE_EXPR. */
4583 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4584 return 0;
4586 lcode = TREE_CODE (lhs);
4587 rcode = TREE_CODE (rhs);
4589 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4591 lhs = build2 (NE_EXPR, truth_type, lhs,
4592 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4593 lcode = NE_EXPR;
4596 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4598 rhs = build2 (NE_EXPR, truth_type, rhs,
4599 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4600 rcode = NE_EXPR;
4603 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4604 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4605 return 0;
4607 ll_arg = TREE_OPERAND (lhs, 0);
4608 lr_arg = TREE_OPERAND (lhs, 1);
4609 rl_arg = TREE_OPERAND (rhs, 0);
4610 rr_arg = TREE_OPERAND (rhs, 1);
4612 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4613 if (simple_operand_p (ll_arg)
4614 && simple_operand_p (lr_arg))
4616 tree result;
4617 if (operand_equal_p (ll_arg, rl_arg, 0)
4618 && operand_equal_p (lr_arg, rr_arg, 0))
4620 result = combine_comparisons (code, lcode, rcode,
4621 truth_type, ll_arg, lr_arg);
4622 if (result)
4623 return result;
4625 else if (operand_equal_p (ll_arg, rr_arg, 0)
4626 && operand_equal_p (lr_arg, rl_arg, 0))
4628 result = combine_comparisons (code, lcode,
4629 swap_tree_comparison (rcode),
4630 truth_type, ll_arg, lr_arg);
4631 if (result)
4632 return result;
4636 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4637 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4639 /* If the RHS can be evaluated unconditionally and its operands are
4640 simple, it wins to evaluate the RHS unconditionally on machines
4641 with expensive branches. In this case, this isn't a comparison
4642 that can be merged. Avoid doing this if the RHS is a floating-point
4643 comparison since those can trap. */
4645 if (BRANCH_COST >= 2
4646 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4647 && simple_operand_p (rl_arg)
4648 && simple_operand_p (rr_arg))
4650 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4651 if (code == TRUTH_OR_EXPR
4652 && lcode == NE_EXPR && integer_zerop (lr_arg)
4653 && rcode == NE_EXPR && integer_zerop (rr_arg)
4654 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4655 return build2 (NE_EXPR, truth_type,
4656 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4657 ll_arg, rl_arg),
4658 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4660 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4661 if (code == TRUTH_AND_EXPR
4662 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4663 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4664 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4665 return build2 (EQ_EXPR, truth_type,
4666 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4667 ll_arg, rl_arg),
4668 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4670 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4671 return build2 (code, truth_type, lhs, rhs);
4674 /* See if the comparisons can be merged. Then get all the parameters for
4675 each side. */
4677 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4678 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4679 return 0;
4681 volatilep = 0;
4682 ll_inner = decode_field_reference (ll_arg,
4683 &ll_bitsize, &ll_bitpos, &ll_mode,
4684 &ll_unsignedp, &volatilep, &ll_mask,
4685 &ll_and_mask);
4686 lr_inner = decode_field_reference (lr_arg,
4687 &lr_bitsize, &lr_bitpos, &lr_mode,
4688 &lr_unsignedp, &volatilep, &lr_mask,
4689 &lr_and_mask);
4690 rl_inner = decode_field_reference (rl_arg,
4691 &rl_bitsize, &rl_bitpos, &rl_mode,
4692 &rl_unsignedp, &volatilep, &rl_mask,
4693 &rl_and_mask);
4694 rr_inner = decode_field_reference (rr_arg,
4695 &rr_bitsize, &rr_bitpos, &rr_mode,
4696 &rr_unsignedp, &volatilep, &rr_mask,
4697 &rr_and_mask);
4699 /* It must be true that the inner operation on the lhs of each
4700 comparison must be the same if we are to be able to do anything.
4701 Then see if we have constants. If not, the same must be true for
4702 the rhs's. */
4703 if (volatilep || ll_inner == 0 || rl_inner == 0
4704 || ! operand_equal_p (ll_inner, rl_inner, 0))
4705 return 0;
4707 if (TREE_CODE (lr_arg) == INTEGER_CST
4708 && TREE_CODE (rr_arg) == INTEGER_CST)
4709 l_const = lr_arg, r_const = rr_arg;
4710 else if (lr_inner == 0 || rr_inner == 0
4711 || ! operand_equal_p (lr_inner, rr_inner, 0))
4712 return 0;
4713 else
4714 l_const = r_const = 0;
4716 /* If either comparison code is not correct for our logical operation,
4717 fail. However, we can convert a one-bit comparison against zero into
4718 the opposite comparison against that bit being set in the field. */
4720 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4721 if (lcode != wanted_code)
4723 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4725 /* Make the left operand unsigned, since we are only interested
4726 in the value of one bit. Otherwise we are doing the wrong
4727 thing below. */
4728 ll_unsignedp = 1;
4729 l_const = ll_mask;
4731 else
4732 return 0;
4735 /* This is analogous to the code for l_const above. */
4736 if (rcode != wanted_code)
4738 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4740 rl_unsignedp = 1;
4741 r_const = rl_mask;
4743 else
4744 return 0;
4747 /* After this point all optimizations will generate bit-field
4748 references, which we might not want. */
4749 if (! lang_hooks.can_use_bit_fields_p ())
4750 return 0;
4752 /* See if we can find a mode that contains both fields being compared on
4753 the left. If we can't, fail. Otherwise, update all constants and masks
4754 to be relative to a field of that size. */
4755 first_bit = MIN (ll_bitpos, rl_bitpos);
4756 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4757 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4758 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4759 volatilep);
4760 if (lnmode == VOIDmode)
4761 return 0;
4763 lnbitsize = GET_MODE_BITSIZE (lnmode);
4764 lnbitpos = first_bit & ~ (lnbitsize - 1);
4765 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4766 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4768 if (BYTES_BIG_ENDIAN)
4770 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4771 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4774 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4775 size_int (xll_bitpos), 0);
4776 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4777 size_int (xrl_bitpos), 0);
4779 if (l_const)
4781 l_const = fold_convert (lntype, l_const);
4782 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4783 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4784 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4785 fold (build1 (BIT_NOT_EXPR,
4786 lntype, ll_mask)),
4787 0)))
4789 warning ("comparison is always %d", wanted_code == NE_EXPR);
4791 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4794 if (r_const)
4796 r_const = fold_convert (lntype, r_const);
4797 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4798 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4799 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4800 fold (build1 (BIT_NOT_EXPR,
4801 lntype, rl_mask)),
4802 0)))
4804 warning ("comparison is always %d", wanted_code == NE_EXPR);
4806 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4810 /* If the right sides are not constant, do the same for it. Also,
4811 disallow this optimization if a size or signedness mismatch occurs
4812 between the left and right sides. */
4813 if (l_const == 0)
4815 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4816 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4817 /* Make sure the two fields on the right
4818 correspond to the left without being swapped. */
4819 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4820 return 0;
4822 first_bit = MIN (lr_bitpos, rr_bitpos);
4823 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4824 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4825 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4826 volatilep);
4827 if (rnmode == VOIDmode)
4828 return 0;
4830 rnbitsize = GET_MODE_BITSIZE (rnmode);
4831 rnbitpos = first_bit & ~ (rnbitsize - 1);
4832 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4833 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4835 if (BYTES_BIG_ENDIAN)
4837 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4838 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4841 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4842 size_int (xlr_bitpos), 0);
4843 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4844 size_int (xrr_bitpos), 0);
4846 /* Make a mask that corresponds to both fields being compared.
4847 Do this for both items being compared. If the operands are the
4848 same size and the bits being compared are in the same position
4849 then we can do this by masking both and comparing the masked
4850 results. */
4851 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4852 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4853 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4855 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4856 ll_unsignedp || rl_unsignedp);
4857 if (! all_ones_mask_p (ll_mask, lnbitsize))
4858 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4860 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4861 lr_unsignedp || rr_unsignedp);
4862 if (! all_ones_mask_p (lr_mask, rnbitsize))
4863 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4865 return build2 (wanted_code, truth_type, lhs, rhs);
4868 /* There is still another way we can do something: If both pairs of
4869 fields being compared are adjacent, we may be able to make a wider
4870 field containing them both.
4872 Note that we still must mask the lhs/rhs expressions. Furthermore,
4873 the mask must be shifted to account for the shift done by
4874 make_bit_field_ref. */
4875 if ((ll_bitsize + ll_bitpos == rl_bitpos
4876 && lr_bitsize + lr_bitpos == rr_bitpos)
4877 || (ll_bitpos == rl_bitpos + rl_bitsize
4878 && lr_bitpos == rr_bitpos + rr_bitsize))
4880 tree type;
4882 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4883 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4884 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4885 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4887 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4888 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4889 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4890 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4892 /* Convert to the smaller type before masking out unwanted bits. */
4893 type = lntype;
4894 if (lntype != rntype)
4896 if (lnbitsize > rnbitsize)
4898 lhs = fold_convert (rntype, lhs);
4899 ll_mask = fold_convert (rntype, ll_mask);
4900 type = rntype;
4902 else if (lnbitsize < rnbitsize)
4904 rhs = fold_convert (lntype, rhs);
4905 lr_mask = fold_convert (lntype, lr_mask);
4906 type = lntype;
4910 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4911 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4913 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4914 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4916 return build2 (wanted_code, truth_type, lhs, rhs);
4919 return 0;
4922 /* Handle the case of comparisons with constants. If there is something in
4923 common between the masks, those bits of the constants must be the same.
4924 If not, the condition is always false. Test for this to avoid generating
4925 incorrect code below. */
4926 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4927 if (! integer_zerop (result)
4928 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4929 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4931 if (wanted_code == NE_EXPR)
4933 warning ("%<or%> of unmatched not-equal tests is always 1");
4934 return constant_boolean_node (true, truth_type);
4936 else
4938 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4939 return constant_boolean_node (false, truth_type);
4943 /* Construct the expression we will return. First get the component
4944 reference we will make. Unless the mask is all ones the width of
4945 that field, perform the mask operation. Then compare with the
4946 merged constant. */
4947 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4948 ll_unsignedp || rl_unsignedp);
4950 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4951 if (! all_ones_mask_p (ll_mask, lnbitsize))
4952 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4954 return build2 (wanted_code, truth_type, result,
4955 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4958 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4959 constant. */
4961 static tree
4962 optimize_minmax_comparison (tree t)
4964 tree type = TREE_TYPE (t);
4965 tree arg0 = TREE_OPERAND (t, 0);
4966 enum tree_code op_code;
4967 tree comp_const = TREE_OPERAND (t, 1);
4968 tree minmax_const;
4969 int consts_equal, consts_lt;
4970 tree inner;
4972 STRIP_SIGN_NOPS (arg0);
4974 op_code = TREE_CODE (arg0);
4975 minmax_const = TREE_OPERAND (arg0, 1);
4976 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4977 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4978 inner = TREE_OPERAND (arg0, 0);
4980 /* If something does not permit us to optimize, return the original tree. */
4981 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4982 || TREE_CODE (comp_const) != INTEGER_CST
4983 || TREE_CONSTANT_OVERFLOW (comp_const)
4984 || TREE_CODE (minmax_const) != INTEGER_CST
4985 || TREE_CONSTANT_OVERFLOW (minmax_const))
4986 return t;
4988 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4989 and GT_EXPR, doing the rest with recursive calls using logical
4990 simplifications. */
4991 switch (TREE_CODE (t))
4993 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4994 return
4995 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4997 case GE_EXPR:
4998 return
4999 fold (build2 (TRUTH_ORIF_EXPR, type,
5000 optimize_minmax_comparison
5001 (build2 (EQ_EXPR, type, arg0, comp_const)),
5002 optimize_minmax_comparison
5003 (build2 (GT_EXPR, type, arg0, comp_const))));
5005 case EQ_EXPR:
5006 if (op_code == MAX_EXPR && consts_equal)
5007 /* MAX (X, 0) == 0 -> X <= 0 */
5008 return fold (build2 (LE_EXPR, type, inner, comp_const));
5010 else if (op_code == MAX_EXPR && consts_lt)
5011 /* MAX (X, 0) == 5 -> X == 5 */
5012 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5014 else if (op_code == MAX_EXPR)
5015 /* MAX (X, 0) == -1 -> false */
5016 return omit_one_operand (type, integer_zero_node, inner);
5018 else if (consts_equal)
5019 /* MIN (X, 0) == 0 -> X >= 0 */
5020 return fold (build2 (GE_EXPR, type, inner, comp_const));
5022 else if (consts_lt)
5023 /* MIN (X, 0) == 5 -> false */
5024 return omit_one_operand (type, integer_zero_node, inner);
5026 else
5027 /* MIN (X, 0) == -1 -> X == -1 */
5028 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5030 case GT_EXPR:
5031 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5032 /* MAX (X, 0) > 0 -> X > 0
5033 MAX (X, 0) > 5 -> X > 5 */
5034 return fold (build2 (GT_EXPR, type, inner, comp_const));
5036 else if (op_code == MAX_EXPR)
5037 /* MAX (X, 0) > -1 -> true */
5038 return omit_one_operand (type, integer_one_node, inner);
5040 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5041 /* MIN (X, 0) > 0 -> false
5042 MIN (X, 0) > 5 -> false */
5043 return omit_one_operand (type, integer_zero_node, inner);
5045 else
5046 /* MIN (X, 0) > -1 -> X > -1 */
5047 return fold (build2 (GT_EXPR, type, inner, comp_const));
5049 default:
5050 return t;
5054 /* T is an integer expression that is being multiplied, divided, or taken a
5055 modulus (CODE says which and what kind of divide or modulus) by a
5056 constant C. See if we can eliminate that operation by folding it with
5057 other operations already in T. WIDE_TYPE, if non-null, is a type that
5058 should be used for the computation if wider than our type.
5060 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5061 (X * 2) + (Y * 4). We must, however, be assured that either the original
5062 expression would not overflow or that overflow is undefined for the type
5063 in the language in question.
5065 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5066 the machine has a multiply-accumulate insn or that this is part of an
5067 addressing calculation.
5069 If we return a non-null expression, it is an equivalent form of the
5070 original computation, but need not be in the original type. */
5072 static tree
5073 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5075 /* To avoid exponential search depth, refuse to allow recursion past
5076 three levels. Beyond that (1) it's highly unlikely that we'll find
5077 something interesting and (2) we've probably processed it before
5078 when we built the inner expression. */
5080 static int depth;
5081 tree ret;
5083 if (depth > 3)
5084 return NULL;
5086 depth++;
5087 ret = extract_muldiv_1 (t, c, code, wide_type);
5088 depth--;
5090 return ret;
5093 static tree
5094 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5096 tree type = TREE_TYPE (t);
5097 enum tree_code tcode = TREE_CODE (t);
5098 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5099 > GET_MODE_SIZE (TYPE_MODE (type)))
5100 ? wide_type : type);
5101 tree t1, t2;
5102 int same_p = tcode == code;
5103 tree op0 = NULL_TREE, op1 = NULL_TREE;
5105 /* Don't deal with constants of zero here; they confuse the code below. */
5106 if (integer_zerop (c))
5107 return NULL_TREE;
5109 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5110 op0 = TREE_OPERAND (t, 0);
5112 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5113 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5115 /* Note that we need not handle conditional operations here since fold
5116 already handles those cases. So just do arithmetic here. */
5117 switch (tcode)
5119 case INTEGER_CST:
5120 /* For a constant, we can always simplify if we are a multiply
5121 or (for divide and modulus) if it is a multiple of our constant. */
5122 if (code == MULT_EXPR
5123 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5124 return const_binop (code, fold_convert (ctype, t),
5125 fold_convert (ctype, c), 0);
5126 break;
5128 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5129 /* If op0 is an expression ... */
5130 if ((COMPARISON_CLASS_P (op0)
5131 || UNARY_CLASS_P (op0)
5132 || BINARY_CLASS_P (op0)
5133 || EXPRESSION_CLASS_P (op0))
5134 /* ... and is unsigned, and its type is smaller than ctype,
5135 then we cannot pass through as widening. */
5136 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5137 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5138 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5139 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5140 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5141 /* ... or this is a truncation (t is narrower than op0),
5142 then we cannot pass through this narrowing. */
5143 || (GET_MODE_SIZE (TYPE_MODE (type))
5144 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5145 /* ... or signedness changes for division or modulus,
5146 then we cannot pass through this conversion. */
5147 || (code != MULT_EXPR
5148 && (TYPE_UNSIGNED (ctype)
5149 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5150 break;
5152 /* Pass the constant down and see if we can make a simplification. If
5153 we can, replace this expression with the inner simplification for
5154 possible later conversion to our or some other type. */
5155 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5156 && TREE_CODE (t2) == INTEGER_CST
5157 && ! TREE_CONSTANT_OVERFLOW (t2)
5158 && (0 != (t1 = extract_muldiv (op0, t2, code,
5159 code == MULT_EXPR
5160 ? ctype : NULL_TREE))))
5161 return t1;
5162 break;
5164 case ABS_EXPR:
5165 /* If widening the type changes it from signed to unsigned, then we
5166 must avoid building ABS_EXPR itself as unsigned. */
5167 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5169 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5170 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5172 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5173 return fold_convert (ctype, t1);
5175 break;
5177 /* FALLTHROUGH */
5178 case NEGATE_EXPR:
5179 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5180 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5181 break;
5183 case MIN_EXPR: case MAX_EXPR:
5184 /* If widening the type changes the signedness, then we can't perform
5185 this optimization as that changes the result. */
5186 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5187 break;
5189 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5190 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5191 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5193 if (tree_int_cst_sgn (c) < 0)
5194 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5196 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5197 fold_convert (ctype, t2)));
5199 break;
5201 case LSHIFT_EXPR: case RSHIFT_EXPR:
5202 /* If the second operand is constant, this is a multiplication
5203 or floor division, by a power of two, so we can treat it that
5204 way unless the multiplier or divisor overflows. Signed
5205 left-shift overflow is implementation-defined rather than
5206 undefined in C90, so do not convert signed left shift into
5207 multiplication. */
5208 if (TREE_CODE (op1) == INTEGER_CST
5209 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5210 /* const_binop may not detect overflow correctly,
5211 so check for it explicitly here. */
5212 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5213 && TREE_INT_CST_HIGH (op1) == 0
5214 && 0 != (t1 = fold_convert (ctype,
5215 const_binop (LSHIFT_EXPR,
5216 size_one_node,
5217 op1, 0)))
5218 && ! TREE_OVERFLOW (t1))
5219 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5220 ? MULT_EXPR : FLOOR_DIV_EXPR,
5221 ctype, fold_convert (ctype, op0), t1),
5222 c, code, wide_type);
5223 break;
5225 case PLUS_EXPR: case MINUS_EXPR:
5226 /* See if we can eliminate the operation on both sides. If we can, we
5227 can return a new PLUS or MINUS. If we can't, the only remaining
5228 cases where we can do anything are if the second operand is a
5229 constant. */
5230 t1 = extract_muldiv (op0, c, code, wide_type);
5231 t2 = extract_muldiv (op1, c, code, wide_type);
5232 if (t1 != 0 && t2 != 0
5233 && (code == MULT_EXPR
5234 /* If not multiplication, we can only do this if both operands
5235 are divisible by c. */
5236 || (multiple_of_p (ctype, op0, c)
5237 && multiple_of_p (ctype, op1, c))))
5238 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5239 fold_convert (ctype, t2)));
5241 /* If this was a subtraction, negate OP1 and set it to be an addition.
5242 This simplifies the logic below. */
5243 if (tcode == MINUS_EXPR)
5244 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5246 if (TREE_CODE (op1) != INTEGER_CST)
5247 break;
5249 /* If either OP1 or C are negative, this optimization is not safe for
5250 some of the division and remainder types while for others we need
5251 to change the code. */
5252 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5254 if (code == CEIL_DIV_EXPR)
5255 code = FLOOR_DIV_EXPR;
5256 else if (code == FLOOR_DIV_EXPR)
5257 code = CEIL_DIV_EXPR;
5258 else if (code != MULT_EXPR
5259 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5260 break;
5263 /* If it's a multiply or a division/modulus operation of a multiple
5264 of our constant, do the operation and verify it doesn't overflow. */
5265 if (code == MULT_EXPR
5266 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5268 op1 = const_binop (code, fold_convert (ctype, op1),
5269 fold_convert (ctype, c), 0);
5270 /* We allow the constant to overflow with wrapping semantics. */
5271 if (op1 == 0
5272 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5273 break;
5275 else
5276 break;
5278 /* If we have an unsigned type is not a sizetype, we cannot widen
5279 the operation since it will change the result if the original
5280 computation overflowed. */
5281 if (TYPE_UNSIGNED (ctype)
5282 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5283 && ctype != type)
5284 break;
5286 /* If we were able to eliminate our operation from the first side,
5287 apply our operation to the second side and reform the PLUS. */
5288 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5289 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5291 /* The last case is if we are a multiply. In that case, we can
5292 apply the distributive law to commute the multiply and addition
5293 if the multiplication of the constants doesn't overflow. */
5294 if (code == MULT_EXPR)
5295 return fold (build2 (tcode, ctype,
5296 fold (build2 (code, ctype,
5297 fold_convert (ctype, op0),
5298 fold_convert (ctype, c))),
5299 op1));
5301 break;
5303 case MULT_EXPR:
5304 /* We have a special case here if we are doing something like
5305 (C * 8) % 4 since we know that's zero. */
5306 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5307 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5308 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5309 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5310 return omit_one_operand (type, integer_zero_node, op0);
5312 /* ... fall through ... */
5314 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5315 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5316 /* If we can extract our operation from the LHS, do so and return a
5317 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5318 do something only if the second operand is a constant. */
5319 if (same_p
5320 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5321 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5322 fold_convert (ctype, op1)));
5323 else if (tcode == MULT_EXPR && code == MULT_EXPR
5324 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5325 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5326 fold_convert (ctype, t1)));
5327 else if (TREE_CODE (op1) != INTEGER_CST)
5328 return 0;
5330 /* If these are the same operation types, we can associate them
5331 assuming no overflow. */
5332 if (tcode == code
5333 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5334 fold_convert (ctype, c), 0))
5335 && ! TREE_OVERFLOW (t1))
5336 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5338 /* If these operations "cancel" each other, we have the main
5339 optimizations of this pass, which occur when either constant is a
5340 multiple of the other, in which case we replace this with either an
5341 operation or CODE or TCODE.
5343 If we have an unsigned type that is not a sizetype, we cannot do
5344 this since it will change the result if the original computation
5345 overflowed. */
5346 if ((! TYPE_UNSIGNED (ctype)
5347 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5348 && ! flag_wrapv
5349 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5350 || (tcode == MULT_EXPR
5351 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5352 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5354 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5355 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5356 fold_convert (ctype,
5357 const_binop (TRUNC_DIV_EXPR,
5358 op1, c, 0))));
5359 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5360 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5361 fold_convert (ctype,
5362 const_binop (TRUNC_DIV_EXPR,
5363 c, op1, 0))));
5365 break;
5367 default:
5368 break;
5371 return 0;
5374 /* Return a node which has the indicated constant VALUE (either 0 or
5375 1), and is of the indicated TYPE. */
5377 tree
5378 constant_boolean_node (int value, tree type)
5380 if (type == integer_type_node)
5381 return value ? integer_one_node : integer_zero_node;
5382 else if (type == boolean_type_node)
5383 return value ? boolean_true_node : boolean_false_node;
5384 else
5385 return build_int_cst (type, value);
5389 /* Return true if expr looks like an ARRAY_REF and set base and
5390 offset to the appropriate trees. If there is no offset,
5391 offset is set to NULL_TREE. */
5393 static bool
5394 extract_array_ref (tree expr, tree *base, tree *offset)
5396 /* We have to be careful with stripping nops as with the
5397 base type the meaning of the offset can change. */
5398 tree inner_expr = expr;
5399 STRIP_NOPS (inner_expr);
5400 /* One canonical form is a PLUS_EXPR with the first
5401 argument being an ADDR_EXPR with a possible NOP_EXPR
5402 attached. */
5403 if (TREE_CODE (expr) == PLUS_EXPR)
5405 tree op0 = TREE_OPERAND (expr, 0);
5406 STRIP_NOPS (op0);
5407 if (TREE_CODE (op0) == ADDR_EXPR)
5409 *base = TREE_OPERAND (expr, 0);
5410 *offset = TREE_OPERAND (expr, 1);
5411 return true;
5414 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5415 which we transform into an ADDR_EXPR with appropriate
5416 offset. For other arguments to the ADDR_EXPR we assume
5417 zero offset and as such do not care about the ADDR_EXPR
5418 type and strip possible nops from it. */
5419 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5421 tree op0 = TREE_OPERAND (inner_expr, 0);
5422 if (TREE_CODE (op0) == ARRAY_REF)
5424 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5425 *offset = TREE_OPERAND (op0, 1);
5427 else
5429 *base = inner_expr;
5430 *offset = NULL_TREE;
5432 return true;
5435 return false;
5439 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5440 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5441 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5442 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5443 COND is the first argument to CODE; otherwise (as in the example
5444 given here), it is the second argument. TYPE is the type of the
5445 original expression. Return NULL_TREE if no simplification is
5446 possible. */
5448 static tree
5449 fold_binary_op_with_conditional_arg (tree t, enum tree_code code, tree cond,
5450 tree arg, int cond_first_p)
5452 const tree type = TREE_TYPE (t);
5453 tree cond_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 0))
5454 : TREE_TYPE (TREE_OPERAND (t, 1));
5455 tree arg_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 1))
5456 : TREE_TYPE (TREE_OPERAND (t, 0));
5457 tree test, true_value, false_value;
5458 tree lhs = NULL_TREE;
5459 tree rhs = NULL_TREE;
5461 /* This transformation is only worthwhile if we don't have to wrap
5462 arg in a SAVE_EXPR, and the operation can be simplified on at least
5463 one of the branches once its pushed inside the COND_EXPR. */
5464 if (!TREE_CONSTANT (arg))
5465 return NULL_TREE;
5467 if (TREE_CODE (cond) == COND_EXPR)
5469 test = TREE_OPERAND (cond, 0);
5470 true_value = TREE_OPERAND (cond, 1);
5471 false_value = TREE_OPERAND (cond, 2);
5472 /* If this operand throws an expression, then it does not make
5473 sense to try to perform a logical or arithmetic operation
5474 involving it. */
5475 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5476 lhs = true_value;
5477 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5478 rhs = false_value;
5480 else
5482 tree testtype = TREE_TYPE (cond);
5483 test = cond;
5484 true_value = constant_boolean_node (true, testtype);
5485 false_value = constant_boolean_node (false, testtype);
5488 arg = fold_convert (arg_type, arg);
5489 if (lhs == 0)
5491 true_value = fold_convert (cond_type, true_value);
5492 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5493 : build2 (code, type, arg, true_value));
5495 if (rhs == 0)
5497 false_value = fold_convert (cond_type, false_value);
5498 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5499 : build2 (code, type, arg, false_value));
5502 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5503 return fold_convert (type, test);
5507 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5509 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5510 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5511 ADDEND is the same as X.
5513 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5514 and finite. The problematic cases are when X is zero, and its mode
5515 has signed zeros. In the case of rounding towards -infinity,
5516 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5517 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5519 static bool
5520 fold_real_zero_addition_p (tree type, tree addend, int negate)
5522 if (!real_zerop (addend))
5523 return false;
5525 /* Don't allow the fold with -fsignaling-nans. */
5526 if (HONOR_SNANS (TYPE_MODE (type)))
5527 return false;
5529 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5530 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5531 return true;
5533 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5534 if (TREE_CODE (addend) == REAL_CST
5535 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5536 negate = !negate;
5538 /* The mode has signed zeros, and we have to honor their sign.
5539 In this situation, there is only one case we can return true for.
5540 X - 0 is the same as X unless rounding towards -infinity is
5541 supported. */
5542 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5545 /* Subroutine of fold() that checks comparisons of built-in math
5546 functions against real constants.
5548 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5549 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5550 is the type of the result and ARG0 and ARG1 are the operands of the
5551 comparison. ARG1 must be a TREE_REAL_CST.
5553 The function returns the constant folded tree if a simplification
5554 can be made, and NULL_TREE otherwise. */
5556 static tree
5557 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5558 tree type, tree arg0, tree arg1)
5560 REAL_VALUE_TYPE c;
5562 if (BUILTIN_SQRT_P (fcode))
5564 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5565 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5567 c = TREE_REAL_CST (arg1);
5568 if (REAL_VALUE_NEGATIVE (c))
5570 /* sqrt(x) < y is always false, if y is negative. */
5571 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5572 return omit_one_operand (type, integer_zero_node, arg);
5574 /* sqrt(x) > y is always true, if y is negative and we
5575 don't care about NaNs, i.e. negative values of x. */
5576 if (code == NE_EXPR || !HONOR_NANS (mode))
5577 return omit_one_operand (type, integer_one_node, arg);
5579 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5580 return fold (build2 (GE_EXPR, type, arg,
5581 build_real (TREE_TYPE (arg), dconst0)));
5583 else if (code == GT_EXPR || code == GE_EXPR)
5585 REAL_VALUE_TYPE c2;
5587 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5588 real_convert (&c2, mode, &c2);
5590 if (REAL_VALUE_ISINF (c2))
5592 /* sqrt(x) > y is x == +Inf, when y is very large. */
5593 if (HONOR_INFINITIES (mode))
5594 return fold (build2 (EQ_EXPR, type, arg,
5595 build_real (TREE_TYPE (arg), c2)));
5597 /* sqrt(x) > y is always false, when y is very large
5598 and we don't care about infinities. */
5599 return omit_one_operand (type, integer_zero_node, arg);
5602 /* sqrt(x) > c is the same as x > c*c. */
5603 return fold (build2 (code, type, arg,
5604 build_real (TREE_TYPE (arg), c2)));
5606 else if (code == LT_EXPR || code == LE_EXPR)
5608 REAL_VALUE_TYPE c2;
5610 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5611 real_convert (&c2, mode, &c2);
5613 if (REAL_VALUE_ISINF (c2))
5615 /* sqrt(x) < y is always true, when y is a very large
5616 value and we don't care about NaNs or Infinities. */
5617 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5618 return omit_one_operand (type, integer_one_node, arg);
5620 /* sqrt(x) < y is x != +Inf when y is very large and we
5621 don't care about NaNs. */
5622 if (! HONOR_NANS (mode))
5623 return fold (build2 (NE_EXPR, type, arg,
5624 build_real (TREE_TYPE (arg), c2)));
5626 /* sqrt(x) < y is x >= 0 when y is very large and we
5627 don't care about Infinities. */
5628 if (! HONOR_INFINITIES (mode))
5629 return fold (build2 (GE_EXPR, type, arg,
5630 build_real (TREE_TYPE (arg), dconst0)));
5632 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5633 if (lang_hooks.decls.global_bindings_p () != 0
5634 || CONTAINS_PLACEHOLDER_P (arg))
5635 return NULL_TREE;
5637 arg = save_expr (arg);
5638 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5639 fold (build2 (GE_EXPR, type, arg,
5640 build_real (TREE_TYPE (arg),
5641 dconst0))),
5642 fold (build2 (NE_EXPR, type, arg,
5643 build_real (TREE_TYPE (arg),
5644 c2)))));
5647 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5648 if (! HONOR_NANS (mode))
5649 return fold (build2 (code, type, arg,
5650 build_real (TREE_TYPE (arg), c2)));
5652 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5653 if (lang_hooks.decls.global_bindings_p () == 0
5654 && ! CONTAINS_PLACEHOLDER_P (arg))
5656 arg = save_expr (arg);
5657 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5658 fold (build2 (GE_EXPR, type, arg,
5659 build_real (TREE_TYPE (arg),
5660 dconst0))),
5661 fold (build2 (code, type, arg,
5662 build_real (TREE_TYPE (arg),
5663 c2)))));
5668 return NULL_TREE;
5671 /* Subroutine of fold() that optimizes comparisons against Infinities,
5672 either +Inf or -Inf.
5674 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5675 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5676 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5678 The function returns the constant folded tree if a simplification
5679 can be made, and NULL_TREE otherwise. */
5681 static tree
5682 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5684 enum machine_mode mode;
5685 REAL_VALUE_TYPE max;
5686 tree temp;
5687 bool neg;
5689 mode = TYPE_MODE (TREE_TYPE (arg0));
5691 /* For negative infinity swap the sense of the comparison. */
5692 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5693 if (neg)
5694 code = swap_tree_comparison (code);
5696 switch (code)
5698 case GT_EXPR:
5699 /* x > +Inf is always false, if with ignore sNANs. */
5700 if (HONOR_SNANS (mode))
5701 return NULL_TREE;
5702 return omit_one_operand (type, integer_zero_node, arg0);
5704 case LE_EXPR:
5705 /* x <= +Inf is always true, if we don't case about NaNs. */
5706 if (! HONOR_NANS (mode))
5707 return omit_one_operand (type, integer_one_node, arg0);
5709 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5710 if (lang_hooks.decls.global_bindings_p () == 0
5711 && ! CONTAINS_PLACEHOLDER_P (arg0))
5713 arg0 = save_expr (arg0);
5714 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5716 break;
5718 case EQ_EXPR:
5719 case GE_EXPR:
5720 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5721 real_maxval (&max, neg, mode);
5722 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5723 arg0, build_real (TREE_TYPE (arg0), max)));
5725 case LT_EXPR:
5726 /* x < +Inf is always equal to x <= DBL_MAX. */
5727 real_maxval (&max, neg, mode);
5728 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5729 arg0, build_real (TREE_TYPE (arg0), max)));
5731 case NE_EXPR:
5732 /* x != +Inf is always equal to !(x > DBL_MAX). */
5733 real_maxval (&max, neg, mode);
5734 if (! HONOR_NANS (mode))
5735 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5736 arg0, build_real (TREE_TYPE (arg0), max)));
5738 /* The transformation below creates non-gimple code and thus is
5739 not appropriate if we are in gimple form. */
5740 if (in_gimple_form)
5741 return NULL_TREE;
5743 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5744 arg0, build_real (TREE_TYPE (arg0), max)));
5745 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5747 default:
5748 break;
5751 return NULL_TREE;
5754 /* Subroutine of fold() that optimizes comparisons of a division by
5755 a nonzero integer constant against an integer constant, i.e.
5756 X/C1 op C2.
5758 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5759 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5760 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5762 The function returns the constant folded tree if a simplification
5763 can be made, and NULL_TREE otherwise. */
5765 static tree
5766 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5768 tree prod, tmp, hi, lo;
5769 tree arg00 = TREE_OPERAND (arg0, 0);
5770 tree arg01 = TREE_OPERAND (arg0, 1);
5771 unsigned HOST_WIDE_INT lpart;
5772 HOST_WIDE_INT hpart;
5773 int overflow;
5775 /* We have to do this the hard way to detect unsigned overflow.
5776 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5777 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5778 TREE_INT_CST_HIGH (arg01),
5779 TREE_INT_CST_LOW (arg1),
5780 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5781 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5782 prod = force_fit_type (prod, -1, overflow, false);
5784 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5786 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5787 lo = prod;
5789 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5790 overflow = add_double (TREE_INT_CST_LOW (prod),
5791 TREE_INT_CST_HIGH (prod),
5792 TREE_INT_CST_LOW (tmp),
5793 TREE_INT_CST_HIGH (tmp),
5794 &lpart, &hpart);
5795 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5796 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5797 TREE_CONSTANT_OVERFLOW (prod));
5799 else if (tree_int_cst_sgn (arg01) >= 0)
5801 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5802 switch (tree_int_cst_sgn (arg1))
5804 case -1:
5805 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5806 hi = prod;
5807 break;
5809 case 0:
5810 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5811 hi = tmp;
5812 break;
5814 case 1:
5815 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5816 lo = prod;
5817 break;
5819 default:
5820 gcc_unreachable ();
5823 else
5825 /* A negative divisor reverses the relational operators. */
5826 code = swap_tree_comparison (code);
5828 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5829 switch (tree_int_cst_sgn (arg1))
5831 case -1:
5832 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5833 lo = prod;
5834 break;
5836 case 0:
5837 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5838 lo = tmp;
5839 break;
5841 case 1:
5842 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5843 hi = prod;
5844 break;
5846 default:
5847 gcc_unreachable ();
5851 switch (code)
5853 case EQ_EXPR:
5854 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5855 return omit_one_operand (type, integer_zero_node, arg00);
5856 if (TREE_OVERFLOW (hi))
5857 return fold (build2 (GE_EXPR, type, arg00, lo));
5858 if (TREE_OVERFLOW (lo))
5859 return fold (build2 (LE_EXPR, type, arg00, hi));
5860 return build_range_check (type, arg00, 1, lo, hi);
5862 case NE_EXPR:
5863 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5864 return omit_one_operand (type, integer_one_node, arg00);
5865 if (TREE_OVERFLOW (hi))
5866 return fold (build2 (LT_EXPR, type, arg00, lo));
5867 if (TREE_OVERFLOW (lo))
5868 return fold (build2 (GT_EXPR, type, arg00, hi));
5869 return build_range_check (type, arg00, 0, lo, hi);
5871 case LT_EXPR:
5872 if (TREE_OVERFLOW (lo))
5873 return omit_one_operand (type, integer_zero_node, arg00);
5874 return fold (build2 (LT_EXPR, type, arg00, lo));
5876 case LE_EXPR:
5877 if (TREE_OVERFLOW (hi))
5878 return omit_one_operand (type, integer_one_node, arg00);
5879 return fold (build2 (LE_EXPR, type, arg00, hi));
5881 case GT_EXPR:
5882 if (TREE_OVERFLOW (hi))
5883 return omit_one_operand (type, integer_zero_node, arg00);
5884 return fold (build2 (GT_EXPR, type, arg00, hi));
5886 case GE_EXPR:
5887 if (TREE_OVERFLOW (lo))
5888 return omit_one_operand (type, integer_one_node, arg00);
5889 return fold (build2 (GE_EXPR, type, arg00, lo));
5891 default:
5892 break;
5895 return NULL_TREE;
5899 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5900 equality/inequality test, then return a simplified form of
5901 the test using shifts and logical operations. Otherwise return
5902 NULL. TYPE is the desired result type. */
5904 tree
5905 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5906 tree result_type)
5908 /* If this is testing a single bit, we can optimize the test. */
5909 if ((code == NE_EXPR || code == EQ_EXPR)
5910 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5911 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5913 tree inner = TREE_OPERAND (arg0, 0);
5914 tree type = TREE_TYPE (arg0);
5915 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5916 enum machine_mode operand_mode = TYPE_MODE (type);
5917 int ops_unsigned;
5918 tree signed_type, unsigned_type, intermediate_type;
5919 tree arg00;
5921 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5922 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5923 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5924 if (arg00 != NULL_TREE
5925 /* This is only a win if casting to a signed type is cheap,
5926 i.e. when arg00's type is not a partial mode. */
5927 && TYPE_PRECISION (TREE_TYPE (arg00))
5928 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5930 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5931 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5932 result_type, fold_convert (stype, arg00),
5933 fold_convert (stype, integer_zero_node)));
5936 /* Otherwise we have (A & C) != 0 where C is a single bit,
5937 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5938 Similarly for (A & C) == 0. */
5940 /* If INNER is a right shift of a constant and it plus BITNUM does
5941 not overflow, adjust BITNUM and INNER. */
5942 if (TREE_CODE (inner) == RSHIFT_EXPR
5943 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5944 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5945 && bitnum < TYPE_PRECISION (type)
5946 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5947 bitnum - TYPE_PRECISION (type)))
5949 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5950 inner = TREE_OPERAND (inner, 0);
5953 /* If we are going to be able to omit the AND below, we must do our
5954 operations as unsigned. If we must use the AND, we have a choice.
5955 Normally unsigned is faster, but for some machines signed is. */
5956 #ifdef LOAD_EXTEND_OP
5957 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5958 && !flag_syntax_only) ? 0 : 1;
5959 #else
5960 ops_unsigned = 1;
5961 #endif
5963 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5964 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5965 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5966 inner = fold_convert (intermediate_type, inner);
5968 if (bitnum != 0)
5969 inner = build2 (RSHIFT_EXPR, intermediate_type,
5970 inner, size_int (bitnum));
5972 if (code == EQ_EXPR)
5973 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5974 inner, integer_one_node));
5976 /* Put the AND last so it can combine with more things. */
5977 inner = build2 (BIT_AND_EXPR, intermediate_type,
5978 inner, integer_one_node);
5980 /* Make sure to return the proper type. */
5981 inner = fold_convert (result_type, inner);
5983 return inner;
5985 return NULL_TREE;
5988 /* Check whether we are allowed to reorder operands arg0 and arg1,
5989 such that the evaluation of arg1 occurs before arg0. */
5991 static bool
5992 reorder_operands_p (tree arg0, tree arg1)
5994 if (! flag_evaluation_order)
5995 return true;
5996 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5997 return true;
5998 return ! TREE_SIDE_EFFECTS (arg0)
5999 && ! TREE_SIDE_EFFECTS (arg1);
6002 /* Test whether it is preferable two swap two operands, ARG0 and
6003 ARG1, for example because ARG0 is an integer constant and ARG1
6004 isn't. If REORDER is true, only recommend swapping if we can
6005 evaluate the operands in reverse order. */
6007 bool
6008 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6010 STRIP_SIGN_NOPS (arg0);
6011 STRIP_SIGN_NOPS (arg1);
6013 if (TREE_CODE (arg1) == INTEGER_CST)
6014 return 0;
6015 if (TREE_CODE (arg0) == INTEGER_CST)
6016 return 1;
6018 if (TREE_CODE (arg1) == REAL_CST)
6019 return 0;
6020 if (TREE_CODE (arg0) == REAL_CST)
6021 return 1;
6023 if (TREE_CODE (arg1) == COMPLEX_CST)
6024 return 0;
6025 if (TREE_CODE (arg0) == COMPLEX_CST)
6026 return 1;
6028 if (TREE_CONSTANT (arg1))
6029 return 0;
6030 if (TREE_CONSTANT (arg0))
6031 return 1;
6033 if (optimize_size)
6034 return 0;
6036 if (reorder && flag_evaluation_order
6037 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6038 return 0;
6040 if (DECL_P (arg1))
6041 return 0;
6042 if (DECL_P (arg0))
6043 return 1;
6045 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6046 for commutative and comparison operators. Ensuring a canonical
6047 form allows the optimizers to find additional redundancies without
6048 having to explicitly check for both orderings. */
6049 if (TREE_CODE (arg0) == SSA_NAME
6050 && TREE_CODE (arg1) == SSA_NAME
6051 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6052 return 1;
6054 return 0;
6057 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6058 ARG0 is extended to a wider type. */
6060 static tree
6061 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6063 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6064 tree arg1_unw;
6065 tree shorter_type, outer_type;
6066 tree min, max;
6067 bool above, below;
6069 if (arg0_unw == arg0)
6070 return NULL_TREE;
6071 shorter_type = TREE_TYPE (arg0_unw);
6073 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6074 return NULL_TREE;
6076 arg1_unw = get_unwidened (arg1, shorter_type);
6077 if (!arg1_unw)
6078 return NULL_TREE;
6080 /* If possible, express the comparison in the shorter mode. */
6081 if ((code == EQ_EXPR || code == NE_EXPR
6082 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6083 && (TREE_TYPE (arg1_unw) == shorter_type
6084 || (TREE_CODE (arg1_unw) == INTEGER_CST
6085 && TREE_CODE (shorter_type) == INTEGER_TYPE
6086 && int_fits_type_p (arg1_unw, shorter_type))))
6087 return fold (build (code, type, arg0_unw,
6088 fold_convert (shorter_type, arg1_unw)));
6090 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6091 return NULL_TREE;
6093 /* If we are comparing with the integer that does not fit into the range
6094 of the shorter type, the result is known. */
6095 outer_type = TREE_TYPE (arg1_unw);
6096 min = lower_bound_in_type (outer_type, shorter_type);
6097 max = upper_bound_in_type (outer_type, shorter_type);
6099 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6100 max, arg1_unw));
6101 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6102 arg1_unw, min));
6104 switch (code)
6106 case EQ_EXPR:
6107 if (above || below)
6108 return omit_one_operand (type, integer_zero_node, arg0);
6109 break;
6111 case NE_EXPR:
6112 if (above || below)
6113 return omit_one_operand (type, integer_one_node, arg0);
6114 break;
6116 case LT_EXPR:
6117 case LE_EXPR:
6118 if (above)
6119 return omit_one_operand (type, integer_one_node, arg0);
6120 else if (below)
6121 return omit_one_operand (type, integer_zero_node, arg0);
6123 case GT_EXPR:
6124 case GE_EXPR:
6125 if (above)
6126 return omit_one_operand (type, integer_zero_node, arg0);
6127 else if (below)
6128 return omit_one_operand (type, integer_one_node, arg0);
6130 default:
6131 break;
6134 return NULL_TREE;
6137 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6138 ARG0 just the signedness is changed. */
6140 static tree
6141 fold_sign_changed_comparison (enum tree_code code, tree type,
6142 tree arg0, tree arg1)
6144 tree arg0_inner, tmp;
6145 tree inner_type, outer_type;
6147 if (TREE_CODE (arg0) != NOP_EXPR)
6148 return NULL_TREE;
6150 outer_type = TREE_TYPE (arg0);
6151 arg0_inner = TREE_OPERAND (arg0, 0);
6152 inner_type = TREE_TYPE (arg0_inner);
6154 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6155 return NULL_TREE;
6157 if (TREE_CODE (arg1) != INTEGER_CST
6158 && !(TREE_CODE (arg1) == NOP_EXPR
6159 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6160 return NULL_TREE;
6162 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6163 && code != NE_EXPR
6164 && code != EQ_EXPR)
6165 return NULL_TREE;
6167 if (TREE_CODE (arg1) == INTEGER_CST)
6169 tmp = build_int_cst_wide (inner_type,
6170 TREE_INT_CST_LOW (arg1),
6171 TREE_INT_CST_HIGH (arg1));
6172 arg1 = force_fit_type (tmp, 0,
6173 TREE_OVERFLOW (arg1),
6174 TREE_CONSTANT_OVERFLOW (arg1));
6176 else
6177 arg1 = fold_convert (inner_type, arg1);
6179 return fold (build (code, type, arg0_inner, arg1));
6182 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6183 step of the array. ADDR is the address. MULT is the multiplicative expression.
6184 If the function succeeds, the new address expression is returned. Otherwise
6185 NULL_TREE is returned. */
6187 static tree
6188 try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
6190 tree s, delta, step;
6191 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6192 tree ref = TREE_OPERAND (addr, 0), pref;
6193 tree ret, pos;
6194 tree itype;
6196 STRIP_NOPS (arg0);
6197 STRIP_NOPS (arg1);
6199 if (TREE_CODE (arg0) == INTEGER_CST)
6201 s = arg0;
6202 delta = arg1;
6204 else if (TREE_CODE (arg1) == INTEGER_CST)
6206 s = arg1;
6207 delta = arg0;
6209 else
6210 return NULL_TREE;
6212 for (;; ref = TREE_OPERAND (ref, 0))
6214 if (TREE_CODE (ref) == ARRAY_REF)
6216 step = array_ref_element_size (ref);
6218 if (TREE_CODE (step) != INTEGER_CST)
6219 continue;
6221 itype = TREE_TYPE (step);
6223 /* If the type sizes do not match, we might run into problems
6224 when one of them would overflow. */
6225 if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
6226 continue;
6228 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6229 continue;
6231 delta = fold_convert (itype, delta);
6232 break;
6235 if (!handled_component_p (ref))
6236 return NULL_TREE;
6239 /* We found the suitable array reference. So copy everything up to it,
6240 and replace the index. */
6242 pref = TREE_OPERAND (addr, 0);
6243 ret = copy_node (pref);
6244 pos = ret;
6246 while (pref != ref)
6248 pref = TREE_OPERAND (pref, 0);
6249 TREE_OPERAND (pos, 0) = copy_node (pref);
6250 pos = TREE_OPERAND (pos, 0);
6253 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6254 TREE_OPERAND (pos, 1),
6255 delta));
6257 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6261 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6262 means A >= Y && A != MAX, but in this case we know that
6263 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6265 static tree
6266 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6268 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6270 if (TREE_CODE (bound) == LT_EXPR)
6271 a = TREE_OPERAND (bound, 0);
6272 else if (TREE_CODE (bound) == GT_EXPR)
6273 a = TREE_OPERAND (bound, 1);
6274 else
6275 return NULL_TREE;
6277 typea = TREE_TYPE (a);
6278 if (!INTEGRAL_TYPE_P (typea)
6279 && !POINTER_TYPE_P (typea))
6280 return NULL_TREE;
6282 if (TREE_CODE (ineq) == LT_EXPR)
6284 a1 = TREE_OPERAND (ineq, 1);
6285 y = TREE_OPERAND (ineq, 0);
6287 else if (TREE_CODE (ineq) == GT_EXPR)
6289 a1 = TREE_OPERAND (ineq, 0);
6290 y = TREE_OPERAND (ineq, 1);
6292 else
6293 return NULL_TREE;
6295 if (TREE_TYPE (a1) != typea)
6296 return NULL_TREE;
6298 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6299 if (!integer_onep (diff))
6300 return NULL_TREE;
6302 return fold (build2 (GE_EXPR, type, a, y));
6305 /* Fold complex addition when both components are accessible by parts.
6306 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6307 or MINUS_EXPR for subtraction. */
6309 static tree
6310 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6312 tree ar, ai, br, bi, rr, ri, inner_type;
6314 if (TREE_CODE (ac) == COMPLEX_EXPR)
6315 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6316 else if (TREE_CODE (ac) == COMPLEX_CST)
6317 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6318 else
6319 return NULL;
6321 if (TREE_CODE (bc) == COMPLEX_EXPR)
6322 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6323 else if (TREE_CODE (bc) == COMPLEX_CST)
6324 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6325 else
6326 return NULL;
6328 inner_type = TREE_TYPE (type);
6330 rr = fold (build2 (code, inner_type, ar, br));
6331 ri = fold (build2 (code, inner_type, ai, bi));
6333 return fold (build2 (COMPLEX_EXPR, type, rr, ri));
6336 /* Perform some simplifications of complex multiplication when one or more
6337 of the components are constants or zeros. Return non-null if successful. */
6339 tree
6340 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6342 tree rr, ri, inner_type, zero;
6343 bool ar0, ai0, br0, bi0, bi1;
6345 inner_type = TREE_TYPE (type);
6346 zero = NULL;
6348 if (SCALAR_FLOAT_TYPE_P (inner_type))
6350 ar0 = ai0 = br0 = bi0 = bi1 = false;
6352 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6354 if (TREE_CODE (ar) == REAL_CST
6355 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6356 ar0 = true, zero = ar;
6358 if (TREE_CODE (ai) == REAL_CST
6359 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6360 ai0 = true, zero = ai;
6362 if (TREE_CODE (br) == REAL_CST
6363 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6364 br0 = true, zero = br;
6366 if (TREE_CODE (bi) == REAL_CST)
6368 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6369 bi0 = true, zero = bi;
6370 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6371 bi1 = true;
6374 else
6376 ar0 = integer_zerop (ar);
6377 if (ar0)
6378 zero = ar;
6379 ai0 = integer_zerop (ai);
6380 if (ai0)
6381 zero = ai;
6382 br0 = integer_zerop (br);
6383 if (br0)
6384 zero = br;
6385 bi0 = integer_zerop (bi);
6386 if (bi0)
6388 zero = bi;
6389 bi1 = false;
6391 else
6392 bi1 = integer_onep (bi);
6395 /* We won't optimize anything below unless something is zero. */
6396 if (zero == NULL)
6397 return NULL;
6399 if (ai0 && br0 && bi1)
6401 rr = zero;
6402 ri = ar;
6404 else if (ai0 && bi0)
6406 rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
6407 ri = zero;
6409 else if (ai0 && br0)
6411 rr = zero;
6412 ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
6414 else if (ar0 && bi0)
6416 rr = zero;
6417 ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
6419 else if (ar0 && br0)
6421 rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
6422 rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
6423 ri = zero;
6425 else if (bi0)
6427 rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
6428 ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
6430 else if (ai0)
6432 rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
6433 ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
6435 else if (br0)
6437 rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
6438 rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
6439 ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
6441 else if (ar0)
6443 rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
6444 rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
6445 ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
6447 else
6448 return NULL;
6450 return fold (build2 (COMPLEX_EXPR, type, rr, ri));
6453 static tree
6454 fold_complex_mult (tree type, tree ac, tree bc)
6456 tree ar, ai, br, bi;
6458 if (TREE_CODE (ac) == COMPLEX_EXPR)
6459 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6460 else if (TREE_CODE (ac) == COMPLEX_CST)
6461 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6462 else
6463 return NULL;
6465 if (TREE_CODE (bc) == COMPLEX_EXPR)
6466 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6467 else if (TREE_CODE (bc) == COMPLEX_CST)
6468 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6469 else
6470 return NULL;
6472 return fold_complex_mult_parts (type, ar, ai, br, bi);
6475 /* Perform some simplifications of complex division when one or more of
6476 the components are constants or zeros. Return non-null if successful. */
6478 tree
6479 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6480 enum tree_code code)
6482 tree rr, ri, inner_type, zero;
6483 bool ar0, ai0, br0, bi0, bi1;
6485 inner_type = TREE_TYPE (type);
6486 zero = NULL;
6488 if (SCALAR_FLOAT_TYPE_P (inner_type))
6490 ar0 = ai0 = br0 = bi0 = bi1 = false;
6492 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6494 if (TREE_CODE (ar) == REAL_CST
6495 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6496 ar0 = true, zero = ar;
6498 if (TREE_CODE (ai) == REAL_CST
6499 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6500 ai0 = true, zero = ai;
6502 if (TREE_CODE (br) == REAL_CST
6503 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6504 br0 = true, zero = br;
6506 if (TREE_CODE (bi) == REAL_CST)
6508 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6509 bi0 = true, zero = bi;
6510 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6511 bi1 = true;
6514 else
6516 ar0 = integer_zerop (ar);
6517 if (ar0)
6518 zero = ar;
6519 ai0 = integer_zerop (ai);
6520 if (ai0)
6521 zero = ai;
6522 br0 = integer_zerop (br);
6523 if (br0)
6524 zero = br;
6525 bi0 = integer_zerop (bi);
6526 if (bi0)
6528 zero = bi;
6529 bi1 = false;
6531 else
6532 bi1 = integer_onep (bi);
6535 /* We won't optimize anything below unless something is zero. */
6536 if (zero == NULL)
6537 return NULL;
6539 if (ai0 && bi0)
6541 rr = fold (build2 (code, inner_type, ar, br));
6542 ri = zero;
6544 else if (ai0 && br0)
6546 rr = zero;
6547 ri = fold (build2 (code, inner_type, ar, bi));
6548 ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
6550 else if (ar0 && bi0)
6552 rr = zero;
6553 ri = fold (build2 (code, inner_type, ai, br));
6555 else if (ar0 && br0)
6557 rr = fold (build2 (code, inner_type, ai, bi));
6558 ri = zero;
6560 else if (bi0)
6562 rr = fold (build2 (code, inner_type, ar, br));
6563 ri = fold (build2 (code, inner_type, ai, br));
6565 else if (br0)
6567 rr = fold (build2 (code, inner_type, ai, bi));
6568 ri = fold (build2 (code, inner_type, ar, bi));
6569 ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
6571 else
6572 return NULL;
6574 return fold (build2 (COMPLEX_EXPR, type, rr, ri));
6577 static tree
6578 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6580 tree ar, ai, br, bi;
6582 if (TREE_CODE (ac) == COMPLEX_EXPR)
6583 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6584 else if (TREE_CODE (ac) == COMPLEX_CST)
6585 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6586 else
6587 return NULL;
6589 if (TREE_CODE (bc) == COMPLEX_EXPR)
6590 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6591 else if (TREE_CODE (bc) == COMPLEX_CST)
6592 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6593 else
6594 return NULL;
6596 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6599 /* Perform constant folding and related simplification of EXPR.
6600 The related simplifications include x*1 => x, x*0 => 0, etc.,
6601 and application of the associative law.
6602 NOP_EXPR conversions may be removed freely (as long as we
6603 are careful not to change the type of the overall expression).
6604 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6605 but we can constant-fold them if they have constant operands. */
6607 #ifdef ENABLE_FOLD_CHECKING
6608 # define fold(x) fold_1 (x)
6609 static tree fold_1 (tree);
6610 static
6611 #endif
6612 tree
6613 fold (tree expr)
6615 const tree t = expr;
6616 const tree type = TREE_TYPE (expr);
6617 tree t1 = NULL_TREE;
6618 tree tem;
6619 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6620 enum tree_code code = TREE_CODE (t);
6621 enum tree_code_class kind = TREE_CODE_CLASS (code);
6623 /* WINS will be nonzero when the switch is done
6624 if all operands are constant. */
6625 int wins = 1;
6627 /* Return right away if a constant. */
6628 if (kind == tcc_constant)
6629 return t;
6631 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6633 tree subop;
6635 /* Special case for conversion ops that can have fixed point args. */
6636 arg0 = TREE_OPERAND (t, 0);
6638 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6639 if (arg0 != 0)
6640 STRIP_SIGN_NOPS (arg0);
6642 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6643 subop = TREE_REALPART (arg0);
6644 else
6645 subop = arg0;
6647 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6648 && TREE_CODE (subop) != REAL_CST)
6649 /* Note that TREE_CONSTANT isn't enough:
6650 static var addresses are constant but we can't
6651 do arithmetic on them. */
6652 wins = 0;
6654 else if (IS_EXPR_CODE_CLASS (kind))
6656 int len = TREE_CODE_LENGTH (code);
6657 int i;
6658 for (i = 0; i < len; i++)
6660 tree op = TREE_OPERAND (t, i);
6661 tree subop;
6663 if (op == 0)
6664 continue; /* Valid for CALL_EXPR, at least. */
6666 /* Strip any conversions that don't change the mode. This is
6667 safe for every expression, except for a comparison expression
6668 because its signedness is derived from its operands. So, in
6669 the latter case, only strip conversions that don't change the
6670 signedness.
6672 Note that this is done as an internal manipulation within the
6673 constant folder, in order to find the simplest representation
6674 of the arguments so that their form can be studied. In any
6675 cases, the appropriate type conversions should be put back in
6676 the tree that will get out of the constant folder. */
6677 if (kind == tcc_comparison)
6678 STRIP_SIGN_NOPS (op);
6679 else
6680 STRIP_NOPS (op);
6682 if (TREE_CODE (op) == COMPLEX_CST)
6683 subop = TREE_REALPART (op);
6684 else
6685 subop = op;
6687 if (TREE_CODE (subop) != INTEGER_CST
6688 && TREE_CODE (subop) != REAL_CST)
6689 /* Note that TREE_CONSTANT isn't enough:
6690 static var addresses are constant but we can't
6691 do arithmetic on them. */
6692 wins = 0;
6694 if (i == 0)
6695 arg0 = op;
6696 else if (i == 1)
6697 arg1 = op;
6701 /* If this is a commutative operation, and ARG0 is a constant, move it
6702 to ARG1 to reduce the number of tests below. */
6703 if (commutative_tree_code (code)
6704 && tree_swap_operands_p (arg0, arg1, true))
6705 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6706 TREE_OPERAND (t, 0)));
6708 /* Now WINS is set as described above,
6709 ARG0 is the first operand of EXPR,
6710 and ARG1 is the second operand (if it has more than one operand).
6712 First check for cases where an arithmetic operation is applied to a
6713 compound, conditional, or comparison operation. Push the arithmetic
6714 operation inside the compound or conditional to see if any folding
6715 can then be done. Convert comparison to conditional for this purpose.
6716 The also optimizes non-constant cases that used to be done in
6717 expand_expr.
6719 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6720 one of the operands is a comparison and the other is a comparison, a
6721 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6722 code below would make the expression more complex. Change it to a
6723 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6724 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6726 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6727 || code == EQ_EXPR || code == NE_EXPR)
6728 && ((truth_value_p (TREE_CODE (arg0))
6729 && (truth_value_p (TREE_CODE (arg1))
6730 || (TREE_CODE (arg1) == BIT_AND_EXPR
6731 && integer_onep (TREE_OPERAND (arg1, 1)))))
6732 || (truth_value_p (TREE_CODE (arg1))
6733 && (truth_value_p (TREE_CODE (arg0))
6734 || (TREE_CODE (arg0) == BIT_AND_EXPR
6735 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6737 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6738 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6739 : TRUTH_XOR_EXPR,
6740 type, fold_convert (boolean_type_node, arg0),
6741 fold_convert (boolean_type_node, arg1)));
6743 if (code == EQ_EXPR)
6744 tem = invert_truthvalue (tem);
6746 return tem;
6749 if (TREE_CODE_CLASS (code) == tcc_unary)
6751 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6752 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6753 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6754 else if (TREE_CODE (arg0) == COND_EXPR)
6756 tree arg01 = TREE_OPERAND (arg0, 1);
6757 tree arg02 = TREE_OPERAND (arg0, 2);
6758 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6759 arg01 = fold (build1 (code, type, arg01));
6760 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6761 arg02 = fold (build1 (code, type, arg02));
6762 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6763 arg01, arg02));
6765 /* If this was a conversion, and all we did was to move into
6766 inside the COND_EXPR, bring it back out. But leave it if
6767 it is a conversion from integer to integer and the
6768 result precision is no wider than a word since such a
6769 conversion is cheap and may be optimized away by combine,
6770 while it couldn't if it were outside the COND_EXPR. Then return
6771 so we don't get into an infinite recursion loop taking the
6772 conversion out and then back in. */
6774 if ((code == NOP_EXPR || code == CONVERT_EXPR
6775 || code == NON_LVALUE_EXPR)
6776 && TREE_CODE (tem) == COND_EXPR
6777 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6778 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6779 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6780 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6781 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6782 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6783 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6784 && (INTEGRAL_TYPE_P
6785 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6786 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6787 || flag_syntax_only))
6788 tem = build1 (code, type,
6789 build3 (COND_EXPR,
6790 TREE_TYPE (TREE_OPERAND
6791 (TREE_OPERAND (tem, 1), 0)),
6792 TREE_OPERAND (tem, 0),
6793 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6794 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6795 return tem;
6797 else if (COMPARISON_CLASS_P (arg0))
6799 if (TREE_CODE (type) == BOOLEAN_TYPE)
6801 arg0 = copy_node (arg0);
6802 TREE_TYPE (arg0) = type;
6803 return arg0;
6805 else if (TREE_CODE (type) != INTEGER_TYPE)
6806 return fold (build3 (COND_EXPR, type, arg0,
6807 fold (build1 (code, type,
6808 integer_one_node)),
6809 fold (build1 (code, type,
6810 integer_zero_node))));
6813 else if (TREE_CODE_CLASS (code) == tcc_comparison
6814 && TREE_CODE (arg0) == COMPOUND_EXPR)
6815 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6816 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6817 else if (TREE_CODE_CLASS (code) == tcc_comparison
6818 && TREE_CODE (arg1) == COMPOUND_EXPR)
6819 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6820 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6821 else if (TREE_CODE_CLASS (code) == tcc_binary
6822 || TREE_CODE_CLASS (code) == tcc_comparison)
6824 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6825 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6826 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6827 arg1)));
6828 if (TREE_CODE (arg1) == COMPOUND_EXPR
6829 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6830 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6831 fold (build2 (code, type,
6832 arg0, TREE_OPERAND (arg1, 1))));
6834 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6836 tem = fold_binary_op_with_conditional_arg (t, code, arg0, arg1,
6837 /*cond_first_p=*/1);
6838 if (tem != NULL_TREE)
6839 return tem;
6842 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6844 tem = fold_binary_op_with_conditional_arg (t, code, arg1, arg0,
6845 /*cond_first_p=*/0);
6846 if (tem != NULL_TREE)
6847 return tem;
6851 switch (code)
6853 case CONST_DECL:
6854 return fold (DECL_INITIAL (t));
6856 case NOP_EXPR:
6857 case FLOAT_EXPR:
6858 case CONVERT_EXPR:
6859 case FIX_TRUNC_EXPR:
6860 case FIX_CEIL_EXPR:
6861 case FIX_FLOOR_EXPR:
6862 case FIX_ROUND_EXPR:
6863 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6864 return TREE_OPERAND (t, 0);
6866 /* Handle cases of two conversions in a row. */
6867 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6868 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6870 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6871 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6872 int inside_int = INTEGRAL_TYPE_P (inside_type);
6873 int inside_ptr = POINTER_TYPE_P (inside_type);
6874 int inside_float = FLOAT_TYPE_P (inside_type);
6875 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6876 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6877 int inter_int = INTEGRAL_TYPE_P (inter_type);
6878 int inter_ptr = POINTER_TYPE_P (inter_type);
6879 int inter_float = FLOAT_TYPE_P (inter_type);
6880 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6881 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6882 int final_int = INTEGRAL_TYPE_P (type);
6883 int final_ptr = POINTER_TYPE_P (type);
6884 int final_float = FLOAT_TYPE_P (type);
6885 unsigned int final_prec = TYPE_PRECISION (type);
6886 int final_unsignedp = TYPE_UNSIGNED (type);
6888 /* In addition to the cases of two conversions in a row
6889 handled below, if we are converting something to its own
6890 type via an object of identical or wider precision, neither
6891 conversion is needed. */
6892 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6893 && ((inter_int && final_int) || (inter_float && final_float))
6894 && inter_prec >= final_prec)
6895 return fold (build1 (code, type,
6896 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6898 /* Likewise, if the intermediate and final types are either both
6899 float or both integer, we don't need the middle conversion if
6900 it is wider than the final type and doesn't change the signedness
6901 (for integers). Avoid this if the final type is a pointer
6902 since then we sometimes need the inner conversion. Likewise if
6903 the outer has a precision not equal to the size of its mode. */
6904 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6905 || (inter_float && inside_float))
6906 && inter_prec >= inside_prec
6907 && (inter_float || inter_unsignedp == inside_unsignedp)
6908 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6909 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6910 && ! final_ptr)
6911 return fold (build1 (code, type,
6912 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6914 /* If we have a sign-extension of a zero-extended value, we can
6915 replace that by a single zero-extension. */
6916 if (inside_int && inter_int && final_int
6917 && inside_prec < inter_prec && inter_prec < final_prec
6918 && inside_unsignedp && !inter_unsignedp)
6919 return fold (build1 (code, type,
6920 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6922 /* Two conversions in a row are not needed unless:
6923 - some conversion is floating-point (overstrict for now), or
6924 - the intermediate type is narrower than both initial and
6925 final, or
6926 - the intermediate type and innermost type differ in signedness,
6927 and the outermost type is wider than the intermediate, or
6928 - the initial type is a pointer type and the precisions of the
6929 intermediate and final types differ, or
6930 - the final type is a pointer type and the precisions of the
6931 initial and intermediate types differ. */
6932 if (! inside_float && ! inter_float && ! final_float
6933 && (inter_prec > inside_prec || inter_prec > final_prec)
6934 && ! (inside_int && inter_int
6935 && inter_unsignedp != inside_unsignedp
6936 && inter_prec < final_prec)
6937 && ((inter_unsignedp && inter_prec > inside_prec)
6938 == (final_unsignedp && final_prec > inter_prec))
6939 && ! (inside_ptr && inter_prec != final_prec)
6940 && ! (final_ptr && inside_prec != inter_prec)
6941 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6942 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6943 && ! final_ptr)
6944 return fold (build1 (code, type,
6945 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6948 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6949 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6950 /* Detect assigning a bitfield. */
6951 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6952 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6954 /* Don't leave an assignment inside a conversion
6955 unless assigning a bitfield. */
6956 tree prev = TREE_OPERAND (t, 0);
6957 tem = copy_node (t);
6958 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6959 /* First do the assignment, then return converted constant. */
6960 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6961 TREE_NO_WARNING (tem) = 1;
6962 TREE_USED (tem) = 1;
6963 return tem;
6966 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6967 constants (if x has signed type, the sign bit cannot be set
6968 in c). This folds extension into the BIT_AND_EXPR. */
6969 if (INTEGRAL_TYPE_P (type)
6970 && TREE_CODE (type) != BOOLEAN_TYPE
6971 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6972 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6974 tree and = TREE_OPERAND (t, 0);
6975 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6976 int change = 0;
6978 if (TYPE_UNSIGNED (TREE_TYPE (and))
6979 || (TYPE_PRECISION (type)
6980 <= TYPE_PRECISION (TREE_TYPE (and))))
6981 change = 1;
6982 else if (TYPE_PRECISION (TREE_TYPE (and1))
6983 <= HOST_BITS_PER_WIDE_INT
6984 && host_integerp (and1, 1))
6986 unsigned HOST_WIDE_INT cst;
6988 cst = tree_low_cst (and1, 1);
6989 cst &= (HOST_WIDE_INT) -1
6990 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6991 change = (cst == 0);
6992 #ifdef LOAD_EXTEND_OP
6993 if (change
6994 && !flag_syntax_only
6995 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6996 == ZERO_EXTEND))
6998 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6999 and0 = fold_convert (uns, and0);
7000 and1 = fold_convert (uns, and1);
7002 #endif
7004 if (change)
7006 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7007 TREE_INT_CST_HIGH (and1));
7008 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7009 TREE_CONSTANT_OVERFLOW (and1));
7010 return fold (build2 (BIT_AND_EXPR, type,
7011 fold_convert (type, and0), tem));
7015 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7016 T2 being pointers to types of the same size. */
7017 if (POINTER_TYPE_P (TREE_TYPE (t))
7018 && BINARY_CLASS_P (arg0)
7019 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7020 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7022 tree arg00 = TREE_OPERAND (arg0, 0);
7023 tree t0 = TREE_TYPE (t);
7024 tree t1 = TREE_TYPE (arg00);
7025 tree tt0 = TREE_TYPE (t0);
7026 tree tt1 = TREE_TYPE (t1);
7027 tree s0 = TYPE_SIZE (tt0);
7028 tree s1 = TYPE_SIZE (tt1);
7030 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7031 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7032 TREE_OPERAND (arg0, 1));
7035 tem = fold_convert_const (code, type, arg0);
7036 return tem ? tem : t;
7038 case VIEW_CONVERT_EXPR:
7039 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
7040 return build1 (VIEW_CONVERT_EXPR, type,
7041 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
7042 return t;
7044 case COMPONENT_REF:
7045 if (TREE_CODE (arg0) == CONSTRUCTOR
7046 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
7048 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
7049 if (m)
7050 return TREE_VALUE (m);
7052 return t;
7054 case RANGE_EXPR:
7055 if (TREE_CONSTANT (t) != wins)
7057 tem = copy_node (t);
7058 TREE_CONSTANT (tem) = wins;
7059 TREE_INVARIANT (tem) = wins;
7060 return tem;
7062 return t;
7064 case NEGATE_EXPR:
7065 if (negate_expr_p (arg0))
7066 return fold_convert (type, negate_expr (arg0));
7067 /* Convert - (~A) to A + 1. */
7068 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
7069 return fold (build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
7070 build_int_cst (type, 1)));
7071 return t;
7073 case ABS_EXPR:
7074 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7075 return fold_abs_const (arg0, type);
7076 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7077 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
7078 /* Convert fabs((double)float) into (double)fabsf(float). */
7079 else if (TREE_CODE (arg0) == NOP_EXPR
7080 && TREE_CODE (type) == REAL_TYPE)
7082 tree targ0 = strip_float_extensions (arg0);
7083 if (targ0 != arg0)
7084 return fold_convert (type, fold (build1 (ABS_EXPR,
7085 TREE_TYPE (targ0),
7086 targ0)));
7088 else if (tree_expr_nonnegative_p (arg0))
7089 return arg0;
7091 /* Strip sign ops from argument. */
7092 if (TREE_CODE (type) == REAL_TYPE)
7094 tem = fold_strip_sign_ops (arg0);
7095 if (tem)
7096 return fold (build1 (ABS_EXPR, type, fold_convert (type, tem)));
7098 return t;
7100 case CONJ_EXPR:
7101 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7102 return fold_convert (type, arg0);
7103 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7104 return build2 (COMPLEX_EXPR, type,
7105 TREE_OPERAND (arg0, 0),
7106 negate_expr (TREE_OPERAND (arg0, 1)));
7107 else if (TREE_CODE (arg0) == COMPLEX_CST)
7108 return build_complex (type, TREE_REALPART (arg0),
7109 negate_expr (TREE_IMAGPART (arg0)));
7110 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7111 return fold (build2 (TREE_CODE (arg0), type,
7112 fold (build1 (CONJ_EXPR, type,
7113 TREE_OPERAND (arg0, 0))),
7114 fold (build1 (CONJ_EXPR, type,
7115 TREE_OPERAND (arg0, 1)))));
7116 else if (TREE_CODE (arg0) == CONJ_EXPR)
7117 return TREE_OPERAND (arg0, 0);
7118 return t;
7120 case BIT_NOT_EXPR:
7121 if (TREE_CODE (arg0) == INTEGER_CST)
7122 return fold_not_const (arg0, type);
7123 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7124 return TREE_OPERAND (arg0, 0);
7125 /* Convert ~ (-A) to A - 1. */
7126 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7127 return fold (build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7128 build_int_cst (type, 1)));
7129 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7130 else if (INTEGRAL_TYPE_P (type)
7131 && ((TREE_CODE (arg0) == MINUS_EXPR
7132 && integer_onep (TREE_OPERAND (arg0, 1)))
7133 || (TREE_CODE (arg0) == PLUS_EXPR
7134 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7135 return fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)));
7136 return t;
7138 case PLUS_EXPR:
7139 /* A + (-B) -> A - B */
7140 if (TREE_CODE (arg1) == NEGATE_EXPR)
7141 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7142 /* (-A) + B -> B - A */
7143 if (TREE_CODE (arg0) == NEGATE_EXPR
7144 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7145 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
7147 if (TREE_CODE (type) == COMPLEX_TYPE)
7149 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7150 if (tem)
7151 return tem;
7154 if (! FLOAT_TYPE_P (type))
7156 if (integer_zerop (arg1))
7157 return non_lvalue (fold_convert (type, arg0));
7159 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7160 with a constant, and the two constants have no bits in common,
7161 we should treat this as a BIT_IOR_EXPR since this may produce more
7162 simplifications. */
7163 if (TREE_CODE (arg0) == BIT_AND_EXPR
7164 && TREE_CODE (arg1) == BIT_AND_EXPR
7165 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7166 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7167 && integer_zerop (const_binop (BIT_AND_EXPR,
7168 TREE_OPERAND (arg0, 1),
7169 TREE_OPERAND (arg1, 1), 0)))
7171 code = BIT_IOR_EXPR;
7172 goto bit_ior;
7175 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7176 (plus (plus (mult) (mult)) (foo)) so that we can
7177 take advantage of the factoring cases below. */
7178 if (((TREE_CODE (arg0) == PLUS_EXPR
7179 || TREE_CODE (arg0) == MINUS_EXPR)
7180 && TREE_CODE (arg1) == MULT_EXPR)
7181 || ((TREE_CODE (arg1) == PLUS_EXPR
7182 || TREE_CODE (arg1) == MINUS_EXPR)
7183 && TREE_CODE (arg0) == MULT_EXPR))
7185 tree parg0, parg1, parg, marg;
7186 enum tree_code pcode;
7188 if (TREE_CODE (arg1) == MULT_EXPR)
7189 parg = arg0, marg = arg1;
7190 else
7191 parg = arg1, marg = arg0;
7192 pcode = TREE_CODE (parg);
7193 parg0 = TREE_OPERAND (parg, 0);
7194 parg1 = TREE_OPERAND (parg, 1);
7195 STRIP_NOPS (parg0);
7196 STRIP_NOPS (parg1);
7198 if (TREE_CODE (parg0) == MULT_EXPR
7199 && TREE_CODE (parg1) != MULT_EXPR)
7200 return fold (build2 (pcode, type,
7201 fold (build2 (PLUS_EXPR, type,
7202 fold_convert (type, parg0),
7203 fold_convert (type, marg))),
7204 fold_convert (type, parg1)));
7205 if (TREE_CODE (parg0) != MULT_EXPR
7206 && TREE_CODE (parg1) == MULT_EXPR)
7207 return fold (build2 (PLUS_EXPR, type,
7208 fold_convert (type, parg0),
7209 fold (build2 (pcode, type,
7210 fold_convert (type, marg),
7211 fold_convert (type,
7212 parg1)))));
7215 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7217 tree arg00, arg01, arg10, arg11;
7218 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7220 /* (A * C) + (B * C) -> (A+B) * C.
7221 We are most concerned about the case where C is a constant,
7222 but other combinations show up during loop reduction. Since
7223 it is not difficult, try all four possibilities. */
7225 arg00 = TREE_OPERAND (arg0, 0);
7226 arg01 = TREE_OPERAND (arg0, 1);
7227 arg10 = TREE_OPERAND (arg1, 0);
7228 arg11 = TREE_OPERAND (arg1, 1);
7229 same = NULL_TREE;
7231 if (operand_equal_p (arg01, arg11, 0))
7232 same = arg01, alt0 = arg00, alt1 = arg10;
7233 else if (operand_equal_p (arg00, arg10, 0))
7234 same = arg00, alt0 = arg01, alt1 = arg11;
7235 else if (operand_equal_p (arg00, arg11, 0))
7236 same = arg00, alt0 = arg01, alt1 = arg10;
7237 else if (operand_equal_p (arg01, arg10, 0))
7238 same = arg01, alt0 = arg00, alt1 = arg11;
7240 /* No identical multiplicands; see if we can find a common
7241 power-of-two factor in non-power-of-two multiplies. This
7242 can help in multi-dimensional array access. */
7243 else if (TREE_CODE (arg01) == INTEGER_CST
7244 && TREE_CODE (arg11) == INTEGER_CST
7245 && TREE_INT_CST_HIGH (arg01) == 0
7246 && TREE_INT_CST_HIGH (arg11) == 0)
7248 HOST_WIDE_INT int01, int11, tmp;
7249 int01 = TREE_INT_CST_LOW (arg01);
7250 int11 = TREE_INT_CST_LOW (arg11);
7252 /* Move min of absolute values to int11. */
7253 if ((int01 >= 0 ? int01 : -int01)
7254 < (int11 >= 0 ? int11 : -int11))
7256 tmp = int01, int01 = int11, int11 = tmp;
7257 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7258 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7261 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7263 alt0 = fold (build2 (MULT_EXPR, type, arg00,
7264 build_int_cst (NULL_TREE,
7265 int01 / int11)));
7266 alt1 = arg10;
7267 same = arg11;
7271 if (same)
7272 return fold (build2 (MULT_EXPR, type,
7273 fold (build2 (PLUS_EXPR, type,
7274 fold_convert (type, alt0),
7275 fold_convert (type, alt1))),
7276 same));
7279 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7280 of the array. Loop optimizer sometimes produce this type of
7281 expressions. */
7282 if (TREE_CODE (arg0) == ADDR_EXPR
7283 && TREE_CODE (arg1) == MULT_EXPR)
7285 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7286 if (tem)
7287 return fold_convert (type, fold (tem));
7289 else if (TREE_CODE (arg1) == ADDR_EXPR
7290 && TREE_CODE (arg0) == MULT_EXPR)
7292 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7293 if (tem)
7294 return fold_convert (type, fold (tem));
7297 else
7299 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7300 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7301 return non_lvalue (fold_convert (type, arg0));
7303 /* Likewise if the operands are reversed. */
7304 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7305 return non_lvalue (fold_convert (type, arg1));
7307 /* Convert X + -C into X - C. */
7308 if (TREE_CODE (arg1) == REAL_CST
7309 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7311 tem = fold_negate_const (arg1, type);
7312 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7313 return fold (build2 (MINUS_EXPR, type,
7314 fold_convert (type, arg0),
7315 fold_convert (type, tem)));
7318 /* Convert x+x into x*2.0. */
7319 if (operand_equal_p (arg0, arg1, 0)
7320 && SCALAR_FLOAT_TYPE_P (type))
7321 return fold (build2 (MULT_EXPR, type, arg0,
7322 build_real (type, dconst2)));
7324 /* Convert x*c+x into x*(c+1). */
7325 if (flag_unsafe_math_optimizations
7326 && TREE_CODE (arg0) == MULT_EXPR
7327 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7328 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7329 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7331 REAL_VALUE_TYPE c;
7333 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7334 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7335 return fold (build2 (MULT_EXPR, type, arg1,
7336 build_real (type, c)));
7339 /* Convert x+x*c into x*(c+1). */
7340 if (flag_unsafe_math_optimizations
7341 && TREE_CODE (arg1) == MULT_EXPR
7342 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7343 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7344 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7346 REAL_VALUE_TYPE c;
7348 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7349 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7350 return fold (build2 (MULT_EXPR, type, arg0,
7351 build_real (type, c)));
7354 /* Convert x*c1+x*c2 into x*(c1+c2). */
7355 if (flag_unsafe_math_optimizations
7356 && TREE_CODE (arg0) == MULT_EXPR
7357 && TREE_CODE (arg1) == MULT_EXPR
7358 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7359 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7360 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7361 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7362 && operand_equal_p (TREE_OPERAND (arg0, 0),
7363 TREE_OPERAND (arg1, 0), 0))
7365 REAL_VALUE_TYPE c1, c2;
7367 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7368 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7369 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7370 return fold (build2 (MULT_EXPR, type,
7371 TREE_OPERAND (arg0, 0),
7372 build_real (type, c1)));
7374 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7375 if (flag_unsafe_math_optimizations
7376 && TREE_CODE (arg1) == PLUS_EXPR
7377 && TREE_CODE (arg0) != MULT_EXPR)
7379 tree tree10 = TREE_OPERAND (arg1, 0);
7380 tree tree11 = TREE_OPERAND (arg1, 1);
7381 if (TREE_CODE (tree11) == MULT_EXPR
7382 && TREE_CODE (tree10) == MULT_EXPR)
7384 tree tree0;
7385 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
7386 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
7389 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7390 if (flag_unsafe_math_optimizations
7391 && TREE_CODE (arg0) == PLUS_EXPR
7392 && TREE_CODE (arg1) != MULT_EXPR)
7394 tree tree00 = TREE_OPERAND (arg0, 0);
7395 tree tree01 = TREE_OPERAND (arg0, 1);
7396 if (TREE_CODE (tree01) == MULT_EXPR
7397 && TREE_CODE (tree00) == MULT_EXPR)
7399 tree tree0;
7400 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
7401 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
7406 bit_rotate:
7407 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7408 is a rotate of A by C1 bits. */
7409 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7410 is a rotate of A by B bits. */
7412 enum tree_code code0, code1;
7413 code0 = TREE_CODE (arg0);
7414 code1 = TREE_CODE (arg1);
7415 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7416 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7417 && operand_equal_p (TREE_OPERAND (arg0, 0),
7418 TREE_OPERAND (arg1, 0), 0)
7419 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7421 tree tree01, tree11;
7422 enum tree_code code01, code11;
7424 tree01 = TREE_OPERAND (arg0, 1);
7425 tree11 = TREE_OPERAND (arg1, 1);
7426 STRIP_NOPS (tree01);
7427 STRIP_NOPS (tree11);
7428 code01 = TREE_CODE (tree01);
7429 code11 = TREE_CODE (tree11);
7430 if (code01 == INTEGER_CST
7431 && code11 == INTEGER_CST
7432 && TREE_INT_CST_HIGH (tree01) == 0
7433 && TREE_INT_CST_HIGH (tree11) == 0
7434 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7435 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7436 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7437 code0 == LSHIFT_EXPR ? tree01 : tree11);
7438 else if (code11 == MINUS_EXPR)
7440 tree tree110, tree111;
7441 tree110 = TREE_OPERAND (tree11, 0);
7442 tree111 = TREE_OPERAND (tree11, 1);
7443 STRIP_NOPS (tree110);
7444 STRIP_NOPS (tree111);
7445 if (TREE_CODE (tree110) == INTEGER_CST
7446 && 0 == compare_tree_int (tree110,
7447 TYPE_PRECISION
7448 (TREE_TYPE (TREE_OPERAND
7449 (arg0, 0))))
7450 && operand_equal_p (tree01, tree111, 0))
7451 return build2 ((code0 == LSHIFT_EXPR
7452 ? LROTATE_EXPR
7453 : RROTATE_EXPR),
7454 type, TREE_OPERAND (arg0, 0), tree01);
7456 else if (code01 == MINUS_EXPR)
7458 tree tree010, tree011;
7459 tree010 = TREE_OPERAND (tree01, 0);
7460 tree011 = TREE_OPERAND (tree01, 1);
7461 STRIP_NOPS (tree010);
7462 STRIP_NOPS (tree011);
7463 if (TREE_CODE (tree010) == INTEGER_CST
7464 && 0 == compare_tree_int (tree010,
7465 TYPE_PRECISION
7466 (TREE_TYPE (TREE_OPERAND
7467 (arg0, 0))))
7468 && operand_equal_p (tree11, tree011, 0))
7469 return build2 ((code0 != LSHIFT_EXPR
7470 ? LROTATE_EXPR
7471 : RROTATE_EXPR),
7472 type, TREE_OPERAND (arg0, 0), tree11);
7477 associate:
7478 /* In most languages, can't associate operations on floats through
7479 parentheses. Rather than remember where the parentheses were, we
7480 don't associate floats at all, unless the user has specified
7481 -funsafe-math-optimizations. */
7483 if (! wins
7484 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7486 tree var0, con0, lit0, minus_lit0;
7487 tree var1, con1, lit1, minus_lit1;
7489 /* Split both trees into variables, constants, and literals. Then
7490 associate each group together, the constants with literals,
7491 then the result with variables. This increases the chances of
7492 literals being recombined later and of generating relocatable
7493 expressions for the sum of a constant and literal. */
7494 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7495 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7496 code == MINUS_EXPR);
7498 /* Only do something if we found more than two objects. Otherwise,
7499 nothing has changed and we risk infinite recursion. */
7500 if (2 < ((var0 != 0) + (var1 != 0)
7501 + (con0 != 0) + (con1 != 0)
7502 + (lit0 != 0) + (lit1 != 0)
7503 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7505 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7506 if (code == MINUS_EXPR)
7507 code = PLUS_EXPR;
7509 var0 = associate_trees (var0, var1, code, type);
7510 con0 = associate_trees (con0, con1, code, type);
7511 lit0 = associate_trees (lit0, lit1, code, type);
7512 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7514 /* Preserve the MINUS_EXPR if the negative part of the literal is
7515 greater than the positive part. Otherwise, the multiplicative
7516 folding code (i.e extract_muldiv) may be fooled in case
7517 unsigned constants are subtracted, like in the following
7518 example: ((X*2 + 4) - 8U)/2. */
7519 if (minus_lit0 && lit0)
7521 if (TREE_CODE (lit0) == INTEGER_CST
7522 && TREE_CODE (minus_lit0) == INTEGER_CST
7523 && tree_int_cst_lt (lit0, minus_lit0))
7525 minus_lit0 = associate_trees (minus_lit0, lit0,
7526 MINUS_EXPR, type);
7527 lit0 = 0;
7529 else
7531 lit0 = associate_trees (lit0, minus_lit0,
7532 MINUS_EXPR, type);
7533 minus_lit0 = 0;
7536 if (minus_lit0)
7538 if (con0 == 0)
7539 return fold_convert (type,
7540 associate_trees (var0, minus_lit0,
7541 MINUS_EXPR, type));
7542 else
7544 con0 = associate_trees (con0, minus_lit0,
7545 MINUS_EXPR, type);
7546 return fold_convert (type,
7547 associate_trees (var0, con0,
7548 PLUS_EXPR, type));
7552 con0 = associate_trees (con0, lit0, code, type);
7553 return fold_convert (type, associate_trees (var0, con0,
7554 code, type));
7558 binary:
7559 if (wins)
7560 t1 = const_binop (code, arg0, arg1, 0);
7561 if (t1 != NULL_TREE)
7563 /* The return value should always have
7564 the same type as the original expression. */
7565 if (TREE_TYPE (t1) != type)
7566 t1 = fold_convert (type, t1);
7568 return t1;
7570 return t;
7572 case MINUS_EXPR:
7573 /* A - (-B) -> A + B */
7574 if (TREE_CODE (arg1) == NEGATE_EXPR)
7575 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7576 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7577 if (TREE_CODE (arg0) == NEGATE_EXPR
7578 && (FLOAT_TYPE_P (type)
7579 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7580 && negate_expr_p (arg1)
7581 && reorder_operands_p (arg0, arg1))
7582 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7583 TREE_OPERAND (arg0, 0)));
7585 if (TREE_CODE (type) == COMPLEX_TYPE)
7587 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7588 if (tem)
7589 return tem;
7592 if (! FLOAT_TYPE_P (type))
7594 if (! wins && integer_zerop (arg0))
7595 return negate_expr (fold_convert (type, arg1));
7596 if (integer_zerop (arg1))
7597 return non_lvalue (fold_convert (type, arg0));
7599 /* Fold A - (A & B) into ~B & A. */
7600 if (!TREE_SIDE_EFFECTS (arg0)
7601 && TREE_CODE (arg1) == BIT_AND_EXPR)
7603 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7604 return fold (build2 (BIT_AND_EXPR, type,
7605 fold (build1 (BIT_NOT_EXPR, type,
7606 TREE_OPERAND (arg1, 0))),
7607 arg0));
7608 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7609 return fold (build2 (BIT_AND_EXPR, type,
7610 fold (build1 (BIT_NOT_EXPR, type,
7611 TREE_OPERAND (arg1, 1))),
7612 arg0));
7615 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7616 any power of 2 minus 1. */
7617 if (TREE_CODE (arg0) == BIT_AND_EXPR
7618 && TREE_CODE (arg1) == BIT_AND_EXPR
7619 && operand_equal_p (TREE_OPERAND (arg0, 0),
7620 TREE_OPERAND (arg1, 0), 0))
7622 tree mask0 = TREE_OPERAND (arg0, 1);
7623 tree mask1 = TREE_OPERAND (arg1, 1);
7624 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7626 if (operand_equal_p (tem, mask1, 0))
7628 tem = fold (build2 (BIT_XOR_EXPR, type,
7629 TREE_OPERAND (arg0, 0), mask1));
7630 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7635 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7636 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7637 return non_lvalue (fold_convert (type, arg0));
7639 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7640 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7641 (-ARG1 + ARG0) reduces to -ARG1. */
7642 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7643 return negate_expr (fold_convert (type, arg1));
7645 /* Fold &x - &x. This can happen from &x.foo - &x.
7646 This is unsafe for certain floats even in non-IEEE formats.
7647 In IEEE, it is unsafe because it does wrong for NaNs.
7648 Also note that operand_equal_p is always false if an operand
7649 is volatile. */
7651 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7652 && operand_equal_p (arg0, arg1, 0))
7653 return fold_convert (type, integer_zero_node);
7655 /* A - B -> A + (-B) if B is easily negatable. */
7656 if (!wins && negate_expr_p (arg1)
7657 && ((FLOAT_TYPE_P (type)
7658 /* Avoid this transformation if B is a positive REAL_CST. */
7659 && (TREE_CODE (arg1) != REAL_CST
7660 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7661 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7662 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7664 /* Try folding difference of addresses. */
7666 HOST_WIDE_INT diff;
7668 if ((TREE_CODE (arg0) == ADDR_EXPR
7669 || TREE_CODE (arg1) == ADDR_EXPR)
7670 && ptr_difference_const (arg0, arg1, &diff))
7671 return build_int_cst_type (type, diff);
7674 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7675 of the array. Loop optimizer sometimes produce this type of
7676 expressions. */
7677 if (TREE_CODE (arg0) == ADDR_EXPR
7678 && TREE_CODE (arg1) == MULT_EXPR)
7680 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7681 if (tem)
7682 return fold_convert (type, fold (tem));
7685 if (TREE_CODE (arg0) == MULT_EXPR
7686 && TREE_CODE (arg1) == MULT_EXPR
7687 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7689 /* (A * C) - (B * C) -> (A-B) * C. */
7690 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7691 TREE_OPERAND (arg1, 1), 0))
7692 return fold (build2 (MULT_EXPR, type,
7693 fold (build2 (MINUS_EXPR, type,
7694 TREE_OPERAND (arg0, 0),
7695 TREE_OPERAND (arg1, 0))),
7696 TREE_OPERAND (arg0, 1)));
7697 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7698 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7699 TREE_OPERAND (arg1, 0), 0))
7700 return fold (build2 (MULT_EXPR, type,
7701 TREE_OPERAND (arg0, 0),
7702 fold (build2 (MINUS_EXPR, type,
7703 TREE_OPERAND (arg0, 1),
7704 TREE_OPERAND (arg1, 1)))));
7707 goto associate;
7709 case MULT_EXPR:
7710 /* (-A) * (-B) -> A * B */
7711 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7712 return fold (build2 (MULT_EXPR, type,
7713 TREE_OPERAND (arg0, 0),
7714 negate_expr (arg1)));
7715 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7716 return fold (build2 (MULT_EXPR, type,
7717 negate_expr (arg0),
7718 TREE_OPERAND (arg1, 0)));
7720 if (TREE_CODE (type) == COMPLEX_TYPE)
7722 tem = fold_complex_mult (type, arg0, arg1);
7723 if (tem)
7724 return tem;
7727 if (! FLOAT_TYPE_P (type))
7729 if (integer_zerop (arg1))
7730 return omit_one_operand (type, arg1, arg0);
7731 if (integer_onep (arg1))
7732 return non_lvalue (fold_convert (type, arg0));
7734 /* (a * (1 << b)) is (a << b) */
7735 if (TREE_CODE (arg1) == LSHIFT_EXPR
7736 && integer_onep (TREE_OPERAND (arg1, 0)))
7737 return fold (build2 (LSHIFT_EXPR, type, arg0,
7738 TREE_OPERAND (arg1, 1)));
7739 if (TREE_CODE (arg0) == LSHIFT_EXPR
7740 && integer_onep (TREE_OPERAND (arg0, 0)))
7741 return fold (build2 (LSHIFT_EXPR, type, arg1,
7742 TREE_OPERAND (arg0, 1)));
7744 if (TREE_CODE (arg1) == INTEGER_CST
7745 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7746 fold_convert (type, arg1),
7747 code, NULL_TREE)))
7748 return fold_convert (type, tem);
7751 else
7753 /* Maybe fold x * 0 to 0. The expressions aren't the same
7754 when x is NaN, since x * 0 is also NaN. Nor are they the
7755 same in modes with signed zeros, since multiplying a
7756 negative value by 0 gives -0, not +0. */
7757 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7758 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7759 && real_zerop (arg1))
7760 return omit_one_operand (type, arg1, arg0);
7761 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7762 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7763 && real_onep (arg1))
7764 return non_lvalue (fold_convert (type, arg0));
7766 /* Transform x * -1.0 into -x. */
7767 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7768 && real_minus_onep (arg1))
7769 return fold_convert (type, negate_expr (arg0));
7771 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7772 if (flag_unsafe_math_optimizations
7773 && TREE_CODE (arg0) == RDIV_EXPR
7774 && TREE_CODE (arg1) == REAL_CST
7775 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7777 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7778 arg1, 0);
7779 if (tem)
7780 return fold (build2 (RDIV_EXPR, type, tem,
7781 TREE_OPERAND (arg0, 1)));
7784 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7785 if (operand_equal_p (arg0, arg1, 0))
7787 tree tem = fold_strip_sign_ops (arg0);
7788 if (tem != NULL_TREE)
7790 tem = fold_convert (type, tem);
7791 return fold (build2 (MULT_EXPR, type, tem, tem));
7795 if (flag_unsafe_math_optimizations)
7797 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7798 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7800 /* Optimizations of root(...)*root(...). */
7801 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7803 tree rootfn, arg, arglist;
7804 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7805 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7807 /* Optimize sqrt(x)*sqrt(x) as x. */
7808 if (BUILTIN_SQRT_P (fcode0)
7809 && operand_equal_p (arg00, arg10, 0)
7810 && ! HONOR_SNANS (TYPE_MODE (type)))
7811 return arg00;
7813 /* Optimize root(x)*root(y) as root(x*y). */
7814 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7815 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7816 arglist = build_tree_list (NULL_TREE, arg);
7817 return build_function_call_expr (rootfn, arglist);
7820 /* Optimize expN(x)*expN(y) as expN(x+y). */
7821 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7823 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7824 tree arg = build2 (PLUS_EXPR, type,
7825 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7826 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7827 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7828 return build_function_call_expr (expfn, arglist);
7831 /* Optimizations of pow(...)*pow(...). */
7832 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7833 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7834 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7836 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7837 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7838 1)));
7839 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7840 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7841 1)));
7843 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7844 if (operand_equal_p (arg01, arg11, 0))
7846 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7847 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7848 tree arglist = tree_cons (NULL_TREE, fold (arg),
7849 build_tree_list (NULL_TREE,
7850 arg01));
7851 return build_function_call_expr (powfn, arglist);
7854 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7855 if (operand_equal_p (arg00, arg10, 0))
7857 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7858 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7859 tree arglist = tree_cons (NULL_TREE, arg00,
7860 build_tree_list (NULL_TREE,
7861 arg));
7862 return build_function_call_expr (powfn, arglist);
7866 /* Optimize tan(x)*cos(x) as sin(x). */
7867 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7868 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7869 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7870 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7871 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7872 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7873 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7874 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7876 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7878 if (sinfn != NULL_TREE)
7879 return build_function_call_expr (sinfn,
7880 TREE_OPERAND (arg0, 1));
7883 /* Optimize x*pow(x,c) as pow(x,c+1). */
7884 if (fcode1 == BUILT_IN_POW
7885 || fcode1 == BUILT_IN_POWF
7886 || fcode1 == BUILT_IN_POWL)
7888 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7889 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7890 1)));
7891 if (TREE_CODE (arg11) == REAL_CST
7892 && ! TREE_CONSTANT_OVERFLOW (arg11)
7893 && operand_equal_p (arg0, arg10, 0))
7895 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7896 REAL_VALUE_TYPE c;
7897 tree arg, arglist;
7899 c = TREE_REAL_CST (arg11);
7900 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7901 arg = build_real (type, c);
7902 arglist = build_tree_list (NULL_TREE, arg);
7903 arglist = tree_cons (NULL_TREE, arg0, arglist);
7904 return build_function_call_expr (powfn, arglist);
7908 /* Optimize pow(x,c)*x as pow(x,c+1). */
7909 if (fcode0 == BUILT_IN_POW
7910 || fcode0 == BUILT_IN_POWF
7911 || fcode0 == BUILT_IN_POWL)
7913 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7914 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7915 1)));
7916 if (TREE_CODE (arg01) == REAL_CST
7917 && ! TREE_CONSTANT_OVERFLOW (arg01)
7918 && operand_equal_p (arg1, arg00, 0))
7920 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7921 REAL_VALUE_TYPE c;
7922 tree arg, arglist;
7924 c = TREE_REAL_CST (arg01);
7925 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7926 arg = build_real (type, c);
7927 arglist = build_tree_list (NULL_TREE, arg);
7928 arglist = tree_cons (NULL_TREE, arg1, arglist);
7929 return build_function_call_expr (powfn, arglist);
7933 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7934 if (! optimize_size
7935 && operand_equal_p (arg0, arg1, 0))
7937 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7939 if (powfn)
7941 tree arg = build_real (type, dconst2);
7942 tree arglist = build_tree_list (NULL_TREE, arg);
7943 arglist = tree_cons (NULL_TREE, arg0, arglist);
7944 return build_function_call_expr (powfn, arglist);
7949 goto associate;
7951 case BIT_IOR_EXPR:
7952 bit_ior:
7953 if (integer_all_onesp (arg1))
7954 return omit_one_operand (type, arg1, arg0);
7955 if (integer_zerop (arg1))
7956 return non_lvalue (fold_convert (type, arg0));
7957 if (operand_equal_p (arg0, arg1, 0))
7958 return non_lvalue (fold_convert (type, arg0));
7960 /* ~X | X is -1. */
7961 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7962 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7964 t1 = build_int_cst (type, -1);
7965 t1 = force_fit_type (t1, 0, false, false);
7966 return omit_one_operand (type, t1, arg1);
7969 /* X | ~X is -1. */
7970 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7971 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7973 t1 = build_int_cst (type, -1);
7974 t1 = force_fit_type (t1, 0, false, false);
7975 return omit_one_operand (type, t1, arg0);
7978 t1 = distribute_bit_expr (code, type, arg0, arg1);
7979 if (t1 != NULL_TREE)
7980 return t1;
7982 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7984 This results in more efficient code for machines without a NAND
7985 instruction. Combine will canonicalize to the first form
7986 which will allow use of NAND instructions provided by the
7987 backend if they exist. */
7988 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7989 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7991 return fold (build1 (BIT_NOT_EXPR, type,
7992 build2 (BIT_AND_EXPR, type,
7993 TREE_OPERAND (arg0, 0),
7994 TREE_OPERAND (arg1, 0))));
7997 /* See if this can be simplified into a rotate first. If that
7998 is unsuccessful continue in the association code. */
7999 goto bit_rotate;
8001 case BIT_XOR_EXPR:
8002 if (integer_zerop (arg1))
8003 return non_lvalue (fold_convert (type, arg0));
8004 if (integer_all_onesp (arg1))
8005 return fold (build1 (BIT_NOT_EXPR, type, arg0));
8006 if (operand_equal_p (arg0, arg1, 0))
8007 return omit_one_operand (type, integer_zero_node, arg0);
8009 /* ~X ^ X is -1. */
8010 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8011 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8013 t1 = build_int_cst (type, -1);
8014 t1 = force_fit_type (t1, 0, false, false);
8015 return omit_one_operand (type, t1, arg1);
8018 /* X ^ ~X is -1. */
8019 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8020 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8022 t1 = build_int_cst (type, -1);
8023 t1 = force_fit_type (t1, 0, false, false);
8024 return omit_one_operand (type, t1, arg0);
8027 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8028 with a constant, and the two constants have no bits in common,
8029 we should treat this as a BIT_IOR_EXPR since this may produce more
8030 simplifications. */
8031 if (TREE_CODE (arg0) == BIT_AND_EXPR
8032 && TREE_CODE (arg1) == BIT_AND_EXPR
8033 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8034 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8035 && integer_zerop (const_binop (BIT_AND_EXPR,
8036 TREE_OPERAND (arg0, 1),
8037 TREE_OPERAND (arg1, 1), 0)))
8039 code = BIT_IOR_EXPR;
8040 goto bit_ior;
8043 /* See if this can be simplified into a rotate first. If that
8044 is unsuccessful continue in the association code. */
8045 goto bit_rotate;
8047 case BIT_AND_EXPR:
8048 if (integer_all_onesp (arg1))
8049 return non_lvalue (fold_convert (type, arg0));
8050 if (integer_zerop (arg1))
8051 return omit_one_operand (type, arg1, arg0);
8052 if (operand_equal_p (arg0, arg1, 0))
8053 return non_lvalue (fold_convert (type, arg0));
8055 /* ~X & X is always zero. */
8056 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8057 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8058 return omit_one_operand (type, integer_zero_node, arg1);
8060 /* X & ~X is always zero. */
8061 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8062 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8063 return omit_one_operand (type, integer_zero_node, arg0);
8065 t1 = distribute_bit_expr (code, type, arg0, arg1);
8066 if (t1 != NULL_TREE)
8067 return t1;
8068 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8069 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8070 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8072 unsigned int prec
8073 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8075 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8076 && (~TREE_INT_CST_LOW (arg1)
8077 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8078 return fold_convert (type, TREE_OPERAND (arg0, 0));
8081 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8083 This results in more efficient code for machines without a NOR
8084 instruction. Combine will canonicalize to the first form
8085 which will allow use of NOR instructions provided by the
8086 backend if they exist. */
8087 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8088 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8090 return fold (build1 (BIT_NOT_EXPR, type,
8091 build2 (BIT_IOR_EXPR, type,
8092 TREE_OPERAND (arg0, 0),
8093 TREE_OPERAND (arg1, 0))));
8096 goto associate;
8098 case RDIV_EXPR:
8099 /* Don't touch a floating-point divide by zero unless the mode
8100 of the constant can represent infinity. */
8101 if (TREE_CODE (arg1) == REAL_CST
8102 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8103 && real_zerop (arg1))
8104 return t;
8106 /* (-A) / (-B) -> A / B */
8107 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8108 return fold (build2 (RDIV_EXPR, type,
8109 TREE_OPERAND (arg0, 0),
8110 negate_expr (arg1)));
8111 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8112 return fold (build2 (RDIV_EXPR, type,
8113 negate_expr (arg0),
8114 TREE_OPERAND (arg1, 0)));
8116 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8117 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8118 && real_onep (arg1))
8119 return non_lvalue (fold_convert (type, arg0));
8121 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8122 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8123 && real_minus_onep (arg1))
8124 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8126 /* If ARG1 is a constant, we can convert this to a multiply by the
8127 reciprocal. This does not have the same rounding properties,
8128 so only do this if -funsafe-math-optimizations. We can actually
8129 always safely do it if ARG1 is a power of two, but it's hard to
8130 tell if it is or not in a portable manner. */
8131 if (TREE_CODE (arg1) == REAL_CST)
8133 if (flag_unsafe_math_optimizations
8134 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8135 arg1, 0)))
8136 return fold (build2 (MULT_EXPR, type, arg0, tem));
8137 /* Find the reciprocal if optimizing and the result is exact. */
8138 if (optimize)
8140 REAL_VALUE_TYPE r;
8141 r = TREE_REAL_CST (arg1);
8142 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8144 tem = build_real (type, r);
8145 return fold (build2 (MULT_EXPR, type, arg0, tem));
8149 /* Convert A/B/C to A/(B*C). */
8150 if (flag_unsafe_math_optimizations
8151 && TREE_CODE (arg0) == RDIV_EXPR)
8152 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8153 fold (build2 (MULT_EXPR, type,
8154 TREE_OPERAND (arg0, 1), arg1))));
8156 /* Convert A/(B/C) to (A/B)*C. */
8157 if (flag_unsafe_math_optimizations
8158 && TREE_CODE (arg1) == RDIV_EXPR)
8159 return fold (build2 (MULT_EXPR, type,
8160 fold (build2 (RDIV_EXPR, type, arg0,
8161 TREE_OPERAND (arg1, 0))),
8162 TREE_OPERAND (arg1, 1)));
8164 /* Convert C1/(X*C2) into (C1/C2)/X. */
8165 if (flag_unsafe_math_optimizations
8166 && TREE_CODE (arg1) == MULT_EXPR
8167 && TREE_CODE (arg0) == REAL_CST
8168 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8170 tree tem = const_binop (RDIV_EXPR, arg0,
8171 TREE_OPERAND (arg1, 1), 0);
8172 if (tem)
8173 return fold (build2 (RDIV_EXPR, type, tem,
8174 TREE_OPERAND (arg1, 0)));
8177 if (TREE_CODE (type) == COMPLEX_TYPE)
8179 tem = fold_complex_div (type, arg0, arg1, code);
8180 if (tem)
8181 return tem;
8184 if (flag_unsafe_math_optimizations)
8186 enum built_in_function fcode = builtin_mathfn_code (arg1);
8187 /* Optimize x/expN(y) into x*expN(-y). */
8188 if (BUILTIN_EXPONENT_P (fcode))
8190 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8191 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8192 tree arglist = build_tree_list (NULL_TREE,
8193 fold_convert (type, arg));
8194 arg1 = build_function_call_expr (expfn, arglist);
8195 return fold (build2 (MULT_EXPR, type, arg0, arg1));
8198 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8199 if (fcode == BUILT_IN_POW
8200 || fcode == BUILT_IN_POWF
8201 || fcode == BUILT_IN_POWL)
8203 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8204 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8205 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8206 tree neg11 = fold_convert (type, negate_expr (arg11));
8207 tree arglist = tree_cons(NULL_TREE, arg10,
8208 build_tree_list (NULL_TREE, neg11));
8209 arg1 = build_function_call_expr (powfn, arglist);
8210 return fold (build2 (MULT_EXPR, type, arg0, arg1));
8214 if (flag_unsafe_math_optimizations)
8216 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8217 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8219 /* Optimize sin(x)/cos(x) as tan(x). */
8220 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8221 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8222 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8223 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8224 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8226 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8228 if (tanfn != NULL_TREE)
8229 return build_function_call_expr (tanfn,
8230 TREE_OPERAND (arg0, 1));
8233 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8234 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8235 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8236 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8237 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8238 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8240 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8242 if (tanfn != NULL_TREE)
8244 tree tmp = TREE_OPERAND (arg0, 1);
8245 tmp = build_function_call_expr (tanfn, tmp);
8246 return fold (build2 (RDIV_EXPR, type,
8247 build_real (type, dconst1), tmp));
8251 /* Optimize pow(x,c)/x as pow(x,c-1). */
8252 if (fcode0 == BUILT_IN_POW
8253 || fcode0 == BUILT_IN_POWF
8254 || fcode0 == BUILT_IN_POWL)
8256 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8257 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8258 if (TREE_CODE (arg01) == REAL_CST
8259 && ! TREE_CONSTANT_OVERFLOW (arg01)
8260 && operand_equal_p (arg1, arg00, 0))
8262 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8263 REAL_VALUE_TYPE c;
8264 tree arg, arglist;
8266 c = TREE_REAL_CST (arg01);
8267 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8268 arg = build_real (type, c);
8269 arglist = build_tree_list (NULL_TREE, arg);
8270 arglist = tree_cons (NULL_TREE, arg1, arglist);
8271 return build_function_call_expr (powfn, arglist);
8275 goto binary;
8277 case TRUNC_DIV_EXPR:
8278 case ROUND_DIV_EXPR:
8279 case FLOOR_DIV_EXPR:
8280 case CEIL_DIV_EXPR:
8281 case EXACT_DIV_EXPR:
8282 if (integer_onep (arg1))
8283 return non_lvalue (fold_convert (type, arg0));
8284 if (integer_zerop (arg1))
8285 return t;
8286 /* X / -1 is -X. */
8287 if (!TYPE_UNSIGNED (type)
8288 && TREE_CODE (arg1) == INTEGER_CST
8289 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8290 && TREE_INT_CST_HIGH (arg1) == -1)
8291 return fold_convert (type, negate_expr (arg0));
8293 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8294 operation, EXACT_DIV_EXPR.
8296 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8297 At one time others generated faster code, it's not clear if they do
8298 after the last round to changes to the DIV code in expmed.c. */
8299 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8300 && multiple_of_p (type, arg0, arg1))
8301 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
8303 if (TREE_CODE (arg1) == INTEGER_CST
8304 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
8305 code, NULL_TREE)))
8306 return fold_convert (type, tem);
8308 if (TREE_CODE (type) == COMPLEX_TYPE)
8310 tem = fold_complex_div (type, arg0, arg1, code);
8311 if (tem)
8312 return tem;
8314 goto binary;
8316 case CEIL_MOD_EXPR:
8317 case FLOOR_MOD_EXPR:
8318 case ROUND_MOD_EXPR:
8319 case TRUNC_MOD_EXPR:
8320 /* X % 1 is always zero, but be sure to preserve any side
8321 effects in X. */
8322 if (integer_onep (arg1))
8323 return omit_one_operand (type, integer_zero_node, arg0);
8325 /* X % 0, return X % 0 unchanged so that we can get the
8326 proper warnings and errors. */
8327 if (integer_zerop (arg1))
8328 return t;
8330 /* 0 % X is always zero, but be sure to preserve any side
8331 effects in X. Place this after checking for X == 0. */
8332 if (integer_zerop (arg0))
8333 return omit_one_operand (type, integer_zero_node, arg1);
8335 /* X % -1 is zero. */
8336 if (!TYPE_UNSIGNED (type)
8337 && TREE_CODE (arg1) == INTEGER_CST
8338 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8339 && TREE_INT_CST_HIGH (arg1) == -1)
8340 return omit_one_operand (type, integer_zero_node, arg0);
8342 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8343 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8344 if (code == TRUNC_MOD_EXPR
8345 && TYPE_UNSIGNED (type)
8346 && integer_pow2p (arg1))
8348 unsigned HOST_WIDE_INT high, low;
8349 tree mask;
8350 int l;
8352 l = tree_log2 (arg1);
8353 if (l >= HOST_BITS_PER_WIDE_INT)
8355 high = ((unsigned HOST_WIDE_INT) 1
8356 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8357 low = -1;
8359 else
8361 high = 0;
8362 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8365 mask = build_int_cst_wide (type, low, high);
8366 return fold (build2 (BIT_AND_EXPR, type,
8367 fold_convert (type, arg0), mask));
8370 /* X % -C is the same as X % C. */
8371 if (code == TRUNC_MOD_EXPR
8372 && !TYPE_UNSIGNED (type)
8373 && TREE_CODE (arg1) == INTEGER_CST
8374 && TREE_INT_CST_HIGH (arg1) < 0
8375 && !flag_trapv
8376 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8377 && !sign_bit_p (arg1, arg1))
8378 return fold (build2 (code, type, fold_convert (type, arg0),
8379 fold_convert (type, negate_expr (arg1))));
8381 /* X % -Y is the same as X % Y. */
8382 if (code == TRUNC_MOD_EXPR
8383 && !TYPE_UNSIGNED (type)
8384 && TREE_CODE (arg1) == NEGATE_EXPR
8385 && !flag_trapv)
8386 return fold (build2 (code, type, fold_convert (type, arg0),
8387 fold_convert (type, TREE_OPERAND (arg1, 0))));
8389 if (TREE_CODE (arg1) == INTEGER_CST
8390 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
8391 code, NULL_TREE)))
8392 return fold_convert (type, tem);
8394 goto binary;
8396 case LROTATE_EXPR:
8397 case RROTATE_EXPR:
8398 if (integer_all_onesp (arg0))
8399 return omit_one_operand (type, arg0, arg1);
8400 goto shift;
8402 case RSHIFT_EXPR:
8403 /* Optimize -1 >> x for arithmetic right shifts. */
8404 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8405 return omit_one_operand (type, arg0, arg1);
8406 /* ... fall through ... */
8408 case LSHIFT_EXPR:
8409 shift:
8410 if (integer_zerop (arg1))
8411 return non_lvalue (fold_convert (type, arg0));
8412 if (integer_zerop (arg0))
8413 return omit_one_operand (type, arg0, arg1);
8415 /* Since negative shift count is not well-defined,
8416 don't try to compute it in the compiler. */
8417 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8418 return t;
8419 /* Rewrite an LROTATE_EXPR by a constant into an
8420 RROTATE_EXPR by a new constant. */
8421 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8423 tree tem = build_int_cst (NULL_TREE,
8424 GET_MODE_BITSIZE (TYPE_MODE (type)));
8425 tem = fold_convert (TREE_TYPE (arg1), tem);
8426 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8427 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
8430 /* If we have a rotate of a bit operation with the rotate count and
8431 the second operand of the bit operation both constant,
8432 permute the two operations. */
8433 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8434 && (TREE_CODE (arg0) == BIT_AND_EXPR
8435 || TREE_CODE (arg0) == BIT_IOR_EXPR
8436 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8438 return fold (build2 (TREE_CODE (arg0), type,
8439 fold (build2 (code, type,
8440 TREE_OPERAND (arg0, 0), arg1)),
8441 fold (build2 (code, type,
8442 TREE_OPERAND (arg0, 1), arg1))));
8444 /* Two consecutive rotates adding up to the width of the mode can
8445 be ignored. */
8446 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8447 && TREE_CODE (arg0) == RROTATE_EXPR
8448 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8449 && TREE_INT_CST_HIGH (arg1) == 0
8450 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8451 && ((TREE_INT_CST_LOW (arg1)
8452 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8453 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8454 return TREE_OPERAND (arg0, 0);
8456 goto binary;
8458 case MIN_EXPR:
8459 if (operand_equal_p (arg0, arg1, 0))
8460 return omit_one_operand (type, arg0, arg1);
8461 if (INTEGRAL_TYPE_P (type)
8462 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8463 return omit_one_operand (type, arg1, arg0);
8464 goto associate;
8466 case MAX_EXPR:
8467 if (operand_equal_p (arg0, arg1, 0))
8468 return omit_one_operand (type, arg0, arg1);
8469 if (INTEGRAL_TYPE_P (type)
8470 && TYPE_MAX_VALUE (type)
8471 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8472 return omit_one_operand (type, arg1, arg0);
8473 goto associate;
8475 case TRUTH_NOT_EXPR:
8476 /* The argument to invert_truthvalue must have Boolean type. */
8477 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8478 arg0 = fold_convert (boolean_type_node, arg0);
8480 /* Note that the operand of this must be an int
8481 and its values must be 0 or 1.
8482 ("true" is a fixed value perhaps depending on the language,
8483 but we don't handle values other than 1 correctly yet.) */
8484 tem = invert_truthvalue (arg0);
8485 /* Avoid infinite recursion. */
8486 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
8487 return t;
8488 return fold_convert (type, tem);
8490 case TRUTH_ANDIF_EXPR:
8491 /* Note that the operands of this must be ints
8492 and their values must be 0 or 1.
8493 ("true" is a fixed value perhaps depending on the language.) */
8494 /* If first arg is constant zero, return it. */
8495 if (integer_zerop (arg0))
8496 return fold_convert (type, arg0);
8497 case TRUTH_AND_EXPR:
8498 /* If either arg is constant true, drop it. */
8499 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8500 return non_lvalue (fold_convert (type, arg1));
8501 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8502 /* Preserve sequence points. */
8503 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8504 return non_lvalue (fold_convert (type, arg0));
8505 /* If second arg is constant zero, result is zero, but first arg
8506 must be evaluated. */
8507 if (integer_zerop (arg1))
8508 return omit_one_operand (type, arg1, arg0);
8509 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8510 case will be handled here. */
8511 if (integer_zerop (arg0))
8512 return omit_one_operand (type, arg0, arg1);
8514 /* !X && X is always false. */
8515 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8516 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8517 return omit_one_operand (type, integer_zero_node, arg1);
8518 /* X && !X is always false. */
8519 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8520 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8521 return omit_one_operand (type, integer_zero_node, arg0);
8523 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8524 means A >= Y && A != MAX, but in this case we know that
8525 A < X <= MAX. */
8527 if (!TREE_SIDE_EFFECTS (arg0)
8528 && !TREE_SIDE_EFFECTS (arg1))
8530 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8531 if (tem)
8532 return fold (build2 (code, type, tem, arg1));
8534 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8535 if (tem)
8536 return fold (build2 (code, type, arg0, tem));
8539 truth_andor:
8540 /* We only do these simplifications if we are optimizing. */
8541 if (!optimize)
8542 return t;
8544 /* Check for things like (A || B) && (A || C). We can convert this
8545 to A || (B && C). Note that either operator can be any of the four
8546 truth and/or operations and the transformation will still be
8547 valid. Also note that we only care about order for the
8548 ANDIF and ORIF operators. If B contains side effects, this
8549 might change the truth-value of A. */
8550 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8551 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8552 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8553 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8554 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8555 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8557 tree a00 = TREE_OPERAND (arg0, 0);
8558 tree a01 = TREE_OPERAND (arg0, 1);
8559 tree a10 = TREE_OPERAND (arg1, 0);
8560 tree a11 = TREE_OPERAND (arg1, 1);
8561 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8562 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8563 && (code == TRUTH_AND_EXPR
8564 || code == TRUTH_OR_EXPR));
8566 if (operand_equal_p (a00, a10, 0))
8567 return fold (build2 (TREE_CODE (arg0), type, a00,
8568 fold (build2 (code, type, a01, a11))));
8569 else if (commutative && operand_equal_p (a00, a11, 0))
8570 return fold (build2 (TREE_CODE (arg0), type, a00,
8571 fold (build2 (code, type, a01, a10))));
8572 else if (commutative && operand_equal_p (a01, a10, 0))
8573 return fold (build2 (TREE_CODE (arg0), type, a01,
8574 fold (build2 (code, type, a00, a11))));
8576 /* This case if tricky because we must either have commutative
8577 operators or else A10 must not have side-effects. */
8579 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8580 && operand_equal_p (a01, a11, 0))
8581 return fold (build2 (TREE_CODE (arg0), type,
8582 fold (build2 (code, type, a00, a10)),
8583 a01));
8586 /* See if we can build a range comparison. */
8587 if (0 != (tem = fold_range_test (t)))
8588 return tem;
8590 /* Check for the possibility of merging component references. If our
8591 lhs is another similar operation, try to merge its rhs with our
8592 rhs. Then try to merge our lhs and rhs. */
8593 if (TREE_CODE (arg0) == code
8594 && 0 != (tem = fold_truthop (code, type,
8595 TREE_OPERAND (arg0, 1), arg1)))
8596 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8598 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8599 return tem;
8601 return t;
8603 case TRUTH_ORIF_EXPR:
8604 /* Note that the operands of this must be ints
8605 and their values must be 0 or true.
8606 ("true" is a fixed value perhaps depending on the language.) */
8607 /* If first arg is constant true, return it. */
8608 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8609 return fold_convert (type, arg0);
8610 case TRUTH_OR_EXPR:
8611 /* If either arg is constant zero, drop it. */
8612 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8613 return non_lvalue (fold_convert (type, arg1));
8614 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8615 /* Preserve sequence points. */
8616 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8617 return non_lvalue (fold_convert (type, arg0));
8618 /* If second arg is constant true, result is true, but we must
8619 evaluate first arg. */
8620 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8621 return omit_one_operand (type, arg1, arg0);
8622 /* Likewise for first arg, but note this only occurs here for
8623 TRUTH_OR_EXPR. */
8624 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8625 return omit_one_operand (type, arg0, arg1);
8627 /* !X || X is always true. */
8628 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8629 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8630 return omit_one_operand (type, integer_one_node, arg1);
8631 /* X || !X is always true. */
8632 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8633 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8634 return omit_one_operand (type, integer_one_node, arg0);
8636 goto truth_andor;
8638 case TRUTH_XOR_EXPR:
8639 /* If the second arg is constant zero, drop it. */
8640 if (integer_zerop (arg1))
8641 return non_lvalue (fold_convert (type, arg0));
8642 /* If the second arg is constant true, this is a logical inversion. */
8643 if (integer_onep (arg1))
8644 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8645 /* Identical arguments cancel to zero. */
8646 if (operand_equal_p (arg0, arg1, 0))
8647 return omit_one_operand (type, integer_zero_node, arg0);
8649 /* !X ^ X is always true. */
8650 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8651 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8652 return omit_one_operand (type, integer_one_node, arg1);
8654 /* X ^ !X is always true. */
8655 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8656 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8657 return omit_one_operand (type, integer_one_node, arg0);
8659 return t;
8661 case EQ_EXPR:
8662 case NE_EXPR:
8663 case LT_EXPR:
8664 case GT_EXPR:
8665 case LE_EXPR:
8666 case GE_EXPR:
8667 /* If one arg is a real or integer constant, put it last. */
8668 if (tree_swap_operands_p (arg0, arg1, true))
8669 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8671 /* If this is an equality comparison of the address of a non-weak
8672 object against zero, then we know the result. */
8673 if ((code == EQ_EXPR || code == NE_EXPR)
8674 && TREE_CODE (arg0) == ADDR_EXPR
8675 && DECL_P (TREE_OPERAND (arg0, 0))
8676 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8677 && integer_zerop (arg1))
8678 return constant_boolean_node (code != EQ_EXPR, type);
8680 /* If this is an equality comparison of the address of two non-weak,
8681 unaliased symbols neither of which are extern (since we do not
8682 have access to attributes for externs), then we know the result. */
8683 if ((code == EQ_EXPR || code == NE_EXPR)
8684 && TREE_CODE (arg0) == ADDR_EXPR
8685 && DECL_P (TREE_OPERAND (arg0, 0))
8686 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8687 && ! lookup_attribute ("alias",
8688 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8689 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8690 && TREE_CODE (arg1) == ADDR_EXPR
8691 && DECL_P (TREE_OPERAND (arg1, 0))
8692 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8693 && ! lookup_attribute ("alias",
8694 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8695 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8696 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8697 ? code == EQ_EXPR : code != EQ_EXPR,
8698 type);
8700 /* If this is a comparison of two exprs that look like an
8701 ARRAY_REF of the same object, then we can fold this to a
8702 comparison of the two offsets. */
8703 if (COMPARISON_CLASS_P (t))
8705 tree base0, offset0, base1, offset1;
8707 if (extract_array_ref (arg0, &base0, &offset0)
8708 && extract_array_ref (arg1, &base1, &offset1)
8709 && operand_equal_p (base0, base1, 0))
8711 if (offset0 == NULL_TREE
8712 && offset1 == NULL_TREE)
8714 offset0 = integer_zero_node;
8715 offset1 = integer_zero_node;
8717 else if (offset0 == NULL_TREE)
8718 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8719 else if (offset1 == NULL_TREE)
8720 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8722 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8723 return fold (build2 (code, type, offset0, offset1));
8727 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8729 tree targ0 = strip_float_extensions (arg0);
8730 tree targ1 = strip_float_extensions (arg1);
8731 tree newtype = TREE_TYPE (targ0);
8733 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8734 newtype = TREE_TYPE (targ1);
8736 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8737 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8738 return fold (build2 (code, type, fold_convert (newtype, targ0),
8739 fold_convert (newtype, targ1)));
8741 /* (-a) CMP (-b) -> b CMP a */
8742 if (TREE_CODE (arg0) == NEGATE_EXPR
8743 && TREE_CODE (arg1) == NEGATE_EXPR)
8744 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8745 TREE_OPERAND (arg0, 0)));
8747 if (TREE_CODE (arg1) == REAL_CST)
8749 REAL_VALUE_TYPE cst;
8750 cst = TREE_REAL_CST (arg1);
8752 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8753 if (TREE_CODE (arg0) == NEGATE_EXPR)
8754 return
8755 fold (build2 (swap_tree_comparison (code), type,
8756 TREE_OPERAND (arg0, 0),
8757 build_real (TREE_TYPE (arg1),
8758 REAL_VALUE_NEGATE (cst))));
8760 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8761 /* a CMP (-0) -> a CMP 0 */
8762 if (REAL_VALUE_MINUS_ZERO (cst))
8763 return fold (build2 (code, type, arg0,
8764 build_real (TREE_TYPE (arg1), dconst0)));
8766 /* x != NaN is always true, other ops are always false. */
8767 if (REAL_VALUE_ISNAN (cst)
8768 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8770 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8771 return omit_one_operand (type, tem, arg0);
8774 /* Fold comparisons against infinity. */
8775 if (REAL_VALUE_ISINF (cst))
8777 tem = fold_inf_compare (code, type, arg0, arg1);
8778 if (tem != NULL_TREE)
8779 return tem;
8783 /* If this is a comparison of a real constant with a PLUS_EXPR
8784 or a MINUS_EXPR of a real constant, we can convert it into a
8785 comparison with a revised real constant as long as no overflow
8786 occurs when unsafe_math_optimizations are enabled. */
8787 if (flag_unsafe_math_optimizations
8788 && TREE_CODE (arg1) == REAL_CST
8789 && (TREE_CODE (arg0) == PLUS_EXPR
8790 || TREE_CODE (arg0) == MINUS_EXPR)
8791 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8792 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8793 ? MINUS_EXPR : PLUS_EXPR,
8794 arg1, TREE_OPERAND (arg0, 1), 0))
8795 && ! TREE_CONSTANT_OVERFLOW (tem))
8796 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8798 /* Likewise, we can simplify a comparison of a real constant with
8799 a MINUS_EXPR whose first operand is also a real constant, i.e.
8800 (c1 - x) < c2 becomes x > c1-c2. */
8801 if (flag_unsafe_math_optimizations
8802 && TREE_CODE (arg1) == REAL_CST
8803 && TREE_CODE (arg0) == MINUS_EXPR
8804 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8805 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8806 arg1, 0))
8807 && ! TREE_CONSTANT_OVERFLOW (tem))
8808 return fold (build2 (swap_tree_comparison (code), type,
8809 TREE_OPERAND (arg0, 1), tem));
8811 /* Fold comparisons against built-in math functions. */
8812 if (TREE_CODE (arg1) == REAL_CST
8813 && flag_unsafe_math_optimizations
8814 && ! flag_errno_math)
8816 enum built_in_function fcode = builtin_mathfn_code (arg0);
8818 if (fcode != END_BUILTINS)
8820 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8821 if (tem != NULL_TREE)
8822 return tem;
8827 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8828 if (TREE_CONSTANT (arg1)
8829 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8830 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8831 /* This optimization is invalid for ordered comparisons
8832 if CONST+INCR overflows or if foo+incr might overflow.
8833 This optimization is invalid for floating point due to rounding.
8834 For pointer types we assume overflow doesn't happen. */
8835 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8836 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8837 && (code == EQ_EXPR || code == NE_EXPR))))
8839 tree varop, newconst;
8841 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8843 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8844 arg1, TREE_OPERAND (arg0, 1)));
8845 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8846 TREE_OPERAND (arg0, 0),
8847 TREE_OPERAND (arg0, 1));
8849 else
8851 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8852 arg1, TREE_OPERAND (arg0, 1)));
8853 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8854 TREE_OPERAND (arg0, 0),
8855 TREE_OPERAND (arg0, 1));
8859 /* If VAROP is a reference to a bitfield, we must mask
8860 the constant by the width of the field. */
8861 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8862 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8863 && host_integerp (DECL_SIZE (TREE_OPERAND
8864 (TREE_OPERAND (varop, 0), 1)), 1))
8866 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8867 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8868 tree folded_compare, shift;
8870 /* First check whether the comparison would come out
8871 always the same. If we don't do that we would
8872 change the meaning with the masking. */
8873 folded_compare = fold (build2 (code, type,
8874 TREE_OPERAND (varop, 0), arg1));
8875 if (integer_zerop (folded_compare)
8876 || integer_onep (folded_compare))
8877 return omit_one_operand (type, folded_compare, varop);
8879 shift = build_int_cst (NULL_TREE,
8880 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8881 shift = fold_convert (TREE_TYPE (varop), shift);
8882 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8883 newconst, shift));
8884 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8885 newconst, shift));
8888 return fold (build2 (code, type, varop, newconst));
8891 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8892 This transformation affects the cases which are handled in later
8893 optimizations involving comparisons with non-negative constants. */
8894 if (TREE_CODE (arg1) == INTEGER_CST
8895 && TREE_CODE (arg0) != INTEGER_CST
8896 && tree_int_cst_sgn (arg1) > 0)
8898 switch (code)
8900 case GE_EXPR:
8901 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8902 return fold (build2 (GT_EXPR, type, arg0, arg1));
8904 case LT_EXPR:
8905 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8906 return fold (build2 (LE_EXPR, type, arg0, arg1));
8908 default:
8909 break;
8913 /* Comparisons with the highest or lowest possible integer of
8914 the specified size will have known values.
8916 This is quite similar to fold_relational_hi_lo, however,
8917 attempts to share the code have been nothing but trouble. */
8919 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8921 if (TREE_CODE (arg1) == INTEGER_CST
8922 && ! TREE_CONSTANT_OVERFLOW (arg1)
8923 && width <= 2 * HOST_BITS_PER_WIDE_INT
8924 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8925 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8927 HOST_WIDE_INT signed_max_hi;
8928 unsigned HOST_WIDE_INT signed_max_lo;
8929 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
8931 if (width <= HOST_BITS_PER_WIDE_INT)
8933 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
8934 - 1;
8935 signed_max_hi = 0;
8936 max_hi = 0;
8938 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8940 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8941 min_lo = 0;
8942 min_hi = 0;
8944 else
8946 max_lo = signed_max_lo;
8947 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8948 min_hi = -1;
8951 else
8953 width -= HOST_BITS_PER_WIDE_INT;
8954 signed_max_lo = -1;
8955 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
8956 - 1;
8957 max_lo = -1;
8958 min_lo = 0;
8960 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8962 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8963 min_hi = 0;
8965 else
8967 max_hi = signed_max_hi;
8968 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8972 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
8973 && TREE_INT_CST_LOW (arg1) == max_lo)
8974 switch (code)
8976 case GT_EXPR:
8977 return omit_one_operand (type, integer_zero_node, arg0);
8979 case GE_EXPR:
8980 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8982 case LE_EXPR:
8983 return omit_one_operand (type, integer_one_node, arg0);
8985 case LT_EXPR:
8986 return fold (build2 (NE_EXPR, type, arg0, arg1));
8988 /* The GE_EXPR and LT_EXPR cases above are not normally
8989 reached because of previous transformations. */
8991 default:
8992 break;
8994 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
8995 == max_hi
8996 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
8997 switch (code)
8999 case GT_EXPR:
9000 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9001 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9002 case LE_EXPR:
9003 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9004 return fold (build2 (NE_EXPR, type, arg0, arg1));
9005 default:
9006 break;
9008 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9009 == min_hi
9010 && TREE_INT_CST_LOW (arg1) == min_lo)
9011 switch (code)
9013 case LT_EXPR:
9014 return omit_one_operand (type, integer_zero_node, arg0);
9016 case LE_EXPR:
9017 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9019 case GE_EXPR:
9020 return omit_one_operand (type, integer_one_node, arg0);
9022 case GT_EXPR:
9023 return fold (build2 (NE_EXPR, type, arg0, arg1));
9025 default:
9026 break;
9028 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9029 == min_hi
9030 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9031 switch (code)
9033 case GE_EXPR:
9034 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9035 return fold (build2 (NE_EXPR, type, arg0, arg1));
9036 case LT_EXPR:
9037 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9038 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9039 default:
9040 break;
9043 else if (!in_gimple_form
9044 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9045 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9046 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9047 /* signed_type does not work on pointer types. */
9048 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9050 /* The following case also applies to X < signed_max+1
9051 and X >= signed_max+1 because previous transformations. */
9052 if (code == LE_EXPR || code == GT_EXPR)
9054 tree st0, st1;
9055 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9056 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9057 return fold
9058 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9059 type, fold_convert (st0, arg0),
9060 fold_convert (st1, integer_zero_node)));
9066 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9067 a MINUS_EXPR of a constant, we can convert it into a comparison with
9068 a revised constant as long as no overflow occurs. */
9069 if ((code == EQ_EXPR || code == NE_EXPR)
9070 && TREE_CODE (arg1) == INTEGER_CST
9071 && (TREE_CODE (arg0) == PLUS_EXPR
9072 || TREE_CODE (arg0) == MINUS_EXPR)
9073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9074 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9075 ? MINUS_EXPR : PLUS_EXPR,
9076 arg1, TREE_OPERAND (arg0, 1), 0))
9077 && ! TREE_CONSTANT_OVERFLOW (tem))
9078 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
9080 /* Similarly for a NEGATE_EXPR. */
9081 else if ((code == EQ_EXPR || code == NE_EXPR)
9082 && TREE_CODE (arg0) == NEGATE_EXPR
9083 && TREE_CODE (arg1) == INTEGER_CST
9084 && 0 != (tem = negate_expr (arg1))
9085 && TREE_CODE (tem) == INTEGER_CST
9086 && ! TREE_CONSTANT_OVERFLOW (tem))
9087 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
9089 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9090 for !=. Don't do this for ordered comparisons due to overflow. */
9091 else if ((code == NE_EXPR || code == EQ_EXPR)
9092 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9093 return fold (build2 (code, type,
9094 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
9096 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9097 && TREE_CODE (arg0) == NOP_EXPR)
9099 /* If we are widening one operand of an integer comparison,
9100 see if the other operand is similarly being widened. Perhaps we
9101 can do the comparison in the narrower type. */
9102 tem = fold_widened_comparison (code, type, arg0, arg1);
9103 if (tem)
9104 return tem;
9106 /* Or if we are changing signedness. */
9107 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9108 if (tem)
9109 return tem;
9112 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9113 constant, we can simplify it. */
9114 else if (TREE_CODE (arg1) == INTEGER_CST
9115 && (TREE_CODE (arg0) == MIN_EXPR
9116 || TREE_CODE (arg0) == MAX_EXPR)
9117 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9118 return optimize_minmax_comparison (t);
9120 /* If we are comparing an ABS_EXPR with a constant, we can
9121 convert all the cases into explicit comparisons, but they may
9122 well not be faster than doing the ABS and one comparison.
9123 But ABS (X) <= C is a range comparison, which becomes a subtraction
9124 and a comparison, and is probably faster. */
9125 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9126 && TREE_CODE (arg0) == ABS_EXPR
9127 && ! TREE_SIDE_EFFECTS (arg0)
9128 && (0 != (tem = negate_expr (arg1)))
9129 && TREE_CODE (tem) == INTEGER_CST
9130 && ! TREE_CONSTANT_OVERFLOW (tem))
9131 return fold (build2 (TRUTH_ANDIF_EXPR, type,
9132 build2 (GE_EXPR, type,
9133 TREE_OPERAND (arg0, 0), tem),
9134 build2 (LE_EXPR, type,
9135 TREE_OPERAND (arg0, 0), arg1)));
9137 /* Convert ABS_EXPR<x> >= 0 to true. */
9138 else if (code == GE_EXPR
9139 && tree_expr_nonnegative_p (arg0)
9140 && (integer_zerop (arg1)
9141 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9142 && real_zerop (arg1))))
9143 return omit_one_operand (type, integer_one_node, arg0);
9145 /* Convert ABS_EXPR<x> < 0 to false. */
9146 else if (code == LT_EXPR
9147 && tree_expr_nonnegative_p (arg0)
9148 && (integer_zerop (arg1) || real_zerop (arg1)))
9149 return omit_one_operand (type, integer_zero_node, arg0);
9151 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9152 else if ((code == EQ_EXPR || code == NE_EXPR)
9153 && TREE_CODE (arg0) == ABS_EXPR
9154 && (integer_zerop (arg1) || real_zerop (arg1)))
9155 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), arg1));
9157 /* If this is an EQ or NE comparison with zero and ARG0 is
9158 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9159 two operations, but the latter can be done in one less insn
9160 on machines that have only two-operand insns or on which a
9161 constant cannot be the first operand. */
9162 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9163 && TREE_CODE (arg0) == BIT_AND_EXPR)
9165 tree arg00 = TREE_OPERAND (arg0, 0);
9166 tree arg01 = TREE_OPERAND (arg0, 1);
9167 if (TREE_CODE (arg00) == LSHIFT_EXPR
9168 && integer_onep (TREE_OPERAND (arg00, 0)))
9169 return
9170 fold (build2 (code, type,
9171 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9172 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9173 arg01, TREE_OPERAND (arg00, 1)),
9174 fold_convert (TREE_TYPE (arg0),
9175 integer_one_node)),
9176 arg1));
9177 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9178 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9179 return
9180 fold (build2 (code, type,
9181 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9182 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9183 arg00, TREE_OPERAND (arg01, 1)),
9184 fold_convert (TREE_TYPE (arg0),
9185 integer_one_node)),
9186 arg1));
9189 /* If this is an NE or EQ comparison of zero against the result of a
9190 signed MOD operation whose second operand is a power of 2, make
9191 the MOD operation unsigned since it is simpler and equivalent. */
9192 if ((code == NE_EXPR || code == EQ_EXPR)
9193 && integer_zerop (arg1)
9194 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9195 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9196 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9197 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9198 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9199 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9201 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9202 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
9203 fold_convert (newtype,
9204 TREE_OPERAND (arg0, 0)),
9205 fold_convert (newtype,
9206 TREE_OPERAND (arg0, 1))));
9208 return fold (build2 (code, type, newmod,
9209 fold_convert (newtype, arg1)));
9212 /* If this is an NE comparison of zero with an AND of one, remove the
9213 comparison since the AND will give the correct value. */
9214 if (code == NE_EXPR && integer_zerop (arg1)
9215 && TREE_CODE (arg0) == BIT_AND_EXPR
9216 && integer_onep (TREE_OPERAND (arg0, 1)))
9217 return fold_convert (type, arg0);
9219 /* If we have (A & C) == C where C is a power of 2, convert this into
9220 (A & C) != 0. Similarly for NE_EXPR. */
9221 if ((code == EQ_EXPR || code == NE_EXPR)
9222 && TREE_CODE (arg0) == BIT_AND_EXPR
9223 && integer_pow2p (TREE_OPERAND (arg0, 1))
9224 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9225 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9226 arg0, fold_convert (TREE_TYPE (arg0),
9227 integer_zero_node)));
9229 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
9230 2, then fold the expression into shifts and logical operations. */
9231 tem = fold_single_bit_test (code, arg0, arg1, type);
9232 if (tem)
9233 return tem;
9235 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9236 Similarly for NE_EXPR. */
9237 if ((code == EQ_EXPR || code == NE_EXPR)
9238 && TREE_CODE (arg0) == BIT_AND_EXPR
9239 && TREE_CODE (arg1) == INTEGER_CST
9240 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9242 tree notc = fold (build1 (BIT_NOT_EXPR,
9243 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9244 TREE_OPERAND (arg0, 1)));
9245 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9246 arg1, notc));
9247 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9248 if (integer_nonzerop (dandnotc))
9249 return omit_one_operand (type, rslt, arg0);
9252 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9253 Similarly for NE_EXPR. */
9254 if ((code == EQ_EXPR || code == NE_EXPR)
9255 && TREE_CODE (arg0) == BIT_IOR_EXPR
9256 && TREE_CODE (arg1) == INTEGER_CST
9257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9259 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
9260 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9261 TREE_OPERAND (arg0, 1), notd));
9262 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9263 if (integer_nonzerop (candnotd))
9264 return omit_one_operand (type, rslt, arg0);
9267 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9268 and similarly for >= into !=. */
9269 if ((code == LT_EXPR || code == GE_EXPR)
9270 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9271 && TREE_CODE (arg1) == LSHIFT_EXPR
9272 && integer_onep (TREE_OPERAND (arg1, 0)))
9273 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9274 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9275 TREE_OPERAND (arg1, 1)),
9276 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9278 else if ((code == LT_EXPR || code == GE_EXPR)
9279 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9280 && (TREE_CODE (arg1) == NOP_EXPR
9281 || TREE_CODE (arg1) == CONVERT_EXPR)
9282 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9283 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9284 return
9285 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9286 fold_convert (TREE_TYPE (arg0),
9287 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9288 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9289 1))),
9290 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9292 /* Simplify comparison of something with itself. (For IEEE
9293 floating-point, we can only do some of these simplifications.) */
9294 if (operand_equal_p (arg0, arg1, 0))
9296 switch (code)
9298 case EQ_EXPR:
9299 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9300 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9301 return constant_boolean_node (1, type);
9302 break;
9304 case GE_EXPR:
9305 case LE_EXPR:
9306 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9307 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9308 return constant_boolean_node (1, type);
9309 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9311 case NE_EXPR:
9312 /* For NE, we can only do this simplification if integer
9313 or we don't honor IEEE floating point NaNs. */
9314 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9315 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9316 break;
9317 /* ... fall through ... */
9318 case GT_EXPR:
9319 case LT_EXPR:
9320 return constant_boolean_node (0, type);
9321 default:
9322 gcc_unreachable ();
9326 /* If we are comparing an expression that just has comparisons
9327 of two integer values, arithmetic expressions of those comparisons,
9328 and constants, we can simplify it. There are only three cases
9329 to check: the two values can either be equal, the first can be
9330 greater, or the second can be greater. Fold the expression for
9331 those three values. Since each value must be 0 or 1, we have
9332 eight possibilities, each of which corresponds to the constant 0
9333 or 1 or one of the six possible comparisons.
9335 This handles common cases like (a > b) == 0 but also handles
9336 expressions like ((x > y) - (y > x)) > 0, which supposedly
9337 occur in macroized code. */
9339 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9341 tree cval1 = 0, cval2 = 0;
9342 int save_p = 0;
9344 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9345 /* Don't handle degenerate cases here; they should already
9346 have been handled anyway. */
9347 && cval1 != 0 && cval2 != 0
9348 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9349 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9350 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9351 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9352 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9353 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9354 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9356 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9357 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9359 /* We can't just pass T to eval_subst in case cval1 or cval2
9360 was the same as ARG1. */
9362 tree high_result
9363 = fold (build2 (code, type,
9364 eval_subst (arg0, cval1, maxval,
9365 cval2, minval),
9366 arg1));
9367 tree equal_result
9368 = fold (build2 (code, type,
9369 eval_subst (arg0, cval1, maxval,
9370 cval2, maxval),
9371 arg1));
9372 tree low_result
9373 = fold (build2 (code, type,
9374 eval_subst (arg0, cval1, minval,
9375 cval2, maxval),
9376 arg1));
9378 /* All three of these results should be 0 or 1. Confirm they
9379 are. Then use those values to select the proper code
9380 to use. */
9382 if ((integer_zerop (high_result)
9383 || integer_onep (high_result))
9384 && (integer_zerop (equal_result)
9385 || integer_onep (equal_result))
9386 && (integer_zerop (low_result)
9387 || integer_onep (low_result)))
9389 /* Make a 3-bit mask with the high-order bit being the
9390 value for `>', the next for '=', and the low for '<'. */
9391 switch ((integer_onep (high_result) * 4)
9392 + (integer_onep (equal_result) * 2)
9393 + integer_onep (low_result))
9395 case 0:
9396 /* Always false. */
9397 return omit_one_operand (type, integer_zero_node, arg0);
9398 case 1:
9399 code = LT_EXPR;
9400 break;
9401 case 2:
9402 code = EQ_EXPR;
9403 break;
9404 case 3:
9405 code = LE_EXPR;
9406 break;
9407 case 4:
9408 code = GT_EXPR;
9409 break;
9410 case 5:
9411 code = NE_EXPR;
9412 break;
9413 case 6:
9414 code = GE_EXPR;
9415 break;
9416 case 7:
9417 /* Always true. */
9418 return omit_one_operand (type, integer_one_node, arg0);
9421 tem = build2 (code, type, cval1, cval2);
9422 if (save_p)
9423 return save_expr (tem);
9424 else
9425 return fold (tem);
9430 /* If this is a comparison of a field, we may be able to simplify it. */
9431 if (((TREE_CODE (arg0) == COMPONENT_REF
9432 && lang_hooks.can_use_bit_fields_p ())
9433 || TREE_CODE (arg0) == BIT_FIELD_REF)
9434 && (code == EQ_EXPR || code == NE_EXPR)
9435 /* Handle the constant case even without -O
9436 to make sure the warnings are given. */
9437 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9439 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9440 if (t1)
9441 return t1;
9444 /* If this is a comparison of complex values and either or both sides
9445 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9446 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9447 This may prevent needless evaluations. */
9448 if ((code == EQ_EXPR || code == NE_EXPR)
9449 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9450 && (TREE_CODE (arg0) == COMPLEX_EXPR
9451 || TREE_CODE (arg1) == COMPLEX_EXPR
9452 || TREE_CODE (arg0) == COMPLEX_CST
9453 || TREE_CODE (arg1) == COMPLEX_CST))
9455 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9456 tree real0, imag0, real1, imag1;
9458 arg0 = save_expr (arg0);
9459 arg1 = save_expr (arg1);
9460 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
9461 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
9462 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
9463 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
9465 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9466 : TRUTH_ORIF_EXPR),
9467 type,
9468 fold (build2 (code, type, real0, real1)),
9469 fold (build2 (code, type, imag0, imag1))));
9472 /* Optimize comparisons of strlen vs zero to a compare of the
9473 first character of the string vs zero. To wit,
9474 strlen(ptr) == 0 => *ptr == 0
9475 strlen(ptr) != 0 => *ptr != 0
9476 Other cases should reduce to one of these two (or a constant)
9477 due to the return value of strlen being unsigned. */
9478 if ((code == EQ_EXPR || code == NE_EXPR)
9479 && integer_zerop (arg1)
9480 && TREE_CODE (arg0) == CALL_EXPR)
9482 tree fndecl = get_callee_fndecl (arg0);
9483 tree arglist;
9485 if (fndecl
9486 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9487 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9488 && (arglist = TREE_OPERAND (arg0, 1))
9489 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9490 && ! TREE_CHAIN (arglist))
9491 return fold (build2 (code, type,
9492 build1 (INDIRECT_REF, char_type_node,
9493 TREE_VALUE (arglist)),
9494 fold_convert (char_type_node,
9495 integer_zero_node)));
9498 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9499 into a single range test. */
9500 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9501 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9502 && TREE_CODE (arg1) == INTEGER_CST
9503 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9504 && !integer_zerop (TREE_OPERAND (arg0, 1))
9505 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9506 && !TREE_OVERFLOW (arg1))
9508 t1 = fold_div_compare (code, type, arg0, arg1);
9509 if (t1 != NULL_TREE)
9510 return t1;
9513 if ((code == EQ_EXPR || code == NE_EXPR)
9514 && !TREE_SIDE_EFFECTS (arg0)
9515 && integer_zerop (arg1)
9516 && tree_expr_nonzero_p (arg0))
9517 return constant_boolean_node (code==NE_EXPR, type);
9519 t1 = fold_relational_const (code, type, arg0, arg1);
9520 return t1 == NULL_TREE ? t : t1;
9522 case UNORDERED_EXPR:
9523 case ORDERED_EXPR:
9524 case UNLT_EXPR:
9525 case UNLE_EXPR:
9526 case UNGT_EXPR:
9527 case UNGE_EXPR:
9528 case UNEQ_EXPR:
9529 case LTGT_EXPR:
9530 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9532 t1 = fold_relational_const (code, type, arg0, arg1);
9533 if (t1 != NULL_TREE)
9534 return t1;
9537 /* If the first operand is NaN, the result is constant. */
9538 if (TREE_CODE (arg0) == REAL_CST
9539 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9540 && (code != LTGT_EXPR || ! flag_trapping_math))
9542 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9543 ? integer_zero_node
9544 : integer_one_node;
9545 return omit_one_operand (type, t1, arg1);
9548 /* If the second operand is NaN, the result is constant. */
9549 if (TREE_CODE (arg1) == REAL_CST
9550 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9551 && (code != LTGT_EXPR || ! flag_trapping_math))
9553 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9554 ? integer_zero_node
9555 : integer_one_node;
9556 return omit_one_operand (type, t1, arg0);
9559 /* Simplify unordered comparison of something with itself. */
9560 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9561 && operand_equal_p (arg0, arg1, 0))
9562 return constant_boolean_node (1, type);
9564 if (code == LTGT_EXPR
9565 && !flag_trapping_math
9566 && operand_equal_p (arg0, arg1, 0))
9567 return constant_boolean_node (0, type);
9569 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9571 tree targ0 = strip_float_extensions (arg0);
9572 tree targ1 = strip_float_extensions (arg1);
9573 tree newtype = TREE_TYPE (targ0);
9575 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9576 newtype = TREE_TYPE (targ1);
9578 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9579 return fold (build2 (code, type, fold_convert (newtype, targ0),
9580 fold_convert (newtype, targ1)));
9583 return t;
9585 case COND_EXPR:
9586 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9587 so all simple results must be passed through pedantic_non_lvalue. */
9588 if (TREE_CODE (arg0) == INTEGER_CST)
9590 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
9591 /* Only optimize constant conditions when the selected branch
9592 has the same type as the COND_EXPR. This avoids optimizing
9593 away "c ? x : throw", where the throw has a void type. */
9594 if (! VOID_TYPE_P (TREE_TYPE (tem))
9595 || VOID_TYPE_P (type))
9596 return pedantic_non_lvalue (tem);
9597 return t;
9599 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
9600 return pedantic_omit_one_operand (type, arg1, arg0);
9602 /* If we have A op B ? A : C, we may be able to convert this to a
9603 simpler expression, depending on the operation and the values
9604 of B and C. Signed zeros prevent all of these transformations,
9605 for reasons given above each one.
9607 Also try swapping the arguments and inverting the conditional. */
9608 if (COMPARISON_CLASS_P (arg0)
9609 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9610 arg1, TREE_OPERAND (arg0, 1))
9611 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9613 tem = fold_cond_expr_with_comparison (type, arg0,
9614 TREE_OPERAND (t, 1),
9615 TREE_OPERAND (t, 2));
9616 if (tem)
9617 return tem;
9620 if (COMPARISON_CLASS_P (arg0)
9621 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9622 TREE_OPERAND (t, 2),
9623 TREE_OPERAND (arg0, 1))
9624 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
9626 tem = invert_truthvalue (arg0);
9627 if (COMPARISON_CLASS_P (tem))
9629 tem = fold_cond_expr_with_comparison (type, tem,
9630 TREE_OPERAND (t, 2),
9631 TREE_OPERAND (t, 1));
9632 if (tem)
9633 return tem;
9637 /* If the second operand is simpler than the third, swap them
9638 since that produces better jump optimization results. */
9639 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
9640 TREE_OPERAND (t, 2), false))
9642 /* See if this can be inverted. If it can't, possibly because
9643 it was a floating-point inequality comparison, don't do
9644 anything. */
9645 tem = invert_truthvalue (arg0);
9647 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9648 return fold (build3 (code, type, tem,
9649 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
9652 /* Convert A ? 1 : 0 to simply A. */
9653 if (integer_onep (TREE_OPERAND (t, 1))
9654 && integer_zerop (TREE_OPERAND (t, 2))
9655 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9656 call to fold will try to move the conversion inside
9657 a COND, which will recurse. In that case, the COND_EXPR
9658 is probably the best choice, so leave it alone. */
9659 && type == TREE_TYPE (arg0))
9660 return pedantic_non_lvalue (arg0);
9662 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9663 over COND_EXPR in cases such as floating point comparisons. */
9664 if (integer_zerop (TREE_OPERAND (t, 1))
9665 && integer_onep (TREE_OPERAND (t, 2))
9666 && truth_value_p (TREE_CODE (arg0)))
9667 return pedantic_non_lvalue (fold_convert (type,
9668 invert_truthvalue (arg0)));
9670 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9671 if (TREE_CODE (arg0) == LT_EXPR
9672 && integer_zerop (TREE_OPERAND (arg0, 1))
9673 && integer_zerop (TREE_OPERAND (t, 2))
9674 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9675 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9676 TREE_TYPE (tem), tem, arg1)));
9678 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9679 already handled above. */
9680 if (TREE_CODE (arg0) == BIT_AND_EXPR
9681 && integer_onep (TREE_OPERAND (arg0, 1))
9682 && integer_zerop (TREE_OPERAND (t, 2))
9683 && integer_pow2p (arg1))
9685 tree tem = TREE_OPERAND (arg0, 0);
9686 STRIP_NOPS (tem);
9687 if (TREE_CODE (tem) == RSHIFT_EXPR
9688 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9689 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9690 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9691 return fold (build2 (BIT_AND_EXPR, type,
9692 TREE_OPERAND (tem, 0), arg1));
9695 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9696 is probably obsolete because the first operand should be a
9697 truth value (that's why we have the two cases above), but let's
9698 leave it in until we can confirm this for all front-ends. */
9699 if (integer_zerop (TREE_OPERAND (t, 2))
9700 && TREE_CODE (arg0) == NE_EXPR
9701 && integer_zerop (TREE_OPERAND (arg0, 1))
9702 && integer_pow2p (arg1)
9703 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9704 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9705 arg1, OEP_ONLY_CONST))
9706 return pedantic_non_lvalue (fold_convert (type,
9707 TREE_OPERAND (arg0, 0)));
9709 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9710 if (integer_zerop (TREE_OPERAND (t, 2))
9711 && truth_value_p (TREE_CODE (arg0))
9712 && truth_value_p (TREE_CODE (arg1)))
9713 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9715 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9716 if (integer_onep (TREE_OPERAND (t, 2))
9717 && truth_value_p (TREE_CODE (arg0))
9718 && truth_value_p (TREE_CODE (arg1)))
9720 /* Only perform transformation if ARG0 is easily inverted. */
9721 tem = invert_truthvalue (arg0);
9722 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9723 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9726 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9727 if (integer_zerop (arg1)
9728 && truth_value_p (TREE_CODE (arg0))
9729 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9731 /* Only perform transformation if ARG0 is easily inverted. */
9732 tem = invert_truthvalue (arg0);
9733 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9734 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9735 TREE_OPERAND (t, 2)));
9738 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9739 if (integer_onep (arg1)
9740 && truth_value_p (TREE_CODE (arg0))
9741 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9742 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9743 TREE_OPERAND (t, 2)));
9745 return t;
9747 case COMPOUND_EXPR:
9748 /* When pedantic, a compound expression can be neither an lvalue
9749 nor an integer constant expression. */
9750 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9751 return t;
9752 /* Don't let (0, 0) be null pointer constant. */
9753 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9754 : fold_convert (type, arg1);
9755 return pedantic_non_lvalue (tem);
9757 case COMPLEX_EXPR:
9758 if (wins)
9759 return build_complex (type, arg0, arg1);
9760 return t;
9762 case REALPART_EXPR:
9763 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9764 return t;
9765 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9766 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9767 TREE_OPERAND (arg0, 1));
9768 else if (TREE_CODE (arg0) == COMPLEX_CST)
9769 return TREE_REALPART (arg0);
9770 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9771 return fold (build2 (TREE_CODE (arg0), type,
9772 fold (build1 (REALPART_EXPR, type,
9773 TREE_OPERAND (arg0, 0))),
9774 fold (build1 (REALPART_EXPR, type,
9775 TREE_OPERAND (arg0, 1)))));
9776 return t;
9778 case IMAGPART_EXPR:
9779 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9780 return fold_convert (type, integer_zero_node);
9781 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9782 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9783 TREE_OPERAND (arg0, 0));
9784 else if (TREE_CODE (arg0) == COMPLEX_CST)
9785 return TREE_IMAGPART (arg0);
9786 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9787 return fold (build2 (TREE_CODE (arg0), type,
9788 fold (build1 (IMAGPART_EXPR, type,
9789 TREE_OPERAND (arg0, 0))),
9790 fold (build1 (IMAGPART_EXPR, type,
9791 TREE_OPERAND (arg0, 1)))));
9792 return t;
9794 case CALL_EXPR:
9795 /* Check for a built-in function. */
9796 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9797 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9798 == FUNCTION_DECL)
9799 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9801 tree tmp = fold_builtin (t, false);
9802 if (tmp)
9803 return tmp;
9805 return t;
9807 default:
9808 return t;
9809 } /* switch (code) */
9812 #ifdef ENABLE_FOLD_CHECKING
9813 #undef fold
9815 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9816 static void fold_check_failed (tree, tree);
9817 void print_fold_checksum (tree);
9819 /* When --enable-checking=fold, compute a digest of expr before
9820 and after actual fold call to see if fold did not accidentally
9821 change original expr. */
9823 tree
9824 fold (tree expr)
9826 tree ret;
9827 struct md5_ctx ctx;
9828 unsigned char checksum_before[16], checksum_after[16];
9829 htab_t ht;
9831 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9832 md5_init_ctx (&ctx);
9833 fold_checksum_tree (expr, &ctx, ht);
9834 md5_finish_ctx (&ctx, checksum_before);
9835 htab_empty (ht);
9837 ret = fold_1 (expr);
9839 md5_init_ctx (&ctx);
9840 fold_checksum_tree (expr, &ctx, ht);
9841 md5_finish_ctx (&ctx, checksum_after);
9842 htab_delete (ht);
9844 if (memcmp (checksum_before, checksum_after, 16))
9845 fold_check_failed (expr, ret);
9847 return ret;
9850 void
9851 print_fold_checksum (tree expr)
9853 struct md5_ctx ctx;
9854 unsigned char checksum[16], cnt;
9855 htab_t ht;
9857 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9858 md5_init_ctx (&ctx);
9859 fold_checksum_tree (expr, &ctx, ht);
9860 md5_finish_ctx (&ctx, checksum);
9861 htab_delete (ht);
9862 for (cnt = 0; cnt < 16; ++cnt)
9863 fprintf (stderr, "%02x", checksum[cnt]);
9864 putc ('\n', stderr);
9867 static void
9868 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9870 internal_error ("fold check: original tree changed by fold");
9873 static void
9874 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9876 void **slot;
9877 enum tree_code code;
9878 char buf[sizeof (struct tree_decl)];
9879 int i, len;
9881 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9882 <= sizeof (struct tree_decl))
9883 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9884 if (expr == NULL)
9885 return;
9886 slot = htab_find_slot (ht, expr, INSERT);
9887 if (*slot != NULL)
9888 return;
9889 *slot = expr;
9890 code = TREE_CODE (expr);
9891 if (TREE_CODE_CLASS (code) == tcc_declaration
9892 && DECL_ASSEMBLER_NAME_SET_P (expr))
9894 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9895 memcpy (buf, expr, tree_size (expr));
9896 expr = (tree) buf;
9897 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9899 else if (TREE_CODE_CLASS (code) == tcc_type
9900 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9901 || TYPE_CACHED_VALUES_P (expr)))
9903 /* Allow these fields to be modified. */
9904 memcpy (buf, expr, tree_size (expr));
9905 expr = (tree) buf;
9906 TYPE_POINTER_TO (expr) = NULL;
9907 TYPE_REFERENCE_TO (expr) = NULL;
9908 TYPE_CACHED_VALUES_P (expr) = 0;
9909 TYPE_CACHED_VALUES (expr) = NULL;
9911 md5_process_bytes (expr, tree_size (expr), ctx);
9912 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9913 if (TREE_CODE_CLASS (code) != tcc_type
9914 && TREE_CODE_CLASS (code) != tcc_declaration)
9915 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9916 switch (TREE_CODE_CLASS (code))
9918 case tcc_constant:
9919 switch (code)
9921 case STRING_CST:
9922 md5_process_bytes (TREE_STRING_POINTER (expr),
9923 TREE_STRING_LENGTH (expr), ctx);
9924 break;
9925 case COMPLEX_CST:
9926 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9927 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9928 break;
9929 case VECTOR_CST:
9930 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9931 break;
9932 default:
9933 break;
9935 break;
9936 case tcc_exceptional:
9937 switch (code)
9939 case TREE_LIST:
9940 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9941 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9942 break;
9943 case TREE_VEC:
9944 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9945 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9946 break;
9947 default:
9948 break;
9950 break;
9951 case tcc_expression:
9952 case tcc_reference:
9953 case tcc_comparison:
9954 case tcc_unary:
9955 case tcc_binary:
9956 case tcc_statement:
9957 len = TREE_CODE_LENGTH (code);
9958 for (i = 0; i < len; ++i)
9959 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9960 break;
9961 case tcc_declaration:
9962 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9963 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9964 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9965 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9966 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9967 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9968 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9969 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9970 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9971 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9972 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9973 break;
9974 case tcc_type:
9975 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9976 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9977 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9978 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9979 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9980 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9981 if (INTEGRAL_TYPE_P (expr)
9982 || SCALAR_FLOAT_TYPE_P (expr))
9984 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9985 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9987 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9988 if (TREE_CODE (expr) == RECORD_TYPE
9989 || TREE_CODE (expr) == UNION_TYPE
9990 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9991 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9992 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9993 break;
9994 default:
9995 break;
9999 #endif
10001 /* Perform constant folding and related simplification of initializer
10002 expression EXPR. This behaves identically to "fold" but ignores
10003 potential run-time traps and exceptions that fold must preserve. */
10005 tree
10006 fold_initializer (tree expr)
10008 int saved_signaling_nans = flag_signaling_nans;
10009 int saved_trapping_math = flag_trapping_math;
10010 int saved_rounding_math = flag_rounding_math;
10011 int saved_trapv = flag_trapv;
10012 tree result;
10014 flag_signaling_nans = 0;
10015 flag_trapping_math = 0;
10016 flag_rounding_math = 0;
10017 flag_trapv = 0;
10019 result = fold (expr);
10021 flag_signaling_nans = saved_signaling_nans;
10022 flag_trapping_math = saved_trapping_math;
10023 flag_rounding_math = saved_rounding_math;
10024 flag_trapv = saved_trapv;
10026 return result;
10029 /* Determine if first argument is a multiple of second argument. Return 0 if
10030 it is not, or we cannot easily determined it to be.
10032 An example of the sort of thing we care about (at this point; this routine
10033 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10034 fold cases do now) is discovering that
10036 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10038 is a multiple of
10040 SAVE_EXPR (J * 8)
10042 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10044 This code also handles discovering that
10046 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10048 is a multiple of 8 so we don't have to worry about dealing with a
10049 possible remainder.
10051 Note that we *look* inside a SAVE_EXPR only to determine how it was
10052 calculated; it is not safe for fold to do much of anything else with the
10053 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10054 at run time. For example, the latter example above *cannot* be implemented
10055 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10056 evaluation time of the original SAVE_EXPR is not necessarily the same at
10057 the time the new expression is evaluated. The only optimization of this
10058 sort that would be valid is changing
10060 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10062 divided by 8 to
10064 SAVE_EXPR (I) * SAVE_EXPR (J)
10066 (where the same SAVE_EXPR (J) is used in the original and the
10067 transformed version). */
10069 static int
10070 multiple_of_p (tree type, tree top, tree bottom)
10072 if (operand_equal_p (top, bottom, 0))
10073 return 1;
10075 if (TREE_CODE (type) != INTEGER_TYPE)
10076 return 0;
10078 switch (TREE_CODE (top))
10080 case BIT_AND_EXPR:
10081 /* Bitwise and provides a power of two multiple. If the mask is
10082 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10083 if (!integer_pow2p (bottom))
10084 return 0;
10085 /* FALLTHRU */
10087 case MULT_EXPR:
10088 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10089 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10091 case PLUS_EXPR:
10092 case MINUS_EXPR:
10093 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10094 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10096 case LSHIFT_EXPR:
10097 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10099 tree op1, t1;
10101 op1 = TREE_OPERAND (top, 1);
10102 /* const_binop may not detect overflow correctly,
10103 so check for it explicitly here. */
10104 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10105 > TREE_INT_CST_LOW (op1)
10106 && TREE_INT_CST_HIGH (op1) == 0
10107 && 0 != (t1 = fold_convert (type,
10108 const_binop (LSHIFT_EXPR,
10109 size_one_node,
10110 op1, 0)))
10111 && ! TREE_OVERFLOW (t1))
10112 return multiple_of_p (type, t1, bottom);
10114 return 0;
10116 case NOP_EXPR:
10117 /* Can't handle conversions from non-integral or wider integral type. */
10118 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10119 || (TYPE_PRECISION (type)
10120 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10121 return 0;
10123 /* .. fall through ... */
10125 case SAVE_EXPR:
10126 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10128 case INTEGER_CST:
10129 if (TREE_CODE (bottom) != INTEGER_CST
10130 || (TYPE_UNSIGNED (type)
10131 && (tree_int_cst_sgn (top) < 0
10132 || tree_int_cst_sgn (bottom) < 0)))
10133 return 0;
10134 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10135 top, bottom, 0));
10137 default:
10138 return 0;
10142 /* Return true if `t' is known to be non-negative. */
10145 tree_expr_nonnegative_p (tree t)
10147 switch (TREE_CODE (t))
10149 case ABS_EXPR:
10150 return 1;
10152 case INTEGER_CST:
10153 return tree_int_cst_sgn (t) >= 0;
10155 case REAL_CST:
10156 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10158 case PLUS_EXPR:
10159 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10160 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10161 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10163 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10164 both unsigned and at least 2 bits shorter than the result. */
10165 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10166 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10167 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10169 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10170 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10171 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10172 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10174 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10175 TYPE_PRECISION (inner2)) + 1;
10176 return prec < TYPE_PRECISION (TREE_TYPE (t));
10179 break;
10181 case MULT_EXPR:
10182 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10184 /* x * x for floating point x is always non-negative. */
10185 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10186 return 1;
10187 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10188 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10191 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10192 both unsigned and their total bits is shorter than the result. */
10193 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10194 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10195 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10197 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10198 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10199 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10200 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10201 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10202 < TYPE_PRECISION (TREE_TYPE (t));
10204 return 0;
10206 case TRUNC_DIV_EXPR:
10207 case CEIL_DIV_EXPR:
10208 case FLOOR_DIV_EXPR:
10209 case ROUND_DIV_EXPR:
10210 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10211 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10213 case TRUNC_MOD_EXPR:
10214 case CEIL_MOD_EXPR:
10215 case FLOOR_MOD_EXPR:
10216 case ROUND_MOD_EXPR:
10217 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10219 case RDIV_EXPR:
10220 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10221 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10223 case BIT_AND_EXPR:
10224 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10225 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10226 case BIT_IOR_EXPR:
10227 case BIT_XOR_EXPR:
10228 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10229 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10231 case NOP_EXPR:
10233 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10234 tree outer_type = TREE_TYPE (t);
10236 if (TREE_CODE (outer_type) == REAL_TYPE)
10238 if (TREE_CODE (inner_type) == REAL_TYPE)
10239 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10240 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10242 if (TYPE_UNSIGNED (inner_type))
10243 return 1;
10244 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10247 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10249 if (TREE_CODE (inner_type) == REAL_TYPE)
10250 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10251 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10252 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10253 && TYPE_UNSIGNED (inner_type);
10256 break;
10258 case COND_EXPR:
10259 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10260 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10261 case COMPOUND_EXPR:
10262 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10263 case MIN_EXPR:
10264 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10265 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10266 case MAX_EXPR:
10267 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10268 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10269 case MODIFY_EXPR:
10270 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10271 case BIND_EXPR:
10272 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10273 case SAVE_EXPR:
10274 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10275 case NON_LVALUE_EXPR:
10276 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10277 case FLOAT_EXPR:
10278 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10280 case TARGET_EXPR:
10282 tree temp = TARGET_EXPR_SLOT (t);
10283 t = TARGET_EXPR_INITIAL (t);
10285 /* If the initializer is non-void, then it's a normal expression
10286 that will be assigned to the slot. */
10287 if (!VOID_TYPE_P (t))
10288 return tree_expr_nonnegative_p (t);
10290 /* Otherwise, the initializer sets the slot in some way. One common
10291 way is an assignment statement at the end of the initializer. */
10292 while (1)
10294 if (TREE_CODE (t) == BIND_EXPR)
10295 t = expr_last (BIND_EXPR_BODY (t));
10296 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10297 || TREE_CODE (t) == TRY_CATCH_EXPR)
10298 t = expr_last (TREE_OPERAND (t, 0));
10299 else if (TREE_CODE (t) == STATEMENT_LIST)
10300 t = expr_last (t);
10301 else
10302 break;
10304 if (TREE_CODE (t) == MODIFY_EXPR
10305 && TREE_OPERAND (t, 0) == temp)
10306 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10308 return 0;
10311 case CALL_EXPR:
10313 tree fndecl = get_callee_fndecl (t);
10314 tree arglist = TREE_OPERAND (t, 1);
10315 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10316 switch (DECL_FUNCTION_CODE (fndecl))
10318 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10319 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10320 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10321 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10323 CASE_BUILTIN_F (BUILT_IN_ACOS)
10324 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10325 CASE_BUILTIN_F (BUILT_IN_CABS)
10326 CASE_BUILTIN_F (BUILT_IN_COSH)
10327 CASE_BUILTIN_F (BUILT_IN_ERFC)
10328 CASE_BUILTIN_F (BUILT_IN_EXP)
10329 CASE_BUILTIN_F (BUILT_IN_EXP10)
10330 CASE_BUILTIN_F (BUILT_IN_EXP2)
10331 CASE_BUILTIN_F (BUILT_IN_FABS)
10332 CASE_BUILTIN_F (BUILT_IN_FDIM)
10333 CASE_BUILTIN_F (BUILT_IN_FREXP)
10334 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10335 CASE_BUILTIN_F (BUILT_IN_POW10)
10336 CASE_BUILTIN_I (BUILT_IN_FFS)
10337 CASE_BUILTIN_I (BUILT_IN_PARITY)
10338 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10339 /* Always true. */
10340 return 1;
10342 CASE_BUILTIN_F (BUILT_IN_SQRT)
10343 /* sqrt(-0.0) is -0.0. */
10344 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10345 return 1;
10346 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10348 CASE_BUILTIN_F (BUILT_IN_ASINH)
10349 CASE_BUILTIN_F (BUILT_IN_ATAN)
10350 CASE_BUILTIN_F (BUILT_IN_ATANH)
10351 CASE_BUILTIN_F (BUILT_IN_CBRT)
10352 CASE_BUILTIN_F (BUILT_IN_CEIL)
10353 CASE_BUILTIN_F (BUILT_IN_ERF)
10354 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10355 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10356 CASE_BUILTIN_F (BUILT_IN_FMOD)
10357 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10358 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10359 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10360 CASE_BUILTIN_F (BUILT_IN_LRINT)
10361 CASE_BUILTIN_F (BUILT_IN_LROUND)
10362 CASE_BUILTIN_F (BUILT_IN_MODF)
10363 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10364 CASE_BUILTIN_F (BUILT_IN_POW)
10365 CASE_BUILTIN_F (BUILT_IN_RINT)
10366 CASE_BUILTIN_F (BUILT_IN_ROUND)
10367 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10368 CASE_BUILTIN_F (BUILT_IN_SINH)
10369 CASE_BUILTIN_F (BUILT_IN_TANH)
10370 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10371 /* True if the 1st argument is nonnegative. */
10372 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10374 CASE_BUILTIN_F (BUILT_IN_FMAX)
10375 /* True if the 1st OR 2nd arguments are nonnegative. */
10376 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10377 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10379 CASE_BUILTIN_F (BUILT_IN_FMIN)
10380 /* True if the 1st AND 2nd arguments are nonnegative. */
10381 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10382 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10384 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10385 /* True if the 2nd argument is nonnegative. */
10386 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10388 default:
10389 break;
10390 #undef CASE_BUILTIN_F
10391 #undef CASE_BUILTIN_I
10395 /* ... fall through ... */
10397 default:
10398 if (truth_value_p (TREE_CODE (t)))
10399 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10400 return 1;
10403 /* We don't know sign of `t', so be conservative and return false. */
10404 return 0;
10407 /* Return true when T is an address and is known to be nonzero.
10408 For floating point we further ensure that T is not denormal.
10409 Similar logic is present in nonzero_address in rtlanal.h. */
10411 static bool
10412 tree_expr_nonzero_p (tree t)
10414 tree type = TREE_TYPE (t);
10416 /* Doing something useful for floating point would need more work. */
10417 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10418 return false;
10420 switch (TREE_CODE (t))
10422 case ABS_EXPR:
10423 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10424 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10426 case INTEGER_CST:
10427 /* We used to test for !integer_zerop here. This does not work correctly
10428 if TREE_CONSTANT_OVERFLOW (t). */
10429 return (TREE_INT_CST_LOW (t) != 0
10430 || TREE_INT_CST_HIGH (t) != 0);
10432 case PLUS_EXPR:
10433 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10435 /* With the presence of negative values it is hard
10436 to say something. */
10437 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10438 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10439 return false;
10440 /* One of operands must be positive and the other non-negative. */
10441 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10442 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10444 break;
10446 case MULT_EXPR:
10447 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10449 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10450 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10452 break;
10454 case NOP_EXPR:
10456 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10457 tree outer_type = TREE_TYPE (t);
10459 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10460 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10462 break;
10464 case ADDR_EXPR:
10466 tree base = get_base_address (TREE_OPERAND (t, 0));
10468 if (!base)
10469 return false;
10471 /* Weak declarations may link to NULL. */
10472 if (DECL_P (base))
10473 return !DECL_WEAK (base);
10475 /* Constants are never weak. */
10476 if (CONSTANT_CLASS_P (base))
10477 return true;
10479 return false;
10482 case COND_EXPR:
10483 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10484 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10486 case MIN_EXPR:
10487 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10488 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10490 case MAX_EXPR:
10491 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10493 /* When both operands are nonzero, then MAX must be too. */
10494 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10495 return true;
10497 /* MAX where operand 0 is positive is positive. */
10498 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10500 /* MAX where operand 1 is positive is positive. */
10501 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10502 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10503 return true;
10504 break;
10506 case COMPOUND_EXPR:
10507 case MODIFY_EXPR:
10508 case BIND_EXPR:
10509 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10511 case SAVE_EXPR:
10512 case NON_LVALUE_EXPR:
10513 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10515 case BIT_IOR_EXPR:
10516 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10517 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10519 default:
10520 break;
10522 return false;
10525 /* See if we are applying CODE, a relational to the highest or lowest
10526 possible integer of TYPE. If so, then the result is a compile
10527 time constant. */
10529 static tree
10530 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10531 tree *op1_p)
10533 tree op0 = *op0_p;
10534 tree op1 = *op1_p;
10535 enum tree_code code = *code_p;
10536 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10538 if (TREE_CODE (op1) == INTEGER_CST
10539 && ! TREE_CONSTANT_OVERFLOW (op1)
10540 && width <= HOST_BITS_PER_WIDE_INT
10541 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10542 || POINTER_TYPE_P (TREE_TYPE (op1))))
10544 unsigned HOST_WIDE_INT signed_max;
10545 unsigned HOST_WIDE_INT max, min;
10547 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10549 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10551 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10552 min = 0;
10554 else
10556 max = signed_max;
10557 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10560 if (TREE_INT_CST_HIGH (op1) == 0
10561 && TREE_INT_CST_LOW (op1) == max)
10562 switch (code)
10564 case GT_EXPR:
10565 return omit_one_operand (type, integer_zero_node, op0);
10567 case GE_EXPR:
10568 *code_p = EQ_EXPR;
10569 break;
10570 case LE_EXPR:
10571 return omit_one_operand (type, integer_one_node, op0);
10573 case LT_EXPR:
10574 *code_p = NE_EXPR;
10575 break;
10577 /* The GE_EXPR and LT_EXPR cases above are not normally
10578 reached because of previous transformations. */
10580 default:
10581 break;
10583 else if (TREE_INT_CST_HIGH (op1) == 0
10584 && TREE_INT_CST_LOW (op1) == max - 1)
10585 switch (code)
10587 case GT_EXPR:
10588 *code_p = EQ_EXPR;
10589 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10590 break;
10591 case LE_EXPR:
10592 *code_p = NE_EXPR;
10593 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10594 break;
10595 default:
10596 break;
10598 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10599 && TREE_INT_CST_LOW (op1) == min)
10600 switch (code)
10602 case LT_EXPR:
10603 return omit_one_operand (type, integer_zero_node, op0);
10605 case LE_EXPR:
10606 *code_p = EQ_EXPR;
10607 break;
10609 case GE_EXPR:
10610 return omit_one_operand (type, integer_one_node, op0);
10612 case GT_EXPR:
10613 *code_p = NE_EXPR;
10614 break;
10616 default:
10617 break;
10619 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10620 && TREE_INT_CST_LOW (op1) == min + 1)
10621 switch (code)
10623 case GE_EXPR:
10624 *code_p = NE_EXPR;
10625 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10626 break;
10627 case LT_EXPR:
10628 *code_p = EQ_EXPR;
10629 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10630 break;
10631 default:
10632 break;
10635 else if (TREE_INT_CST_HIGH (op1) == 0
10636 && TREE_INT_CST_LOW (op1) == signed_max
10637 && TYPE_UNSIGNED (TREE_TYPE (op1))
10638 /* signed_type does not work on pointer types. */
10639 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10641 /* The following case also applies to X < signed_max+1
10642 and X >= signed_max+1 because previous transformations. */
10643 if (code == LE_EXPR || code == GT_EXPR)
10645 tree st0, st1, exp, retval;
10646 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10647 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10649 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10650 type,
10651 fold_convert (st0, op0),
10652 fold_convert (st1, integer_zero_node));
10654 retval = fold_binary_to_constant (TREE_CODE (exp),
10655 TREE_TYPE (exp),
10656 TREE_OPERAND (exp, 0),
10657 TREE_OPERAND (exp, 1));
10659 /* If we are in gimple form, then returning EXP would create
10660 non-gimple expressions. Clearing it is safe and insures
10661 we do not allow a non-gimple expression to escape. */
10662 if (in_gimple_form)
10663 exp = NULL;
10665 return (retval ? retval : exp);
10670 return NULL_TREE;
10674 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10675 attempt to fold the expression to a constant without modifying TYPE,
10676 OP0 or OP1.
10678 If the expression could be simplified to a constant, then return
10679 the constant. If the expression would not be simplified to a
10680 constant, then return NULL_TREE.
10682 Note this is primarily designed to be called after gimplification
10683 of the tree structures and when at least one operand is a constant.
10684 As a result of those simplifying assumptions this routine is far
10685 simpler than the generic fold routine. */
10687 tree
10688 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10690 int wins = 1;
10691 tree subop0;
10692 tree subop1;
10693 tree tem;
10695 /* If this is a commutative operation, and ARG0 is a constant, move it
10696 to ARG1 to reduce the number of tests below. */
10697 if (commutative_tree_code (code)
10698 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10700 tem = op0;
10701 op0 = op1;
10702 op1 = tem;
10705 /* If either operand is a complex type, extract its real component. */
10706 if (TREE_CODE (op0) == COMPLEX_CST)
10707 subop0 = TREE_REALPART (op0);
10708 else
10709 subop0 = op0;
10711 if (TREE_CODE (op1) == COMPLEX_CST)
10712 subop1 = TREE_REALPART (op1);
10713 else
10714 subop1 = op1;
10716 /* Note if either argument is not a real or integer constant.
10717 With a few exceptions, simplification is limited to cases
10718 where both arguments are constants. */
10719 if ((TREE_CODE (subop0) != INTEGER_CST
10720 && TREE_CODE (subop0) != REAL_CST)
10721 || (TREE_CODE (subop1) != INTEGER_CST
10722 && TREE_CODE (subop1) != REAL_CST))
10723 wins = 0;
10725 switch (code)
10727 case PLUS_EXPR:
10728 /* (plus (address) (const_int)) is a constant. */
10729 if (TREE_CODE (op0) == PLUS_EXPR
10730 && TREE_CODE (op1) == INTEGER_CST
10731 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10732 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10733 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10734 == ADDR_EXPR)))
10735 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10737 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10738 const_binop (PLUS_EXPR, op1,
10739 TREE_OPERAND (op0, 1), 0));
10741 case BIT_XOR_EXPR:
10743 binary:
10744 if (!wins)
10745 return NULL_TREE;
10747 /* Both arguments are constants. Simplify. */
10748 tem = const_binop (code, op0, op1, 0);
10749 if (tem != NULL_TREE)
10751 /* The return value should always have the same type as
10752 the original expression. */
10753 if (TREE_TYPE (tem) != type)
10754 tem = fold_convert (type, tem);
10756 return tem;
10758 return NULL_TREE;
10760 case MINUS_EXPR:
10761 /* Fold &x - &x. This can happen from &x.foo - &x.
10762 This is unsafe for certain floats even in non-IEEE formats.
10763 In IEEE, it is unsafe because it does wrong for NaNs.
10764 Also note that operand_equal_p is always false if an
10765 operand is volatile. */
10766 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10767 return fold_convert (type, integer_zero_node);
10769 goto binary;
10771 case MULT_EXPR:
10772 case BIT_AND_EXPR:
10773 /* Special case multiplication or bitwise AND where one argument
10774 is zero. */
10775 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10776 return omit_one_operand (type, op1, op0);
10777 else
10778 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10779 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10780 && real_zerop (op1))
10781 return omit_one_operand (type, op1, op0);
10783 goto binary;
10785 case BIT_IOR_EXPR:
10786 /* Special case when we know the result will be all ones. */
10787 if (integer_all_onesp (op1))
10788 return omit_one_operand (type, op1, op0);
10790 goto binary;
10792 case TRUNC_DIV_EXPR:
10793 case ROUND_DIV_EXPR:
10794 case FLOOR_DIV_EXPR:
10795 case CEIL_DIV_EXPR:
10796 case EXACT_DIV_EXPR:
10797 case TRUNC_MOD_EXPR:
10798 case ROUND_MOD_EXPR:
10799 case FLOOR_MOD_EXPR:
10800 case CEIL_MOD_EXPR:
10801 case RDIV_EXPR:
10802 /* Division by zero is undefined. */
10803 if (integer_zerop (op1))
10804 return NULL_TREE;
10806 if (TREE_CODE (op1) == REAL_CST
10807 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10808 && real_zerop (op1))
10809 return NULL_TREE;
10811 goto binary;
10813 case MIN_EXPR:
10814 if (INTEGRAL_TYPE_P (type)
10815 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10816 return omit_one_operand (type, op1, op0);
10818 goto binary;
10820 case MAX_EXPR:
10821 if (INTEGRAL_TYPE_P (type)
10822 && TYPE_MAX_VALUE (type)
10823 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10824 return omit_one_operand (type, op1, op0);
10826 goto binary;
10828 case RSHIFT_EXPR:
10829 /* Optimize -1 >> x for arithmetic right shifts. */
10830 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10831 return omit_one_operand (type, op0, op1);
10832 /* ... fall through ... */
10834 case LSHIFT_EXPR:
10835 if (integer_zerop (op0))
10836 return omit_one_operand (type, op0, op1);
10838 /* Since negative shift count is not well-defined, don't
10839 try to compute it in the compiler. */
10840 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10841 return NULL_TREE;
10843 goto binary;
10845 case LROTATE_EXPR:
10846 case RROTATE_EXPR:
10847 /* -1 rotated either direction by any amount is still -1. */
10848 if (integer_all_onesp (op0))
10849 return omit_one_operand (type, op0, op1);
10851 /* 0 rotated either direction by any amount is still zero. */
10852 if (integer_zerop (op0))
10853 return omit_one_operand (type, op0, op1);
10855 goto binary;
10857 case COMPLEX_EXPR:
10858 if (wins)
10859 return build_complex (type, op0, op1);
10860 return NULL_TREE;
10862 case LT_EXPR:
10863 case LE_EXPR:
10864 case GT_EXPR:
10865 case GE_EXPR:
10866 case EQ_EXPR:
10867 case NE_EXPR:
10868 /* If one arg is a real or integer constant, put it last. */
10869 if ((TREE_CODE (op0) == INTEGER_CST
10870 && TREE_CODE (op1) != INTEGER_CST)
10871 || (TREE_CODE (op0) == REAL_CST
10872 && TREE_CODE (op0) != REAL_CST))
10874 tree temp;
10876 temp = op0;
10877 op0 = op1;
10878 op1 = temp;
10879 code = swap_tree_comparison (code);
10882 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10883 This transformation affects the cases which are handled in later
10884 optimizations involving comparisons with non-negative constants. */
10885 if (TREE_CODE (op1) == INTEGER_CST
10886 && TREE_CODE (op0) != INTEGER_CST
10887 && tree_int_cst_sgn (op1) > 0)
10889 switch (code)
10891 case GE_EXPR:
10892 code = GT_EXPR;
10893 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10894 break;
10896 case LT_EXPR:
10897 code = LE_EXPR;
10898 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10899 break;
10901 default:
10902 break;
10906 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10907 if (tem)
10908 return tem;
10910 /* Fall through. */
10912 case ORDERED_EXPR:
10913 case UNORDERED_EXPR:
10914 case UNLT_EXPR:
10915 case UNLE_EXPR:
10916 case UNGT_EXPR:
10917 case UNGE_EXPR:
10918 case UNEQ_EXPR:
10919 case LTGT_EXPR:
10920 if (!wins)
10921 return NULL_TREE;
10923 return fold_relational_const (code, type, op0, op1);
10925 case RANGE_EXPR:
10926 /* This could probably be handled. */
10927 return NULL_TREE;
10929 case TRUTH_AND_EXPR:
10930 /* If second arg is constant zero, result is zero, but first arg
10931 must be evaluated. */
10932 if (integer_zerop (op1))
10933 return omit_one_operand (type, op1, op0);
10934 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10935 case will be handled here. */
10936 if (integer_zerop (op0))
10937 return omit_one_operand (type, op0, op1);
10938 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10939 return constant_boolean_node (true, type);
10940 return NULL_TREE;
10942 case TRUTH_OR_EXPR:
10943 /* If second arg is constant true, result is true, but we must
10944 evaluate first arg. */
10945 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10946 return omit_one_operand (type, op1, op0);
10947 /* Likewise for first arg, but note this only occurs here for
10948 TRUTH_OR_EXPR. */
10949 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10950 return omit_one_operand (type, op0, op1);
10951 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10952 return constant_boolean_node (false, type);
10953 return NULL_TREE;
10955 case TRUTH_XOR_EXPR:
10956 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10958 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10959 return constant_boolean_node (x, type);
10961 return NULL_TREE;
10963 default:
10964 return NULL_TREE;
10968 /* Given the components of a unary expression CODE, TYPE and OP0,
10969 attempt to fold the expression to a constant without modifying
10970 TYPE or OP0.
10972 If the expression could be simplified to a constant, then return
10973 the constant. If the expression would not be simplified to a
10974 constant, then return NULL_TREE.
10976 Note this is primarily designed to be called after gimplification
10977 of the tree structures and when op0 is a constant. As a result
10978 of those simplifying assumptions this routine is far simpler than
10979 the generic fold routine. */
10981 tree
10982 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10984 /* Make sure we have a suitable constant argument. */
10985 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10987 tree subop;
10989 if (TREE_CODE (op0) == COMPLEX_CST)
10990 subop = TREE_REALPART (op0);
10991 else
10992 subop = op0;
10994 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10995 return NULL_TREE;
10998 switch (code)
11000 case NOP_EXPR:
11001 case FLOAT_EXPR:
11002 case CONVERT_EXPR:
11003 case FIX_TRUNC_EXPR:
11004 case FIX_FLOOR_EXPR:
11005 case FIX_CEIL_EXPR:
11006 return fold_convert_const (code, type, op0);
11008 case NEGATE_EXPR:
11009 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11010 return fold_negate_const (op0, type);
11011 else
11012 return NULL_TREE;
11014 case ABS_EXPR:
11015 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11016 return fold_abs_const (op0, type);
11017 else
11018 return NULL_TREE;
11020 case BIT_NOT_EXPR:
11021 if (TREE_CODE (op0) == INTEGER_CST)
11022 return fold_not_const (op0, type);
11023 else
11024 return NULL_TREE;
11026 case REALPART_EXPR:
11027 if (TREE_CODE (op0) == COMPLEX_CST)
11028 return TREE_REALPART (op0);
11029 else
11030 return NULL_TREE;
11032 case IMAGPART_EXPR:
11033 if (TREE_CODE (op0) == COMPLEX_CST)
11034 return TREE_IMAGPART (op0);
11035 else
11036 return NULL_TREE;
11038 case CONJ_EXPR:
11039 if (TREE_CODE (op0) == COMPLEX_CST
11040 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
11041 return build_complex (type, TREE_REALPART (op0),
11042 negate_expr (TREE_IMAGPART (op0)));
11043 return NULL_TREE;
11045 default:
11046 return NULL_TREE;
11050 /* If EXP represents referencing an element in a constant string
11051 (either via pointer arithmetic or array indexing), return the
11052 tree representing the value accessed, otherwise return NULL. */
11054 tree
11055 fold_read_from_constant_string (tree exp)
11057 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11059 tree exp1 = TREE_OPERAND (exp, 0);
11060 tree index;
11061 tree string;
11063 if (TREE_CODE (exp) == INDIRECT_REF)
11064 string = string_constant (exp1, &index);
11065 else
11067 tree low_bound = array_ref_low_bound (exp);
11068 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11070 /* Optimize the special-case of a zero lower bound.
11072 We convert the low_bound to sizetype to avoid some problems
11073 with constant folding. (E.g. suppose the lower bound is 1,
11074 and its mode is QI. Without the conversion,l (ARRAY
11075 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11076 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11077 if (! integer_zerop (low_bound))
11078 index = size_diffop (index, fold_convert (sizetype, low_bound));
11080 string = exp1;
11083 if (string
11084 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11085 && TREE_CODE (string) == STRING_CST
11086 && TREE_CODE (index) == INTEGER_CST
11087 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11088 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11089 == MODE_INT)
11090 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11091 return fold_convert (TREE_TYPE (exp),
11092 build_int_cst (NULL_TREE,
11093 (TREE_STRING_POINTER (string)
11094 [TREE_INT_CST_LOW (index)])));
11096 return NULL;
11099 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11100 an integer constant or real constant.
11102 TYPE is the type of the result. */
11104 static tree
11105 fold_negate_const (tree arg0, tree type)
11107 tree t = NULL_TREE;
11109 switch (TREE_CODE (arg0))
11111 case INTEGER_CST:
11113 unsigned HOST_WIDE_INT low;
11114 HOST_WIDE_INT high;
11115 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11116 TREE_INT_CST_HIGH (arg0),
11117 &low, &high);
11118 t = build_int_cst_wide (type, low, high);
11119 t = force_fit_type (t, 1,
11120 (overflow | TREE_OVERFLOW (arg0))
11121 && !TYPE_UNSIGNED (type),
11122 TREE_CONSTANT_OVERFLOW (arg0));
11123 break;
11126 case REAL_CST:
11127 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11128 break;
11130 default:
11131 gcc_unreachable ();
11134 return t;
11137 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11138 an integer constant or real constant.
11140 TYPE is the type of the result. */
11142 tree
11143 fold_abs_const (tree arg0, tree type)
11145 tree t = NULL_TREE;
11147 switch (TREE_CODE (arg0))
11149 case INTEGER_CST:
11150 /* If the value is unsigned, then the absolute value is
11151 the same as the ordinary value. */
11152 if (TYPE_UNSIGNED (type))
11153 t = arg0;
11154 /* Similarly, if the value is non-negative. */
11155 else if (INT_CST_LT (integer_minus_one_node, arg0))
11156 t = arg0;
11157 /* If the value is negative, then the absolute value is
11158 its negation. */
11159 else
11161 unsigned HOST_WIDE_INT low;
11162 HOST_WIDE_INT high;
11163 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11164 TREE_INT_CST_HIGH (arg0),
11165 &low, &high);
11166 t = build_int_cst_wide (type, low, high);
11167 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11168 TREE_CONSTANT_OVERFLOW (arg0));
11170 break;
11172 case REAL_CST:
11173 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11174 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11175 else
11176 t = arg0;
11177 break;
11179 default:
11180 gcc_unreachable ();
11183 return t;
11186 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11187 constant. TYPE is the type of the result. */
11189 static tree
11190 fold_not_const (tree arg0, tree type)
11192 tree t = NULL_TREE;
11194 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11196 t = build_int_cst_wide (type,
11197 ~ TREE_INT_CST_LOW (arg0),
11198 ~ TREE_INT_CST_HIGH (arg0));
11199 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11200 TREE_CONSTANT_OVERFLOW (arg0));
11202 return t;
11205 /* Given CODE, a relational operator, the target type, TYPE and two
11206 constant operands OP0 and OP1, return the result of the
11207 relational operation. If the result is not a compile time
11208 constant, then return NULL_TREE. */
11210 static tree
11211 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11213 int result, invert;
11215 /* From here on, the only cases we handle are when the result is
11216 known to be a constant. */
11218 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11220 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11221 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11223 /* Handle the cases where either operand is a NaN. */
11224 if (real_isnan (c0) || real_isnan (c1))
11226 switch (code)
11228 case EQ_EXPR:
11229 case ORDERED_EXPR:
11230 result = 0;
11231 break;
11233 case NE_EXPR:
11234 case UNORDERED_EXPR:
11235 case UNLT_EXPR:
11236 case UNLE_EXPR:
11237 case UNGT_EXPR:
11238 case UNGE_EXPR:
11239 case UNEQ_EXPR:
11240 result = 1;
11241 break;
11243 case LT_EXPR:
11244 case LE_EXPR:
11245 case GT_EXPR:
11246 case GE_EXPR:
11247 case LTGT_EXPR:
11248 if (flag_trapping_math)
11249 return NULL_TREE;
11250 result = 0;
11251 break;
11253 default:
11254 gcc_unreachable ();
11257 return constant_boolean_node (result, type);
11260 return constant_boolean_node (real_compare (code, c0, c1), type);
11263 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11265 To compute GT, swap the arguments and do LT.
11266 To compute GE, do LT and invert the result.
11267 To compute LE, swap the arguments, do LT and invert the result.
11268 To compute NE, do EQ and invert the result.
11270 Therefore, the code below must handle only EQ and LT. */
11272 if (code == LE_EXPR || code == GT_EXPR)
11274 tree tem = op0;
11275 op0 = op1;
11276 op1 = tem;
11277 code = swap_tree_comparison (code);
11280 /* Note that it is safe to invert for real values here because we
11281 have already handled the one case that it matters. */
11283 invert = 0;
11284 if (code == NE_EXPR || code == GE_EXPR)
11286 invert = 1;
11287 code = invert_tree_comparison (code, false);
11290 /* Compute a result for LT or EQ if args permit;
11291 Otherwise return T. */
11292 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11294 if (code == EQ_EXPR)
11295 result = tree_int_cst_equal (op0, op1);
11296 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11297 result = INT_CST_LT_UNSIGNED (op0, op1);
11298 else
11299 result = INT_CST_LT (op0, op1);
11301 else
11302 return NULL_TREE;
11304 if (invert)
11305 result ^= 1;
11306 return constant_boolean_node (result, type);
11309 /* Build an expression for the a clean point containing EXPR with type TYPE.
11310 Don't build a cleanup point expression for EXPR which don't have side
11311 effects. */
11313 tree
11314 fold_build_cleanup_point_expr (tree type, tree expr)
11316 /* If the expression does not have side effects then we don't have to wrap
11317 it with a cleanup point expression. */
11318 if (!TREE_SIDE_EFFECTS (expr))
11319 return expr;
11321 /* If the expression is a return, check to see if the expression inside the
11322 return has no side effects or the right hand side of the modify expression
11323 inside the return. If either don't have side effects set we don't need to
11324 wrap the expression in a cleanup point expression. Note we don't check the
11325 left hand side of the modify because it should always be a return decl. */
11326 if (TREE_CODE (expr) == RETURN_EXPR)
11328 tree op = TREE_OPERAND (expr, 0);
11329 if (!op || !TREE_SIDE_EFFECTS (op))
11330 return expr;
11331 op = TREE_OPERAND (op, 1);
11332 if (!TREE_SIDE_EFFECTS (op))
11333 return expr;
11336 return build1 (CLEANUP_POINT_EXPR, type, expr);
11339 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11340 avoid confusing the gimplify process. */
11342 tree
11343 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11345 /* The size of the object is not relevant when talking about its address. */
11346 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11347 t = TREE_OPERAND (t, 0);
11349 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11350 if (TREE_CODE (t) == INDIRECT_REF
11351 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11353 t = TREE_OPERAND (t, 0);
11354 if (TREE_TYPE (t) != ptrtype)
11355 t = build1 (NOP_EXPR, ptrtype, t);
11357 else
11359 tree base = t;
11361 while (handled_component_p (base))
11362 base = TREE_OPERAND (base, 0);
11363 if (DECL_P (base))
11364 TREE_ADDRESSABLE (base) = 1;
11366 t = build1 (ADDR_EXPR, ptrtype, t);
11369 return t;
11372 tree
11373 build_fold_addr_expr (tree t)
11375 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11378 /* Given a pointer value T, return a simplified version of an indirection
11379 through T, or NULL_TREE if no simplification is possible. */
11381 static tree
11382 fold_indirect_ref_1 (tree t)
11384 tree type = TREE_TYPE (TREE_TYPE (t));
11385 tree sub = t;
11386 tree subtype;
11388 STRIP_NOPS (sub);
11389 subtype = TREE_TYPE (sub);
11390 if (!POINTER_TYPE_P (subtype))
11391 return NULL_TREE;
11393 if (TREE_CODE (sub) == ADDR_EXPR)
11395 tree op = TREE_OPERAND (sub, 0);
11396 tree optype = TREE_TYPE (op);
11397 /* *&p => p */
11398 if (lang_hooks.types_compatible_p (type, optype))
11399 return op;
11400 /* *(foo *)&fooarray => fooarray[0] */
11401 else if (TREE_CODE (optype) == ARRAY_TYPE
11402 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11404 tree type_domain = TYPE_DOMAIN (optype);
11405 tree min_val = size_zero_node;
11406 if (type_domain && TYPE_MIN_VALUE (type_domain))
11407 min_val = TYPE_MIN_VALUE (type_domain);
11408 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11412 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11413 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11414 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11416 tree type_domain;
11417 tree min_val = size_zero_node;
11418 sub = build_fold_indirect_ref (sub);
11419 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11420 if (type_domain && TYPE_MIN_VALUE (type_domain))
11421 min_val = TYPE_MIN_VALUE (type_domain);
11422 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11425 return NULL_TREE;
11428 /* Builds an expression for an indirection through T, simplifying some
11429 cases. */
11431 tree
11432 build_fold_indirect_ref (tree t)
11434 tree sub = fold_indirect_ref_1 (t);
11436 if (sub)
11437 return sub;
11438 else
11439 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
11442 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11444 tree
11445 fold_indirect_ref (tree t)
11447 tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
11449 if (sub)
11450 return sub;
11451 else
11452 return t;
11455 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11456 whose result is ignored. The type of the returned tree need not be
11457 the same as the original expression. */
11459 tree
11460 fold_ignored_result (tree t)
11462 if (!TREE_SIDE_EFFECTS (t))
11463 return integer_zero_node;
11465 for (;;)
11466 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11468 case tcc_unary:
11469 t = TREE_OPERAND (t, 0);
11470 break;
11472 case tcc_binary:
11473 case tcc_comparison:
11474 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11475 t = TREE_OPERAND (t, 0);
11476 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11477 t = TREE_OPERAND (t, 1);
11478 else
11479 return t;
11480 break;
11482 case tcc_expression:
11483 switch (TREE_CODE (t))
11485 case COMPOUND_EXPR:
11486 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11487 return t;
11488 t = TREE_OPERAND (t, 0);
11489 break;
11491 case COND_EXPR:
11492 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11493 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11494 return t;
11495 t = TREE_OPERAND (t, 0);
11496 break;
11498 default:
11499 return t;
11501 break;
11503 default:
11504 return t;
11508 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11509 This can only be applied to objects of a sizetype. */
11511 tree
11512 round_up (tree value, int divisor)
11514 tree div = NULL_TREE;
11516 gcc_assert (divisor > 0);
11517 if (divisor == 1)
11518 return value;
11520 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11521 have to do anything. Only do this when we are not given a const,
11522 because in that case, this check is more expensive than just
11523 doing it. */
11524 if (TREE_CODE (value) != INTEGER_CST)
11526 div = build_int_cst (TREE_TYPE (value), divisor);
11528 if (multiple_of_p (TREE_TYPE (value), value, div))
11529 return value;
11532 /* If divisor is a power of two, simplify this to bit manipulation. */
11533 if (divisor == (divisor & -divisor))
11535 tree t;
11537 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11538 value = size_binop (PLUS_EXPR, value, t);
11539 t = build_int_cst (TREE_TYPE (value), -divisor);
11540 value = size_binop (BIT_AND_EXPR, value, t);
11542 else
11544 if (!div)
11545 div = build_int_cst (TREE_TYPE (value), divisor);
11546 value = size_binop (CEIL_DIV_EXPR, value, div);
11547 value = size_binop (MULT_EXPR, value, div);
11550 return value;
11553 /* Likewise, but round down. */
11555 tree
11556 round_down (tree value, int divisor)
11558 tree div = NULL_TREE;
11560 gcc_assert (divisor > 0);
11561 if (divisor == 1)
11562 return value;
11564 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11565 have to do anything. Only do this when we are not given a const,
11566 because in that case, this check is more expensive than just
11567 doing it. */
11568 if (TREE_CODE (value) != INTEGER_CST)
11570 div = build_int_cst (TREE_TYPE (value), divisor);
11572 if (multiple_of_p (TREE_TYPE (value), value, div))
11573 return value;
11576 /* If divisor is a power of two, simplify this to bit manipulation. */
11577 if (divisor == (divisor & -divisor))
11579 tree t;
11581 t = build_int_cst (TREE_TYPE (value), -divisor);
11582 value = size_binop (BIT_AND_EXPR, value, t);
11584 else
11586 if (!div)
11587 div = build_int_cst (TREE_TYPE (value), divisor);
11588 value = size_binop (FLOOR_DIV_EXPR, value, div);
11589 value = size_binop (MULT_EXPR, value, div);
11592 return value;
11595 /* Returns the pointer to the base of the object addressed by EXP and
11596 extracts the information about the offset of the access, storing it
11597 to PBITPOS and POFFSET. */
11599 static tree
11600 split_address_to_core_and_offset (tree exp,
11601 HOST_WIDE_INT *pbitpos, tree *poffset)
11603 tree core;
11604 enum machine_mode mode;
11605 int unsignedp, volatilep;
11606 HOST_WIDE_INT bitsize;
11608 if (TREE_CODE (exp) == ADDR_EXPR)
11610 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11611 poffset, &mode, &unsignedp, &volatilep,
11612 false);
11614 if (TREE_CODE (core) == INDIRECT_REF)
11615 core = TREE_OPERAND (core, 0);
11617 else
11619 core = exp;
11620 *pbitpos = 0;
11621 *poffset = NULL_TREE;
11624 return core;
11627 /* Returns true if addresses of E1 and E2 differ by a constant, false
11628 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11630 bool
11631 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11633 tree core1, core2;
11634 HOST_WIDE_INT bitpos1, bitpos2;
11635 tree toffset1, toffset2, tdiff, type;
11637 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11638 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11640 if (bitpos1 % BITS_PER_UNIT != 0
11641 || bitpos2 % BITS_PER_UNIT != 0
11642 || !operand_equal_p (core1, core2, 0))
11643 return false;
11645 if (toffset1 && toffset2)
11647 type = TREE_TYPE (toffset1);
11648 if (type != TREE_TYPE (toffset2))
11649 toffset2 = fold_convert (type, toffset2);
11651 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11652 if (!host_integerp (tdiff, 0))
11653 return false;
11655 *diff = tree_low_cst (tdiff, 0);
11657 else if (toffset1 || toffset2)
11659 /* If only one of the offsets is non-constant, the difference cannot
11660 be a constant. */
11661 return false;
11663 else
11664 *diff = 0;
11666 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11667 return true;
11670 /* Simplify the floating point expression EXP when the sign of the
11671 result is not significant. Return NULL_TREE if no simplification
11672 is possible. */
11674 tree
11675 fold_strip_sign_ops (tree exp)
11677 tree arg0, arg1;
11679 switch (TREE_CODE (exp))
11681 case ABS_EXPR:
11682 case NEGATE_EXPR:
11683 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11684 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11686 case MULT_EXPR:
11687 case RDIV_EXPR:
11688 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11689 return NULL_TREE;
11690 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11691 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11692 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11693 return fold (build2 (TREE_CODE (exp), TREE_TYPE (exp),
11694 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11695 arg1 ? arg1 : TREE_OPERAND (exp, 1)));
11696 break;
11698 default:
11699 break;
11701 return NULL_TREE;