* lib/target-supports.exp (check_iconv_available): Fix comment.
[official-gcc.git] / gcc / fold-const.c
blobdf9e8a26d221de26077ab703573565c2f2babb7f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (tree, enum tree_code,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static tree fold_relational_hi_lo (enum tree_code *, const tree,
136 tree *, tree *);
137 static bool tree_expr_nonzero_p (tree);
139 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
140 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
141 and SUM1. Then this yields nonzero if overflow occurred during the
142 addition.
144 Overflow occurs if A and B have the same sign, but A and SUM differ in
145 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
146 sign. */
147 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
150 We do that by representing the two-word integer in 4 words, with only
151 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
152 number. The value of the word is LOWPART + HIGHPART * BASE. */
154 #define LOWPART(x) \
155 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
156 #define HIGHPART(x) \
157 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
158 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160 /* Unpack a two-word integer into 4 words.
161 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
162 WORDS points to the array of HOST_WIDE_INTs. */
164 static void
165 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 words[0] = LOWPART (low);
168 words[1] = HIGHPART (low);
169 words[2] = LOWPART (hi);
170 words[3] = HIGHPART (hi);
173 /* Pack an array of 4 words into a two-word integer.
174 WORDS points to the array of words.
175 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
177 static void
178 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
179 HOST_WIDE_INT *hi)
181 *low = words[0] + words[1] * BASE;
182 *hi = words[2] + words[3] * BASE;
185 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
186 in overflow of the value, when >0 we are only interested in signed
187 overflow, for <0 we are interested in any overflow. OVERFLOWED
188 indicates whether overflow has already occurred. CONST_OVERFLOWED
189 indicates whether constant overflow has already occurred. We force
190 T's value to be within range of T's type (by setting to 0 or 1 all
191 the bits outside the type's range). We set TREE_OVERFLOWED if,
192 OVERFLOWED is nonzero,
193 or OVERFLOWABLE is >0 and signed overflow occurs
194 or OVERFLOWABLE is <0 and any overflow occurs
195 We set TREE_CONSTANT_OVERFLOWED if,
196 CONST_OVERFLOWED is nonzero
197 or we set TREE_OVERFLOWED.
198 We return either the original T, or a copy. */
200 tree
201 force_fit_type (tree t, int overflowable,
202 bool overflowed, bool overflowed_const)
204 unsigned HOST_WIDE_INT low;
205 HOST_WIDE_INT high;
206 unsigned int prec;
207 int sign_extended_type;
209 gcc_assert (TREE_CODE (t) == INTEGER_CST);
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
216 prec = POINTER_SIZE;
217 else
218 prec = TYPE_PRECISION (TREE_TYPE (t));
219 /* Size types *are* sign extended. */
220 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
221 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
222 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
224 /* First clear all bits that are beyond the type's precision. */
226 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
228 else if (prec > HOST_BITS_PER_WIDE_INT)
229 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 else
232 high = 0;
233 if (prec < HOST_BITS_PER_WIDE_INT)
234 low &= ~((HOST_WIDE_INT) (-1) << prec);
237 if (!sign_extended_type)
238 /* No sign extension */;
239 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
240 /* Correct width already. */;
241 else if (prec > HOST_BITS_PER_WIDE_INT)
243 /* Sign extend top half? */
244 if (high & ((unsigned HOST_WIDE_INT)1
245 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
246 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
248 else if (prec == HOST_BITS_PER_WIDE_INT)
250 if ((HOST_WIDE_INT)low < 0)
251 high = -1;
253 else
255 /* Sign extend bottom half? */
256 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
258 high = -1;
259 low |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value changed, return a new node. */
264 if (overflowed || overflowed_const
265 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
267 t = build_int_cst_wide (TREE_TYPE (t), low, high);
269 if (overflowed
270 || overflowable < 0
271 || (overflowable > 0 && sign_extended_type))
273 t = copy_node (t);
274 TREE_OVERFLOW (t) = 1;
275 TREE_CONSTANT_OVERFLOW (t) = 1;
277 else if (overflowed_const)
279 t = copy_node (t);
280 TREE_CONSTANT_OVERFLOW (t) = 1;
284 return t;
287 /* Add two doubleword integers with doubleword result.
288 Each argument is given as two `HOST_WIDE_INT' pieces.
289 One argument is L1 and H1; the other, L2 and H2.
290 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
294 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
295 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 unsigned HOST_WIDE_INT l;
298 HOST_WIDE_INT h;
300 l = l1 + l2;
301 h = h1 + h2 + (l < l1);
303 *lv = l;
304 *hv = h;
305 return OVERFLOW_SUM_SIGN (h1, h2, h);
308 /* Negate a doubleword integer with doubleword result.
309 Return nonzero if the operation overflows, assuming it's signed.
310 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
311 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
314 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
315 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
317 if (l1 == 0)
319 *lv = 0;
320 *hv = - h1;
321 return (*hv & h1) < 0;
323 else
325 *lv = -l1;
326 *hv = ~h1;
327 return 0;
331 /* Multiply two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows, assuming it's signed.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
342 HOST_WIDE_INT arg1[4];
343 HOST_WIDE_INT arg2[4];
344 HOST_WIDE_INT prod[4 * 2];
345 unsigned HOST_WIDE_INT carry;
346 int i, j, k;
347 unsigned HOST_WIDE_INT toplow, neglow;
348 HOST_WIDE_INT tophigh, neghigh;
350 encode (arg1, l1, h1);
351 encode (arg2, l2, h2);
353 memset (prod, 0, sizeof prod);
355 for (i = 0; i < 4; i++)
357 carry = 0;
358 for (j = 0; j < 4; j++)
360 k = i + j;
361 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
362 carry += arg1[i] * arg2[j];
363 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
364 carry += prod[k];
365 prod[k] = LOWPART (carry);
366 carry = HIGHPART (carry);
368 prod[i + 4] = carry;
371 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
373 /* Check for overflow by calculating the top half of the answer in full;
374 it should agree with the low half's sign bit. */
375 decode (prod + 4, &toplow, &tophigh);
376 if (h1 < 0)
378 neg_double (l2, h2, &neglow, &neghigh);
379 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
381 if (h2 < 0)
383 neg_double (l1, h1, &neglow, &neghigh);
384 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
386 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
389 /* Shift the doubleword integer in L1, H1 left by COUNT places
390 keeping only PREC bits of result.
391 Shift right if COUNT is negative.
392 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
393 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
395 void
396 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
397 HOST_WIDE_INT count, unsigned int prec,
398 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
400 unsigned HOST_WIDE_INT signmask;
402 if (count < 0)
404 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 return;
408 if (SHIFT_COUNT_TRUNCATED)
409 count %= prec;
411 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
413 /* Shifting by the host word size is undefined according to the
414 ANSI standard, so we must handle this as a special case. */
415 *hv = 0;
416 *lv = 0;
418 else if (count >= HOST_BITS_PER_WIDE_INT)
420 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
421 *lv = 0;
423 else
425 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
426 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 *lv = l1 << count;
430 /* Sign extend all bits that are beyond the precision. */
432 signmask = -((prec > HOST_BITS_PER_WIDE_INT
433 ? ((unsigned HOST_WIDE_INT) *hv
434 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
435 : (*lv >> (prec - 1))) & 1);
437 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
439 else if (prec >= HOST_BITS_PER_WIDE_INT)
441 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
442 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 else
446 *hv = signmask;
447 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
448 *lv |= signmask << prec;
452 /* Shift the doubleword integer in L1, H1 right by COUNT places
453 keeping only PREC bits of result. COUNT must be positive.
454 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
455 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
457 void
458 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
459 HOST_WIDE_INT count, unsigned int prec,
460 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
461 int arith)
463 unsigned HOST_WIDE_INT signmask;
465 signmask = (arith
466 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
467 : 0);
469 if (SHIFT_COUNT_TRUNCATED)
470 count %= prec;
472 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
474 /* Shifting by the host word size is undefined according to the
475 ANSI standard, so we must handle this as a special case. */
476 *hv = 0;
477 *lv = 0;
479 else if (count >= HOST_BITS_PER_WIDE_INT)
481 *hv = 0;
482 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
484 else
486 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
487 *lv = ((l1 >> count)
488 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
491 /* Zero / sign extend all bits that are beyond the precision. */
493 if (count >= (HOST_WIDE_INT)prec)
495 *hv = signmask;
496 *lv = signmask;
498 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
500 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
502 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
503 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 else
507 *hv = signmask;
508 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
509 *lv |= signmask << (prec - count);
513 /* Rotate the doubleword integer in L1, H1 left by COUNT places
514 keeping only PREC bits of result.
515 Rotate right if COUNT is negative.
516 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
518 void
519 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
520 HOST_WIDE_INT count, unsigned int prec,
521 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
523 unsigned HOST_WIDE_INT s1l, s2l;
524 HOST_WIDE_INT s1h, s2h;
526 count %= prec;
527 if (count < 0)
528 count += prec;
530 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
531 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
532 *lv = s1l | s2l;
533 *hv = s1h | s2h;
536 /* Rotate the doubleword integer in L1, H1 left by COUNT places
537 keeping only PREC bits of result. COUNT must be positive.
538 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
540 void
541 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
542 HOST_WIDE_INT count, unsigned int prec,
543 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
545 unsigned HOST_WIDE_INT s1l, s2l;
546 HOST_WIDE_INT s1h, s2h;
548 count %= prec;
549 if (count < 0)
550 count += prec;
552 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
553 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
554 *lv = s1l | s2l;
555 *hv = s1h | s2h;
558 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
559 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
560 CODE is a tree code for a kind of division, one of
561 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
562 or EXACT_DIV_EXPR
563 It controls how the quotient is rounded to an integer.
564 Return nonzero if the operation overflows.
565 UNS nonzero says do unsigned division. */
568 div_and_round_double (enum tree_code code, int uns,
569 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
570 HOST_WIDE_INT hnum_orig,
571 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
572 HOST_WIDE_INT hden_orig,
573 unsigned HOST_WIDE_INT *lquo,
574 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT *hrem)
577 int quo_neg = 0;
578 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
579 HOST_WIDE_INT den[4], quo[4];
580 int i, j;
581 unsigned HOST_WIDE_INT work;
582 unsigned HOST_WIDE_INT carry = 0;
583 unsigned HOST_WIDE_INT lnum = lnum_orig;
584 HOST_WIDE_INT hnum = hnum_orig;
585 unsigned HOST_WIDE_INT lden = lden_orig;
586 HOST_WIDE_INT hden = hden_orig;
587 int overflow = 0;
589 if (hden == 0 && lden == 0)
590 overflow = 1, lden = 1;
592 /* Calculate quotient sign and convert operands to unsigned. */
593 if (!uns)
595 if (hnum < 0)
597 quo_neg = ~ quo_neg;
598 /* (minimum integer) / (-1) is the only overflow case. */
599 if (neg_double (lnum, hnum, &lnum, &hnum)
600 && ((HOST_WIDE_INT) lden & hden) == -1)
601 overflow = 1;
603 if (hden < 0)
605 quo_neg = ~ quo_neg;
606 neg_double (lden, hden, &lden, &hden);
610 if (hnum == 0 && hden == 0)
611 { /* single precision */
612 *hquo = *hrem = 0;
613 /* This unsigned division rounds toward zero. */
614 *lquo = lnum / lden;
615 goto finish_up;
618 if (hnum == 0)
619 { /* trivial case: dividend < divisor */
620 /* hden != 0 already checked. */
621 *hquo = *lquo = 0;
622 *hrem = hnum;
623 *lrem = lnum;
624 goto finish_up;
627 memset (quo, 0, sizeof quo);
629 memset (num, 0, sizeof num); /* to zero 9th element */
630 memset (den, 0, sizeof den);
632 encode (num, lnum, hnum);
633 encode (den, lden, hden);
635 /* Special code for when the divisor < BASE. */
636 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
638 /* hnum != 0 already checked. */
639 for (i = 4 - 1; i >= 0; i--)
641 work = num[i] + carry * BASE;
642 quo[i] = work / lden;
643 carry = work % lden;
646 else
648 /* Full double precision division,
649 with thanks to Don Knuth's "Seminumerical Algorithms". */
650 int num_hi_sig, den_hi_sig;
651 unsigned HOST_WIDE_INT quo_est, scale;
653 /* Find the highest nonzero divisor digit. */
654 for (i = 4 - 1;; i--)
655 if (den[i] != 0)
657 den_hi_sig = i;
658 break;
661 /* Insure that the first digit of the divisor is at least BASE/2.
662 This is required by the quotient digit estimation algorithm. */
664 scale = BASE / (den[den_hi_sig] + 1);
665 if (scale > 1)
666 { /* scale divisor and dividend */
667 carry = 0;
668 for (i = 0; i <= 4 - 1; i++)
670 work = (num[i] * scale) + carry;
671 num[i] = LOWPART (work);
672 carry = HIGHPART (work);
675 num[4] = carry;
676 carry = 0;
677 for (i = 0; i <= 4 - 1; i++)
679 work = (den[i] * scale) + carry;
680 den[i] = LOWPART (work);
681 carry = HIGHPART (work);
682 if (den[i] != 0) den_hi_sig = i;
686 num_hi_sig = 4;
688 /* Main loop */
689 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
691 /* Guess the next quotient digit, quo_est, by dividing the first
692 two remaining dividend digits by the high order quotient digit.
693 quo_est is never low and is at most 2 high. */
694 unsigned HOST_WIDE_INT tmp;
696 num_hi_sig = i + den_hi_sig + 1;
697 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698 if (num[num_hi_sig] != den[den_hi_sig])
699 quo_est = work / den[den_hi_sig];
700 else
701 quo_est = BASE - 1;
703 /* Refine quo_est so it's usually correct, and at most one high. */
704 tmp = work - quo_est * den[den_hi_sig];
705 if (tmp < BASE
706 && (den[den_hi_sig - 1] * quo_est
707 > (tmp * BASE + num[num_hi_sig - 2])))
708 quo_est--;
710 /* Try QUO_EST as the quotient digit, by multiplying the
711 divisor by QUO_EST and subtracting from the remaining dividend.
712 Keep in mind that QUO_EST is the I - 1st digit. */
714 carry = 0;
715 for (j = 0; j <= den_hi_sig; j++)
717 work = quo_est * den[j] + carry;
718 carry = HIGHPART (work);
719 work = num[i + j] - LOWPART (work);
720 num[i + j] = LOWPART (work);
721 carry += HIGHPART (work) != 0;
724 /* If quo_est was high by one, then num[i] went negative and
725 we need to correct things. */
726 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
728 quo_est--;
729 carry = 0; /* add divisor back in */
730 for (j = 0; j <= den_hi_sig; j++)
732 work = num[i + j] + den[j] + carry;
733 carry = HIGHPART (work);
734 num[i + j] = LOWPART (work);
737 num [num_hi_sig] += carry;
740 /* Store the quotient digit. */
741 quo[i] = quo_est;
745 decode (quo, lquo, hquo);
747 finish_up:
748 /* If result is negative, make it so. */
749 if (quo_neg)
750 neg_double (*lquo, *hquo, lquo, hquo);
752 /* Compute trial remainder: rem = num - (quo * den) */
753 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754 neg_double (*lrem, *hrem, lrem, hrem);
755 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 switch (code)
759 case TRUNC_DIV_EXPR:
760 case TRUNC_MOD_EXPR: /* round toward zero */
761 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 return overflow;
764 case FLOOR_DIV_EXPR:
765 case FLOOR_MOD_EXPR: /* round toward negative infinity */
766 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
768 /* quo = quo - 1; */
769 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
770 lquo, hquo);
772 else
773 return overflow;
774 break;
776 case CEIL_DIV_EXPR:
777 case CEIL_MOD_EXPR: /* round toward positive infinity */
778 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
780 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
781 lquo, hquo);
783 else
784 return overflow;
785 break;
787 case ROUND_DIV_EXPR:
788 case ROUND_MOD_EXPR: /* round to closest integer */
790 unsigned HOST_WIDE_INT labs_rem = *lrem;
791 HOST_WIDE_INT habs_rem = *hrem;
792 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793 HOST_WIDE_INT habs_den = hden, htwice;
795 /* Get absolute values. */
796 if (*hrem < 0)
797 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
798 if (hden < 0)
799 neg_double (lden, hden, &labs_den, &habs_den);
801 /* If (2 * abs (lrem) >= abs (lden)) */
802 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803 labs_rem, habs_rem, &ltwice, &htwice);
805 if (((unsigned HOST_WIDE_INT) habs_den
806 < (unsigned HOST_WIDE_INT) htwice)
807 || (((unsigned HOST_WIDE_INT) habs_den
808 == (unsigned HOST_WIDE_INT) htwice)
809 && (labs_den < ltwice)))
811 if (*hquo < 0)
812 /* quo = quo - 1; */
813 add_double (*lquo, *hquo,
814 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815 else
816 /* quo = quo + 1; */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
820 else
821 return overflow;
823 break;
825 default:
826 gcc_unreachable ();
829 /* Compute true remainder: rem = num - (quo * den) */
830 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831 neg_double (*lrem, *hrem, lrem, hrem);
832 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 return overflow;
836 /* Return true if built-in mathematical function specified by CODE
837 preserves the sign of it argument, i.e. -f(x) == f(-x). */
839 static bool
840 negate_mathfn_p (enum built_in_function code)
842 switch (code)
844 case BUILT_IN_ASIN:
845 case BUILT_IN_ASINF:
846 case BUILT_IN_ASINL:
847 case BUILT_IN_ATAN:
848 case BUILT_IN_ATANF:
849 case BUILT_IN_ATANL:
850 case BUILT_IN_SIN:
851 case BUILT_IN_SINF:
852 case BUILT_IN_SINL:
853 case BUILT_IN_TAN:
854 case BUILT_IN_TANF:
855 case BUILT_IN_TANL:
856 return true;
858 default:
859 break;
861 return false;
864 /* Check whether we may negate an integer constant T without causing
865 overflow. */
867 bool
868 may_negate_without_overflow_p (tree t)
870 unsigned HOST_WIDE_INT val;
871 unsigned int prec;
872 tree type;
874 gcc_assert (TREE_CODE (t) == INTEGER_CST);
876 type = TREE_TYPE (t);
877 if (TYPE_UNSIGNED (type))
878 return false;
880 prec = TYPE_PRECISION (type);
881 if (prec > HOST_BITS_PER_WIDE_INT)
883 if (TREE_INT_CST_LOW (t) != 0)
884 return true;
885 prec -= HOST_BITS_PER_WIDE_INT;
886 val = TREE_INT_CST_HIGH (t);
888 else
889 val = TREE_INT_CST_LOW (t);
890 if (prec < HOST_BITS_PER_WIDE_INT)
891 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
892 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
895 /* Determine whether an expression T can be cheaply negated using
896 the function negate_expr. */
898 static bool
899 negate_expr_p (tree t)
901 tree type;
903 if (t == 0)
904 return false;
906 type = TREE_TYPE (t);
908 STRIP_SIGN_NOPS (t);
909 switch (TREE_CODE (t))
911 case INTEGER_CST:
912 if (TYPE_UNSIGNED (type) || ! flag_trapv)
913 return true;
915 /* Check that -CST will not overflow type. */
916 return may_negate_without_overflow_p (t);
918 case REAL_CST:
919 case NEGATE_EXPR:
920 return true;
922 case COMPLEX_CST:
923 return negate_expr_p (TREE_REALPART (t))
924 && negate_expr_p (TREE_IMAGPART (t));
926 case PLUS_EXPR:
927 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
928 return false;
929 /* -(A + B) -> (-B) - A. */
930 if (negate_expr_p (TREE_OPERAND (t, 1))
931 && reorder_operands_p (TREE_OPERAND (t, 0),
932 TREE_OPERAND (t, 1)))
933 return true;
934 /* -(A + B) -> (-A) - B. */
935 return negate_expr_p (TREE_OPERAND (t, 0));
937 case MINUS_EXPR:
938 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
939 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
940 && reorder_operands_p (TREE_OPERAND (t, 0),
941 TREE_OPERAND (t, 1));
943 case MULT_EXPR:
944 if (TYPE_UNSIGNED (TREE_TYPE (t)))
945 break;
947 /* Fall through. */
949 case RDIV_EXPR:
950 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
951 return negate_expr_p (TREE_OPERAND (t, 1))
952 || negate_expr_p (TREE_OPERAND (t, 0));
953 break;
955 case NOP_EXPR:
956 /* Negate -((double)float) as (double)(-float). */
957 if (TREE_CODE (type) == REAL_TYPE)
959 tree tem = strip_float_extensions (t);
960 if (tem != t)
961 return negate_expr_p (tem);
963 break;
965 case CALL_EXPR:
966 /* Negate -f(x) as f(-x). */
967 if (negate_mathfn_p (builtin_mathfn_code (t)))
968 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
969 break;
971 case RSHIFT_EXPR:
972 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
973 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
975 tree op1 = TREE_OPERAND (t, 1);
976 if (TREE_INT_CST_HIGH (op1) == 0
977 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
978 == TREE_INT_CST_LOW (op1))
979 return true;
981 break;
983 default:
984 break;
986 return false;
989 /* Given T, an expression, return the negation of T. Allow for T to be
990 null, in which case return null. */
992 static tree
993 negate_expr (tree t)
995 tree type;
996 tree tem;
998 if (t == 0)
999 return 0;
1001 type = TREE_TYPE (t);
1002 STRIP_SIGN_NOPS (t);
1004 switch (TREE_CODE (t))
1006 case INTEGER_CST:
1007 tem = fold_negate_const (t, type);
1008 if (! TREE_OVERFLOW (tem)
1009 || TYPE_UNSIGNED (type)
1010 || ! flag_trapv)
1011 return tem;
1012 break;
1014 case REAL_CST:
1015 tem = fold_negate_const (t, type);
1016 /* Two's complement FP formats, such as c4x, may overflow. */
1017 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1018 return fold_convert (type, tem);
1019 break;
1021 case COMPLEX_CST:
1023 tree rpart = negate_expr (TREE_REALPART (t));
1024 tree ipart = negate_expr (TREE_IMAGPART (t));
1026 if ((TREE_CODE (rpart) == REAL_CST
1027 && TREE_CODE (ipart) == REAL_CST)
1028 || (TREE_CODE (rpart) == INTEGER_CST
1029 && TREE_CODE (ipart) == INTEGER_CST))
1030 return build_complex (type, rpart, ipart);
1032 break;
1034 case NEGATE_EXPR:
1035 return fold_convert (type, TREE_OPERAND (t, 0));
1037 case PLUS_EXPR:
1038 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1040 /* -(A + B) -> (-B) - A. */
1041 if (negate_expr_p (TREE_OPERAND (t, 1))
1042 && reorder_operands_p (TREE_OPERAND (t, 0),
1043 TREE_OPERAND (t, 1)))
1045 tem = negate_expr (TREE_OPERAND (t, 1));
1046 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1047 tem, TREE_OPERAND (t, 0)));
1048 return fold_convert (type, tem);
1051 /* -(A + B) -> (-A) - B. */
1052 if (negate_expr_p (TREE_OPERAND (t, 0)))
1054 tem = negate_expr (TREE_OPERAND (t, 0));
1055 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1056 tem, TREE_OPERAND (t, 1)));
1057 return fold_convert (type, tem);
1060 break;
1062 case MINUS_EXPR:
1063 /* - (A - B) -> B - A */
1064 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1066 return fold_convert (type,
1067 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1068 TREE_OPERAND (t, 1),
1069 TREE_OPERAND (t, 0))));
1070 break;
1072 case MULT_EXPR:
1073 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1074 break;
1076 /* Fall through. */
1078 case RDIV_EXPR:
1079 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1081 tem = TREE_OPERAND (t, 1);
1082 if (negate_expr_p (tem))
1083 return fold_convert (type,
1084 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1085 TREE_OPERAND (t, 0),
1086 negate_expr (tem))));
1087 tem = TREE_OPERAND (t, 0);
1088 if (negate_expr_p (tem))
1089 return fold_convert (type,
1090 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1091 negate_expr (tem),
1092 TREE_OPERAND (t, 1))));
1094 break;
1096 case NOP_EXPR:
1097 /* Convert -((double)float) into (double)(-float). */
1098 if (TREE_CODE (type) == REAL_TYPE)
1100 tem = strip_float_extensions (t);
1101 if (tem != t && negate_expr_p (tem))
1102 return fold_convert (type, negate_expr (tem));
1104 break;
1106 case CALL_EXPR:
1107 /* Negate -f(x) as f(-x). */
1108 if (negate_mathfn_p (builtin_mathfn_code (t))
1109 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1111 tree fndecl, arg, arglist;
1113 fndecl = get_callee_fndecl (t);
1114 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1115 arglist = build_tree_list (NULL_TREE, arg);
1116 return build_function_call_expr (fndecl, arglist);
1118 break;
1120 case RSHIFT_EXPR:
1121 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1122 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1124 tree op1 = TREE_OPERAND (t, 1);
1125 if (TREE_INT_CST_HIGH (op1) == 0
1126 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1127 == TREE_INT_CST_LOW (op1))
1129 tree ntype = TYPE_UNSIGNED (type)
1130 ? lang_hooks.types.signed_type (type)
1131 : lang_hooks.types.unsigned_type (type);
1132 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1133 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1134 return fold_convert (type, temp);
1137 break;
1139 default:
1140 break;
1143 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1144 return fold_convert (type, tem);
1147 /* Split a tree IN into a constant, literal and variable parts that could be
1148 combined with CODE to make IN. "constant" means an expression with
1149 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1150 commutative arithmetic operation. Store the constant part into *CONP,
1151 the literal in *LITP and return the variable part. If a part isn't
1152 present, set it to null. If the tree does not decompose in this way,
1153 return the entire tree as the variable part and the other parts as null.
1155 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1156 case, we negate an operand that was subtracted. Except if it is a
1157 literal for which we use *MINUS_LITP instead.
1159 If NEGATE_P is true, we are negating all of IN, again except a literal
1160 for which we use *MINUS_LITP instead.
1162 If IN is itself a literal or constant, return it as appropriate.
1164 Note that we do not guarantee that any of the three values will be the
1165 same type as IN, but they will have the same signedness and mode. */
1167 static tree
1168 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1169 tree *minus_litp, int negate_p)
1171 tree var = 0;
1173 *conp = 0;
1174 *litp = 0;
1175 *minus_litp = 0;
1177 /* Strip any conversions that don't change the machine mode or signedness. */
1178 STRIP_SIGN_NOPS (in);
1180 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1181 *litp = in;
1182 else if (TREE_CODE (in) == code
1183 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1184 /* We can associate addition and subtraction together (even
1185 though the C standard doesn't say so) for integers because
1186 the value is not affected. For reals, the value might be
1187 affected, so we can't. */
1188 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1189 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1191 tree op0 = TREE_OPERAND (in, 0);
1192 tree op1 = TREE_OPERAND (in, 1);
1193 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1194 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1196 /* First see if either of the operands is a literal, then a constant. */
1197 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1198 *litp = op0, op0 = 0;
1199 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1200 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1202 if (op0 != 0 && TREE_CONSTANT (op0))
1203 *conp = op0, op0 = 0;
1204 else if (op1 != 0 && TREE_CONSTANT (op1))
1205 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1207 /* If we haven't dealt with either operand, this is not a case we can
1208 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1209 if (op0 != 0 && op1 != 0)
1210 var = in;
1211 else if (op0 != 0)
1212 var = op0;
1213 else
1214 var = op1, neg_var_p = neg1_p;
1216 /* Now do any needed negations. */
1217 if (neg_litp_p)
1218 *minus_litp = *litp, *litp = 0;
1219 if (neg_conp_p)
1220 *conp = negate_expr (*conp);
1221 if (neg_var_p)
1222 var = negate_expr (var);
1224 else if (TREE_CONSTANT (in))
1225 *conp = in;
1226 else
1227 var = in;
1229 if (negate_p)
1231 if (*litp)
1232 *minus_litp = *litp, *litp = 0;
1233 else if (*minus_litp)
1234 *litp = *minus_litp, *minus_litp = 0;
1235 *conp = negate_expr (*conp);
1236 var = negate_expr (var);
1239 return var;
1242 /* Re-associate trees split by the above function. T1 and T2 are either
1243 expressions to associate or null. Return the new expression, if any. If
1244 we build an operation, do it in TYPE and with CODE. */
1246 static tree
1247 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1249 if (t1 == 0)
1250 return t2;
1251 else if (t2 == 0)
1252 return t1;
1254 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1255 try to fold this since we will have infinite recursion. But do
1256 deal with any NEGATE_EXPRs. */
1257 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1258 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1260 if (code == PLUS_EXPR)
1262 if (TREE_CODE (t1) == NEGATE_EXPR)
1263 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1264 fold_convert (type, TREE_OPERAND (t1, 0)));
1265 else if (TREE_CODE (t2) == NEGATE_EXPR)
1266 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1267 fold_convert (type, TREE_OPERAND (t2, 0)));
1268 else if (integer_zerop (t2))
1269 return fold_convert (type, t1);
1271 else if (code == MINUS_EXPR)
1273 if (integer_zerop (t2))
1274 return fold_convert (type, t1);
1277 return build2 (code, type, fold_convert (type, t1),
1278 fold_convert (type, t2));
1281 return fold (build2 (code, type, fold_convert (type, t1),
1282 fold_convert (type, t2)));
1285 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1286 to produce a new constant.
1288 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1290 tree
1291 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1293 unsigned HOST_WIDE_INT int1l, int2l;
1294 HOST_WIDE_INT int1h, int2h;
1295 unsigned HOST_WIDE_INT low;
1296 HOST_WIDE_INT hi;
1297 unsigned HOST_WIDE_INT garbagel;
1298 HOST_WIDE_INT garbageh;
1299 tree t;
1300 tree type = TREE_TYPE (arg1);
1301 int uns = TYPE_UNSIGNED (type);
1302 int is_sizetype
1303 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1304 int overflow = 0;
1305 int no_overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1312 switch (code)
1314 case BIT_IOR_EXPR:
1315 low = int1l | int2l, hi = int1h | int2h;
1316 break;
1318 case BIT_XOR_EXPR:
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1320 break;
1322 case BIT_AND_EXPR:
1323 low = int1l & int2l, hi = int1h & int2h;
1324 break;
1326 case RSHIFT_EXPR:
1327 int2l = -int2l;
1328 case LSHIFT_EXPR:
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 &low, &hi, !uns);
1334 no_overflow = 1;
1335 break;
1337 case RROTATE_EXPR:
1338 int2l = - int2l;
1339 case LROTATE_EXPR:
1340 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1341 &low, &hi);
1342 break;
1344 case PLUS_EXPR:
1345 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1346 break;
1348 case MINUS_EXPR:
1349 neg_double (int2l, int2h, &low, &hi);
1350 add_double (int1l, int1h, low, hi, &low, &hi);
1351 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1352 break;
1354 case MULT_EXPR:
1355 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1356 break;
1358 case TRUNC_DIV_EXPR:
1359 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1360 case EXACT_DIV_EXPR:
1361 /* This is a shortcut for a common special case. */
1362 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1363 && ! TREE_CONSTANT_OVERFLOW (arg1)
1364 && ! TREE_CONSTANT_OVERFLOW (arg2)
1365 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1367 if (code == CEIL_DIV_EXPR)
1368 int1l += int2l - 1;
1370 low = int1l / int2l, hi = 0;
1371 break;
1374 /* ... fall through ... */
1376 case ROUND_DIV_EXPR:
1377 if (int2h == 0 && int2l == 1)
1379 low = int1l, hi = int1h;
1380 break;
1382 if (int1l == int2l && int1h == int2h
1383 && ! (int1l == 0 && int1h == 0))
1385 low = 1, hi = 0;
1386 break;
1388 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1389 &low, &hi, &garbagel, &garbageh);
1390 break;
1392 case TRUNC_MOD_EXPR:
1393 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1394 /* This is a shortcut for a common special case. */
1395 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1396 && ! TREE_CONSTANT_OVERFLOW (arg1)
1397 && ! TREE_CONSTANT_OVERFLOW (arg2)
1398 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1400 if (code == CEIL_MOD_EXPR)
1401 int1l += int2l - 1;
1402 low = int1l % int2l, hi = 0;
1403 break;
1406 /* ... fall through ... */
1408 case ROUND_MOD_EXPR:
1409 overflow = div_and_round_double (code, uns,
1410 int1l, int1h, int2l, int2h,
1411 &garbagel, &garbageh, &low, &hi);
1412 break;
1414 case MIN_EXPR:
1415 case MAX_EXPR:
1416 if (uns)
1417 low = (((unsigned HOST_WIDE_INT) int1h
1418 < (unsigned HOST_WIDE_INT) int2h)
1419 || (((unsigned HOST_WIDE_INT) int1h
1420 == (unsigned HOST_WIDE_INT) int2h)
1421 && int1l < int2l));
1422 else
1423 low = (int1h < int2h
1424 || (int1h == int2h && int1l < int2l));
1426 if (low == (code == MIN_EXPR))
1427 low = int1l, hi = int1h;
1428 else
1429 low = int2l, hi = int2h;
1430 break;
1432 default:
1433 gcc_unreachable ();
1436 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1438 if (notrunc)
1440 /* Propagate overflow flags ourselves. */
1441 if (((!uns || is_sizetype) && overflow)
1442 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1444 t = copy_node (t);
1445 TREE_OVERFLOW (t) = 1;
1446 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1450 t = copy_node (t);
1451 TREE_CONSTANT_OVERFLOW (t) = 1;
1454 else
1455 t = force_fit_type (t, 1,
1456 ((!uns || is_sizetype) && overflow)
1457 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1458 TREE_CONSTANT_OVERFLOW (arg1)
1459 | TREE_CONSTANT_OVERFLOW (arg2));
1461 return t;
1464 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1465 constant. We assume ARG1 and ARG2 have the same data type, or at least
1466 are the same kind of constant and the same machine mode.
1468 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1470 static tree
1471 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1473 STRIP_NOPS (arg1);
1474 STRIP_NOPS (arg2);
1476 if (TREE_CODE (arg1) == INTEGER_CST)
1477 return int_const_binop (code, arg1, arg2, notrunc);
1479 if (TREE_CODE (arg1) == REAL_CST)
1481 enum machine_mode mode;
1482 REAL_VALUE_TYPE d1;
1483 REAL_VALUE_TYPE d2;
1484 REAL_VALUE_TYPE value;
1485 REAL_VALUE_TYPE result;
1486 bool inexact;
1487 tree t, type;
1489 d1 = TREE_REAL_CST (arg1);
1490 d2 = TREE_REAL_CST (arg2);
1492 type = TREE_TYPE (arg1);
1493 mode = TYPE_MODE (type);
1495 /* Don't perform operation if we honor signaling NaNs and
1496 either operand is a NaN. */
1497 if (HONOR_SNANS (mode)
1498 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1499 return NULL_TREE;
1501 /* Don't perform operation if it would raise a division
1502 by zero exception. */
1503 if (code == RDIV_EXPR
1504 && REAL_VALUES_EQUAL (d2, dconst0)
1505 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1506 return NULL_TREE;
1508 /* If either operand is a NaN, just return it. Otherwise, set up
1509 for floating-point trap; we return an overflow. */
1510 if (REAL_VALUE_ISNAN (d1))
1511 return arg1;
1512 else if (REAL_VALUE_ISNAN (d2))
1513 return arg2;
1515 inexact = real_arithmetic (&value, code, &d1, &d2);
1516 real_convert (&result, mode, &value);
1518 /* Don't constant fold this floating point operation if the
1519 result may dependent upon the run-time rounding mode and
1520 flag_rounding_math is set, or if GCC's software emulation
1521 is unable to accurately represent the result. */
1523 if ((flag_rounding_math
1524 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1525 && !flag_unsafe_math_optimizations))
1526 && (inexact || !real_identical (&result, &value)))
1527 return NULL_TREE;
1529 t = build_real (type, result);
1531 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1532 TREE_CONSTANT_OVERFLOW (t)
1533 = TREE_OVERFLOW (t)
1534 | TREE_CONSTANT_OVERFLOW (arg1)
1535 | TREE_CONSTANT_OVERFLOW (arg2);
1536 return t;
1538 if (TREE_CODE (arg1) == COMPLEX_CST)
1540 tree type = TREE_TYPE (arg1);
1541 tree r1 = TREE_REALPART (arg1);
1542 tree i1 = TREE_IMAGPART (arg1);
1543 tree r2 = TREE_REALPART (arg2);
1544 tree i2 = TREE_IMAGPART (arg2);
1545 tree t;
1547 switch (code)
1549 case PLUS_EXPR:
1550 t = build_complex (type,
1551 const_binop (PLUS_EXPR, r1, r2, notrunc),
1552 const_binop (PLUS_EXPR, i1, i2, notrunc));
1553 break;
1555 case MINUS_EXPR:
1556 t = build_complex (type,
1557 const_binop (MINUS_EXPR, r1, r2, notrunc),
1558 const_binop (MINUS_EXPR, i1, i2, notrunc));
1559 break;
1561 case MULT_EXPR:
1562 t = build_complex (type,
1563 const_binop (MINUS_EXPR,
1564 const_binop (MULT_EXPR,
1565 r1, r2, notrunc),
1566 const_binop (MULT_EXPR,
1567 i1, i2, notrunc),
1568 notrunc),
1569 const_binop (PLUS_EXPR,
1570 const_binop (MULT_EXPR,
1571 r1, i2, notrunc),
1572 const_binop (MULT_EXPR,
1573 i1, r2, notrunc),
1574 notrunc));
1575 break;
1577 case RDIV_EXPR:
1579 tree magsquared
1580 = const_binop (PLUS_EXPR,
1581 const_binop (MULT_EXPR, r2, r2, notrunc),
1582 const_binop (MULT_EXPR, i2, i2, notrunc),
1583 notrunc);
1585 t = build_complex (type,
1586 const_binop
1587 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1588 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1589 const_binop (PLUS_EXPR,
1590 const_binop (MULT_EXPR, r1, r2,
1591 notrunc),
1592 const_binop (MULT_EXPR, i1, i2,
1593 notrunc),
1594 notrunc),
1595 magsquared, notrunc),
1596 const_binop
1597 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1598 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1599 const_binop (MINUS_EXPR,
1600 const_binop (MULT_EXPR, i1, r2,
1601 notrunc),
1602 const_binop (MULT_EXPR, r1, i2,
1603 notrunc),
1604 notrunc),
1605 magsquared, notrunc));
1607 break;
1609 default:
1610 gcc_unreachable ();
1612 return t;
1614 return 0;
1617 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1618 indicates which particular sizetype to create. */
1620 tree
1621 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1623 return build_int_cst (sizetype_tab[(int) kind], number);
1626 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1627 is a tree code. The type of the result is taken from the operands.
1628 Both must be the same type integer type and it must be a size type.
1629 If the operands are constant, so is the result. */
1631 tree
1632 size_binop (enum tree_code code, tree arg0, tree arg1)
1634 tree type = TREE_TYPE (arg0);
1636 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1637 && type == TREE_TYPE (arg1));
1639 /* Handle the special case of two integer constants faster. */
1640 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1642 /* And some specific cases even faster than that. */
1643 if (code == PLUS_EXPR && integer_zerop (arg0))
1644 return arg1;
1645 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1646 && integer_zerop (arg1))
1647 return arg0;
1648 else if (code == MULT_EXPR && integer_onep (arg0))
1649 return arg1;
1651 /* Handle general case of two integer constants. */
1652 return int_const_binop (code, arg0, arg1, 0);
1655 if (arg0 == error_mark_node || arg1 == error_mark_node)
1656 return error_mark_node;
1658 return fold (build2 (code, type, arg0, arg1));
1661 /* Given two values, either both of sizetype or both of bitsizetype,
1662 compute the difference between the two values. Return the value
1663 in signed type corresponding to the type of the operands. */
1665 tree
1666 size_diffop (tree arg0, tree arg1)
1668 tree type = TREE_TYPE (arg0);
1669 tree ctype;
1671 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1672 && type == TREE_TYPE (arg1));
1674 /* If the type is already signed, just do the simple thing. */
1675 if (!TYPE_UNSIGNED (type))
1676 return size_binop (MINUS_EXPR, arg0, arg1);
1678 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1680 /* If either operand is not a constant, do the conversions to the signed
1681 type and subtract. The hardware will do the right thing with any
1682 overflow in the subtraction. */
1683 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1684 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1685 fold_convert (ctype, arg1));
1687 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1688 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1689 overflow) and negate (which can't either). Special-case a result
1690 of zero while we're here. */
1691 if (tree_int_cst_equal (arg0, arg1))
1692 return fold_convert (ctype, integer_zero_node);
1693 else if (tree_int_cst_lt (arg1, arg0))
1694 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1695 else
1696 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1697 fold_convert (ctype, size_binop (MINUS_EXPR,
1698 arg1, arg0)));
1701 /* A subroutine of fold_convert_const handling conversions of an
1702 INTEGER_CST to another integer type. */
1704 static tree
1705 fold_convert_const_int_from_int (tree type, tree arg1)
1707 tree t;
1709 /* Given an integer constant, make new constant with new type,
1710 appropriately sign-extended or truncated. */
1711 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1712 TREE_INT_CST_HIGH (arg1));
1714 t = force_fit_type (t,
1715 /* Don't set the overflow when
1716 converting a pointer */
1717 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1718 (TREE_INT_CST_HIGH (arg1) < 0
1719 && (TYPE_UNSIGNED (type)
1720 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1721 | TREE_OVERFLOW (arg1),
1722 TREE_CONSTANT_OVERFLOW (arg1));
1724 return t;
1727 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1728 to an integer type. */
1730 static tree
1731 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1733 int overflow = 0;
1734 tree t;
1736 /* The following code implements the floating point to integer
1737 conversion rules required by the Java Language Specification,
1738 that IEEE NaNs are mapped to zero and values that overflow
1739 the target precision saturate, i.e. values greater than
1740 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1741 are mapped to INT_MIN. These semantics are allowed by the
1742 C and C++ standards that simply state that the behavior of
1743 FP-to-integer conversion is unspecified upon overflow. */
1745 HOST_WIDE_INT high, low;
1746 REAL_VALUE_TYPE r;
1747 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1749 switch (code)
1751 case FIX_TRUNC_EXPR:
1752 real_trunc (&r, VOIDmode, &x);
1753 break;
1755 case FIX_CEIL_EXPR:
1756 real_ceil (&r, VOIDmode, &x);
1757 break;
1759 case FIX_FLOOR_EXPR:
1760 real_floor (&r, VOIDmode, &x);
1761 break;
1763 case FIX_ROUND_EXPR:
1764 real_round (&r, VOIDmode, &x);
1765 break;
1767 default:
1768 gcc_unreachable ();
1771 /* If R is NaN, return zero and show we have an overflow. */
1772 if (REAL_VALUE_ISNAN (r))
1774 overflow = 1;
1775 high = 0;
1776 low = 0;
1779 /* See if R is less than the lower bound or greater than the
1780 upper bound. */
1782 if (! overflow)
1784 tree lt = TYPE_MIN_VALUE (type);
1785 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1786 if (REAL_VALUES_LESS (r, l))
1788 overflow = 1;
1789 high = TREE_INT_CST_HIGH (lt);
1790 low = TREE_INT_CST_LOW (lt);
1794 if (! overflow)
1796 tree ut = TYPE_MAX_VALUE (type);
1797 if (ut)
1799 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1800 if (REAL_VALUES_LESS (u, r))
1802 overflow = 1;
1803 high = TREE_INT_CST_HIGH (ut);
1804 low = TREE_INT_CST_LOW (ut);
1809 if (! overflow)
1810 REAL_VALUE_TO_INT (&low, &high, r);
1812 t = build_int_cst_wide (type, low, high);
1814 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1815 TREE_CONSTANT_OVERFLOW (arg1));
1816 return t;
1819 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1820 to another floating point type. */
1822 static tree
1823 fold_convert_const_real_from_real (tree type, tree arg1)
1825 REAL_VALUE_TYPE value;
1826 tree t;
1828 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1829 t = build_real (type, value);
1831 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1832 TREE_CONSTANT_OVERFLOW (t)
1833 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1834 return t;
1837 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1838 type TYPE. If no simplification can be done return NULL_TREE. */
1840 static tree
1841 fold_convert_const (enum tree_code code, tree type, tree arg1)
1843 if (TREE_TYPE (arg1) == type)
1844 return arg1;
1846 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return fold_convert_const_int_from_int (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_int_from_real (code, type, arg1);
1853 else if (TREE_CODE (type) == REAL_TYPE)
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return build_real_from_int_cst (type, arg1);
1857 if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_real_from_real (type, arg1);
1860 return NULL_TREE;
1863 /* Construct a vector of zero elements of vector type TYPE. */
1865 static tree
1866 build_zero_vector (tree type)
1868 tree elem, list;
1869 int i, units;
1871 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1872 units = TYPE_VECTOR_SUBPARTS (type);
1874 list = NULL_TREE;
1875 for (i = 0; i < units; i++)
1876 list = tree_cons (NULL_TREE, elem, list);
1877 return build_vector (type, list);
1880 /* Convert expression ARG to type TYPE. Used by the middle-end for
1881 simple conversions in preference to calling the front-end's convert. */
1883 tree
1884 fold_convert (tree type, tree arg)
1886 tree orig = TREE_TYPE (arg);
1887 tree tem;
1889 if (type == orig)
1890 return arg;
1892 if (TREE_CODE (arg) == ERROR_MARK
1893 || TREE_CODE (type) == ERROR_MARK
1894 || TREE_CODE (orig) == ERROR_MARK)
1895 return error_mark_node;
1897 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1898 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1899 TYPE_MAIN_VARIANT (orig)))
1900 return fold (build1 (NOP_EXPR, type, arg));
1902 switch (TREE_CODE (type))
1904 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1905 case POINTER_TYPE: case REFERENCE_TYPE:
1906 case OFFSET_TYPE:
1907 if (TREE_CODE (arg) == INTEGER_CST)
1909 tem = fold_convert_const (NOP_EXPR, type, arg);
1910 if (tem != NULL_TREE)
1911 return tem;
1913 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1914 || TREE_CODE (orig) == OFFSET_TYPE)
1915 return fold (build1 (NOP_EXPR, type, arg));
1916 if (TREE_CODE (orig) == COMPLEX_TYPE)
1918 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1919 return fold_convert (type, tem);
1921 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1922 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1923 return fold (build1 (NOP_EXPR, type, arg));
1925 case REAL_TYPE:
1926 if (TREE_CODE (arg) == INTEGER_CST)
1928 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1929 if (tem != NULL_TREE)
1930 return tem;
1932 else if (TREE_CODE (arg) == REAL_CST)
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 return tem;
1939 switch (TREE_CODE (orig))
1941 case INTEGER_TYPE: case CHAR_TYPE:
1942 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1943 case POINTER_TYPE: case REFERENCE_TYPE:
1944 return fold (build1 (FLOAT_EXPR, type, arg));
1946 case REAL_TYPE:
1947 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1948 type, arg));
1950 case COMPLEX_TYPE:
1951 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1952 return fold_convert (type, tem);
1954 default:
1955 gcc_unreachable ();
1958 case COMPLEX_TYPE:
1959 switch (TREE_CODE (orig))
1961 case INTEGER_TYPE: case CHAR_TYPE:
1962 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1963 case POINTER_TYPE: case REFERENCE_TYPE:
1964 case REAL_TYPE:
1965 return build2 (COMPLEX_EXPR, type,
1966 fold_convert (TREE_TYPE (type), arg),
1967 fold_convert (TREE_TYPE (type), integer_zero_node));
1968 case COMPLEX_TYPE:
1970 tree rpart, ipart;
1972 if (TREE_CODE (arg) == COMPLEX_EXPR)
1974 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1975 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1976 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1979 arg = save_expr (arg);
1980 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1981 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1982 rpart = fold_convert (TREE_TYPE (type), rpart);
1983 ipart = fold_convert (TREE_TYPE (type), ipart);
1984 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1987 default:
1988 gcc_unreachable ();
1991 case VECTOR_TYPE:
1992 if (integer_zerop (arg))
1993 return build_zero_vector (type);
1994 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1995 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1996 || TREE_CODE (orig) == VECTOR_TYPE);
1997 return fold (build1 (NOP_EXPR, type, arg));
1999 case VOID_TYPE:
2000 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2002 default:
2003 gcc_unreachable ();
2007 /* Return an expr equal to X but certainly not valid as an lvalue. */
2009 tree
2010 non_lvalue (tree x)
2012 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2013 us. */
2014 if (in_gimple_form)
2015 return x;
2017 /* We only need to wrap lvalue tree codes. */
2018 switch (TREE_CODE (x))
2020 case VAR_DECL:
2021 case PARM_DECL:
2022 case RESULT_DECL:
2023 case LABEL_DECL:
2024 case FUNCTION_DECL:
2025 case SSA_NAME:
2027 case COMPONENT_REF:
2028 case INDIRECT_REF:
2029 case ALIGN_INDIRECT_REF:
2030 case MISALIGNED_INDIRECT_REF:
2031 case ARRAY_REF:
2032 case ARRAY_RANGE_REF:
2033 case BIT_FIELD_REF:
2034 case OBJ_TYPE_REF:
2036 case REALPART_EXPR:
2037 case IMAGPART_EXPR:
2038 case PREINCREMENT_EXPR:
2039 case PREDECREMENT_EXPR:
2040 case SAVE_EXPR:
2041 case TRY_CATCH_EXPR:
2042 case WITH_CLEANUP_EXPR:
2043 case COMPOUND_EXPR:
2044 case MODIFY_EXPR:
2045 case TARGET_EXPR:
2046 case COND_EXPR:
2047 case BIND_EXPR:
2048 case MIN_EXPR:
2049 case MAX_EXPR:
2050 break;
2052 default:
2053 /* Assume the worst for front-end tree codes. */
2054 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2055 break;
2056 return x;
2058 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2061 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2062 Zero means allow extended lvalues. */
2064 int pedantic_lvalues;
2066 /* When pedantic, return an expr equal to X but certainly not valid as a
2067 pedantic lvalue. Otherwise, return X. */
2069 static tree
2070 pedantic_non_lvalue (tree x)
2072 if (pedantic_lvalues)
2073 return non_lvalue (x);
2074 else
2075 return x;
2078 /* Given a tree comparison code, return the code that is the logical inverse
2079 of the given code. It is not safe to do this for floating-point
2080 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2081 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2083 static enum tree_code
2084 invert_tree_comparison (enum tree_code code, bool honor_nans)
2086 if (honor_nans && flag_trapping_math)
2087 return ERROR_MARK;
2089 switch (code)
2091 case EQ_EXPR:
2092 return NE_EXPR;
2093 case NE_EXPR:
2094 return EQ_EXPR;
2095 case GT_EXPR:
2096 return honor_nans ? UNLE_EXPR : LE_EXPR;
2097 case GE_EXPR:
2098 return honor_nans ? UNLT_EXPR : LT_EXPR;
2099 case LT_EXPR:
2100 return honor_nans ? UNGE_EXPR : GE_EXPR;
2101 case LE_EXPR:
2102 return honor_nans ? UNGT_EXPR : GT_EXPR;
2103 case LTGT_EXPR:
2104 return UNEQ_EXPR;
2105 case UNEQ_EXPR:
2106 return LTGT_EXPR;
2107 case UNGT_EXPR:
2108 return LE_EXPR;
2109 case UNGE_EXPR:
2110 return LT_EXPR;
2111 case UNLT_EXPR:
2112 return GE_EXPR;
2113 case UNLE_EXPR:
2114 return GT_EXPR;
2115 case ORDERED_EXPR:
2116 return UNORDERED_EXPR;
2117 case UNORDERED_EXPR:
2118 return ORDERED_EXPR;
2119 default:
2120 gcc_unreachable ();
2124 /* Similar, but return the comparison that results if the operands are
2125 swapped. This is safe for floating-point. */
2127 enum tree_code
2128 swap_tree_comparison (enum tree_code code)
2130 switch (code)
2132 case EQ_EXPR:
2133 case NE_EXPR:
2134 return code;
2135 case GT_EXPR:
2136 return LT_EXPR;
2137 case GE_EXPR:
2138 return LE_EXPR;
2139 case LT_EXPR:
2140 return GT_EXPR;
2141 case LE_EXPR:
2142 return GE_EXPR;
2143 default:
2144 gcc_unreachable ();
2149 /* Convert a comparison tree code from an enum tree_code representation
2150 into a compcode bit-based encoding. This function is the inverse of
2151 compcode_to_comparison. */
2153 static enum comparison_code
2154 comparison_to_compcode (enum tree_code code)
2156 switch (code)
2158 case LT_EXPR:
2159 return COMPCODE_LT;
2160 case EQ_EXPR:
2161 return COMPCODE_EQ;
2162 case LE_EXPR:
2163 return COMPCODE_LE;
2164 case GT_EXPR:
2165 return COMPCODE_GT;
2166 case NE_EXPR:
2167 return COMPCODE_NE;
2168 case GE_EXPR:
2169 return COMPCODE_GE;
2170 case ORDERED_EXPR:
2171 return COMPCODE_ORD;
2172 case UNORDERED_EXPR:
2173 return COMPCODE_UNORD;
2174 case UNLT_EXPR:
2175 return COMPCODE_UNLT;
2176 case UNEQ_EXPR:
2177 return COMPCODE_UNEQ;
2178 case UNLE_EXPR:
2179 return COMPCODE_UNLE;
2180 case UNGT_EXPR:
2181 return COMPCODE_UNGT;
2182 case LTGT_EXPR:
2183 return COMPCODE_LTGT;
2184 case UNGE_EXPR:
2185 return COMPCODE_UNGE;
2186 default:
2187 gcc_unreachable ();
2191 /* Convert a compcode bit-based encoding of a comparison operator back
2192 to GCC's enum tree_code representation. This function is the
2193 inverse of comparison_to_compcode. */
2195 static enum tree_code
2196 compcode_to_comparison (enum comparison_code code)
2198 switch (code)
2200 case COMPCODE_LT:
2201 return LT_EXPR;
2202 case COMPCODE_EQ:
2203 return EQ_EXPR;
2204 case COMPCODE_LE:
2205 return LE_EXPR;
2206 case COMPCODE_GT:
2207 return GT_EXPR;
2208 case COMPCODE_NE:
2209 return NE_EXPR;
2210 case COMPCODE_GE:
2211 return GE_EXPR;
2212 case COMPCODE_ORD:
2213 return ORDERED_EXPR;
2214 case COMPCODE_UNORD:
2215 return UNORDERED_EXPR;
2216 case COMPCODE_UNLT:
2217 return UNLT_EXPR;
2218 case COMPCODE_UNEQ:
2219 return UNEQ_EXPR;
2220 case COMPCODE_UNLE:
2221 return UNLE_EXPR;
2222 case COMPCODE_UNGT:
2223 return UNGT_EXPR;
2224 case COMPCODE_LTGT:
2225 return LTGT_EXPR;
2226 case COMPCODE_UNGE:
2227 return UNGE_EXPR;
2228 default:
2229 gcc_unreachable ();
2233 /* Return a tree for the comparison which is the combination of
2234 doing the AND or OR (depending on CODE) of the two operations LCODE
2235 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2236 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2237 if this makes the transformation invalid. */
2239 tree
2240 combine_comparisons (enum tree_code code, enum tree_code lcode,
2241 enum tree_code rcode, tree truth_type,
2242 tree ll_arg, tree lr_arg)
2244 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2245 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2246 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2247 enum comparison_code compcode;
2249 switch (code)
2251 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2252 compcode = lcompcode & rcompcode;
2253 break;
2255 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2256 compcode = lcompcode | rcompcode;
2257 break;
2259 default:
2260 return NULL_TREE;
2263 if (!honor_nans)
2265 /* Eliminate unordered comparisons, as well as LTGT and ORD
2266 which are not used unless the mode has NaNs. */
2267 compcode &= ~COMPCODE_UNORD;
2268 if (compcode == COMPCODE_LTGT)
2269 compcode = COMPCODE_NE;
2270 else if (compcode == COMPCODE_ORD)
2271 compcode = COMPCODE_TRUE;
2273 else if (flag_trapping_math)
2275 /* Check that the original operation and the optimized ones will trap
2276 under the same condition. */
2277 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2278 && (lcompcode != COMPCODE_EQ)
2279 && (lcompcode != COMPCODE_ORD);
2280 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2281 && (rcompcode != COMPCODE_EQ)
2282 && (rcompcode != COMPCODE_ORD);
2283 bool trap = (compcode & COMPCODE_UNORD) == 0
2284 && (compcode != COMPCODE_EQ)
2285 && (compcode != COMPCODE_ORD);
2287 /* In a short-circuited boolean expression the LHS might be
2288 such that the RHS, if evaluated, will never trap. For
2289 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2290 if neither x nor y is NaN. (This is a mixed blessing: for
2291 example, the expression above will never trap, hence
2292 optimizing it to x < y would be invalid). */
2293 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2294 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2295 rtrap = false;
2297 /* If the comparison was short-circuited, and only the RHS
2298 trapped, we may now generate a spurious trap. */
2299 if (rtrap && !ltrap
2300 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2301 return NULL_TREE;
2303 /* If we changed the conditions that cause a trap, we lose. */
2304 if ((ltrap || rtrap) != trap)
2305 return NULL_TREE;
2308 if (compcode == COMPCODE_TRUE)
2309 return constant_boolean_node (true, truth_type);
2310 else if (compcode == COMPCODE_FALSE)
2311 return constant_boolean_node (false, truth_type);
2312 else
2313 return fold (build2 (compcode_to_comparison (compcode),
2314 truth_type, ll_arg, lr_arg));
2317 /* Return nonzero if CODE is a tree code that represents a truth value. */
2319 static int
2320 truth_value_p (enum tree_code code)
2322 return (TREE_CODE_CLASS (code) == tcc_comparison
2323 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2324 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2325 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2359 return 0;
2361 /* If both types don't have the same signedness, then we can't consider
2362 them equal. We must check this before the STRIP_NOPS calls
2363 because they may change the signedness of the arguments. */
2364 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2365 return 0;
2367 STRIP_NOPS (arg0);
2368 STRIP_NOPS (arg1);
2370 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2371 /* This is needed for conversions and for COMPONENT_REF.
2372 Might as well play it safe and always test this. */
2373 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2374 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2375 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2376 return 0;
2378 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2379 We don't care about side effects in that case because the SAVE_EXPR
2380 takes care of that for us. In all other cases, two expressions are
2381 equal if they have no side effects. If we have two identical
2382 expressions with side effects that should be treated the same due
2383 to the only side effects being identical SAVE_EXPR's, that will
2384 be detected in the recursive calls below. */
2385 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2386 && (TREE_CODE (arg0) == SAVE_EXPR
2387 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2388 return 1;
2390 /* Next handle constant cases, those for which we can return 1 even
2391 if ONLY_CONST is set. */
2392 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2393 switch (TREE_CODE (arg0))
2395 case INTEGER_CST:
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && tree_int_cst_equal (arg0, arg1));
2400 case REAL_CST:
2401 return (! TREE_CONSTANT_OVERFLOW (arg0)
2402 && ! TREE_CONSTANT_OVERFLOW (arg1)
2403 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2404 TREE_REAL_CST (arg1)));
2406 case VECTOR_CST:
2408 tree v1, v2;
2410 if (TREE_CONSTANT_OVERFLOW (arg0)
2411 || TREE_CONSTANT_OVERFLOW (arg1))
2412 return 0;
2414 v1 = TREE_VECTOR_CST_ELTS (arg0);
2415 v2 = TREE_VECTOR_CST_ELTS (arg1);
2416 while (v1 && v2)
2418 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2419 flags))
2420 return 0;
2421 v1 = TREE_CHAIN (v1);
2422 v2 = TREE_CHAIN (v2);
2425 return 1;
2428 case COMPLEX_CST:
2429 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2430 flags)
2431 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2432 flags));
2434 case STRING_CST:
2435 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2436 && ! memcmp (TREE_STRING_POINTER (arg0),
2437 TREE_STRING_POINTER (arg1),
2438 TREE_STRING_LENGTH (arg0)));
2440 case ADDR_EXPR:
2441 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2443 default:
2444 break;
2447 if (flags & OEP_ONLY_CONST)
2448 return 0;
2450 /* Define macros to test an operand from arg0 and arg1 for equality and a
2451 variant that allows null and views null as being different from any
2452 non-null value. In the latter case, if either is null, the both
2453 must be; otherwise, do the normal comparison. */
2454 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2455 TREE_OPERAND (arg1, N), flags)
2457 #define OP_SAME_WITH_NULL(N) \
2458 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2459 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2461 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2463 case tcc_unary:
2464 /* Two conversions are equal only if signedness and modes match. */
2465 switch (TREE_CODE (arg0))
2467 case NOP_EXPR:
2468 case CONVERT_EXPR:
2469 case FIX_CEIL_EXPR:
2470 case FIX_TRUNC_EXPR:
2471 case FIX_FLOOR_EXPR:
2472 case FIX_ROUND_EXPR:
2473 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2474 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2475 return 0;
2476 break;
2477 default:
2478 break;
2481 return OP_SAME (0);
2484 case tcc_comparison:
2485 case tcc_binary:
2486 if (OP_SAME (0) && OP_SAME (1))
2487 return 1;
2489 /* For commutative ops, allow the other order. */
2490 return (commutative_tree_code (TREE_CODE (arg0))
2491 && operand_equal_p (TREE_OPERAND (arg0, 0),
2492 TREE_OPERAND (arg1, 1), flags)
2493 && operand_equal_p (TREE_OPERAND (arg0, 1),
2494 TREE_OPERAND (arg1, 0), flags));
2496 case tcc_reference:
2497 /* If either of the pointer (or reference) expressions we are
2498 dereferencing contain a side effect, these cannot be equal. */
2499 if (TREE_SIDE_EFFECTS (arg0)
2500 || TREE_SIDE_EFFECTS (arg1))
2501 return 0;
2503 switch (TREE_CODE (arg0))
2505 case INDIRECT_REF:
2506 case ALIGN_INDIRECT_REF:
2507 case MISALIGNED_INDIRECT_REF:
2508 case REALPART_EXPR:
2509 case IMAGPART_EXPR:
2510 return OP_SAME (0);
2512 case ARRAY_REF:
2513 case ARRAY_RANGE_REF:
2514 /* Operands 2 and 3 may be null. */
2515 return (OP_SAME (0)
2516 && OP_SAME (1)
2517 && OP_SAME_WITH_NULL (2)
2518 && OP_SAME_WITH_NULL (3));
2520 case COMPONENT_REF:
2521 /* Handle operand 2 the same as for ARRAY_REF. */
2522 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2524 case BIT_FIELD_REF:
2525 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2527 default:
2528 return 0;
2531 case tcc_expression:
2532 switch (TREE_CODE (arg0))
2534 case ADDR_EXPR:
2535 case TRUTH_NOT_EXPR:
2536 return OP_SAME (0);
2538 case TRUTH_ANDIF_EXPR:
2539 case TRUTH_ORIF_EXPR:
2540 return OP_SAME (0) && OP_SAME (1);
2542 case TRUTH_AND_EXPR:
2543 case TRUTH_OR_EXPR:
2544 case TRUTH_XOR_EXPR:
2545 if (OP_SAME (0) && OP_SAME (1))
2546 return 1;
2548 /* Otherwise take into account this is a commutative operation. */
2549 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2550 TREE_OPERAND (arg1, 1), flags)
2551 && operand_equal_p (TREE_OPERAND (arg0, 1),
2552 TREE_OPERAND (arg1, 0), flags));
2554 case CALL_EXPR:
2555 /* If the CALL_EXPRs call different functions, then they
2556 clearly can not be equal. */
2557 if (!OP_SAME (0))
2558 return 0;
2561 unsigned int cef = call_expr_flags (arg0);
2562 if (flags & OEP_PURE_SAME)
2563 cef &= ECF_CONST | ECF_PURE;
2564 else
2565 cef &= ECF_CONST;
2566 if (!cef)
2567 return 0;
2570 /* Now see if all the arguments are the same. operand_equal_p
2571 does not handle TREE_LIST, so we walk the operands here
2572 feeding them to operand_equal_p. */
2573 arg0 = TREE_OPERAND (arg0, 1);
2574 arg1 = TREE_OPERAND (arg1, 1);
2575 while (arg0 && arg1)
2577 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2578 flags))
2579 return 0;
2581 arg0 = TREE_CHAIN (arg0);
2582 arg1 = TREE_CHAIN (arg1);
2585 /* If we get here and both argument lists are exhausted
2586 then the CALL_EXPRs are equal. */
2587 return ! (arg0 || arg1);
2589 default:
2590 return 0;
2593 case tcc_declaration:
2594 /* Consider __builtin_sqrt equal to sqrt. */
2595 return (TREE_CODE (arg0) == FUNCTION_DECL
2596 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2597 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2598 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2600 default:
2601 return 0;
2604 #undef OP_SAME
2605 #undef OP_SAME_WITH_NULL
2608 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2609 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2611 When in doubt, return 0. */
2613 static int
2614 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2616 int unsignedp1, unsignedpo;
2617 tree primarg0, primarg1, primother;
2618 unsigned int correct_width;
2620 if (operand_equal_p (arg0, arg1, 0))
2621 return 1;
2623 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2624 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2625 return 0;
2627 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2628 and see if the inner values are the same. This removes any
2629 signedness comparison, which doesn't matter here. */
2630 primarg0 = arg0, primarg1 = arg1;
2631 STRIP_NOPS (primarg0);
2632 STRIP_NOPS (primarg1);
2633 if (operand_equal_p (primarg0, primarg1, 0))
2634 return 1;
2636 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2637 actual comparison operand, ARG0.
2639 First throw away any conversions to wider types
2640 already present in the operands. */
2642 primarg1 = get_narrower (arg1, &unsignedp1);
2643 primother = get_narrower (other, &unsignedpo);
2645 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2646 if (unsignedp1 == unsignedpo
2647 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2648 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2650 tree type = TREE_TYPE (arg0);
2652 /* Make sure shorter operand is extended the right way
2653 to match the longer operand. */
2654 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2655 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2657 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2658 return 1;
2661 return 0;
2664 /* See if ARG is an expression that is either a comparison or is performing
2665 arithmetic on comparisons. The comparisons must only be comparing
2666 two different values, which will be stored in *CVAL1 and *CVAL2; if
2667 they are nonzero it means that some operands have already been found.
2668 No variables may be used anywhere else in the expression except in the
2669 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2670 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2672 If this is true, return 1. Otherwise, return zero. */
2674 static int
2675 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2677 enum tree_code code = TREE_CODE (arg);
2678 enum tree_code_class class = TREE_CODE_CLASS (code);
2680 /* We can handle some of the tcc_expression cases here. */
2681 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2682 class = tcc_unary;
2683 else if (class == tcc_expression
2684 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2685 || code == COMPOUND_EXPR))
2686 class = tcc_binary;
2688 else if (class == tcc_expression && code == SAVE_EXPR
2689 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2691 /* If we've already found a CVAL1 or CVAL2, this expression is
2692 two complex to handle. */
2693 if (*cval1 || *cval2)
2694 return 0;
2696 class = tcc_unary;
2697 *save_p = 1;
2700 switch (class)
2702 case tcc_unary:
2703 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2705 case tcc_binary:
2706 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2707 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2708 cval1, cval2, save_p));
2710 case tcc_constant:
2711 return 1;
2713 case tcc_expression:
2714 if (code == COND_EXPR)
2715 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2716 cval1, cval2, save_p)
2717 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2718 cval1, cval2, save_p)
2719 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2720 cval1, cval2, save_p));
2721 return 0;
2723 case tcc_comparison:
2724 /* First see if we can handle the first operand, then the second. For
2725 the second operand, we know *CVAL1 can't be zero. It must be that
2726 one side of the comparison is each of the values; test for the
2727 case where this isn't true by failing if the two operands
2728 are the same. */
2730 if (operand_equal_p (TREE_OPERAND (arg, 0),
2731 TREE_OPERAND (arg, 1), 0))
2732 return 0;
2734 if (*cval1 == 0)
2735 *cval1 = TREE_OPERAND (arg, 0);
2736 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2738 else if (*cval2 == 0)
2739 *cval2 = TREE_OPERAND (arg, 0);
2740 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2742 else
2743 return 0;
2745 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2747 else if (*cval2 == 0)
2748 *cval2 = TREE_OPERAND (arg, 1);
2749 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2751 else
2752 return 0;
2754 return 1;
2756 default:
2757 return 0;
2761 /* ARG is a tree that is known to contain just arithmetic operations and
2762 comparisons. Evaluate the operations in the tree substituting NEW0 for
2763 any occurrence of OLD0 as an operand of a comparison and likewise for
2764 NEW1 and OLD1. */
2766 static tree
2767 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2769 tree type = TREE_TYPE (arg);
2770 enum tree_code code = TREE_CODE (arg);
2771 enum tree_code_class class = TREE_CODE_CLASS (code);
2773 /* We can handle some of the tcc_expression cases here. */
2774 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2775 class = tcc_unary;
2776 else if (class == tcc_expression
2777 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2778 class = tcc_binary;
2780 switch (class)
2782 case tcc_unary:
2783 return fold (build1 (code, type,
2784 eval_subst (TREE_OPERAND (arg, 0),
2785 old0, new0, old1, new1)));
2787 case tcc_binary:
2788 return fold (build2 (code, type,
2789 eval_subst (TREE_OPERAND (arg, 0),
2790 old0, new0, old1, new1),
2791 eval_subst (TREE_OPERAND (arg, 1),
2792 old0, new0, old1, new1)));
2794 case tcc_expression:
2795 switch (code)
2797 case SAVE_EXPR:
2798 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2800 case COMPOUND_EXPR:
2801 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2803 case COND_EXPR:
2804 return fold (build3 (code, type,
2805 eval_subst (TREE_OPERAND (arg, 0),
2806 old0, new0, old1, new1),
2807 eval_subst (TREE_OPERAND (arg, 1),
2808 old0, new0, old1, new1),
2809 eval_subst (TREE_OPERAND (arg, 2),
2810 old0, new0, old1, new1)));
2811 default:
2812 break;
2814 /* Fall through - ??? */
2816 case tcc_comparison:
2818 tree arg0 = TREE_OPERAND (arg, 0);
2819 tree arg1 = TREE_OPERAND (arg, 1);
2821 /* We need to check both for exact equality and tree equality. The
2822 former will be true if the operand has a side-effect. In that
2823 case, we know the operand occurred exactly once. */
2825 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2826 arg0 = new0;
2827 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2828 arg0 = new1;
2830 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2831 arg1 = new0;
2832 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2833 arg1 = new1;
2835 return fold (build2 (code, type, arg0, arg1));
2838 default:
2839 return arg;
2843 /* Return a tree for the case when the result of an expression is RESULT
2844 converted to TYPE and OMITTED was previously an operand of the expression
2845 but is now not needed (e.g., we folded OMITTED * 0).
2847 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2848 the conversion of RESULT to TYPE. */
2850 tree
2851 omit_one_operand (tree type, tree result, tree omitted)
2853 tree t = fold_convert (type, result);
2855 if (TREE_SIDE_EFFECTS (omitted))
2856 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2858 return non_lvalue (t);
2861 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2863 static tree
2864 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2866 tree t = fold_convert (type, result);
2868 if (TREE_SIDE_EFFECTS (omitted))
2869 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2871 return pedantic_non_lvalue (t);
2874 /* Return a tree for the case when the result of an expression is RESULT
2875 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2876 of the expression but are now not needed.
2878 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2879 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2880 evaluated before OMITTED2. Otherwise, if neither has side effects,
2881 just do the conversion of RESULT to TYPE. */
2883 tree
2884 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2886 tree t = fold_convert (type, result);
2888 if (TREE_SIDE_EFFECTS (omitted2))
2889 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2890 if (TREE_SIDE_EFFECTS (omitted1))
2891 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2893 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2897 /* Return a simplified tree node for the truth-negation of ARG. This
2898 never alters ARG itself. We assume that ARG is an operation that
2899 returns a truth value (0 or 1).
2901 FIXME: one would think we would fold the result, but it causes
2902 problems with the dominator optimizer. */
2903 tree
2904 invert_truthvalue (tree arg)
2906 tree type = TREE_TYPE (arg);
2907 enum tree_code code = TREE_CODE (arg);
2909 if (code == ERROR_MARK)
2910 return arg;
2912 /* If this is a comparison, we can simply invert it, except for
2913 floating-point non-equality comparisons, in which case we just
2914 enclose a TRUTH_NOT_EXPR around what we have. */
2916 if (TREE_CODE_CLASS (code) == tcc_comparison)
2918 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2919 if (FLOAT_TYPE_P (op_type)
2920 && flag_trapping_math
2921 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2922 && code != NE_EXPR && code != EQ_EXPR)
2923 return build1 (TRUTH_NOT_EXPR, type, arg);
2924 else
2926 code = invert_tree_comparison (code,
2927 HONOR_NANS (TYPE_MODE (op_type)));
2928 if (code == ERROR_MARK)
2929 return build1 (TRUTH_NOT_EXPR, type, arg);
2930 else
2931 return build2 (code, type,
2932 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2936 switch (code)
2938 case INTEGER_CST:
2939 return constant_boolean_node (integer_zerop (arg), type);
2941 case TRUTH_AND_EXPR:
2942 return build2 (TRUTH_OR_EXPR, type,
2943 invert_truthvalue (TREE_OPERAND (arg, 0)),
2944 invert_truthvalue (TREE_OPERAND (arg, 1)));
2946 case TRUTH_OR_EXPR:
2947 return build2 (TRUTH_AND_EXPR, type,
2948 invert_truthvalue (TREE_OPERAND (arg, 0)),
2949 invert_truthvalue (TREE_OPERAND (arg, 1)));
2951 case TRUTH_XOR_EXPR:
2952 /* Here we can invert either operand. We invert the first operand
2953 unless the second operand is a TRUTH_NOT_EXPR in which case our
2954 result is the XOR of the first operand with the inside of the
2955 negation of the second operand. */
2957 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2958 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2959 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2960 else
2961 return build2 (TRUTH_XOR_EXPR, type,
2962 invert_truthvalue (TREE_OPERAND (arg, 0)),
2963 TREE_OPERAND (arg, 1));
2965 case TRUTH_ANDIF_EXPR:
2966 return build2 (TRUTH_ORIF_EXPR, type,
2967 invert_truthvalue (TREE_OPERAND (arg, 0)),
2968 invert_truthvalue (TREE_OPERAND (arg, 1)));
2970 case TRUTH_ORIF_EXPR:
2971 return build2 (TRUTH_ANDIF_EXPR, type,
2972 invert_truthvalue (TREE_OPERAND (arg, 0)),
2973 invert_truthvalue (TREE_OPERAND (arg, 1)));
2975 case TRUTH_NOT_EXPR:
2976 return TREE_OPERAND (arg, 0);
2978 case COND_EXPR:
2979 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2980 invert_truthvalue (TREE_OPERAND (arg, 1)),
2981 invert_truthvalue (TREE_OPERAND (arg, 2)));
2983 case COMPOUND_EXPR:
2984 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2985 invert_truthvalue (TREE_OPERAND (arg, 1)));
2987 case NON_LVALUE_EXPR:
2988 return invert_truthvalue (TREE_OPERAND (arg, 0));
2990 case NOP_EXPR:
2991 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2992 break;
2994 case CONVERT_EXPR:
2995 case FLOAT_EXPR:
2996 return build1 (TREE_CODE (arg), type,
2997 invert_truthvalue (TREE_OPERAND (arg, 0)));
2999 case BIT_AND_EXPR:
3000 if (!integer_onep (TREE_OPERAND (arg, 1)))
3001 break;
3002 return build2 (EQ_EXPR, type, arg,
3003 fold_convert (type, integer_zero_node));
3005 case SAVE_EXPR:
3006 return build1 (TRUTH_NOT_EXPR, type, arg);
3008 case CLEANUP_POINT_EXPR:
3009 return build1 (CLEANUP_POINT_EXPR, type,
3010 invert_truthvalue (TREE_OPERAND (arg, 0)));
3012 default:
3013 break;
3015 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3016 return build1 (TRUTH_NOT_EXPR, type, arg);
3019 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3020 operands are another bit-wise operation with a common input. If so,
3021 distribute the bit operations to save an operation and possibly two if
3022 constants are involved. For example, convert
3023 (A | B) & (A | C) into A | (B & C)
3024 Further simplification will occur if B and C are constants.
3026 If this optimization cannot be done, 0 will be returned. */
3028 static tree
3029 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3031 tree common;
3032 tree left, right;
3034 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3035 || TREE_CODE (arg0) == code
3036 || (TREE_CODE (arg0) != BIT_AND_EXPR
3037 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3038 return 0;
3040 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3042 common = TREE_OPERAND (arg0, 0);
3043 left = TREE_OPERAND (arg0, 1);
3044 right = TREE_OPERAND (arg1, 1);
3046 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3048 common = TREE_OPERAND (arg0, 0);
3049 left = TREE_OPERAND (arg0, 1);
3050 right = TREE_OPERAND (arg1, 0);
3052 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3054 common = TREE_OPERAND (arg0, 1);
3055 left = TREE_OPERAND (arg0, 0);
3056 right = TREE_OPERAND (arg1, 1);
3058 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3060 common = TREE_OPERAND (arg0, 1);
3061 left = TREE_OPERAND (arg0, 0);
3062 right = TREE_OPERAND (arg1, 0);
3064 else
3065 return 0;
3067 return fold (build2 (TREE_CODE (arg0), type, common,
3068 fold (build2 (code, type, left, right))));
3071 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3072 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3074 static tree
3075 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3076 int unsignedp)
3078 tree result;
3080 if (bitpos == 0)
3082 tree size = TYPE_SIZE (TREE_TYPE (inner));
3083 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3084 || POINTER_TYPE_P (TREE_TYPE (inner)))
3085 && host_integerp (size, 0)
3086 && tree_low_cst (size, 0) == bitsize)
3087 return fold_convert (type, inner);
3090 result = build3 (BIT_FIELD_REF, type, inner,
3091 size_int (bitsize), bitsize_int (bitpos));
3093 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3095 return result;
3098 /* Optimize a bit-field compare.
3100 There are two cases: First is a compare against a constant and the
3101 second is a comparison of two items where the fields are at the same
3102 bit position relative to the start of a chunk (byte, halfword, word)
3103 large enough to contain it. In these cases we can avoid the shift
3104 implicit in bitfield extractions.
3106 For constants, we emit a compare of the shifted constant with the
3107 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3108 compared. For two fields at the same position, we do the ANDs with the
3109 similar mask and compare the result of the ANDs.
3111 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3112 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3113 are the left and right operands of the comparison, respectively.
3115 If the optimization described above can be done, we return the resulting
3116 tree. Otherwise we return zero. */
3118 static tree
3119 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3120 tree lhs, tree rhs)
3122 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3123 tree type = TREE_TYPE (lhs);
3124 tree signed_type, unsigned_type;
3125 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3126 enum machine_mode lmode, rmode, nmode;
3127 int lunsignedp, runsignedp;
3128 int lvolatilep = 0, rvolatilep = 0;
3129 tree linner, rinner = NULL_TREE;
3130 tree mask;
3131 tree offset;
3133 /* Get all the information about the extractions being done. If the bit size
3134 if the same as the size of the underlying object, we aren't doing an
3135 extraction at all and so can do nothing. We also don't want to
3136 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3137 then will no longer be able to replace it. */
3138 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3139 &lunsignedp, &lvolatilep, false);
3140 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3141 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3142 return 0;
3144 if (!const_p)
3146 /* If this is not a constant, we can only do something if bit positions,
3147 sizes, and signedness are the same. */
3148 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3149 &runsignedp, &rvolatilep, false);
3151 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3152 || lunsignedp != runsignedp || offset != 0
3153 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3154 return 0;
3157 /* See if we can find a mode to refer to this field. We should be able to,
3158 but fail if we can't. */
3159 nmode = get_best_mode (lbitsize, lbitpos,
3160 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3161 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3162 TYPE_ALIGN (TREE_TYPE (rinner))),
3163 word_mode, lvolatilep || rvolatilep);
3164 if (nmode == VOIDmode)
3165 return 0;
3167 /* Set signed and unsigned types of the precision of this mode for the
3168 shifts below. */
3169 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3170 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3172 /* Compute the bit position and size for the new reference and our offset
3173 within it. If the new reference is the same size as the original, we
3174 won't optimize anything, so return zero. */
3175 nbitsize = GET_MODE_BITSIZE (nmode);
3176 nbitpos = lbitpos & ~ (nbitsize - 1);
3177 lbitpos -= nbitpos;
3178 if (nbitsize == lbitsize)
3179 return 0;
3181 if (BYTES_BIG_ENDIAN)
3182 lbitpos = nbitsize - lbitsize - lbitpos;
3184 /* Make the mask to be used against the extracted field. */
3185 mask = build_int_cst (unsigned_type, -1);
3186 mask = force_fit_type (mask, 0, false, false);
3187 mask = fold_convert (unsigned_type, mask);
3188 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3189 mask = const_binop (RSHIFT_EXPR, mask,
3190 size_int (nbitsize - lbitsize - lbitpos), 0);
3192 if (! const_p)
3193 /* If not comparing with constant, just rework the comparison
3194 and return. */
3195 return build2 (code, compare_type,
3196 build2 (BIT_AND_EXPR, unsigned_type,
3197 make_bit_field_ref (linner, unsigned_type,
3198 nbitsize, nbitpos, 1),
3199 mask),
3200 build2 (BIT_AND_EXPR, unsigned_type,
3201 make_bit_field_ref (rinner, unsigned_type,
3202 nbitsize, nbitpos, 1),
3203 mask));
3205 /* Otherwise, we are handling the constant case. See if the constant is too
3206 big for the field. Warn and return a tree of for 0 (false) if so. We do
3207 this not only for its own sake, but to avoid having to test for this
3208 error case below. If we didn't, we might generate wrong code.
3210 For unsigned fields, the constant shifted right by the field length should
3211 be all zero. For signed fields, the high-order bits should agree with
3212 the sign bit. */
3214 if (lunsignedp)
3216 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3217 fold_convert (unsigned_type, rhs),
3218 size_int (lbitsize), 0)))
3220 warning ("comparison is always %d due to width of bit-field",
3221 code == NE_EXPR);
3222 return constant_boolean_node (code == NE_EXPR, compare_type);
3225 else
3227 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3228 size_int (lbitsize - 1), 0);
3229 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3231 warning ("comparison is always %d due to width of bit-field",
3232 code == NE_EXPR);
3233 return constant_boolean_node (code == NE_EXPR, compare_type);
3237 /* Single-bit compares should always be against zero. */
3238 if (lbitsize == 1 && ! integer_zerop (rhs))
3240 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3241 rhs = fold_convert (type, integer_zero_node);
3244 /* Make a new bitfield reference, shift the constant over the
3245 appropriate number of bits and mask it with the computed mask
3246 (in case this was a signed field). If we changed it, make a new one. */
3247 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3248 if (lvolatilep)
3250 TREE_SIDE_EFFECTS (lhs) = 1;
3251 TREE_THIS_VOLATILE (lhs) = 1;
3254 rhs = fold (const_binop (BIT_AND_EXPR,
3255 const_binop (LSHIFT_EXPR,
3256 fold_convert (unsigned_type, rhs),
3257 size_int (lbitpos), 0),
3258 mask, 0));
3260 return build2 (code, compare_type,
3261 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3262 rhs);
3265 /* Subroutine for fold_truthop: decode a field reference.
3267 If EXP is a comparison reference, we return the innermost reference.
3269 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3270 set to the starting bit number.
3272 If the innermost field can be completely contained in a mode-sized
3273 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3275 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3276 otherwise it is not changed.
3278 *PUNSIGNEDP is set to the signedness of the field.
3280 *PMASK is set to the mask used. This is either contained in a
3281 BIT_AND_EXPR or derived from the width of the field.
3283 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3285 Return 0 if this is not a component reference or is one that we can't
3286 do anything with. */
3288 static tree
3289 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3290 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3291 int *punsignedp, int *pvolatilep,
3292 tree *pmask, tree *pand_mask)
3294 tree outer_type = 0;
3295 tree and_mask = 0;
3296 tree mask, inner, offset;
3297 tree unsigned_type;
3298 unsigned int precision;
3300 /* All the optimizations using this function assume integer fields.
3301 There are problems with FP fields since the type_for_size call
3302 below can fail for, e.g., XFmode. */
3303 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3304 return 0;
3306 /* We are interested in the bare arrangement of bits, so strip everything
3307 that doesn't affect the machine mode. However, record the type of the
3308 outermost expression if it may matter below. */
3309 if (TREE_CODE (exp) == NOP_EXPR
3310 || TREE_CODE (exp) == CONVERT_EXPR
3311 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3312 outer_type = TREE_TYPE (exp);
3313 STRIP_NOPS (exp);
3315 if (TREE_CODE (exp) == BIT_AND_EXPR)
3317 and_mask = TREE_OPERAND (exp, 1);
3318 exp = TREE_OPERAND (exp, 0);
3319 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3320 if (TREE_CODE (and_mask) != INTEGER_CST)
3321 return 0;
3324 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3325 punsignedp, pvolatilep, false);
3326 if ((inner == exp && and_mask == 0)
3327 || *pbitsize < 0 || offset != 0
3328 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3329 return 0;
3331 /* If the number of bits in the reference is the same as the bitsize of
3332 the outer type, then the outer type gives the signedness. Otherwise
3333 (in case of a small bitfield) the signedness is unchanged. */
3334 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3335 *punsignedp = TYPE_UNSIGNED (outer_type);
3337 /* Compute the mask to access the bitfield. */
3338 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3339 precision = TYPE_PRECISION (unsigned_type);
3341 mask = build_int_cst (unsigned_type, -1);
3342 mask = force_fit_type (mask, 0, false, false);
3344 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3345 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3347 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3348 if (and_mask != 0)
3349 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3350 fold_convert (unsigned_type, and_mask), mask));
3352 *pmask = mask;
3353 *pand_mask = and_mask;
3354 return inner;
3357 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3358 bit positions. */
3360 static int
3361 all_ones_mask_p (tree mask, int size)
3363 tree type = TREE_TYPE (mask);
3364 unsigned int precision = TYPE_PRECISION (type);
3365 tree tmask;
3367 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3368 tmask = force_fit_type (tmask, 0, false, false);
3370 return
3371 tree_int_cst_equal (mask,
3372 const_binop (RSHIFT_EXPR,
3373 const_binop (LSHIFT_EXPR, tmask,
3374 size_int (precision - size),
3376 size_int (precision - size), 0));
3379 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3380 represents the sign bit of EXP's type. If EXP represents a sign
3381 or zero extension, also test VAL against the unextended type.
3382 The return value is the (sub)expression whose sign bit is VAL,
3383 or NULL_TREE otherwise. */
3385 static tree
3386 sign_bit_p (tree exp, tree val)
3388 unsigned HOST_WIDE_INT mask_lo, lo;
3389 HOST_WIDE_INT mask_hi, hi;
3390 int width;
3391 tree t;
3393 /* Tree EXP must have an integral type. */
3394 t = TREE_TYPE (exp);
3395 if (! INTEGRAL_TYPE_P (t))
3396 return NULL_TREE;
3398 /* Tree VAL must be an integer constant. */
3399 if (TREE_CODE (val) != INTEGER_CST
3400 || TREE_CONSTANT_OVERFLOW (val))
3401 return NULL_TREE;
3403 width = TYPE_PRECISION (t);
3404 if (width > HOST_BITS_PER_WIDE_INT)
3406 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3407 lo = 0;
3409 mask_hi = ((unsigned HOST_WIDE_INT) -1
3410 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3411 mask_lo = -1;
3413 else
3415 hi = 0;
3416 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3418 mask_hi = 0;
3419 mask_lo = ((unsigned HOST_WIDE_INT) -1
3420 >> (HOST_BITS_PER_WIDE_INT - width));
3423 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3424 treat VAL as if it were unsigned. */
3425 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3426 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3427 return exp;
3429 /* Handle extension from a narrower type. */
3430 if (TREE_CODE (exp) == NOP_EXPR
3431 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3432 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3434 return NULL_TREE;
3437 /* Subroutine for fold_truthop: determine if an operand is simple enough
3438 to be evaluated unconditionally. */
3440 static int
3441 simple_operand_p (tree exp)
3443 /* Strip any conversions that don't change the machine mode. */
3444 STRIP_NOPS (exp);
3446 return (CONSTANT_CLASS_P (exp)
3447 || TREE_CODE (exp) == SSA_NAME
3448 || (DECL_P (exp)
3449 && ! TREE_ADDRESSABLE (exp)
3450 && ! TREE_THIS_VOLATILE (exp)
3451 && ! DECL_NONLOCAL (exp)
3452 /* Don't regard global variables as simple. They may be
3453 allocated in ways unknown to the compiler (shared memory,
3454 #pragma weak, etc). */
3455 && ! TREE_PUBLIC (exp)
3456 && ! DECL_EXTERNAL (exp)
3457 /* Loading a static variable is unduly expensive, but global
3458 registers aren't expensive. */
3459 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3462 /* The following functions are subroutines to fold_range_test and allow it to
3463 try to change a logical combination of comparisons into a range test.
3465 For example, both
3466 X == 2 || X == 3 || X == 4 || X == 5
3468 X >= 2 && X <= 5
3469 are converted to
3470 (unsigned) (X - 2) <= 3
3472 We describe each set of comparisons as being either inside or outside
3473 a range, using a variable named like IN_P, and then describe the
3474 range with a lower and upper bound. If one of the bounds is omitted,
3475 it represents either the highest or lowest value of the type.
3477 In the comments below, we represent a range by two numbers in brackets
3478 preceded by a "+" to designate being inside that range, or a "-" to
3479 designate being outside that range, so the condition can be inverted by
3480 flipping the prefix. An omitted bound is represented by a "-". For
3481 example, "- [-, 10]" means being outside the range starting at the lowest
3482 possible value and ending at 10, in other words, being greater than 10.
3483 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3484 always false.
3486 We set up things so that the missing bounds are handled in a consistent
3487 manner so neither a missing bound nor "true" and "false" need to be
3488 handled using a special case. */
3490 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3491 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3492 and UPPER1_P are nonzero if the respective argument is an upper bound
3493 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3494 must be specified for a comparison. ARG1 will be converted to ARG0's
3495 type if both are specified. */
3497 static tree
3498 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3499 tree arg1, int upper1_p)
3501 tree tem;
3502 int result;
3503 int sgn0, sgn1;
3505 /* If neither arg represents infinity, do the normal operation.
3506 Else, if not a comparison, return infinity. Else handle the special
3507 comparison rules. Note that most of the cases below won't occur, but
3508 are handled for consistency. */
3510 if (arg0 != 0 && arg1 != 0)
3512 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3513 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3514 STRIP_NOPS (tem);
3515 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3518 if (TREE_CODE_CLASS (code) != tcc_comparison)
3519 return 0;
3521 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3522 for neither. In real maths, we cannot assume open ended ranges are
3523 the same. But, this is computer arithmetic, where numbers are finite.
3524 We can therefore make the transformation of any unbounded range with
3525 the value Z, Z being greater than any representable number. This permits
3526 us to treat unbounded ranges as equal. */
3527 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3528 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3529 switch (code)
3531 case EQ_EXPR:
3532 result = sgn0 == sgn1;
3533 break;
3534 case NE_EXPR:
3535 result = sgn0 != sgn1;
3536 break;
3537 case LT_EXPR:
3538 result = sgn0 < sgn1;
3539 break;
3540 case LE_EXPR:
3541 result = sgn0 <= sgn1;
3542 break;
3543 case GT_EXPR:
3544 result = sgn0 > sgn1;
3545 break;
3546 case GE_EXPR:
3547 result = sgn0 >= sgn1;
3548 break;
3549 default:
3550 gcc_unreachable ();
3553 return constant_boolean_node (result, type);
3556 /* Given EXP, a logical expression, set the range it is testing into
3557 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3558 actually being tested. *PLOW and *PHIGH will be made of the same type
3559 as the returned expression. If EXP is not a comparison, we will most
3560 likely not be returning a useful value and range. */
3562 static tree
3563 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3565 enum tree_code code;
3566 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3567 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3568 int in_p, n_in_p;
3569 tree low, high, n_low, n_high;
3571 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3572 and see if we can refine the range. Some of the cases below may not
3573 happen, but it doesn't seem worth worrying about this. We "continue"
3574 the outer loop when we've changed something; otherwise we "break"
3575 the switch, which will "break" the while. */
3577 in_p = 0;
3578 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3580 while (1)
3582 code = TREE_CODE (exp);
3583 exp_type = TREE_TYPE (exp);
3585 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3587 if (TREE_CODE_LENGTH (code) > 0)
3588 arg0 = TREE_OPERAND (exp, 0);
3589 if (TREE_CODE_CLASS (code) == tcc_comparison
3590 || TREE_CODE_CLASS (code) == tcc_unary
3591 || TREE_CODE_CLASS (code) == tcc_binary)
3592 arg0_type = TREE_TYPE (arg0);
3593 if (TREE_CODE_CLASS (code) == tcc_binary
3594 || TREE_CODE_CLASS (code) == tcc_comparison
3595 || (TREE_CODE_CLASS (code) == tcc_expression
3596 && TREE_CODE_LENGTH (code) > 1))
3597 arg1 = TREE_OPERAND (exp, 1);
3600 switch (code)
3602 case TRUTH_NOT_EXPR:
3603 in_p = ! in_p, exp = arg0;
3604 continue;
3606 case EQ_EXPR: case NE_EXPR:
3607 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3608 /* We can only do something if the range is testing for zero
3609 and if the second operand is an integer constant. Note that
3610 saying something is "in" the range we make is done by
3611 complementing IN_P since it will set in the initial case of
3612 being not equal to zero; "out" is leaving it alone. */
3613 if (low == 0 || high == 0
3614 || ! integer_zerop (low) || ! integer_zerop (high)
3615 || TREE_CODE (arg1) != INTEGER_CST)
3616 break;
3618 switch (code)
3620 case NE_EXPR: /* - [c, c] */
3621 low = high = arg1;
3622 break;
3623 case EQ_EXPR: /* + [c, c] */
3624 in_p = ! in_p, low = high = arg1;
3625 break;
3626 case GT_EXPR: /* - [-, c] */
3627 low = 0, high = arg1;
3628 break;
3629 case GE_EXPR: /* + [c, -] */
3630 in_p = ! in_p, low = arg1, high = 0;
3631 break;
3632 case LT_EXPR: /* - [c, -] */
3633 low = arg1, high = 0;
3634 break;
3635 case LE_EXPR: /* + [-, c] */
3636 in_p = ! in_p, low = 0, high = arg1;
3637 break;
3638 default:
3639 gcc_unreachable ();
3642 /* If this is an unsigned comparison, we also know that EXP is
3643 greater than or equal to zero. We base the range tests we make
3644 on that fact, so we record it here so we can parse existing
3645 range tests. We test arg0_type since often the return type
3646 of, e.g. EQ_EXPR, is boolean. */
3647 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3649 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3650 in_p, low, high, 1,
3651 fold_convert (arg0_type, integer_zero_node),
3652 NULL_TREE))
3653 break;
3655 in_p = n_in_p, low = n_low, high = n_high;
3657 /* If the high bound is missing, but we have a nonzero low
3658 bound, reverse the range so it goes from zero to the low bound
3659 minus 1. */
3660 if (high == 0 && low && ! integer_zerop (low))
3662 in_p = ! in_p;
3663 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3664 integer_one_node, 0);
3665 low = fold_convert (arg0_type, integer_zero_node);
3669 exp = arg0;
3670 continue;
3672 case NEGATE_EXPR:
3673 /* (-x) IN [a,b] -> x in [-b, -a] */
3674 n_low = range_binop (MINUS_EXPR, exp_type,
3675 fold_convert (exp_type, integer_zero_node),
3676 0, high, 1);
3677 n_high = range_binop (MINUS_EXPR, exp_type,
3678 fold_convert (exp_type, integer_zero_node),
3679 0, low, 0);
3680 low = n_low, high = n_high;
3681 exp = arg0;
3682 continue;
3684 case BIT_NOT_EXPR:
3685 /* ~ X -> -X - 1 */
3686 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3687 fold_convert (exp_type, integer_one_node));
3688 continue;
3690 case PLUS_EXPR: case MINUS_EXPR:
3691 if (TREE_CODE (arg1) != INTEGER_CST)
3692 break;
3694 /* If EXP is signed, any overflow in the computation is undefined,
3695 so we don't worry about it so long as our computations on
3696 the bounds don't overflow. For unsigned, overflow is defined
3697 and this is exactly the right thing. */
3698 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3699 arg0_type, low, 0, arg1, 0);
3700 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3701 arg0_type, high, 1, arg1, 0);
3702 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3703 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3704 break;
3706 /* Check for an unsigned range which has wrapped around the maximum
3707 value thus making n_high < n_low, and normalize it. */
3708 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3710 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3711 integer_one_node, 0);
3712 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3713 integer_one_node, 0);
3715 /* If the range is of the form +/- [ x+1, x ], we won't
3716 be able to normalize it. But then, it represents the
3717 whole range or the empty set, so make it
3718 +/- [ -, - ]. */
3719 if (tree_int_cst_equal (n_low, low)
3720 && tree_int_cst_equal (n_high, high))
3721 low = high = 0;
3722 else
3723 in_p = ! in_p;
3725 else
3726 low = n_low, high = n_high;
3728 exp = arg0;
3729 continue;
3731 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3732 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3733 break;
3735 if (! INTEGRAL_TYPE_P (arg0_type)
3736 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3737 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3738 break;
3740 n_low = low, n_high = high;
3742 if (n_low != 0)
3743 n_low = fold_convert (arg0_type, n_low);
3745 if (n_high != 0)
3746 n_high = fold_convert (arg0_type, n_high);
3749 /* If we're converting arg0 from an unsigned type, to exp,
3750 a signed type, we will be doing the comparison as unsigned.
3751 The tests above have already verified that LOW and HIGH
3752 are both positive.
3754 So we have to ensure that we will handle large unsigned
3755 values the same way that the current signed bounds treat
3756 negative values. */
3758 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3760 tree high_positive;
3761 tree equiv_type = lang_hooks.types.type_for_mode
3762 (TYPE_MODE (arg0_type), 1);
3764 /* A range without an upper bound is, naturally, unbounded.
3765 Since convert would have cropped a very large value, use
3766 the max value for the destination type. */
3767 high_positive
3768 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3769 : TYPE_MAX_VALUE (arg0_type);
3771 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3772 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3773 fold_convert (arg0_type,
3774 high_positive),
3775 fold_convert (arg0_type,
3776 integer_one_node)));
3778 /* If the low bound is specified, "and" the range with the
3779 range for which the original unsigned value will be
3780 positive. */
3781 if (low != 0)
3783 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3784 1, n_low, n_high, 1,
3785 fold_convert (arg0_type,
3786 integer_zero_node),
3787 high_positive))
3788 break;
3790 in_p = (n_in_p == in_p);
3792 else
3794 /* Otherwise, "or" the range with the range of the input
3795 that will be interpreted as negative. */
3796 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3797 0, n_low, n_high, 1,
3798 fold_convert (arg0_type,
3799 integer_zero_node),
3800 high_positive))
3801 break;
3803 in_p = (in_p != n_in_p);
3807 exp = arg0;
3808 low = n_low, high = n_high;
3809 continue;
3811 default:
3812 break;
3815 break;
3818 /* If EXP is a constant, we can evaluate whether this is true or false. */
3819 if (TREE_CODE (exp) == INTEGER_CST)
3821 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3822 exp, 0, low, 0))
3823 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3824 exp, 1, high, 1)));
3825 low = high = 0;
3826 exp = 0;
3829 *pin_p = in_p, *plow = low, *phigh = high;
3830 return exp;
3833 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3834 type, TYPE, return an expression to test if EXP is in (or out of, depending
3835 on IN_P) the range. Return 0 if the test couldn't be created. */
3837 static tree
3838 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3840 tree etype = TREE_TYPE (exp);
3841 tree value;
3843 if (! in_p)
3845 value = build_range_check (type, exp, 1, low, high);
3846 if (value != 0)
3847 return invert_truthvalue (value);
3849 return 0;
3852 if (low == 0 && high == 0)
3853 return fold_convert (type, integer_one_node);
3855 if (low == 0)
3856 return fold (build2 (LE_EXPR, type, exp, high));
3858 if (high == 0)
3859 return fold (build2 (GE_EXPR, type, exp, low));
3861 if (operand_equal_p (low, high, 0))
3862 return fold (build2 (EQ_EXPR, type, exp, low));
3864 if (integer_zerop (low))
3866 if (! TYPE_UNSIGNED (etype))
3868 etype = lang_hooks.types.unsigned_type (etype);
3869 high = fold_convert (etype, high);
3870 exp = fold_convert (etype, exp);
3872 return build_range_check (type, exp, 1, 0, high);
3875 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3876 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3878 unsigned HOST_WIDE_INT lo;
3879 HOST_WIDE_INT hi;
3880 int prec;
3882 prec = TYPE_PRECISION (etype);
3883 if (prec <= HOST_BITS_PER_WIDE_INT)
3885 hi = 0;
3886 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3888 else
3890 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3891 lo = (unsigned HOST_WIDE_INT) -1;
3894 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3896 if (TYPE_UNSIGNED (etype))
3898 etype = lang_hooks.types.signed_type (etype);
3899 exp = fold_convert (etype, exp);
3901 return fold (build2 (GT_EXPR, type, exp,
3902 fold_convert (etype, integer_zero_node)));
3906 value = const_binop (MINUS_EXPR, high, low, 0);
3907 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3909 tree utype, minv, maxv;
3911 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3912 for the type in question, as we rely on this here. */
3913 switch (TREE_CODE (etype))
3915 case INTEGER_TYPE:
3916 case ENUMERAL_TYPE:
3917 case CHAR_TYPE:
3918 utype = lang_hooks.types.unsigned_type (etype);
3919 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3920 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3921 integer_one_node, 1);
3922 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3923 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3924 minv, 1, maxv, 1)))
3926 etype = utype;
3927 high = fold_convert (etype, high);
3928 low = fold_convert (etype, low);
3929 exp = fold_convert (etype, exp);
3930 value = const_binop (MINUS_EXPR, high, low, 0);
3932 break;
3933 default:
3934 break;
3938 if (value != 0 && ! TREE_OVERFLOW (value))
3939 return build_range_check (type,
3940 fold (build2 (MINUS_EXPR, etype, exp, low)),
3941 1, fold_convert (etype, integer_zero_node),
3942 value);
3944 return 0;
3947 /* Given two ranges, see if we can merge them into one. Return 1 if we
3948 can, 0 if we can't. Set the output range into the specified parameters. */
3950 static int
3951 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3952 tree high0, int in1_p, tree low1, tree high1)
3954 int no_overlap;
3955 int subset;
3956 int temp;
3957 tree tem;
3958 int in_p;
3959 tree low, high;
3960 int lowequal = ((low0 == 0 && low1 == 0)
3961 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3962 low0, 0, low1, 0)));
3963 int highequal = ((high0 == 0 && high1 == 0)
3964 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3965 high0, 1, high1, 1)));
3967 /* Make range 0 be the range that starts first, or ends last if they
3968 start at the same value. Swap them if it isn't. */
3969 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3970 low0, 0, low1, 0))
3971 || (lowequal
3972 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3973 high1, 1, high0, 1))))
3975 temp = in0_p, in0_p = in1_p, in1_p = temp;
3976 tem = low0, low0 = low1, low1 = tem;
3977 tem = high0, high0 = high1, high1 = tem;
3980 /* Now flag two cases, whether the ranges are disjoint or whether the
3981 second range is totally subsumed in the first. Note that the tests
3982 below are simplified by the ones above. */
3983 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3984 high0, 1, low1, 0));
3985 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3986 high1, 1, high0, 1));
3988 /* We now have four cases, depending on whether we are including or
3989 excluding the two ranges. */
3990 if (in0_p && in1_p)
3992 /* If they don't overlap, the result is false. If the second range
3993 is a subset it is the result. Otherwise, the range is from the start
3994 of the second to the end of the first. */
3995 if (no_overlap)
3996 in_p = 0, low = high = 0;
3997 else if (subset)
3998 in_p = 1, low = low1, high = high1;
3999 else
4000 in_p = 1, low = low1, high = high0;
4003 else if (in0_p && ! in1_p)
4005 /* If they don't overlap, the result is the first range. If they are
4006 equal, the result is false. If the second range is a subset of the
4007 first, and the ranges begin at the same place, we go from just after
4008 the end of the first range to the end of the second. If the second
4009 range is not a subset of the first, or if it is a subset and both
4010 ranges end at the same place, the range starts at the start of the
4011 first range and ends just before the second range.
4012 Otherwise, we can't describe this as a single range. */
4013 if (no_overlap)
4014 in_p = 1, low = low0, high = high0;
4015 else if (lowequal && highequal)
4016 in_p = 0, low = high = 0;
4017 else if (subset && lowequal)
4019 in_p = 1, high = high0;
4020 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4021 integer_one_node, 0);
4023 else if (! subset || highequal)
4025 in_p = 1, low = low0;
4026 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4027 integer_one_node, 0);
4029 else
4030 return 0;
4033 else if (! in0_p && in1_p)
4035 /* If they don't overlap, the result is the second range. If the second
4036 is a subset of the first, the result is false. Otherwise,
4037 the range starts just after the first range and ends at the
4038 end of the second. */
4039 if (no_overlap)
4040 in_p = 1, low = low1, high = high1;
4041 else if (subset || highequal)
4042 in_p = 0, low = high = 0;
4043 else
4045 in_p = 1, high = high1;
4046 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4047 integer_one_node, 0);
4051 else
4053 /* The case where we are excluding both ranges. Here the complex case
4054 is if they don't overlap. In that case, the only time we have a
4055 range is if they are adjacent. If the second is a subset of the
4056 first, the result is the first. Otherwise, the range to exclude
4057 starts at the beginning of the first range and ends at the end of the
4058 second. */
4059 if (no_overlap)
4061 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4062 range_binop (PLUS_EXPR, NULL_TREE,
4063 high0, 1,
4064 integer_one_node, 1),
4065 1, low1, 0)))
4066 in_p = 0, low = low0, high = high1;
4067 else
4069 /* Canonicalize - [min, x] into - [-, x]. */
4070 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4071 switch (TREE_CODE (TREE_TYPE (low0)))
4073 case ENUMERAL_TYPE:
4074 if (TYPE_PRECISION (TREE_TYPE (low0))
4075 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4076 break;
4077 /* FALLTHROUGH */
4078 case INTEGER_TYPE:
4079 case CHAR_TYPE:
4080 if (tree_int_cst_equal (low0,
4081 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4082 low0 = 0;
4083 break;
4084 case POINTER_TYPE:
4085 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4086 && integer_zerop (low0))
4087 low0 = 0;
4088 break;
4089 default:
4090 break;
4093 /* Canonicalize - [x, max] into - [x, -]. */
4094 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4095 switch (TREE_CODE (TREE_TYPE (high1)))
4097 case ENUMERAL_TYPE:
4098 if (TYPE_PRECISION (TREE_TYPE (high1))
4099 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4100 break;
4101 /* FALLTHROUGH */
4102 case INTEGER_TYPE:
4103 case CHAR_TYPE:
4104 if (tree_int_cst_equal (high1,
4105 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4106 high1 = 0;
4107 break;
4108 case POINTER_TYPE:
4109 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4110 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4111 high1, 1,
4112 integer_one_node, 1)))
4113 high1 = 0;
4114 break;
4115 default:
4116 break;
4119 /* The ranges might be also adjacent between the maximum and
4120 minimum values of the given type. For
4121 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4122 return + [x + 1, y - 1]. */
4123 if (low0 == 0 && high1 == 0)
4125 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4126 integer_one_node, 1);
4127 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4128 integer_one_node, 0);
4129 if (low == 0 || high == 0)
4130 return 0;
4132 in_p = 1;
4134 else
4135 return 0;
4138 else if (subset)
4139 in_p = 0, low = low0, high = high0;
4140 else
4141 in_p = 0, low = low0, high = high1;
4144 *pin_p = in_p, *plow = low, *phigh = high;
4145 return 1;
4149 /* Subroutine of fold, looking inside expressions of the form
4150 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4151 of the COND_EXPR. This function is being used also to optimize
4152 A op B ? C : A, by reversing the comparison first.
4154 Return a folded expression whose code is not a COND_EXPR
4155 anymore, or NULL_TREE if no folding opportunity is found. */
4157 static tree
4158 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4160 enum tree_code comp_code = TREE_CODE (arg0);
4161 tree arg00 = TREE_OPERAND (arg0, 0);
4162 tree arg01 = TREE_OPERAND (arg0, 1);
4163 tree arg1_type = TREE_TYPE (arg1);
4164 tree tem;
4166 STRIP_NOPS (arg1);
4167 STRIP_NOPS (arg2);
4169 /* If we have A op 0 ? A : -A, consider applying the following
4170 transformations:
4172 A == 0? A : -A same as -A
4173 A != 0? A : -A same as A
4174 A >= 0? A : -A same as abs (A)
4175 A > 0? A : -A same as abs (A)
4176 A <= 0? A : -A same as -abs (A)
4177 A < 0? A : -A same as -abs (A)
4179 None of these transformations work for modes with signed
4180 zeros. If A is +/-0, the first two transformations will
4181 change the sign of the result (from +0 to -0, or vice
4182 versa). The last four will fix the sign of the result,
4183 even though the original expressions could be positive or
4184 negative, depending on the sign of A.
4186 Note that all these transformations are correct if A is
4187 NaN, since the two alternatives (A and -A) are also NaNs. */
4188 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4189 ? real_zerop (arg01)
4190 : integer_zerop (arg01))
4191 && TREE_CODE (arg2) == NEGATE_EXPR
4192 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4193 switch (comp_code)
4195 case EQ_EXPR:
4196 case UNEQ_EXPR:
4197 tem = fold_convert (arg1_type, arg1);
4198 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4199 case NE_EXPR:
4200 case LTGT_EXPR:
4201 return pedantic_non_lvalue (fold_convert (type, arg1));
4202 case UNGE_EXPR:
4203 case UNGT_EXPR:
4204 if (flag_trapping_math)
4205 break;
4206 /* Fall through. */
4207 case GE_EXPR:
4208 case GT_EXPR:
4209 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4210 arg1 = fold_convert (lang_hooks.types.signed_type
4211 (TREE_TYPE (arg1)), arg1);
4212 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4213 return pedantic_non_lvalue (fold_convert (type, tem));
4214 case UNLE_EXPR:
4215 case UNLT_EXPR:
4216 if (flag_trapping_math)
4217 break;
4218 case LE_EXPR:
4219 case LT_EXPR:
4220 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4221 arg1 = fold_convert (lang_hooks.types.signed_type
4222 (TREE_TYPE (arg1)), arg1);
4223 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4224 return negate_expr (fold_convert (type, tem));
4225 default:
4226 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4227 break;
4230 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4231 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4232 both transformations are correct when A is NaN: A != 0
4233 is then true, and A == 0 is false. */
4235 if (integer_zerop (arg01) && integer_zerop (arg2))
4237 if (comp_code == NE_EXPR)
4238 return pedantic_non_lvalue (fold_convert (type, arg1));
4239 else if (comp_code == EQ_EXPR)
4240 return fold_convert (type, integer_zero_node);
4243 /* Try some transformations of A op B ? A : B.
4245 A == B? A : B same as B
4246 A != B? A : B same as A
4247 A >= B? A : B same as max (A, B)
4248 A > B? A : B same as max (B, A)
4249 A <= B? A : B same as min (A, B)
4250 A < B? A : B same as min (B, A)
4252 As above, these transformations don't work in the presence
4253 of signed zeros. For example, if A and B are zeros of
4254 opposite sign, the first two transformations will change
4255 the sign of the result. In the last four, the original
4256 expressions give different results for (A=+0, B=-0) and
4257 (A=-0, B=+0), but the transformed expressions do not.
4259 The first two transformations are correct if either A or B
4260 is a NaN. In the first transformation, the condition will
4261 be false, and B will indeed be chosen. In the case of the
4262 second transformation, the condition A != B will be true,
4263 and A will be chosen.
4265 The conversions to max() and min() are not correct if B is
4266 a number and A is not. The conditions in the original
4267 expressions will be false, so all four give B. The min()
4268 and max() versions would give a NaN instead. */
4269 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4271 tree comp_op0 = arg00;
4272 tree comp_op1 = arg01;
4273 tree comp_type = TREE_TYPE (comp_op0);
4275 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4276 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4278 comp_type = type;
4279 comp_op0 = arg1;
4280 comp_op1 = arg2;
4283 switch (comp_code)
4285 case EQ_EXPR:
4286 return pedantic_non_lvalue (fold_convert (type, arg2));
4287 case NE_EXPR:
4288 return pedantic_non_lvalue (fold_convert (type, arg1));
4289 case LE_EXPR:
4290 case LT_EXPR:
4291 case UNLE_EXPR:
4292 case UNLT_EXPR:
4293 /* In C++ a ?: expression can be an lvalue, so put the
4294 operand which will be used if they are equal first
4295 so that we can convert this back to the
4296 corresponding COND_EXPR. */
4297 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4299 comp_op0 = fold_convert (comp_type, comp_op0);
4300 comp_op1 = fold_convert (comp_type, comp_op1);
4301 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4302 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4303 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4304 return pedantic_non_lvalue (fold_convert (type, tem));
4306 break;
4307 case GE_EXPR:
4308 case GT_EXPR:
4309 case UNGE_EXPR:
4310 case UNGT_EXPR:
4311 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4313 comp_op0 = fold_convert (comp_type, comp_op0);
4314 comp_op1 = fold_convert (comp_type, comp_op1);
4315 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4316 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4317 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4318 return pedantic_non_lvalue (fold_convert (type, tem));
4320 break;
4321 case UNEQ_EXPR:
4322 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4323 return pedantic_non_lvalue (fold_convert (type, arg2));
4324 break;
4325 case LTGT_EXPR:
4326 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4327 return pedantic_non_lvalue (fold_convert (type, arg1));
4328 break;
4329 default:
4330 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4331 break;
4335 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4336 we might still be able to simplify this. For example,
4337 if C1 is one less or one more than C2, this might have started
4338 out as a MIN or MAX and been transformed by this function.
4339 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4341 if (INTEGRAL_TYPE_P (type)
4342 && TREE_CODE (arg01) == INTEGER_CST
4343 && TREE_CODE (arg2) == INTEGER_CST)
4344 switch (comp_code)
4346 case EQ_EXPR:
4347 /* We can replace A with C1 in this case. */
4348 arg1 = fold_convert (type, arg01);
4349 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4351 case LT_EXPR:
4352 /* If C1 is C2 + 1, this is min(A, C2). */
4353 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4354 OEP_ONLY_CONST)
4355 && operand_equal_p (arg01,
4356 const_binop (PLUS_EXPR, arg2,
4357 integer_one_node, 0),
4358 OEP_ONLY_CONST))
4359 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4360 type, arg1, arg2)));
4361 break;
4363 case LE_EXPR:
4364 /* If C1 is C2 - 1, this is min(A, C2). */
4365 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4366 OEP_ONLY_CONST)
4367 && operand_equal_p (arg01,
4368 const_binop (MINUS_EXPR, arg2,
4369 integer_one_node, 0),
4370 OEP_ONLY_CONST))
4371 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4372 type, arg1, arg2)));
4373 break;
4375 case GT_EXPR:
4376 /* If C1 is C2 - 1, this is max(A, C2). */
4377 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4378 OEP_ONLY_CONST)
4379 && operand_equal_p (arg01,
4380 const_binop (MINUS_EXPR, arg2,
4381 integer_one_node, 0),
4382 OEP_ONLY_CONST))
4383 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4384 type, arg1, arg2)));
4385 break;
4387 case GE_EXPR:
4388 /* If C1 is C2 + 1, this is max(A, C2). */
4389 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4390 OEP_ONLY_CONST)
4391 && operand_equal_p (arg01,
4392 const_binop (PLUS_EXPR, arg2,
4393 integer_one_node, 0),
4394 OEP_ONLY_CONST))
4395 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4396 type, arg1, arg2)));
4397 break;
4398 case NE_EXPR:
4399 break;
4400 default:
4401 gcc_unreachable ();
4404 return NULL_TREE;
4409 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4410 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4411 #endif
4413 /* EXP is some logical combination of boolean tests. See if we can
4414 merge it into some range test. Return the new tree if so. */
4416 static tree
4417 fold_range_test (tree exp)
4419 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4420 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4421 int in0_p, in1_p, in_p;
4422 tree low0, low1, low, high0, high1, high;
4423 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4424 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4425 tree tem;
4427 /* If this is an OR operation, invert both sides; we will invert
4428 again at the end. */
4429 if (or_op)
4430 in0_p = ! in0_p, in1_p = ! in1_p;
4432 /* If both expressions are the same, if we can merge the ranges, and we
4433 can build the range test, return it or it inverted. If one of the
4434 ranges is always true or always false, consider it to be the same
4435 expression as the other. */
4436 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4437 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4438 in1_p, low1, high1)
4439 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4440 lhs != 0 ? lhs
4441 : rhs != 0 ? rhs : integer_zero_node,
4442 in_p, low, high))))
4443 return or_op ? invert_truthvalue (tem) : tem;
4445 /* On machines where the branch cost is expensive, if this is a
4446 short-circuited branch and the underlying object on both sides
4447 is the same, make a non-short-circuit operation. */
4448 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4449 && lhs != 0 && rhs != 0
4450 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4451 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4452 && operand_equal_p (lhs, rhs, 0))
4454 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4455 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4456 which cases we can't do this. */
4457 if (simple_operand_p (lhs))
4458 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4459 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4460 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4461 TREE_OPERAND (exp, 1));
4463 else if (lang_hooks.decls.global_bindings_p () == 0
4464 && ! CONTAINS_PLACEHOLDER_P (lhs))
4466 tree common = save_expr (lhs);
4468 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4469 or_op ? ! in0_p : in0_p,
4470 low0, high0))
4471 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4472 or_op ? ! in1_p : in1_p,
4473 low1, high1))))
4474 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4475 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4476 TREE_TYPE (exp), lhs, rhs);
4480 return 0;
4483 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4484 bit value. Arrange things so the extra bits will be set to zero if and
4485 only if C is signed-extended to its full width. If MASK is nonzero,
4486 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4488 static tree
4489 unextend (tree c, int p, int unsignedp, tree mask)
4491 tree type = TREE_TYPE (c);
4492 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4493 tree temp;
4495 if (p == modesize || unsignedp)
4496 return c;
4498 /* We work by getting just the sign bit into the low-order bit, then
4499 into the high-order bit, then sign-extend. We then XOR that value
4500 with C. */
4501 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4502 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4504 /* We must use a signed type in order to get an arithmetic right shift.
4505 However, we must also avoid introducing accidental overflows, so that
4506 a subsequent call to integer_zerop will work. Hence we must
4507 do the type conversion here. At this point, the constant is either
4508 zero or one, and the conversion to a signed type can never overflow.
4509 We could get an overflow if this conversion is done anywhere else. */
4510 if (TYPE_UNSIGNED (type))
4511 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4513 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4514 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4515 if (mask != 0)
4516 temp = const_binop (BIT_AND_EXPR, temp,
4517 fold_convert (TREE_TYPE (c), mask), 0);
4518 /* If necessary, convert the type back to match the type of C. */
4519 if (TYPE_UNSIGNED (type))
4520 temp = fold_convert (type, temp);
4522 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4525 /* Find ways of folding logical expressions of LHS and RHS:
4526 Try to merge two comparisons to the same innermost item.
4527 Look for range tests like "ch >= '0' && ch <= '9'".
4528 Look for combinations of simple terms on machines with expensive branches
4529 and evaluate the RHS unconditionally.
4531 For example, if we have p->a == 2 && p->b == 4 and we can make an
4532 object large enough to span both A and B, we can do this with a comparison
4533 against the object ANDed with the a mask.
4535 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4536 operations to do this with one comparison.
4538 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4539 function and the one above.
4541 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4542 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4544 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4545 two operands.
4547 We return the simplified tree or 0 if no optimization is possible. */
4549 static tree
4550 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4552 /* If this is the "or" of two comparisons, we can do something if
4553 the comparisons are NE_EXPR. If this is the "and", we can do something
4554 if the comparisons are EQ_EXPR. I.e.,
4555 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4557 WANTED_CODE is this operation code. For single bit fields, we can
4558 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4559 comparison for one-bit fields. */
4561 enum tree_code wanted_code;
4562 enum tree_code lcode, rcode;
4563 tree ll_arg, lr_arg, rl_arg, rr_arg;
4564 tree ll_inner, lr_inner, rl_inner, rr_inner;
4565 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4566 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4567 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4568 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4569 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4570 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4571 enum machine_mode lnmode, rnmode;
4572 tree ll_mask, lr_mask, rl_mask, rr_mask;
4573 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4574 tree l_const, r_const;
4575 tree lntype, rntype, result;
4576 int first_bit, end_bit;
4577 int volatilep;
4579 /* Start by getting the comparison codes. Fail if anything is volatile.
4580 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4581 it were surrounded with a NE_EXPR. */
4583 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4584 return 0;
4586 lcode = TREE_CODE (lhs);
4587 rcode = TREE_CODE (rhs);
4589 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4591 lhs = build2 (NE_EXPR, truth_type, lhs,
4592 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4593 lcode = NE_EXPR;
4596 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4598 rhs = build2 (NE_EXPR, truth_type, rhs,
4599 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4600 rcode = NE_EXPR;
4603 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4604 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4605 return 0;
4607 ll_arg = TREE_OPERAND (lhs, 0);
4608 lr_arg = TREE_OPERAND (lhs, 1);
4609 rl_arg = TREE_OPERAND (rhs, 0);
4610 rr_arg = TREE_OPERAND (rhs, 1);
4612 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4613 if (simple_operand_p (ll_arg)
4614 && simple_operand_p (lr_arg))
4616 tree result;
4617 if (operand_equal_p (ll_arg, rl_arg, 0)
4618 && operand_equal_p (lr_arg, rr_arg, 0))
4620 result = combine_comparisons (code, lcode, rcode,
4621 truth_type, ll_arg, lr_arg);
4622 if (result)
4623 return result;
4625 else if (operand_equal_p (ll_arg, rr_arg, 0)
4626 && operand_equal_p (lr_arg, rl_arg, 0))
4628 result = combine_comparisons (code, lcode,
4629 swap_tree_comparison (rcode),
4630 truth_type, ll_arg, lr_arg);
4631 if (result)
4632 return result;
4636 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4637 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4639 /* If the RHS can be evaluated unconditionally and its operands are
4640 simple, it wins to evaluate the RHS unconditionally on machines
4641 with expensive branches. In this case, this isn't a comparison
4642 that can be merged. Avoid doing this if the RHS is a floating-point
4643 comparison since those can trap. */
4645 if (BRANCH_COST >= 2
4646 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4647 && simple_operand_p (rl_arg)
4648 && simple_operand_p (rr_arg))
4650 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4651 if (code == TRUTH_OR_EXPR
4652 && lcode == NE_EXPR && integer_zerop (lr_arg)
4653 && rcode == NE_EXPR && integer_zerop (rr_arg)
4654 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4655 return build2 (NE_EXPR, truth_type,
4656 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4657 ll_arg, rl_arg),
4658 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4660 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4661 if (code == TRUTH_AND_EXPR
4662 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4663 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4664 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4665 return build2 (EQ_EXPR, truth_type,
4666 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4667 ll_arg, rl_arg),
4668 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4670 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4671 return build2 (code, truth_type, lhs, rhs);
4674 /* See if the comparisons can be merged. Then get all the parameters for
4675 each side. */
4677 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4678 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4679 return 0;
4681 volatilep = 0;
4682 ll_inner = decode_field_reference (ll_arg,
4683 &ll_bitsize, &ll_bitpos, &ll_mode,
4684 &ll_unsignedp, &volatilep, &ll_mask,
4685 &ll_and_mask);
4686 lr_inner = decode_field_reference (lr_arg,
4687 &lr_bitsize, &lr_bitpos, &lr_mode,
4688 &lr_unsignedp, &volatilep, &lr_mask,
4689 &lr_and_mask);
4690 rl_inner = decode_field_reference (rl_arg,
4691 &rl_bitsize, &rl_bitpos, &rl_mode,
4692 &rl_unsignedp, &volatilep, &rl_mask,
4693 &rl_and_mask);
4694 rr_inner = decode_field_reference (rr_arg,
4695 &rr_bitsize, &rr_bitpos, &rr_mode,
4696 &rr_unsignedp, &volatilep, &rr_mask,
4697 &rr_and_mask);
4699 /* It must be true that the inner operation on the lhs of each
4700 comparison must be the same if we are to be able to do anything.
4701 Then see if we have constants. If not, the same must be true for
4702 the rhs's. */
4703 if (volatilep || ll_inner == 0 || rl_inner == 0
4704 || ! operand_equal_p (ll_inner, rl_inner, 0))
4705 return 0;
4707 if (TREE_CODE (lr_arg) == INTEGER_CST
4708 && TREE_CODE (rr_arg) == INTEGER_CST)
4709 l_const = lr_arg, r_const = rr_arg;
4710 else if (lr_inner == 0 || rr_inner == 0
4711 || ! operand_equal_p (lr_inner, rr_inner, 0))
4712 return 0;
4713 else
4714 l_const = r_const = 0;
4716 /* If either comparison code is not correct for our logical operation,
4717 fail. However, we can convert a one-bit comparison against zero into
4718 the opposite comparison against that bit being set in the field. */
4720 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4721 if (lcode != wanted_code)
4723 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4725 /* Make the left operand unsigned, since we are only interested
4726 in the value of one bit. Otherwise we are doing the wrong
4727 thing below. */
4728 ll_unsignedp = 1;
4729 l_const = ll_mask;
4731 else
4732 return 0;
4735 /* This is analogous to the code for l_const above. */
4736 if (rcode != wanted_code)
4738 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4740 rl_unsignedp = 1;
4741 r_const = rl_mask;
4743 else
4744 return 0;
4747 /* After this point all optimizations will generate bit-field
4748 references, which we might not want. */
4749 if (! lang_hooks.can_use_bit_fields_p ())
4750 return 0;
4752 /* See if we can find a mode that contains both fields being compared on
4753 the left. If we can't, fail. Otherwise, update all constants and masks
4754 to be relative to a field of that size. */
4755 first_bit = MIN (ll_bitpos, rl_bitpos);
4756 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4757 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4758 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4759 volatilep);
4760 if (lnmode == VOIDmode)
4761 return 0;
4763 lnbitsize = GET_MODE_BITSIZE (lnmode);
4764 lnbitpos = first_bit & ~ (lnbitsize - 1);
4765 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4766 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4768 if (BYTES_BIG_ENDIAN)
4770 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4771 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4774 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4775 size_int (xll_bitpos), 0);
4776 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4777 size_int (xrl_bitpos), 0);
4779 if (l_const)
4781 l_const = fold_convert (lntype, l_const);
4782 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4783 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4784 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4785 fold (build1 (BIT_NOT_EXPR,
4786 lntype, ll_mask)),
4787 0)))
4789 warning ("comparison is always %d", wanted_code == NE_EXPR);
4791 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4794 if (r_const)
4796 r_const = fold_convert (lntype, r_const);
4797 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4798 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4799 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4800 fold (build1 (BIT_NOT_EXPR,
4801 lntype, rl_mask)),
4802 0)))
4804 warning ("comparison is always %d", wanted_code == NE_EXPR);
4806 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4810 /* If the right sides are not constant, do the same for it. Also,
4811 disallow this optimization if a size or signedness mismatch occurs
4812 between the left and right sides. */
4813 if (l_const == 0)
4815 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4816 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4817 /* Make sure the two fields on the right
4818 correspond to the left without being swapped. */
4819 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4820 return 0;
4822 first_bit = MIN (lr_bitpos, rr_bitpos);
4823 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4824 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4825 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4826 volatilep);
4827 if (rnmode == VOIDmode)
4828 return 0;
4830 rnbitsize = GET_MODE_BITSIZE (rnmode);
4831 rnbitpos = first_bit & ~ (rnbitsize - 1);
4832 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4833 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4835 if (BYTES_BIG_ENDIAN)
4837 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4838 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4841 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4842 size_int (xlr_bitpos), 0);
4843 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4844 size_int (xrr_bitpos), 0);
4846 /* Make a mask that corresponds to both fields being compared.
4847 Do this for both items being compared. If the operands are the
4848 same size and the bits being compared are in the same position
4849 then we can do this by masking both and comparing the masked
4850 results. */
4851 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4852 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4853 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4855 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4856 ll_unsignedp || rl_unsignedp);
4857 if (! all_ones_mask_p (ll_mask, lnbitsize))
4858 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4860 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4861 lr_unsignedp || rr_unsignedp);
4862 if (! all_ones_mask_p (lr_mask, rnbitsize))
4863 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4865 return build2 (wanted_code, truth_type, lhs, rhs);
4868 /* There is still another way we can do something: If both pairs of
4869 fields being compared are adjacent, we may be able to make a wider
4870 field containing them both.
4872 Note that we still must mask the lhs/rhs expressions. Furthermore,
4873 the mask must be shifted to account for the shift done by
4874 make_bit_field_ref. */
4875 if ((ll_bitsize + ll_bitpos == rl_bitpos
4876 && lr_bitsize + lr_bitpos == rr_bitpos)
4877 || (ll_bitpos == rl_bitpos + rl_bitsize
4878 && lr_bitpos == rr_bitpos + rr_bitsize))
4880 tree type;
4882 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4883 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4884 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4885 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4887 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4888 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4889 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4890 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4892 /* Convert to the smaller type before masking out unwanted bits. */
4893 type = lntype;
4894 if (lntype != rntype)
4896 if (lnbitsize > rnbitsize)
4898 lhs = fold_convert (rntype, lhs);
4899 ll_mask = fold_convert (rntype, ll_mask);
4900 type = rntype;
4902 else if (lnbitsize < rnbitsize)
4904 rhs = fold_convert (lntype, rhs);
4905 lr_mask = fold_convert (lntype, lr_mask);
4906 type = lntype;
4910 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4911 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4913 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4914 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4916 return build2 (wanted_code, truth_type, lhs, rhs);
4919 return 0;
4922 /* Handle the case of comparisons with constants. If there is something in
4923 common between the masks, those bits of the constants must be the same.
4924 If not, the condition is always false. Test for this to avoid generating
4925 incorrect code below. */
4926 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4927 if (! integer_zerop (result)
4928 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4929 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4931 if (wanted_code == NE_EXPR)
4933 warning ("%<or%> of unmatched not-equal tests is always 1");
4934 return constant_boolean_node (true, truth_type);
4936 else
4938 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4939 return constant_boolean_node (false, truth_type);
4943 /* Construct the expression we will return. First get the component
4944 reference we will make. Unless the mask is all ones the width of
4945 that field, perform the mask operation. Then compare with the
4946 merged constant. */
4947 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4948 ll_unsignedp || rl_unsignedp);
4950 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4951 if (! all_ones_mask_p (ll_mask, lnbitsize))
4952 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4954 return build2 (wanted_code, truth_type, result,
4955 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4958 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4959 constant. */
4961 static tree
4962 optimize_minmax_comparison (tree t)
4964 tree type = TREE_TYPE (t);
4965 tree arg0 = TREE_OPERAND (t, 0);
4966 enum tree_code op_code;
4967 tree comp_const = TREE_OPERAND (t, 1);
4968 tree minmax_const;
4969 int consts_equal, consts_lt;
4970 tree inner;
4972 STRIP_SIGN_NOPS (arg0);
4974 op_code = TREE_CODE (arg0);
4975 minmax_const = TREE_OPERAND (arg0, 1);
4976 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4977 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4978 inner = TREE_OPERAND (arg0, 0);
4980 /* If something does not permit us to optimize, return the original tree. */
4981 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4982 || TREE_CODE (comp_const) != INTEGER_CST
4983 || TREE_CONSTANT_OVERFLOW (comp_const)
4984 || TREE_CODE (minmax_const) != INTEGER_CST
4985 || TREE_CONSTANT_OVERFLOW (minmax_const))
4986 return t;
4988 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4989 and GT_EXPR, doing the rest with recursive calls using logical
4990 simplifications. */
4991 switch (TREE_CODE (t))
4993 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4994 return
4995 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4997 case GE_EXPR:
4998 return
4999 fold (build2 (TRUTH_ORIF_EXPR, type,
5000 optimize_minmax_comparison
5001 (build2 (EQ_EXPR, type, arg0, comp_const)),
5002 optimize_minmax_comparison
5003 (build2 (GT_EXPR, type, arg0, comp_const))));
5005 case EQ_EXPR:
5006 if (op_code == MAX_EXPR && consts_equal)
5007 /* MAX (X, 0) == 0 -> X <= 0 */
5008 return fold (build2 (LE_EXPR, type, inner, comp_const));
5010 else if (op_code == MAX_EXPR && consts_lt)
5011 /* MAX (X, 0) == 5 -> X == 5 */
5012 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5014 else if (op_code == MAX_EXPR)
5015 /* MAX (X, 0) == -1 -> false */
5016 return omit_one_operand (type, integer_zero_node, inner);
5018 else if (consts_equal)
5019 /* MIN (X, 0) == 0 -> X >= 0 */
5020 return fold (build2 (GE_EXPR, type, inner, comp_const));
5022 else if (consts_lt)
5023 /* MIN (X, 0) == 5 -> false */
5024 return omit_one_operand (type, integer_zero_node, inner);
5026 else
5027 /* MIN (X, 0) == -1 -> X == -1 */
5028 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5030 case GT_EXPR:
5031 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5032 /* MAX (X, 0) > 0 -> X > 0
5033 MAX (X, 0) > 5 -> X > 5 */
5034 return fold (build2 (GT_EXPR, type, inner, comp_const));
5036 else if (op_code == MAX_EXPR)
5037 /* MAX (X, 0) > -1 -> true */
5038 return omit_one_operand (type, integer_one_node, inner);
5040 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5041 /* MIN (X, 0) > 0 -> false
5042 MIN (X, 0) > 5 -> false */
5043 return omit_one_operand (type, integer_zero_node, inner);
5045 else
5046 /* MIN (X, 0) > -1 -> X > -1 */
5047 return fold (build2 (GT_EXPR, type, inner, comp_const));
5049 default:
5050 return t;
5054 /* T is an integer expression that is being multiplied, divided, or taken a
5055 modulus (CODE says which and what kind of divide or modulus) by a
5056 constant C. See if we can eliminate that operation by folding it with
5057 other operations already in T. WIDE_TYPE, if non-null, is a type that
5058 should be used for the computation if wider than our type.
5060 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5061 (X * 2) + (Y * 4). We must, however, be assured that either the original
5062 expression would not overflow or that overflow is undefined for the type
5063 in the language in question.
5065 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5066 the machine has a multiply-accumulate insn or that this is part of an
5067 addressing calculation.
5069 If we return a non-null expression, it is an equivalent form of the
5070 original computation, but need not be in the original type. */
5072 static tree
5073 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5075 /* To avoid exponential search depth, refuse to allow recursion past
5076 three levels. Beyond that (1) it's highly unlikely that we'll find
5077 something interesting and (2) we've probably processed it before
5078 when we built the inner expression. */
5080 static int depth;
5081 tree ret;
5083 if (depth > 3)
5084 return NULL;
5086 depth++;
5087 ret = extract_muldiv_1 (t, c, code, wide_type);
5088 depth--;
5090 return ret;
5093 static tree
5094 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5096 tree type = TREE_TYPE (t);
5097 enum tree_code tcode = TREE_CODE (t);
5098 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5099 > GET_MODE_SIZE (TYPE_MODE (type)))
5100 ? wide_type : type);
5101 tree t1, t2;
5102 int same_p = tcode == code;
5103 tree op0 = NULL_TREE, op1 = NULL_TREE;
5105 /* Don't deal with constants of zero here; they confuse the code below. */
5106 if (integer_zerop (c))
5107 return NULL_TREE;
5109 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5110 op0 = TREE_OPERAND (t, 0);
5112 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5113 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5115 /* Note that we need not handle conditional operations here since fold
5116 already handles those cases. So just do arithmetic here. */
5117 switch (tcode)
5119 case INTEGER_CST:
5120 /* For a constant, we can always simplify if we are a multiply
5121 or (for divide and modulus) if it is a multiple of our constant. */
5122 if (code == MULT_EXPR
5123 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5124 return const_binop (code, fold_convert (ctype, t),
5125 fold_convert (ctype, c), 0);
5126 break;
5128 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5129 /* If op0 is an expression ... */
5130 if ((COMPARISON_CLASS_P (op0)
5131 || UNARY_CLASS_P (op0)
5132 || BINARY_CLASS_P (op0)
5133 || EXPRESSION_CLASS_P (op0))
5134 /* ... and is unsigned, and its type is smaller than ctype,
5135 then we cannot pass through as widening. */
5136 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5137 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5138 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5139 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5140 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5141 /* ... or this is a truncation (t is narrower than op0),
5142 then we cannot pass through this narrowing. */
5143 || (GET_MODE_SIZE (TYPE_MODE (type))
5144 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5145 /* ... or signedness changes for division or modulus,
5146 then we cannot pass through this conversion. */
5147 || (code != MULT_EXPR
5148 && (TYPE_UNSIGNED (ctype)
5149 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5150 break;
5152 /* Pass the constant down and see if we can make a simplification. If
5153 we can, replace this expression with the inner simplification for
5154 possible later conversion to our or some other type. */
5155 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5156 && TREE_CODE (t2) == INTEGER_CST
5157 && ! TREE_CONSTANT_OVERFLOW (t2)
5158 && (0 != (t1 = extract_muldiv (op0, t2, code,
5159 code == MULT_EXPR
5160 ? ctype : NULL_TREE))))
5161 return t1;
5162 break;
5164 case ABS_EXPR:
5165 /* If widening the type changes it from signed to unsigned, then we
5166 must avoid building ABS_EXPR itself as unsigned. */
5167 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5169 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5170 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5172 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5173 return fold_convert (ctype, t1);
5175 break;
5177 /* FALLTHROUGH */
5178 case NEGATE_EXPR:
5179 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5180 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5181 break;
5183 case MIN_EXPR: case MAX_EXPR:
5184 /* If widening the type changes the signedness, then we can't perform
5185 this optimization as that changes the result. */
5186 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5187 break;
5189 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5190 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5191 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5193 if (tree_int_cst_sgn (c) < 0)
5194 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5196 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5197 fold_convert (ctype, t2)));
5199 break;
5201 case LSHIFT_EXPR: case RSHIFT_EXPR:
5202 /* If the second operand is constant, this is a multiplication
5203 or floor division, by a power of two, so we can treat it that
5204 way unless the multiplier or divisor overflows. Signed
5205 left-shift overflow is implementation-defined rather than
5206 undefined in C90, so do not convert signed left shift into
5207 multiplication. */
5208 if (TREE_CODE (op1) == INTEGER_CST
5209 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5210 /* const_binop may not detect overflow correctly,
5211 so check for it explicitly here. */
5212 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5213 && TREE_INT_CST_HIGH (op1) == 0
5214 && 0 != (t1 = fold_convert (ctype,
5215 const_binop (LSHIFT_EXPR,
5216 size_one_node,
5217 op1, 0)))
5218 && ! TREE_OVERFLOW (t1))
5219 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5220 ? MULT_EXPR : FLOOR_DIV_EXPR,
5221 ctype, fold_convert (ctype, op0), t1),
5222 c, code, wide_type);
5223 break;
5225 case PLUS_EXPR: case MINUS_EXPR:
5226 /* See if we can eliminate the operation on both sides. If we can, we
5227 can return a new PLUS or MINUS. If we can't, the only remaining
5228 cases where we can do anything are if the second operand is a
5229 constant. */
5230 t1 = extract_muldiv (op0, c, code, wide_type);
5231 t2 = extract_muldiv (op1, c, code, wide_type);
5232 if (t1 != 0 && t2 != 0
5233 && (code == MULT_EXPR
5234 /* If not multiplication, we can only do this if both operands
5235 are divisible by c. */
5236 || (multiple_of_p (ctype, op0, c)
5237 && multiple_of_p (ctype, op1, c))))
5238 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5239 fold_convert (ctype, t2)));
5241 /* If this was a subtraction, negate OP1 and set it to be an addition.
5242 This simplifies the logic below. */
5243 if (tcode == MINUS_EXPR)
5244 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5246 if (TREE_CODE (op1) != INTEGER_CST)
5247 break;
5249 /* If either OP1 or C are negative, this optimization is not safe for
5250 some of the division and remainder types while for others we need
5251 to change the code. */
5252 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5254 if (code == CEIL_DIV_EXPR)
5255 code = FLOOR_DIV_EXPR;
5256 else if (code == FLOOR_DIV_EXPR)
5257 code = CEIL_DIV_EXPR;
5258 else if (code != MULT_EXPR
5259 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5260 break;
5263 /* If it's a multiply or a division/modulus operation of a multiple
5264 of our constant, do the operation and verify it doesn't overflow. */
5265 if (code == MULT_EXPR
5266 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5268 op1 = const_binop (code, fold_convert (ctype, op1),
5269 fold_convert (ctype, c), 0);
5270 /* We allow the constant to overflow with wrapping semantics. */
5271 if (op1 == 0
5272 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5273 break;
5275 else
5276 break;
5278 /* If we have an unsigned type is not a sizetype, we cannot widen
5279 the operation since it will change the result if the original
5280 computation overflowed. */
5281 if (TYPE_UNSIGNED (ctype)
5282 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5283 && ctype != type)
5284 break;
5286 /* If we were able to eliminate our operation from the first side,
5287 apply our operation to the second side and reform the PLUS. */
5288 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5289 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5291 /* The last case is if we are a multiply. In that case, we can
5292 apply the distributive law to commute the multiply and addition
5293 if the multiplication of the constants doesn't overflow. */
5294 if (code == MULT_EXPR)
5295 return fold (build2 (tcode, ctype,
5296 fold (build2 (code, ctype,
5297 fold_convert (ctype, op0),
5298 fold_convert (ctype, c))),
5299 op1));
5301 break;
5303 case MULT_EXPR:
5304 /* We have a special case here if we are doing something like
5305 (C * 8) % 4 since we know that's zero. */
5306 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5307 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5308 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5309 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5310 return omit_one_operand (type, integer_zero_node, op0);
5312 /* ... fall through ... */
5314 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5315 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5316 /* If we can extract our operation from the LHS, do so and return a
5317 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5318 do something only if the second operand is a constant. */
5319 if (same_p
5320 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5321 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5322 fold_convert (ctype, op1)));
5323 else if (tcode == MULT_EXPR && code == MULT_EXPR
5324 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5325 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5326 fold_convert (ctype, t1)));
5327 else if (TREE_CODE (op1) != INTEGER_CST)
5328 return 0;
5330 /* If these are the same operation types, we can associate them
5331 assuming no overflow. */
5332 if (tcode == code
5333 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5334 fold_convert (ctype, c), 0))
5335 && ! TREE_OVERFLOW (t1))
5336 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5338 /* If these operations "cancel" each other, we have the main
5339 optimizations of this pass, which occur when either constant is a
5340 multiple of the other, in which case we replace this with either an
5341 operation or CODE or TCODE.
5343 If we have an unsigned type that is not a sizetype, we cannot do
5344 this since it will change the result if the original computation
5345 overflowed. */
5346 if ((! TYPE_UNSIGNED (ctype)
5347 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5348 && ! flag_wrapv
5349 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5350 || (tcode == MULT_EXPR
5351 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5352 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5354 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5355 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5356 fold_convert (ctype,
5357 const_binop (TRUNC_DIV_EXPR,
5358 op1, c, 0))));
5359 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5360 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5361 fold_convert (ctype,
5362 const_binop (TRUNC_DIV_EXPR,
5363 c, op1, 0))));
5365 break;
5367 default:
5368 break;
5371 return 0;
5374 /* Return a node which has the indicated constant VALUE (either 0 or
5375 1), and is of the indicated TYPE. */
5377 tree
5378 constant_boolean_node (int value, tree type)
5380 if (type == integer_type_node)
5381 return value ? integer_one_node : integer_zero_node;
5382 else if (type == boolean_type_node)
5383 return value ? boolean_true_node : boolean_false_node;
5384 else
5385 return build_int_cst (type, value);
5389 /* Return true if expr looks like an ARRAY_REF and set base and
5390 offset to the appropriate trees. If there is no offset,
5391 offset is set to NULL_TREE. */
5393 static bool
5394 extract_array_ref (tree expr, tree *base, tree *offset)
5396 /* We have to be careful with stripping nops as with the
5397 base type the meaning of the offset can change. */
5398 tree inner_expr = expr;
5399 STRIP_NOPS (inner_expr);
5400 /* One canonical form is a PLUS_EXPR with the first
5401 argument being an ADDR_EXPR with a possible NOP_EXPR
5402 attached. */
5403 if (TREE_CODE (expr) == PLUS_EXPR)
5405 tree op0 = TREE_OPERAND (expr, 0);
5406 STRIP_NOPS (op0);
5407 if (TREE_CODE (op0) == ADDR_EXPR)
5409 *base = TREE_OPERAND (expr, 0);
5410 *offset = TREE_OPERAND (expr, 1);
5411 return true;
5414 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5415 which we transform into an ADDR_EXPR with appropriate
5416 offset. For other arguments to the ADDR_EXPR we assume
5417 zero offset and as such do not care about the ADDR_EXPR
5418 type and strip possible nops from it. */
5419 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5421 tree op0 = TREE_OPERAND (inner_expr, 0);
5422 if (TREE_CODE (op0) == ARRAY_REF)
5424 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5425 *offset = TREE_OPERAND (op0, 1);
5427 else
5429 *base = inner_expr;
5430 *offset = NULL_TREE;
5432 return true;
5435 return false;
5439 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5440 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5441 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5442 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5443 COND is the first argument to CODE; otherwise (as in the example
5444 given here), it is the second argument. TYPE is the type of the
5445 original expression. Return NULL_TREE if no simplification is
5446 possible. */
5448 static tree
5449 fold_binary_op_with_conditional_arg (tree t, enum tree_code code, tree cond,
5450 tree arg, int cond_first_p)
5452 const tree type = TREE_TYPE (t);
5453 tree cond_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 0))
5454 : TREE_TYPE (TREE_OPERAND (t, 1));
5455 tree arg_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 1))
5456 : TREE_TYPE (TREE_OPERAND (t, 0));
5457 tree test, true_value, false_value;
5458 tree lhs = NULL_TREE;
5459 tree rhs = NULL_TREE;
5461 /* This transformation is only worthwhile if we don't have to wrap
5462 arg in a SAVE_EXPR, and the operation can be simplified on at least
5463 one of the branches once its pushed inside the COND_EXPR. */
5464 if (!TREE_CONSTANT (arg))
5465 return NULL_TREE;
5467 if (TREE_CODE (cond) == COND_EXPR)
5469 test = TREE_OPERAND (cond, 0);
5470 true_value = TREE_OPERAND (cond, 1);
5471 false_value = TREE_OPERAND (cond, 2);
5472 /* If this operand throws an expression, then it does not make
5473 sense to try to perform a logical or arithmetic operation
5474 involving it. */
5475 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5476 lhs = true_value;
5477 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5478 rhs = false_value;
5480 else
5482 tree testtype = TREE_TYPE (cond);
5483 test = cond;
5484 true_value = constant_boolean_node (true, testtype);
5485 false_value = constant_boolean_node (false, testtype);
5488 arg = fold_convert (arg_type, arg);
5489 if (lhs == 0)
5491 true_value = fold_convert (cond_type, true_value);
5492 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5493 : build2 (code, type, arg, true_value));
5495 if (rhs == 0)
5497 false_value = fold_convert (cond_type, false_value);
5498 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5499 : build2 (code, type, arg, false_value));
5502 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5503 return fold_convert (type, test);
5507 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5509 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5510 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5511 ADDEND is the same as X.
5513 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5514 and finite. The problematic cases are when X is zero, and its mode
5515 has signed zeros. In the case of rounding towards -infinity,
5516 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5517 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5519 static bool
5520 fold_real_zero_addition_p (tree type, tree addend, int negate)
5522 if (!real_zerop (addend))
5523 return false;
5525 /* Don't allow the fold with -fsignaling-nans. */
5526 if (HONOR_SNANS (TYPE_MODE (type)))
5527 return false;
5529 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5530 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5531 return true;
5533 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5534 if (TREE_CODE (addend) == REAL_CST
5535 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5536 negate = !negate;
5538 /* The mode has signed zeros, and we have to honor their sign.
5539 In this situation, there is only one case we can return true for.
5540 X - 0 is the same as X unless rounding towards -infinity is
5541 supported. */
5542 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5545 /* Subroutine of fold() that checks comparisons of built-in math
5546 functions against real constants.
5548 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5549 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5550 is the type of the result and ARG0 and ARG1 are the operands of the
5551 comparison. ARG1 must be a TREE_REAL_CST.
5553 The function returns the constant folded tree if a simplification
5554 can be made, and NULL_TREE otherwise. */
5556 static tree
5557 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5558 tree type, tree arg0, tree arg1)
5560 REAL_VALUE_TYPE c;
5562 if (BUILTIN_SQRT_P (fcode))
5564 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5565 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5567 c = TREE_REAL_CST (arg1);
5568 if (REAL_VALUE_NEGATIVE (c))
5570 /* sqrt(x) < y is always false, if y is negative. */
5571 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5572 return omit_one_operand (type, integer_zero_node, arg);
5574 /* sqrt(x) > y is always true, if y is negative and we
5575 don't care about NaNs, i.e. negative values of x. */
5576 if (code == NE_EXPR || !HONOR_NANS (mode))
5577 return omit_one_operand (type, integer_one_node, arg);
5579 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5580 return fold (build2 (GE_EXPR, type, arg,
5581 build_real (TREE_TYPE (arg), dconst0)));
5583 else if (code == GT_EXPR || code == GE_EXPR)
5585 REAL_VALUE_TYPE c2;
5587 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5588 real_convert (&c2, mode, &c2);
5590 if (REAL_VALUE_ISINF (c2))
5592 /* sqrt(x) > y is x == +Inf, when y is very large. */
5593 if (HONOR_INFINITIES (mode))
5594 return fold (build2 (EQ_EXPR, type, arg,
5595 build_real (TREE_TYPE (arg), c2)));
5597 /* sqrt(x) > y is always false, when y is very large
5598 and we don't care about infinities. */
5599 return omit_one_operand (type, integer_zero_node, arg);
5602 /* sqrt(x) > c is the same as x > c*c. */
5603 return fold (build2 (code, type, arg,
5604 build_real (TREE_TYPE (arg), c2)));
5606 else if (code == LT_EXPR || code == LE_EXPR)
5608 REAL_VALUE_TYPE c2;
5610 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5611 real_convert (&c2, mode, &c2);
5613 if (REAL_VALUE_ISINF (c2))
5615 /* sqrt(x) < y is always true, when y is a very large
5616 value and we don't care about NaNs or Infinities. */
5617 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5618 return omit_one_operand (type, integer_one_node, arg);
5620 /* sqrt(x) < y is x != +Inf when y is very large and we
5621 don't care about NaNs. */
5622 if (! HONOR_NANS (mode))
5623 return fold (build2 (NE_EXPR, type, arg,
5624 build_real (TREE_TYPE (arg), c2)));
5626 /* sqrt(x) < y is x >= 0 when y is very large and we
5627 don't care about Infinities. */
5628 if (! HONOR_INFINITIES (mode))
5629 return fold (build2 (GE_EXPR, type, arg,
5630 build_real (TREE_TYPE (arg), dconst0)));
5632 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5633 if (lang_hooks.decls.global_bindings_p () != 0
5634 || CONTAINS_PLACEHOLDER_P (arg))
5635 return NULL_TREE;
5637 arg = save_expr (arg);
5638 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5639 fold (build2 (GE_EXPR, type, arg,
5640 build_real (TREE_TYPE (arg),
5641 dconst0))),
5642 fold (build2 (NE_EXPR, type, arg,
5643 build_real (TREE_TYPE (arg),
5644 c2)))));
5647 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5648 if (! HONOR_NANS (mode))
5649 return fold (build2 (code, type, arg,
5650 build_real (TREE_TYPE (arg), c2)));
5652 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5653 if (lang_hooks.decls.global_bindings_p () == 0
5654 && ! CONTAINS_PLACEHOLDER_P (arg))
5656 arg = save_expr (arg);
5657 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5658 fold (build2 (GE_EXPR, type, arg,
5659 build_real (TREE_TYPE (arg),
5660 dconst0))),
5661 fold (build2 (code, type, arg,
5662 build_real (TREE_TYPE (arg),
5663 c2)))));
5668 return NULL_TREE;
5671 /* Subroutine of fold() that optimizes comparisons against Infinities,
5672 either +Inf or -Inf.
5674 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5675 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5676 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5678 The function returns the constant folded tree if a simplification
5679 can be made, and NULL_TREE otherwise. */
5681 static tree
5682 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5684 enum machine_mode mode;
5685 REAL_VALUE_TYPE max;
5686 tree temp;
5687 bool neg;
5689 mode = TYPE_MODE (TREE_TYPE (arg0));
5691 /* For negative infinity swap the sense of the comparison. */
5692 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5693 if (neg)
5694 code = swap_tree_comparison (code);
5696 switch (code)
5698 case GT_EXPR:
5699 /* x > +Inf is always false, if with ignore sNANs. */
5700 if (HONOR_SNANS (mode))
5701 return NULL_TREE;
5702 return omit_one_operand (type, integer_zero_node, arg0);
5704 case LE_EXPR:
5705 /* x <= +Inf is always true, if we don't case about NaNs. */
5706 if (! HONOR_NANS (mode))
5707 return omit_one_operand (type, integer_one_node, arg0);
5709 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5710 if (lang_hooks.decls.global_bindings_p () == 0
5711 && ! CONTAINS_PLACEHOLDER_P (arg0))
5713 arg0 = save_expr (arg0);
5714 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5716 break;
5718 case EQ_EXPR:
5719 case GE_EXPR:
5720 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5721 real_maxval (&max, neg, mode);
5722 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5723 arg0, build_real (TREE_TYPE (arg0), max)));
5725 case LT_EXPR:
5726 /* x < +Inf is always equal to x <= DBL_MAX. */
5727 real_maxval (&max, neg, mode);
5728 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5729 arg0, build_real (TREE_TYPE (arg0), max)));
5731 case NE_EXPR:
5732 /* x != +Inf is always equal to !(x > DBL_MAX). */
5733 real_maxval (&max, neg, mode);
5734 if (! HONOR_NANS (mode))
5735 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5736 arg0, build_real (TREE_TYPE (arg0), max)));
5738 /* The transformation below creates non-gimple code and thus is
5739 not appropriate if we are in gimple form. */
5740 if (in_gimple_form)
5741 return NULL_TREE;
5743 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5744 arg0, build_real (TREE_TYPE (arg0), max)));
5745 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5747 default:
5748 break;
5751 return NULL_TREE;
5754 /* Subroutine of fold() that optimizes comparisons of a division by
5755 a nonzero integer constant against an integer constant, i.e.
5756 X/C1 op C2.
5758 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5759 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5760 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5762 The function returns the constant folded tree if a simplification
5763 can be made, and NULL_TREE otherwise. */
5765 static tree
5766 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5768 tree prod, tmp, hi, lo;
5769 tree arg00 = TREE_OPERAND (arg0, 0);
5770 tree arg01 = TREE_OPERAND (arg0, 1);
5771 unsigned HOST_WIDE_INT lpart;
5772 HOST_WIDE_INT hpart;
5773 int overflow;
5775 /* We have to do this the hard way to detect unsigned overflow.
5776 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5777 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5778 TREE_INT_CST_HIGH (arg01),
5779 TREE_INT_CST_LOW (arg1),
5780 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5781 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5782 prod = force_fit_type (prod, -1, overflow, false);
5784 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5786 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5787 lo = prod;
5789 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5790 overflow = add_double (TREE_INT_CST_LOW (prod),
5791 TREE_INT_CST_HIGH (prod),
5792 TREE_INT_CST_LOW (tmp),
5793 TREE_INT_CST_HIGH (tmp),
5794 &lpart, &hpart);
5795 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5796 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5797 TREE_CONSTANT_OVERFLOW (prod));
5799 else if (tree_int_cst_sgn (arg01) >= 0)
5801 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5802 switch (tree_int_cst_sgn (arg1))
5804 case -1:
5805 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5806 hi = prod;
5807 break;
5809 case 0:
5810 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5811 hi = tmp;
5812 break;
5814 case 1:
5815 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5816 lo = prod;
5817 break;
5819 default:
5820 gcc_unreachable ();
5823 else
5825 /* A negative divisor reverses the relational operators. */
5826 code = swap_tree_comparison (code);
5828 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5829 switch (tree_int_cst_sgn (arg1))
5831 case -1:
5832 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5833 lo = prod;
5834 break;
5836 case 0:
5837 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5838 lo = tmp;
5839 break;
5841 case 1:
5842 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5843 hi = prod;
5844 break;
5846 default:
5847 gcc_unreachable ();
5851 switch (code)
5853 case EQ_EXPR:
5854 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5855 return omit_one_operand (type, integer_zero_node, arg00);
5856 if (TREE_OVERFLOW (hi))
5857 return fold (build2 (GE_EXPR, type, arg00, lo));
5858 if (TREE_OVERFLOW (lo))
5859 return fold (build2 (LE_EXPR, type, arg00, hi));
5860 return build_range_check (type, arg00, 1, lo, hi);
5862 case NE_EXPR:
5863 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5864 return omit_one_operand (type, integer_one_node, arg00);
5865 if (TREE_OVERFLOW (hi))
5866 return fold (build2 (LT_EXPR, type, arg00, lo));
5867 if (TREE_OVERFLOW (lo))
5868 return fold (build2 (GT_EXPR, type, arg00, hi));
5869 return build_range_check (type, arg00, 0, lo, hi);
5871 case LT_EXPR:
5872 if (TREE_OVERFLOW (lo))
5873 return omit_one_operand (type, integer_zero_node, arg00);
5874 return fold (build2 (LT_EXPR, type, arg00, lo));
5876 case LE_EXPR:
5877 if (TREE_OVERFLOW (hi))
5878 return omit_one_operand (type, integer_one_node, arg00);
5879 return fold (build2 (LE_EXPR, type, arg00, hi));
5881 case GT_EXPR:
5882 if (TREE_OVERFLOW (hi))
5883 return omit_one_operand (type, integer_zero_node, arg00);
5884 return fold (build2 (GT_EXPR, type, arg00, hi));
5886 case GE_EXPR:
5887 if (TREE_OVERFLOW (lo))
5888 return omit_one_operand (type, integer_one_node, arg00);
5889 return fold (build2 (GE_EXPR, type, arg00, lo));
5891 default:
5892 break;
5895 return NULL_TREE;
5899 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5900 equality/inequality test, then return a simplified form of
5901 the test using shifts and logical operations. Otherwise return
5902 NULL. TYPE is the desired result type. */
5904 tree
5905 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5906 tree result_type)
5908 /* If this is testing a single bit, we can optimize the test. */
5909 if ((code == NE_EXPR || code == EQ_EXPR)
5910 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5911 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5913 tree inner = TREE_OPERAND (arg0, 0);
5914 tree type = TREE_TYPE (arg0);
5915 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5916 enum machine_mode operand_mode = TYPE_MODE (type);
5917 int ops_unsigned;
5918 tree signed_type, unsigned_type, intermediate_type;
5919 tree arg00;
5921 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5922 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5923 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5924 if (arg00 != NULL_TREE
5925 /* This is only a win if casting to a signed type is cheap,
5926 i.e. when arg00's type is not a partial mode. */
5927 && TYPE_PRECISION (TREE_TYPE (arg00))
5928 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5930 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5931 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5932 result_type, fold_convert (stype, arg00),
5933 fold_convert (stype, integer_zero_node)));
5936 /* Otherwise we have (A & C) != 0 where C is a single bit,
5937 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5938 Similarly for (A & C) == 0. */
5940 /* If INNER is a right shift of a constant and it plus BITNUM does
5941 not overflow, adjust BITNUM and INNER. */
5942 if (TREE_CODE (inner) == RSHIFT_EXPR
5943 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5944 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5945 && bitnum < TYPE_PRECISION (type)
5946 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5947 bitnum - TYPE_PRECISION (type)))
5949 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5950 inner = TREE_OPERAND (inner, 0);
5953 /* If we are going to be able to omit the AND below, we must do our
5954 operations as unsigned. If we must use the AND, we have a choice.
5955 Normally unsigned is faster, but for some machines signed is. */
5956 #ifdef LOAD_EXTEND_OP
5957 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5958 && !flag_syntax_only) ? 0 : 1;
5959 #else
5960 ops_unsigned = 1;
5961 #endif
5963 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5964 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5965 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5966 inner = fold_convert (intermediate_type, inner);
5968 if (bitnum != 0)
5969 inner = build2 (RSHIFT_EXPR, intermediate_type,
5970 inner, size_int (bitnum));
5972 if (code == EQ_EXPR)
5973 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5974 inner, integer_one_node));
5976 /* Put the AND last so it can combine with more things. */
5977 inner = build2 (BIT_AND_EXPR, intermediate_type,
5978 inner, integer_one_node);
5980 /* Make sure to return the proper type. */
5981 inner = fold_convert (result_type, inner);
5983 return inner;
5985 return NULL_TREE;
5988 /* Check whether we are allowed to reorder operands arg0 and arg1,
5989 such that the evaluation of arg1 occurs before arg0. */
5991 static bool
5992 reorder_operands_p (tree arg0, tree arg1)
5994 if (! flag_evaluation_order)
5995 return true;
5996 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5997 return true;
5998 return ! TREE_SIDE_EFFECTS (arg0)
5999 && ! TREE_SIDE_EFFECTS (arg1);
6002 /* Test whether it is preferable two swap two operands, ARG0 and
6003 ARG1, for example because ARG0 is an integer constant and ARG1
6004 isn't. If REORDER is true, only recommend swapping if we can
6005 evaluate the operands in reverse order. */
6007 bool
6008 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6010 STRIP_SIGN_NOPS (arg0);
6011 STRIP_SIGN_NOPS (arg1);
6013 if (TREE_CODE (arg1) == INTEGER_CST)
6014 return 0;
6015 if (TREE_CODE (arg0) == INTEGER_CST)
6016 return 1;
6018 if (TREE_CODE (arg1) == REAL_CST)
6019 return 0;
6020 if (TREE_CODE (arg0) == REAL_CST)
6021 return 1;
6023 if (TREE_CODE (arg1) == COMPLEX_CST)
6024 return 0;
6025 if (TREE_CODE (arg0) == COMPLEX_CST)
6026 return 1;
6028 if (TREE_CONSTANT (arg1))
6029 return 0;
6030 if (TREE_CONSTANT (arg0))
6031 return 1;
6033 if (optimize_size)
6034 return 0;
6036 if (reorder && flag_evaluation_order
6037 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6038 return 0;
6040 if (DECL_P (arg1))
6041 return 0;
6042 if (DECL_P (arg0))
6043 return 1;
6045 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6046 for commutative and comparison operators. Ensuring a canonical
6047 form allows the optimizers to find additional redundancies without
6048 having to explicitly check for both orderings. */
6049 if (TREE_CODE (arg0) == SSA_NAME
6050 && TREE_CODE (arg1) == SSA_NAME
6051 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6052 return 1;
6054 return 0;
6057 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6058 ARG0 is extended to a wider type. */
6060 static tree
6061 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6063 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6064 tree arg1_unw;
6065 tree shorter_type, outer_type;
6066 tree min, max;
6067 bool above, below;
6069 if (arg0_unw == arg0)
6070 return NULL_TREE;
6071 shorter_type = TREE_TYPE (arg0_unw);
6073 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6074 return NULL_TREE;
6076 arg1_unw = get_unwidened (arg1, shorter_type);
6077 if (!arg1_unw)
6078 return NULL_TREE;
6080 /* If possible, express the comparison in the shorter mode. */
6081 if ((code == EQ_EXPR || code == NE_EXPR
6082 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6083 && (TREE_TYPE (arg1_unw) == shorter_type
6084 || (TREE_CODE (arg1_unw) == INTEGER_CST
6085 && TREE_CODE (shorter_type) == INTEGER_TYPE
6086 && int_fits_type_p (arg1_unw, shorter_type))))
6087 return fold (build (code, type, arg0_unw,
6088 fold_convert (shorter_type, arg1_unw)));
6090 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6091 return NULL_TREE;
6093 /* If we are comparing with the integer that does not fit into the range
6094 of the shorter type, the result is known. */
6095 outer_type = TREE_TYPE (arg1_unw);
6096 min = lower_bound_in_type (outer_type, shorter_type);
6097 max = upper_bound_in_type (outer_type, shorter_type);
6099 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6100 max, arg1_unw));
6101 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6102 arg1_unw, min));
6104 switch (code)
6106 case EQ_EXPR:
6107 if (above || below)
6108 return omit_one_operand (type, integer_zero_node, arg0);
6109 break;
6111 case NE_EXPR:
6112 if (above || below)
6113 return omit_one_operand (type, integer_one_node, arg0);
6114 break;
6116 case LT_EXPR:
6117 case LE_EXPR:
6118 if (above)
6119 return omit_one_operand (type, integer_one_node, arg0);
6120 else if (below)
6121 return omit_one_operand (type, integer_zero_node, arg0);
6123 case GT_EXPR:
6124 case GE_EXPR:
6125 if (above)
6126 return omit_one_operand (type, integer_zero_node, arg0);
6127 else if (below)
6128 return omit_one_operand (type, integer_one_node, arg0);
6130 default:
6131 break;
6134 return NULL_TREE;
6137 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6138 ARG0 just the signedness is changed. */
6140 static tree
6141 fold_sign_changed_comparison (enum tree_code code, tree type,
6142 tree arg0, tree arg1)
6144 tree arg0_inner, tmp;
6145 tree inner_type, outer_type;
6147 if (TREE_CODE (arg0) != NOP_EXPR)
6148 return NULL_TREE;
6150 outer_type = TREE_TYPE (arg0);
6151 arg0_inner = TREE_OPERAND (arg0, 0);
6152 inner_type = TREE_TYPE (arg0_inner);
6154 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6155 return NULL_TREE;
6157 if (TREE_CODE (arg1) != INTEGER_CST
6158 && !(TREE_CODE (arg1) == NOP_EXPR
6159 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6160 return NULL_TREE;
6162 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6163 && code != NE_EXPR
6164 && code != EQ_EXPR)
6165 return NULL_TREE;
6167 if (TREE_CODE (arg1) == INTEGER_CST)
6169 tmp = build_int_cst_wide (inner_type,
6170 TREE_INT_CST_LOW (arg1),
6171 TREE_INT_CST_HIGH (arg1));
6172 arg1 = force_fit_type (tmp, 0,
6173 TREE_OVERFLOW (arg1),
6174 TREE_CONSTANT_OVERFLOW (arg1));
6176 else
6177 arg1 = fold_convert (inner_type, arg1);
6179 return fold (build (code, type, arg0_inner, arg1));
6182 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6183 step of the array. ADDR is the address. MULT is the multiplicative expression.
6184 If the function succeeds, the new address expression is returned. Otherwise
6185 NULL_TREE is returned. */
6187 static tree
6188 try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
6190 tree s, delta, step;
6191 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6192 tree ref = TREE_OPERAND (addr, 0), pref;
6193 tree ret, pos;
6194 tree itype;
6196 STRIP_NOPS (arg0);
6197 STRIP_NOPS (arg1);
6199 if (TREE_CODE (arg0) == INTEGER_CST)
6201 s = arg0;
6202 delta = arg1;
6204 else if (TREE_CODE (arg1) == INTEGER_CST)
6206 s = arg1;
6207 delta = arg0;
6209 else
6210 return NULL_TREE;
6212 for (;; ref = TREE_OPERAND (ref, 0))
6214 if (TREE_CODE (ref) == ARRAY_REF)
6216 step = array_ref_element_size (ref);
6218 if (TREE_CODE (step) != INTEGER_CST)
6219 continue;
6221 itype = TREE_TYPE (step);
6223 /* If the type sizes do not match, we might run into problems
6224 when one of them would overflow. */
6225 if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
6226 continue;
6228 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6229 continue;
6231 delta = fold_convert (itype, delta);
6232 break;
6235 if (!handled_component_p (ref))
6236 return NULL_TREE;
6239 /* We found the suitable array reference. So copy everything up to it,
6240 and replace the index. */
6242 pref = TREE_OPERAND (addr, 0);
6243 ret = copy_node (pref);
6244 pos = ret;
6246 while (pref != ref)
6248 pref = TREE_OPERAND (pref, 0);
6249 TREE_OPERAND (pos, 0) = copy_node (pref);
6250 pos = TREE_OPERAND (pos, 0);
6253 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6254 TREE_OPERAND (pos, 1),
6255 delta));
6257 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6261 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6262 means A >= Y && A != MAX, but in this case we know that
6263 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6265 static tree
6266 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6268 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6270 if (TREE_CODE (bound) == LT_EXPR)
6271 a = TREE_OPERAND (bound, 0);
6272 else if (TREE_CODE (bound) == GT_EXPR)
6273 a = TREE_OPERAND (bound, 1);
6274 else
6275 return NULL_TREE;
6277 typea = TREE_TYPE (a);
6278 if (!INTEGRAL_TYPE_P (typea)
6279 && !POINTER_TYPE_P (typea))
6280 return NULL_TREE;
6282 if (TREE_CODE (ineq) == LT_EXPR)
6284 a1 = TREE_OPERAND (ineq, 1);
6285 y = TREE_OPERAND (ineq, 0);
6287 else if (TREE_CODE (ineq) == GT_EXPR)
6289 a1 = TREE_OPERAND (ineq, 0);
6290 y = TREE_OPERAND (ineq, 1);
6292 else
6293 return NULL_TREE;
6295 if (TREE_TYPE (a1) != typea)
6296 return NULL_TREE;
6298 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6299 if (!integer_onep (diff))
6300 return NULL_TREE;
6302 return fold (build2 (GE_EXPR, type, a, y));
6305 /* Fold complex addition when both components are accessible by parts.
6306 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6307 or MINUS_EXPR for subtraction. */
6309 static tree
6310 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6312 tree ar, ai, br, bi, rr, ri, inner_type;
6314 if (TREE_CODE (ac) == COMPLEX_EXPR)
6315 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6316 else if (TREE_CODE (ac) == COMPLEX_CST)
6317 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6318 else
6319 return NULL;
6321 if (TREE_CODE (bc) == COMPLEX_EXPR)
6322 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6323 else if (TREE_CODE (bc) == COMPLEX_CST)
6324 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6325 else
6326 return NULL;
6328 inner_type = TREE_TYPE (type);
6330 rr = fold (build2 (code, inner_type, ar, br));
6331 ri = fold (build2 (code, inner_type, ai, bi));
6333 return fold (build2 (COMPLEX_EXPR, type, rr, ri));
6336 /* Perform some simplifications of complex multiplication when one or more
6337 of the components are constants or zeros. Return non-null if successful. */
6339 tree
6340 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6342 tree rr, ri, inner_type, zero;
6343 bool ar0, ai0, br0, bi0, bi1;
6345 inner_type = TREE_TYPE (type);
6346 zero = NULL;
6348 if (SCALAR_FLOAT_TYPE_P (inner_type))
6350 ar0 = ai0 = br0 = bi0 = bi1 = false;
6352 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6354 if (TREE_CODE (ar) == REAL_CST
6355 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6356 ar0 = true, zero = ar;
6358 if (TREE_CODE (ai) == REAL_CST
6359 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6360 ai0 = true, zero = ai;
6362 if (TREE_CODE (br) == REAL_CST
6363 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6364 br0 = true, zero = br;
6366 if (TREE_CODE (bi) == REAL_CST)
6368 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6369 bi0 = true, zero = bi;
6370 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6371 bi1 = true;
6374 else
6376 ar0 = integer_zerop (ar);
6377 if (ar0)
6378 zero = ar;
6379 ai0 = integer_zerop (ai);
6380 if (ai0)
6381 zero = ai;
6382 br0 = integer_zerop (br);
6383 if (br0)
6384 zero = br;
6385 bi0 = integer_zerop (bi);
6386 if (bi0)
6388 zero = bi;
6389 bi1 = false;
6391 else
6392 bi1 = integer_onep (bi);
6395 /* We won't optimize anything below unless something is zero. */
6396 if (zero == NULL)
6397 return NULL;
6399 if (ai0 && br0 && bi1)
6401 rr = zero;
6402 ri = ar;
6404 else if (ai0 && bi0)
6406 rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
6407 ri = zero;
6409 else if (ai0 && br0)
6411 rr = zero;
6412 ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
6414 else if (ar0 && bi0)
6416 rr = zero;
6417 ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
6419 else if (ar0 && br0)
6421 rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
6422 rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
6423 ri = zero;
6425 else if (bi0)
6427 rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
6428 ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
6430 else if (ai0)
6432 rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
6433 ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
6435 else if (br0)
6437 rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
6438 rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
6439 ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
6441 else if (ar0)
6443 rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
6444 rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
6445 ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
6447 else
6448 return NULL;
6450 return fold (build2 (COMPLEX_EXPR, type, rr, ri));
6453 static tree
6454 fold_complex_mult (tree type, tree ac, tree bc)
6456 tree ar, ai, br, bi;
6458 if (TREE_CODE (ac) == COMPLEX_EXPR)
6459 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6460 else if (TREE_CODE (ac) == COMPLEX_CST)
6461 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6462 else
6463 return NULL;
6465 if (TREE_CODE (bc) == COMPLEX_EXPR)
6466 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6467 else if (TREE_CODE (bc) == COMPLEX_CST)
6468 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6469 else
6470 return NULL;
6472 return fold_complex_mult_parts (type, ar, ai, br, bi);
6475 /* Perform some simplifications of complex division when one or more of
6476 the components are constants or zeros. Return non-null if successful. */
6478 tree
6479 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6480 enum tree_code code)
6482 tree rr, ri, inner_type, zero;
6483 bool ar0, ai0, br0, bi0, bi1;
6485 inner_type = TREE_TYPE (type);
6486 zero = NULL;
6488 if (SCALAR_FLOAT_TYPE_P (inner_type))
6490 ar0 = ai0 = br0 = bi0 = bi1 = false;
6492 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6494 if (TREE_CODE (ar) == REAL_CST
6495 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6496 ar0 = true, zero = ar;
6498 if (TREE_CODE (ai) == REAL_CST
6499 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6500 ai0 = true, zero = ai;
6502 if (TREE_CODE (br) == REAL_CST
6503 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6504 br0 = true, zero = br;
6506 if (TREE_CODE (bi) == REAL_CST)
6508 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6509 bi0 = true, zero = bi;
6510 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6511 bi1 = true;
6514 else
6516 ar0 = integer_zerop (ar);
6517 if (ar0)
6518 zero = ar;
6519 ai0 = integer_zerop (ai);
6520 if (ai0)
6521 zero = ai;
6522 br0 = integer_zerop (br);
6523 if (br0)
6524 zero = br;
6525 bi0 = integer_zerop (bi);
6526 if (bi0)
6528 zero = bi;
6529 bi1 = false;
6531 else
6532 bi1 = integer_onep (bi);
6535 /* We won't optimize anything below unless something is zero. */
6536 if (zero == NULL)
6537 return NULL;
6539 if (ai0 && bi0)
6541 rr = fold (build2 (code, inner_type, ar, br));
6542 ri = zero;
6544 else if (ai0 && br0)
6546 rr = zero;
6547 ri = fold (build2 (code, inner_type, ar, bi));
6548 ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
6550 else if (ar0 && bi0)
6552 rr = zero;
6553 ri = fold (build2 (code, inner_type, ai, br));
6555 else if (ar0 && br0)
6557 rr = fold (build2 (code, inner_type, ai, bi));
6558 ri = zero;
6560 else if (bi0)
6562 rr = fold (build2 (code, inner_type, ar, br));
6563 ri = fold (build2 (code, inner_type, ai, br));
6565 else if (br0)
6567 rr = fold (build2 (code, inner_type, ai, bi));
6568 ri = fold (build2 (code, inner_type, ar, bi));
6569 ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
6571 else
6572 return NULL;
6574 return fold (build2 (COMPLEX_EXPR, type, rr, ri));
6577 static tree
6578 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6580 tree ar, ai, br, bi;
6582 if (TREE_CODE (ac) == COMPLEX_EXPR)
6583 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6584 else if (TREE_CODE (ac) == COMPLEX_CST)
6585 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6586 else
6587 return NULL;
6589 if (TREE_CODE (bc) == COMPLEX_EXPR)
6590 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6591 else if (TREE_CODE (bc) == COMPLEX_CST)
6592 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6593 else
6594 return NULL;
6596 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6599 /* Fold a unary expression EXPR. Return the folded expression if
6600 folding is successful. Otherwise, return the original
6601 expression. */
6603 static tree
6604 fold_unary (tree expr)
6606 const tree t = expr;
6607 const tree type = TREE_TYPE (expr);
6608 tree tem;
6609 tree op0, arg0;
6610 enum tree_code code = TREE_CODE (t);
6611 enum tree_code_class kind = TREE_CODE_CLASS (code);
6613 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6614 && TREE_CODE_LENGTH (code) == 1);
6617 arg0 = op0 = TREE_OPERAND (t, 0);
6618 if (arg0)
6620 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6622 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6623 STRIP_SIGN_NOPS (arg0);
6625 else
6627 /* Strip any conversions that don't change the mode. This
6628 is safe for every expression, except for a comparison
6629 expression because its signedness is derived from its
6630 operands.
6632 Note that this is done as an internal manipulation within
6633 the constant folder, in order to find the simplest
6634 representation of the arguments so that their form can be
6635 studied. In any cases, the appropriate type conversions
6636 should be put back in the tree that will get out of the
6637 constant folder. */
6638 STRIP_NOPS (arg0);
6642 if (TREE_CODE_CLASS (code) == tcc_unary)
6644 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6645 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6646 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6647 else if (TREE_CODE (arg0) == COND_EXPR)
6649 tree arg01 = TREE_OPERAND (arg0, 1);
6650 tree arg02 = TREE_OPERAND (arg0, 2);
6651 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6652 arg01 = fold (build1 (code, type, arg01));
6653 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6654 arg02 = fold (build1 (code, type, arg02));
6655 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6656 arg01, arg02));
6658 /* If this was a conversion, and all we did was to move into
6659 inside the COND_EXPR, bring it back out. But leave it if
6660 it is a conversion from integer to integer and the
6661 result precision is no wider than a word since such a
6662 conversion is cheap and may be optimized away by combine,
6663 while it couldn't if it were outside the COND_EXPR. Then return
6664 so we don't get into an infinite recursion loop taking the
6665 conversion out and then back in. */
6667 if ((code == NOP_EXPR || code == CONVERT_EXPR
6668 || code == NON_LVALUE_EXPR)
6669 && TREE_CODE (tem) == COND_EXPR
6670 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6671 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6672 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6673 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6674 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6675 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6676 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6677 && (INTEGRAL_TYPE_P
6678 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6679 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6680 || flag_syntax_only))
6681 tem = build1 (code, type,
6682 build3 (COND_EXPR,
6683 TREE_TYPE (TREE_OPERAND
6684 (TREE_OPERAND (tem, 1), 0)),
6685 TREE_OPERAND (tem, 0),
6686 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6687 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6688 return tem;
6690 else if (COMPARISON_CLASS_P (arg0))
6692 if (TREE_CODE (type) == BOOLEAN_TYPE)
6694 arg0 = copy_node (arg0);
6695 TREE_TYPE (arg0) = type;
6696 return arg0;
6698 else if (TREE_CODE (type) != INTEGER_TYPE)
6699 return fold (build3 (COND_EXPR, type, arg0,
6700 fold (build1 (code, type,
6701 integer_one_node)),
6702 fold (build1 (code, type,
6703 integer_zero_node))));
6707 switch (code)
6709 case NOP_EXPR:
6710 case FLOAT_EXPR:
6711 case CONVERT_EXPR:
6712 case FIX_TRUNC_EXPR:
6713 case FIX_CEIL_EXPR:
6714 case FIX_FLOOR_EXPR:
6715 case FIX_ROUND_EXPR:
6716 if (TREE_TYPE (op0) == type)
6717 return op0;
6719 /* Handle cases of two conversions in a row. */
6720 if (TREE_CODE (op0) == NOP_EXPR
6721 || TREE_CODE (op0) == CONVERT_EXPR)
6723 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6724 tree inter_type = TREE_TYPE (op0);
6725 int inside_int = INTEGRAL_TYPE_P (inside_type);
6726 int inside_ptr = POINTER_TYPE_P (inside_type);
6727 int inside_float = FLOAT_TYPE_P (inside_type);
6728 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6729 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6730 int inter_int = INTEGRAL_TYPE_P (inter_type);
6731 int inter_ptr = POINTER_TYPE_P (inter_type);
6732 int inter_float = FLOAT_TYPE_P (inter_type);
6733 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6734 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6735 int final_int = INTEGRAL_TYPE_P (type);
6736 int final_ptr = POINTER_TYPE_P (type);
6737 int final_float = FLOAT_TYPE_P (type);
6738 unsigned int final_prec = TYPE_PRECISION (type);
6739 int final_unsignedp = TYPE_UNSIGNED (type);
6741 /* In addition to the cases of two conversions in a row
6742 handled below, if we are converting something to its own
6743 type via an object of identical or wider precision, neither
6744 conversion is needed. */
6745 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6746 && ((inter_int && final_int) || (inter_float && final_float))
6747 && inter_prec >= final_prec)
6748 return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
6750 /* Likewise, if the intermediate and final types are either both
6751 float or both integer, we don't need the middle conversion if
6752 it is wider than the final type and doesn't change the signedness
6753 (for integers). Avoid this if the final type is a pointer
6754 since then we sometimes need the inner conversion. Likewise if
6755 the outer has a precision not equal to the size of its mode. */
6756 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6757 || (inter_float && inside_float))
6758 && inter_prec >= inside_prec
6759 && (inter_float || inter_unsignedp == inside_unsignedp)
6760 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6761 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6762 && ! final_ptr)
6763 return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
6765 /* If we have a sign-extension of a zero-extended value, we can
6766 replace that by a single zero-extension. */
6767 if (inside_int && inter_int && final_int
6768 && inside_prec < inter_prec && inter_prec < final_prec
6769 && inside_unsignedp && !inter_unsignedp)
6770 return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
6772 /* Two conversions in a row are not needed unless:
6773 - some conversion is floating-point (overstrict for now), or
6774 - the intermediate type is narrower than both initial and
6775 final, or
6776 - the intermediate type and innermost type differ in signedness,
6777 and the outermost type is wider than the intermediate, or
6778 - the initial type is a pointer type and the precisions of the
6779 intermediate and final types differ, or
6780 - the final type is a pointer type and the precisions of the
6781 initial and intermediate types differ. */
6782 if (! inside_float && ! inter_float && ! final_float
6783 && (inter_prec > inside_prec || inter_prec > final_prec)
6784 && ! (inside_int && inter_int
6785 && inter_unsignedp != inside_unsignedp
6786 && inter_prec < final_prec)
6787 && ((inter_unsignedp && inter_prec > inside_prec)
6788 == (final_unsignedp && final_prec > inter_prec))
6789 && ! (inside_ptr && inter_prec != final_prec)
6790 && ! (final_ptr && inside_prec != inter_prec)
6791 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6792 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6793 && ! final_ptr)
6794 return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
6797 if (TREE_CODE (op0) == MODIFY_EXPR
6798 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6799 /* Detect assigning a bitfield. */
6800 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6801 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6803 /* Don't leave an assignment inside a conversion
6804 unless assigning a bitfield. */
6805 tem = copy_node (t);
6806 TREE_OPERAND (tem, 0) = TREE_OPERAND (op0, 1);
6807 /* First do the assignment, then return converted constant. */
6808 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, fold (tem));
6809 TREE_NO_WARNING (tem) = 1;
6810 TREE_USED (tem) = 1;
6811 return tem;
6814 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6815 constants (if x has signed type, the sign bit cannot be set
6816 in c). This folds extension into the BIT_AND_EXPR. */
6817 if (INTEGRAL_TYPE_P (type)
6818 && TREE_CODE (type) != BOOLEAN_TYPE
6819 && TREE_CODE (op0) == BIT_AND_EXPR
6820 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6822 tree and = op0;
6823 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6824 int change = 0;
6826 if (TYPE_UNSIGNED (TREE_TYPE (and))
6827 || (TYPE_PRECISION (type)
6828 <= TYPE_PRECISION (TREE_TYPE (and))))
6829 change = 1;
6830 else if (TYPE_PRECISION (TREE_TYPE (and1))
6831 <= HOST_BITS_PER_WIDE_INT
6832 && host_integerp (and1, 1))
6834 unsigned HOST_WIDE_INT cst;
6836 cst = tree_low_cst (and1, 1);
6837 cst &= (HOST_WIDE_INT) -1
6838 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6839 change = (cst == 0);
6840 #ifdef LOAD_EXTEND_OP
6841 if (change
6842 && !flag_syntax_only
6843 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6844 == ZERO_EXTEND))
6846 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6847 and0 = fold_convert (uns, and0);
6848 and1 = fold_convert (uns, and1);
6850 #endif
6852 if (change)
6854 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6855 TREE_INT_CST_HIGH (and1));
6856 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6857 TREE_CONSTANT_OVERFLOW (and1));
6858 return fold (build2 (BIT_AND_EXPR, type,
6859 fold_convert (type, and0), tem));
6863 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6864 T2 being pointers to types of the same size. */
6865 if (POINTER_TYPE_P (type)
6866 && BINARY_CLASS_P (arg0)
6867 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6868 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6870 tree arg00 = TREE_OPERAND (arg0, 0);
6871 tree t0 = type;
6872 tree t1 = TREE_TYPE (arg00);
6873 tree tt0 = TREE_TYPE (t0);
6874 tree tt1 = TREE_TYPE (t1);
6875 tree s0 = TYPE_SIZE (tt0);
6876 tree s1 = TYPE_SIZE (tt1);
6878 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6879 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6880 TREE_OPERAND (arg0, 1));
6883 tem = fold_convert_const (code, type, arg0);
6884 return tem ? tem : t;
6886 case VIEW_CONVERT_EXPR:
6887 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6888 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6889 return t;
6891 case NEGATE_EXPR:
6892 if (negate_expr_p (arg0))
6893 return fold_convert (type, negate_expr (arg0));
6894 /* Convert - (~A) to A + 1. */
6895 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6896 return fold (build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6897 build_int_cst (type, 1)));
6898 return t;
6900 case ABS_EXPR:
6901 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6902 return fold_abs_const (arg0, type);
6903 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6904 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6905 /* Convert fabs((double)float) into (double)fabsf(float). */
6906 else if (TREE_CODE (arg0) == NOP_EXPR
6907 && TREE_CODE (type) == REAL_TYPE)
6909 tree targ0 = strip_float_extensions (arg0);
6910 if (targ0 != arg0)
6911 return fold_convert (type, fold (build1 (ABS_EXPR,
6912 TREE_TYPE (targ0),
6913 targ0)));
6915 else if (tree_expr_nonnegative_p (arg0))
6916 return arg0;
6918 /* Strip sign ops from argument. */
6919 if (TREE_CODE (type) == REAL_TYPE)
6921 tem = fold_strip_sign_ops (arg0);
6922 if (tem)
6923 return fold (build1 (ABS_EXPR, type, fold_convert (type, tem)));
6925 return t;
6927 case CONJ_EXPR:
6928 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6929 return fold_convert (type, arg0);
6930 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6931 return build2 (COMPLEX_EXPR, type,
6932 TREE_OPERAND (arg0, 0),
6933 negate_expr (TREE_OPERAND (arg0, 1)));
6934 else if (TREE_CODE (arg0) == COMPLEX_CST)
6935 return build_complex (type, TREE_REALPART (arg0),
6936 negate_expr (TREE_IMAGPART (arg0)));
6937 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6938 return fold (build2 (TREE_CODE (arg0), type,
6939 fold (build1 (CONJ_EXPR, type,
6940 TREE_OPERAND (arg0, 0))),
6941 fold (build1 (CONJ_EXPR, type,
6942 TREE_OPERAND (arg0, 1)))));
6943 else if (TREE_CODE (arg0) == CONJ_EXPR)
6944 return TREE_OPERAND (arg0, 0);
6945 return t;
6947 case BIT_NOT_EXPR:
6948 if (TREE_CODE (arg0) == INTEGER_CST)
6949 return fold_not_const (arg0, type);
6950 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6951 return TREE_OPERAND (arg0, 0);
6952 /* Convert ~ (-A) to A - 1. */
6953 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6954 return fold (build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6955 build_int_cst (type, 1)));
6956 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6957 else if (INTEGRAL_TYPE_P (type)
6958 && ((TREE_CODE (arg0) == MINUS_EXPR
6959 && integer_onep (TREE_OPERAND (arg0, 1)))
6960 || (TREE_CODE (arg0) == PLUS_EXPR
6961 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6962 return fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)));
6963 return t;
6965 case TRUTH_NOT_EXPR:
6966 /* The argument to invert_truthvalue must have Boolean type. */
6967 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6968 arg0 = fold_convert (boolean_type_node, arg0);
6970 /* Note that the operand of this must be an int
6971 and its values must be 0 or 1.
6972 ("true" is a fixed value perhaps depending on the language,
6973 but we don't handle values other than 1 correctly yet.) */
6974 tem = invert_truthvalue (arg0);
6975 /* Avoid infinite recursion. */
6976 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6977 return t;
6978 return fold_convert (type, tem);
6980 case REALPART_EXPR:
6981 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6982 return t;
6983 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6984 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6985 TREE_OPERAND (arg0, 1));
6986 else if (TREE_CODE (arg0) == COMPLEX_CST)
6987 return TREE_REALPART (arg0);
6988 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6989 return fold (build2 (TREE_CODE (arg0), type,
6990 fold (build1 (REALPART_EXPR, type,
6991 TREE_OPERAND (arg0, 0))),
6992 fold (build1 (REALPART_EXPR, type,
6993 TREE_OPERAND (arg0, 1)))));
6994 return t;
6996 case IMAGPART_EXPR:
6997 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6998 return fold_convert (type, integer_zero_node);
6999 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7000 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7001 TREE_OPERAND (arg0, 0));
7002 else if (TREE_CODE (arg0) == COMPLEX_CST)
7003 return TREE_IMAGPART (arg0);
7004 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7005 return fold (build2 (TREE_CODE (arg0), type,
7006 fold (build1 (IMAGPART_EXPR, type,
7007 TREE_OPERAND (arg0, 0))),
7008 fold (build1 (IMAGPART_EXPR, type,
7009 TREE_OPERAND (arg0, 1)))));
7010 return t;
7012 default:
7013 return t;
7014 } /* switch (code) */
7017 /* Fold a ternary expression EXPR. Return the folded expression if
7018 folding is successful. Otherwise, return the original
7019 expression. */
7021 static tree
7022 fold_ternary (tree expr)
7024 const tree t = expr;
7025 const tree type = TREE_TYPE (expr);
7026 tree tem;
7027 tree op0, op1, op2;
7028 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7029 enum tree_code code = TREE_CODE (t);
7030 enum tree_code_class kind = TREE_CODE_CLASS (code);
7032 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7033 && TREE_CODE_LENGTH (code) == 3);
7035 op0 = TREE_OPERAND (t, 0);
7036 op1 = TREE_OPERAND (t, 1);
7037 op2 = TREE_OPERAND (t, 2);
7039 /* Strip any conversions that don't change the mode. This is safe
7040 for every expression, except for a comparison expression because
7041 its signedness is derived from its operands. So, in the latter
7042 case, only strip conversions that don't change the signedness.
7044 Note that this is done as an internal manipulation within the
7045 constant folder, in order to find the simplest representation of
7046 the arguments so that their form can be studied. In any cases,
7047 the appropriate type conversions should be put back in the tree
7048 that will get out of the constant folder. */
7049 if (op0)
7051 arg0 = op0;
7052 STRIP_NOPS (arg0);
7055 if (op1)
7057 arg1 = op1;
7058 STRIP_NOPS (arg1);
7061 switch (code)
7063 case COMPONENT_REF:
7064 if (TREE_CODE (arg0) == CONSTRUCTOR
7065 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
7067 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
7068 if (m)
7069 return TREE_VALUE (m);
7071 return t;
7073 case COND_EXPR:
7074 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7075 so all simple results must be passed through pedantic_non_lvalue. */
7076 if (TREE_CODE (arg0) == INTEGER_CST)
7078 tem = integer_zerop (arg0) ? op2 : op1;
7079 /* Only optimize constant conditions when the selected branch
7080 has the same type as the COND_EXPR. This avoids optimizing
7081 away "c ? x : throw", where the throw has a void type. */
7082 if (! VOID_TYPE_P (TREE_TYPE (tem))
7083 || VOID_TYPE_P (type))
7084 return pedantic_non_lvalue (tem);
7085 return t;
7087 if (operand_equal_p (arg1, op2, 0))
7088 return pedantic_omit_one_operand (type, arg1, arg0);
7090 /* If we have A op B ? A : C, we may be able to convert this to a
7091 simpler expression, depending on the operation and the values
7092 of B and C. Signed zeros prevent all of these transformations,
7093 for reasons given above each one.
7095 Also try swapping the arguments and inverting the conditional. */
7096 if (COMPARISON_CLASS_P (arg0)
7097 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7098 arg1, TREE_OPERAND (arg0, 1))
7099 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7101 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
7102 if (tem)
7103 return tem;
7106 if (COMPARISON_CLASS_P (arg0)
7107 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7108 op2,
7109 TREE_OPERAND (arg0, 1))
7110 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
7112 tem = invert_truthvalue (arg0);
7113 if (COMPARISON_CLASS_P (tem))
7115 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
7116 if (tem)
7117 return tem;
7121 /* If the second operand is simpler than the third, swap them
7122 since that produces better jump optimization results. */
7123 if (tree_swap_operands_p (op1, op2, false))
7125 /* See if this can be inverted. If it can't, possibly because
7126 it was a floating-point inequality comparison, don't do
7127 anything. */
7128 tem = invert_truthvalue (arg0);
7130 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7131 return fold (build3 (code, type, tem, op2, op1));
7134 /* Convert A ? 1 : 0 to simply A. */
7135 if (integer_onep (op1)
7136 && integer_zerop (op2)
7137 /* If we try to convert OP0 to our type, the
7138 call to fold will try to move the conversion inside
7139 a COND, which will recurse. In that case, the COND_EXPR
7140 is probably the best choice, so leave it alone. */
7141 && type == TREE_TYPE (arg0))
7142 return pedantic_non_lvalue (arg0);
7144 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7145 over COND_EXPR in cases such as floating point comparisons. */
7146 if (integer_zerop (op1)
7147 && integer_onep (op2)
7148 && truth_value_p (TREE_CODE (arg0)))
7149 return pedantic_non_lvalue (fold_convert (type,
7150 invert_truthvalue (arg0)));
7152 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
7153 if (TREE_CODE (arg0) == LT_EXPR
7154 && integer_zerop (TREE_OPERAND (arg0, 1))
7155 && integer_zerop (op2)
7156 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
7157 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
7158 TREE_TYPE (tem), tem, arg1)));
7160 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
7161 already handled above. */
7162 if (TREE_CODE (arg0) == BIT_AND_EXPR
7163 && integer_onep (TREE_OPERAND (arg0, 1))
7164 && integer_zerop (op2)
7165 && integer_pow2p (arg1))
7167 tree tem = TREE_OPERAND (arg0, 0);
7168 STRIP_NOPS (tem);
7169 if (TREE_CODE (tem) == RSHIFT_EXPR
7170 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
7171 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
7172 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
7173 return fold (build2 (BIT_AND_EXPR, type,
7174 TREE_OPERAND (tem, 0), arg1));
7177 /* A & N ? N : 0 is simply A & N if N is a power of two. This
7178 is probably obsolete because the first operand should be a
7179 truth value (that's why we have the two cases above), but let's
7180 leave it in until we can confirm this for all front-ends. */
7181 if (integer_zerop (op2)
7182 && TREE_CODE (arg0) == NE_EXPR
7183 && integer_zerop (TREE_OPERAND (arg0, 1))
7184 && integer_pow2p (arg1)
7185 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7186 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7187 arg1, OEP_ONLY_CONST))
7188 return pedantic_non_lvalue (fold_convert (type,
7189 TREE_OPERAND (arg0, 0)));
7191 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7192 if (integer_zerop (op2)
7193 && truth_value_p (TREE_CODE (arg0))
7194 && truth_value_p (TREE_CODE (arg1)))
7195 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
7197 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7198 if (integer_onep (op2)
7199 && truth_value_p (TREE_CODE (arg0))
7200 && truth_value_p (TREE_CODE (arg1)))
7202 /* Only perform transformation if ARG0 is easily inverted. */
7203 tem = invert_truthvalue (arg0);
7204 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7205 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
7208 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
7209 if (integer_zerop (arg1)
7210 && truth_value_p (TREE_CODE (arg0))
7211 && truth_value_p (TREE_CODE (op2)))
7213 /* Only perform transformation if ARG0 is easily inverted. */
7214 tem = invert_truthvalue (arg0);
7215 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7216 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem, op2));
7219 /* Convert A ? 1 : B into A || B if A and B are truth values. */
7220 if (integer_onep (arg1)
7221 && truth_value_p (TREE_CODE (arg0))
7222 && truth_value_p (TREE_CODE (op2)))
7223 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0, op2));
7225 return t;
7227 case CALL_EXPR:
7228 /* Check for a built-in function. */
7229 if (TREE_CODE (op0) == ADDR_EXPR
7230 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
7231 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
7233 tree tmp = fold_builtin (t, false);
7234 if (tmp)
7235 return tmp;
7237 return t;
7239 default:
7240 return t;
7241 } /* switch (code) */
7244 /* Perform constant folding and related simplification of EXPR.
7245 The related simplifications include x*1 => x, x*0 => 0, etc.,
7246 and application of the associative law.
7247 NOP_EXPR conversions may be removed freely (as long as we
7248 are careful not to change the type of the overall expression).
7249 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
7250 but we can constant-fold them if they have constant operands. */
7252 #ifdef ENABLE_FOLD_CHECKING
7253 # define fold(x) fold_1 (x)
7254 static tree fold_1 (tree);
7255 static
7256 #endif
7257 tree
7258 fold (tree expr)
7260 const tree t = expr;
7261 const tree type = TREE_TYPE (expr);
7262 tree t1 = NULL_TREE;
7263 tree tem;
7264 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7265 enum tree_code code = TREE_CODE (t);
7266 enum tree_code_class kind = TREE_CODE_CLASS (code);
7268 /* WINS will be nonzero when the switch is done
7269 if all operands are constant. */
7270 int wins = 1;
7272 /* Return right away if a constant. */
7273 if (kind == tcc_constant)
7274 return t;
7276 if (IS_EXPR_CODE_CLASS (kind))
7278 switch (TREE_CODE_LENGTH (code))
7280 case 1:
7281 return fold_unary (expr);
7282 case 3:
7283 return fold_ternary (expr);
7284 default:
7285 break;
7289 if (IS_EXPR_CODE_CLASS (kind))
7291 int len = TREE_CODE_LENGTH (code);
7292 int i;
7293 for (i = 0; i < len; i++)
7295 tree op = TREE_OPERAND (t, i);
7296 tree subop;
7298 if (op == 0)
7299 continue; /* Valid for CALL_EXPR, at least. */
7301 /* Strip any conversions that don't change the mode. This is
7302 safe for every expression, except for a comparison expression
7303 because its signedness is derived from its operands. So, in
7304 the latter case, only strip conversions that don't change the
7305 signedness.
7307 Note that this is done as an internal manipulation within the
7308 constant folder, in order to find the simplest representation
7309 of the arguments so that their form can be studied. In any
7310 cases, the appropriate type conversions should be put back in
7311 the tree that will get out of the constant folder. */
7312 if (kind == tcc_comparison)
7313 STRIP_SIGN_NOPS (op);
7314 else
7315 STRIP_NOPS (op);
7317 if (TREE_CODE (op) == COMPLEX_CST)
7318 subop = TREE_REALPART (op);
7319 else
7320 subop = op;
7322 if (TREE_CODE (subop) != INTEGER_CST
7323 && TREE_CODE (subop) != REAL_CST)
7324 /* Note that TREE_CONSTANT isn't enough:
7325 static var addresses are constant but we can't
7326 do arithmetic on them. */
7327 wins = 0;
7329 if (i == 0)
7330 arg0 = op;
7331 else if (i == 1)
7332 arg1 = op;
7336 /* If this is a commutative operation, and ARG0 is a constant, move it
7337 to ARG1 to reduce the number of tests below. */
7338 if (commutative_tree_code (code)
7339 && tree_swap_operands_p (arg0, arg1, true))
7340 return fold (build2 (code, type, TREE_OPERAND (t, 1),
7341 TREE_OPERAND (t, 0)));
7343 /* Now WINS is set as described above,
7344 ARG0 is the first operand of EXPR,
7345 and ARG1 is the second operand (if it has more than one operand).
7347 First check for cases where an arithmetic operation is applied to a
7348 compound, conditional, or comparison operation. Push the arithmetic
7349 operation inside the compound or conditional to see if any folding
7350 can then be done. Convert comparison to conditional for this purpose.
7351 The also optimizes non-constant cases that used to be done in
7352 expand_expr.
7354 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7355 one of the operands is a comparison and the other is a comparison, a
7356 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7357 code below would make the expression more complex. Change it to a
7358 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7359 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7361 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7362 || code == EQ_EXPR || code == NE_EXPR)
7363 && ((truth_value_p (TREE_CODE (arg0))
7364 && (truth_value_p (TREE_CODE (arg1))
7365 || (TREE_CODE (arg1) == BIT_AND_EXPR
7366 && integer_onep (TREE_OPERAND (arg1, 1)))))
7367 || (truth_value_p (TREE_CODE (arg1))
7368 && (truth_value_p (TREE_CODE (arg0))
7369 || (TREE_CODE (arg0) == BIT_AND_EXPR
7370 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7372 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7373 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7374 : TRUTH_XOR_EXPR,
7375 type, fold_convert (boolean_type_node, arg0),
7376 fold_convert (boolean_type_node, arg1)));
7378 if (code == EQ_EXPR)
7379 tem = invert_truthvalue (tem);
7381 return tem;
7384 if (TREE_CODE_CLASS (code) == tcc_comparison
7385 && TREE_CODE (arg0) == COMPOUND_EXPR)
7386 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7387 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
7388 else if (TREE_CODE_CLASS (code) == tcc_comparison
7389 && TREE_CODE (arg1) == COMPOUND_EXPR)
7390 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7391 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
7392 else if (TREE_CODE_CLASS (code) == tcc_binary
7393 || TREE_CODE_CLASS (code) == tcc_comparison)
7395 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7396 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7397 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
7398 arg1)));
7399 if (TREE_CODE (arg1) == COMPOUND_EXPR
7400 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7401 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7402 fold (build2 (code, type,
7403 arg0, TREE_OPERAND (arg1, 1))));
7405 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7407 tem = fold_binary_op_with_conditional_arg (t, code, arg0, arg1,
7408 /*cond_first_p=*/1);
7409 if (tem != NULL_TREE)
7410 return tem;
7413 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7415 tem = fold_binary_op_with_conditional_arg (t, code, arg1, arg0,
7416 /*cond_first_p=*/0);
7417 if (tem != NULL_TREE)
7418 return tem;
7422 switch (code)
7424 case CONST_DECL:
7425 return fold (DECL_INITIAL (t));
7427 case RANGE_EXPR:
7428 if (TREE_CONSTANT (t) != wins)
7430 tem = copy_node (t);
7431 TREE_CONSTANT (tem) = wins;
7432 TREE_INVARIANT (tem) = wins;
7433 return tem;
7435 return t;
7437 case PLUS_EXPR:
7438 /* A + (-B) -> A - B */
7439 if (TREE_CODE (arg1) == NEGATE_EXPR)
7440 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7441 /* (-A) + B -> B - A */
7442 if (TREE_CODE (arg0) == NEGATE_EXPR
7443 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7444 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
7446 if (TREE_CODE (type) == COMPLEX_TYPE)
7448 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7449 if (tem)
7450 return tem;
7453 if (! FLOAT_TYPE_P (type))
7455 if (integer_zerop (arg1))
7456 return non_lvalue (fold_convert (type, arg0));
7458 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7459 with a constant, and the two constants have no bits in common,
7460 we should treat this as a BIT_IOR_EXPR since this may produce more
7461 simplifications. */
7462 if (TREE_CODE (arg0) == BIT_AND_EXPR
7463 && TREE_CODE (arg1) == BIT_AND_EXPR
7464 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7465 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7466 && integer_zerop (const_binop (BIT_AND_EXPR,
7467 TREE_OPERAND (arg0, 1),
7468 TREE_OPERAND (arg1, 1), 0)))
7470 code = BIT_IOR_EXPR;
7471 goto bit_ior;
7474 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7475 (plus (plus (mult) (mult)) (foo)) so that we can
7476 take advantage of the factoring cases below. */
7477 if (((TREE_CODE (arg0) == PLUS_EXPR
7478 || TREE_CODE (arg0) == MINUS_EXPR)
7479 && TREE_CODE (arg1) == MULT_EXPR)
7480 || ((TREE_CODE (arg1) == PLUS_EXPR
7481 || TREE_CODE (arg1) == MINUS_EXPR)
7482 && TREE_CODE (arg0) == MULT_EXPR))
7484 tree parg0, parg1, parg, marg;
7485 enum tree_code pcode;
7487 if (TREE_CODE (arg1) == MULT_EXPR)
7488 parg = arg0, marg = arg1;
7489 else
7490 parg = arg1, marg = arg0;
7491 pcode = TREE_CODE (parg);
7492 parg0 = TREE_OPERAND (parg, 0);
7493 parg1 = TREE_OPERAND (parg, 1);
7494 STRIP_NOPS (parg0);
7495 STRIP_NOPS (parg1);
7497 if (TREE_CODE (parg0) == MULT_EXPR
7498 && TREE_CODE (parg1) != MULT_EXPR)
7499 return fold (build2 (pcode, type,
7500 fold (build2 (PLUS_EXPR, type,
7501 fold_convert (type, parg0),
7502 fold_convert (type, marg))),
7503 fold_convert (type, parg1)));
7504 if (TREE_CODE (parg0) != MULT_EXPR
7505 && TREE_CODE (parg1) == MULT_EXPR)
7506 return fold (build2 (PLUS_EXPR, type,
7507 fold_convert (type, parg0),
7508 fold (build2 (pcode, type,
7509 fold_convert (type, marg),
7510 fold_convert (type,
7511 parg1)))));
7514 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7516 tree arg00, arg01, arg10, arg11;
7517 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7519 /* (A * C) + (B * C) -> (A+B) * C.
7520 We are most concerned about the case where C is a constant,
7521 but other combinations show up during loop reduction. Since
7522 it is not difficult, try all four possibilities. */
7524 arg00 = TREE_OPERAND (arg0, 0);
7525 arg01 = TREE_OPERAND (arg0, 1);
7526 arg10 = TREE_OPERAND (arg1, 0);
7527 arg11 = TREE_OPERAND (arg1, 1);
7528 same = NULL_TREE;
7530 if (operand_equal_p (arg01, arg11, 0))
7531 same = arg01, alt0 = arg00, alt1 = arg10;
7532 else if (operand_equal_p (arg00, arg10, 0))
7533 same = arg00, alt0 = arg01, alt1 = arg11;
7534 else if (operand_equal_p (arg00, arg11, 0))
7535 same = arg00, alt0 = arg01, alt1 = arg10;
7536 else if (operand_equal_p (arg01, arg10, 0))
7537 same = arg01, alt0 = arg00, alt1 = arg11;
7539 /* No identical multiplicands; see if we can find a common
7540 power-of-two factor in non-power-of-two multiplies. This
7541 can help in multi-dimensional array access. */
7542 else if (TREE_CODE (arg01) == INTEGER_CST
7543 && TREE_CODE (arg11) == INTEGER_CST
7544 && TREE_INT_CST_HIGH (arg01) == 0
7545 && TREE_INT_CST_HIGH (arg11) == 0)
7547 HOST_WIDE_INT int01, int11, tmp;
7548 int01 = TREE_INT_CST_LOW (arg01);
7549 int11 = TREE_INT_CST_LOW (arg11);
7551 /* Move min of absolute values to int11. */
7552 if ((int01 >= 0 ? int01 : -int01)
7553 < (int11 >= 0 ? int11 : -int11))
7555 tmp = int01, int01 = int11, int11 = tmp;
7556 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7557 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7560 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7562 alt0 = fold (build2 (MULT_EXPR, type, arg00,
7563 build_int_cst (NULL_TREE,
7564 int01 / int11)));
7565 alt1 = arg10;
7566 same = arg11;
7570 if (same)
7571 return fold (build2 (MULT_EXPR, type,
7572 fold (build2 (PLUS_EXPR, type,
7573 fold_convert (type, alt0),
7574 fold_convert (type, alt1))),
7575 same));
7578 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7579 of the array. Loop optimizer sometimes produce this type of
7580 expressions. */
7581 if (TREE_CODE (arg0) == ADDR_EXPR
7582 && TREE_CODE (arg1) == MULT_EXPR)
7584 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7585 if (tem)
7586 return fold_convert (type, fold (tem));
7588 else if (TREE_CODE (arg1) == ADDR_EXPR
7589 && TREE_CODE (arg0) == MULT_EXPR)
7591 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7592 if (tem)
7593 return fold_convert (type, fold (tem));
7596 else
7598 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7599 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7600 return non_lvalue (fold_convert (type, arg0));
7602 /* Likewise if the operands are reversed. */
7603 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7604 return non_lvalue (fold_convert (type, arg1));
7606 /* Convert X + -C into X - C. */
7607 if (TREE_CODE (arg1) == REAL_CST
7608 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7610 tem = fold_negate_const (arg1, type);
7611 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7612 return fold (build2 (MINUS_EXPR, type,
7613 fold_convert (type, arg0),
7614 fold_convert (type, tem)));
7617 /* Convert x+x into x*2.0. */
7618 if (operand_equal_p (arg0, arg1, 0)
7619 && SCALAR_FLOAT_TYPE_P (type))
7620 return fold (build2 (MULT_EXPR, type, arg0,
7621 build_real (type, dconst2)));
7623 /* Convert x*c+x into x*(c+1). */
7624 if (flag_unsafe_math_optimizations
7625 && TREE_CODE (arg0) == MULT_EXPR
7626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7627 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7628 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7630 REAL_VALUE_TYPE c;
7632 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7633 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7634 return fold (build2 (MULT_EXPR, type, arg1,
7635 build_real (type, c)));
7638 /* Convert x+x*c into x*(c+1). */
7639 if (flag_unsafe_math_optimizations
7640 && TREE_CODE (arg1) == MULT_EXPR
7641 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7642 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7643 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7645 REAL_VALUE_TYPE c;
7647 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7648 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7649 return fold (build2 (MULT_EXPR, type, arg0,
7650 build_real (type, c)));
7653 /* Convert x*c1+x*c2 into x*(c1+c2). */
7654 if (flag_unsafe_math_optimizations
7655 && TREE_CODE (arg0) == MULT_EXPR
7656 && TREE_CODE (arg1) == MULT_EXPR
7657 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7658 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7659 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7660 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7661 && operand_equal_p (TREE_OPERAND (arg0, 0),
7662 TREE_OPERAND (arg1, 0), 0))
7664 REAL_VALUE_TYPE c1, c2;
7666 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7667 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7668 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7669 return fold (build2 (MULT_EXPR, type,
7670 TREE_OPERAND (arg0, 0),
7671 build_real (type, c1)));
7673 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7674 if (flag_unsafe_math_optimizations
7675 && TREE_CODE (arg1) == PLUS_EXPR
7676 && TREE_CODE (arg0) != MULT_EXPR)
7678 tree tree10 = TREE_OPERAND (arg1, 0);
7679 tree tree11 = TREE_OPERAND (arg1, 1);
7680 if (TREE_CODE (tree11) == MULT_EXPR
7681 && TREE_CODE (tree10) == MULT_EXPR)
7683 tree tree0;
7684 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
7685 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
7688 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7689 if (flag_unsafe_math_optimizations
7690 && TREE_CODE (arg0) == PLUS_EXPR
7691 && TREE_CODE (arg1) != MULT_EXPR)
7693 tree tree00 = TREE_OPERAND (arg0, 0);
7694 tree tree01 = TREE_OPERAND (arg0, 1);
7695 if (TREE_CODE (tree01) == MULT_EXPR
7696 && TREE_CODE (tree00) == MULT_EXPR)
7698 tree tree0;
7699 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
7700 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
7705 bit_rotate:
7706 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7707 is a rotate of A by C1 bits. */
7708 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7709 is a rotate of A by B bits. */
7711 enum tree_code code0, code1;
7712 code0 = TREE_CODE (arg0);
7713 code1 = TREE_CODE (arg1);
7714 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7715 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7716 && operand_equal_p (TREE_OPERAND (arg0, 0),
7717 TREE_OPERAND (arg1, 0), 0)
7718 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7720 tree tree01, tree11;
7721 enum tree_code code01, code11;
7723 tree01 = TREE_OPERAND (arg0, 1);
7724 tree11 = TREE_OPERAND (arg1, 1);
7725 STRIP_NOPS (tree01);
7726 STRIP_NOPS (tree11);
7727 code01 = TREE_CODE (tree01);
7728 code11 = TREE_CODE (tree11);
7729 if (code01 == INTEGER_CST
7730 && code11 == INTEGER_CST
7731 && TREE_INT_CST_HIGH (tree01) == 0
7732 && TREE_INT_CST_HIGH (tree11) == 0
7733 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7734 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7735 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7736 code0 == LSHIFT_EXPR ? tree01 : tree11);
7737 else if (code11 == MINUS_EXPR)
7739 tree tree110, tree111;
7740 tree110 = TREE_OPERAND (tree11, 0);
7741 tree111 = TREE_OPERAND (tree11, 1);
7742 STRIP_NOPS (tree110);
7743 STRIP_NOPS (tree111);
7744 if (TREE_CODE (tree110) == INTEGER_CST
7745 && 0 == compare_tree_int (tree110,
7746 TYPE_PRECISION
7747 (TREE_TYPE (TREE_OPERAND
7748 (arg0, 0))))
7749 && operand_equal_p (tree01, tree111, 0))
7750 return build2 ((code0 == LSHIFT_EXPR
7751 ? LROTATE_EXPR
7752 : RROTATE_EXPR),
7753 type, TREE_OPERAND (arg0, 0), tree01);
7755 else if (code01 == MINUS_EXPR)
7757 tree tree010, tree011;
7758 tree010 = TREE_OPERAND (tree01, 0);
7759 tree011 = TREE_OPERAND (tree01, 1);
7760 STRIP_NOPS (tree010);
7761 STRIP_NOPS (tree011);
7762 if (TREE_CODE (tree010) == INTEGER_CST
7763 && 0 == compare_tree_int (tree010,
7764 TYPE_PRECISION
7765 (TREE_TYPE (TREE_OPERAND
7766 (arg0, 0))))
7767 && operand_equal_p (tree11, tree011, 0))
7768 return build2 ((code0 != LSHIFT_EXPR
7769 ? LROTATE_EXPR
7770 : RROTATE_EXPR),
7771 type, TREE_OPERAND (arg0, 0), tree11);
7776 associate:
7777 /* In most languages, can't associate operations on floats through
7778 parentheses. Rather than remember where the parentheses were, we
7779 don't associate floats at all, unless the user has specified
7780 -funsafe-math-optimizations. */
7782 if (! wins
7783 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7785 tree var0, con0, lit0, minus_lit0;
7786 tree var1, con1, lit1, minus_lit1;
7788 /* Split both trees into variables, constants, and literals. Then
7789 associate each group together, the constants with literals,
7790 then the result with variables. This increases the chances of
7791 literals being recombined later and of generating relocatable
7792 expressions for the sum of a constant and literal. */
7793 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7794 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7795 code == MINUS_EXPR);
7797 /* Only do something if we found more than two objects. Otherwise,
7798 nothing has changed and we risk infinite recursion. */
7799 if (2 < ((var0 != 0) + (var1 != 0)
7800 + (con0 != 0) + (con1 != 0)
7801 + (lit0 != 0) + (lit1 != 0)
7802 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7804 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7805 if (code == MINUS_EXPR)
7806 code = PLUS_EXPR;
7808 var0 = associate_trees (var0, var1, code, type);
7809 con0 = associate_trees (con0, con1, code, type);
7810 lit0 = associate_trees (lit0, lit1, code, type);
7811 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7813 /* Preserve the MINUS_EXPR if the negative part of the literal is
7814 greater than the positive part. Otherwise, the multiplicative
7815 folding code (i.e extract_muldiv) may be fooled in case
7816 unsigned constants are subtracted, like in the following
7817 example: ((X*2 + 4) - 8U)/2. */
7818 if (minus_lit0 && lit0)
7820 if (TREE_CODE (lit0) == INTEGER_CST
7821 && TREE_CODE (minus_lit0) == INTEGER_CST
7822 && tree_int_cst_lt (lit0, minus_lit0))
7824 minus_lit0 = associate_trees (minus_lit0, lit0,
7825 MINUS_EXPR, type);
7826 lit0 = 0;
7828 else
7830 lit0 = associate_trees (lit0, minus_lit0,
7831 MINUS_EXPR, type);
7832 minus_lit0 = 0;
7835 if (minus_lit0)
7837 if (con0 == 0)
7838 return fold_convert (type,
7839 associate_trees (var0, minus_lit0,
7840 MINUS_EXPR, type));
7841 else
7843 con0 = associate_trees (con0, minus_lit0,
7844 MINUS_EXPR, type);
7845 return fold_convert (type,
7846 associate_trees (var0, con0,
7847 PLUS_EXPR, type));
7851 con0 = associate_trees (con0, lit0, code, type);
7852 return fold_convert (type, associate_trees (var0, con0,
7853 code, type));
7857 binary:
7858 if (wins)
7859 t1 = const_binop (code, arg0, arg1, 0);
7860 if (t1 != NULL_TREE)
7862 /* The return value should always have
7863 the same type as the original expression. */
7864 if (TREE_TYPE (t1) != type)
7865 t1 = fold_convert (type, t1);
7867 return t1;
7869 return t;
7871 case MINUS_EXPR:
7872 /* A - (-B) -> A + B */
7873 if (TREE_CODE (arg1) == NEGATE_EXPR)
7874 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7875 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7876 if (TREE_CODE (arg0) == NEGATE_EXPR
7877 && (FLOAT_TYPE_P (type)
7878 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7879 && negate_expr_p (arg1)
7880 && reorder_operands_p (arg0, arg1))
7881 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7882 TREE_OPERAND (arg0, 0)));
7884 if (TREE_CODE (type) == COMPLEX_TYPE)
7886 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7887 if (tem)
7888 return tem;
7891 if (! FLOAT_TYPE_P (type))
7893 if (! wins && integer_zerop (arg0))
7894 return negate_expr (fold_convert (type, arg1));
7895 if (integer_zerop (arg1))
7896 return non_lvalue (fold_convert (type, arg0));
7898 /* Fold A - (A & B) into ~B & A. */
7899 if (!TREE_SIDE_EFFECTS (arg0)
7900 && TREE_CODE (arg1) == BIT_AND_EXPR)
7902 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7903 return fold (build2 (BIT_AND_EXPR, type,
7904 fold (build1 (BIT_NOT_EXPR, type,
7905 TREE_OPERAND (arg1, 0))),
7906 arg0));
7907 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7908 return fold (build2 (BIT_AND_EXPR, type,
7909 fold (build1 (BIT_NOT_EXPR, type,
7910 TREE_OPERAND (arg1, 1))),
7911 arg0));
7914 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7915 any power of 2 minus 1. */
7916 if (TREE_CODE (arg0) == BIT_AND_EXPR
7917 && TREE_CODE (arg1) == BIT_AND_EXPR
7918 && operand_equal_p (TREE_OPERAND (arg0, 0),
7919 TREE_OPERAND (arg1, 0), 0))
7921 tree mask0 = TREE_OPERAND (arg0, 1);
7922 tree mask1 = TREE_OPERAND (arg1, 1);
7923 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7925 if (operand_equal_p (tem, mask1, 0))
7927 tem = fold (build2 (BIT_XOR_EXPR, type,
7928 TREE_OPERAND (arg0, 0), mask1));
7929 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7934 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7935 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7936 return non_lvalue (fold_convert (type, arg0));
7938 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7939 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7940 (-ARG1 + ARG0) reduces to -ARG1. */
7941 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7942 return negate_expr (fold_convert (type, arg1));
7944 /* Fold &x - &x. This can happen from &x.foo - &x.
7945 This is unsafe for certain floats even in non-IEEE formats.
7946 In IEEE, it is unsafe because it does wrong for NaNs.
7947 Also note that operand_equal_p is always false if an operand
7948 is volatile. */
7950 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7951 && operand_equal_p (arg0, arg1, 0))
7952 return fold_convert (type, integer_zero_node);
7954 /* A - B -> A + (-B) if B is easily negatable. */
7955 if (!wins && negate_expr_p (arg1)
7956 && ((FLOAT_TYPE_P (type)
7957 /* Avoid this transformation if B is a positive REAL_CST. */
7958 && (TREE_CODE (arg1) != REAL_CST
7959 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7960 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7961 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7963 /* Try folding difference of addresses. */
7965 HOST_WIDE_INT diff;
7967 if ((TREE_CODE (arg0) == ADDR_EXPR
7968 || TREE_CODE (arg1) == ADDR_EXPR)
7969 && ptr_difference_const (arg0, arg1, &diff))
7970 return build_int_cst_type (type, diff);
7973 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7974 of the array. Loop optimizer sometimes produce this type of
7975 expressions. */
7976 if (TREE_CODE (arg0) == ADDR_EXPR
7977 && TREE_CODE (arg1) == MULT_EXPR)
7979 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7980 if (tem)
7981 return fold_convert (type, fold (tem));
7984 if (TREE_CODE (arg0) == MULT_EXPR
7985 && TREE_CODE (arg1) == MULT_EXPR
7986 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7988 /* (A * C) - (B * C) -> (A-B) * C. */
7989 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7990 TREE_OPERAND (arg1, 1), 0))
7991 return fold (build2 (MULT_EXPR, type,
7992 fold (build2 (MINUS_EXPR, type,
7993 TREE_OPERAND (arg0, 0),
7994 TREE_OPERAND (arg1, 0))),
7995 TREE_OPERAND (arg0, 1)));
7996 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7997 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7998 TREE_OPERAND (arg1, 0), 0))
7999 return fold (build2 (MULT_EXPR, type,
8000 TREE_OPERAND (arg0, 0),
8001 fold (build2 (MINUS_EXPR, type,
8002 TREE_OPERAND (arg0, 1),
8003 TREE_OPERAND (arg1, 1)))));
8006 goto associate;
8008 case MULT_EXPR:
8009 /* (-A) * (-B) -> A * B */
8010 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8011 return fold (build2 (MULT_EXPR, type,
8012 TREE_OPERAND (arg0, 0),
8013 negate_expr (arg1)));
8014 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8015 return fold (build2 (MULT_EXPR, type,
8016 negate_expr (arg0),
8017 TREE_OPERAND (arg1, 0)));
8019 if (TREE_CODE (type) == COMPLEX_TYPE)
8021 tem = fold_complex_mult (type, arg0, arg1);
8022 if (tem)
8023 return tem;
8026 if (! FLOAT_TYPE_P (type))
8028 if (integer_zerop (arg1))
8029 return omit_one_operand (type, arg1, arg0);
8030 if (integer_onep (arg1))
8031 return non_lvalue (fold_convert (type, arg0));
8033 /* (a * (1 << b)) is (a << b) */
8034 if (TREE_CODE (arg1) == LSHIFT_EXPR
8035 && integer_onep (TREE_OPERAND (arg1, 0)))
8036 return fold (build2 (LSHIFT_EXPR, type, arg0,
8037 TREE_OPERAND (arg1, 1)));
8038 if (TREE_CODE (arg0) == LSHIFT_EXPR
8039 && integer_onep (TREE_OPERAND (arg0, 0)))
8040 return fold (build2 (LSHIFT_EXPR, type, arg1,
8041 TREE_OPERAND (arg0, 1)));
8043 if (TREE_CODE (arg1) == INTEGER_CST
8044 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
8045 fold_convert (type, arg1),
8046 code, NULL_TREE)))
8047 return fold_convert (type, tem);
8050 else
8052 /* Maybe fold x * 0 to 0. The expressions aren't the same
8053 when x is NaN, since x * 0 is also NaN. Nor are they the
8054 same in modes with signed zeros, since multiplying a
8055 negative value by 0 gives -0, not +0. */
8056 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8057 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8058 && real_zerop (arg1))
8059 return omit_one_operand (type, arg1, arg0);
8060 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8061 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8062 && real_onep (arg1))
8063 return non_lvalue (fold_convert (type, arg0));
8065 /* Transform x * -1.0 into -x. */
8066 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8067 && real_minus_onep (arg1))
8068 return fold_convert (type, negate_expr (arg0));
8070 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8071 if (flag_unsafe_math_optimizations
8072 && TREE_CODE (arg0) == RDIV_EXPR
8073 && TREE_CODE (arg1) == REAL_CST
8074 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8076 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8077 arg1, 0);
8078 if (tem)
8079 return fold (build2 (RDIV_EXPR, type, tem,
8080 TREE_OPERAND (arg0, 1)));
8083 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8084 if (operand_equal_p (arg0, arg1, 0))
8086 tree tem = fold_strip_sign_ops (arg0);
8087 if (tem != NULL_TREE)
8089 tem = fold_convert (type, tem);
8090 return fold (build2 (MULT_EXPR, type, tem, tem));
8094 if (flag_unsafe_math_optimizations)
8096 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8097 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8099 /* Optimizations of root(...)*root(...). */
8100 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8102 tree rootfn, arg, arglist;
8103 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8104 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8106 /* Optimize sqrt(x)*sqrt(x) as x. */
8107 if (BUILTIN_SQRT_P (fcode0)
8108 && operand_equal_p (arg00, arg10, 0)
8109 && ! HONOR_SNANS (TYPE_MODE (type)))
8110 return arg00;
8112 /* Optimize root(x)*root(y) as root(x*y). */
8113 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8114 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
8115 arglist = build_tree_list (NULL_TREE, arg);
8116 return build_function_call_expr (rootfn, arglist);
8119 /* Optimize expN(x)*expN(y) as expN(x+y). */
8120 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8122 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8123 tree arg = build2 (PLUS_EXPR, type,
8124 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8125 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8126 tree arglist = build_tree_list (NULL_TREE, fold (arg));
8127 return build_function_call_expr (expfn, arglist);
8130 /* Optimizations of pow(...)*pow(...). */
8131 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8132 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8133 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8135 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8136 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8137 1)));
8138 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8139 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8140 1)));
8142 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8143 if (operand_equal_p (arg01, arg11, 0))
8145 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8146 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
8147 tree arglist = tree_cons (NULL_TREE, fold (arg),
8148 build_tree_list (NULL_TREE,
8149 arg01));
8150 return build_function_call_expr (powfn, arglist);
8153 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8154 if (operand_equal_p (arg00, arg10, 0))
8156 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8157 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
8158 tree arglist = tree_cons (NULL_TREE, arg00,
8159 build_tree_list (NULL_TREE,
8160 arg));
8161 return build_function_call_expr (powfn, arglist);
8165 /* Optimize tan(x)*cos(x) as sin(x). */
8166 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8167 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8168 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8169 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8170 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8171 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8172 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8173 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8175 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8177 if (sinfn != NULL_TREE)
8178 return build_function_call_expr (sinfn,
8179 TREE_OPERAND (arg0, 1));
8182 /* Optimize x*pow(x,c) as pow(x,c+1). */
8183 if (fcode1 == BUILT_IN_POW
8184 || fcode1 == BUILT_IN_POWF
8185 || fcode1 == BUILT_IN_POWL)
8187 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8188 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8189 1)));
8190 if (TREE_CODE (arg11) == REAL_CST
8191 && ! TREE_CONSTANT_OVERFLOW (arg11)
8192 && operand_equal_p (arg0, arg10, 0))
8194 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8195 REAL_VALUE_TYPE c;
8196 tree arg, arglist;
8198 c = TREE_REAL_CST (arg11);
8199 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8200 arg = build_real (type, c);
8201 arglist = build_tree_list (NULL_TREE, arg);
8202 arglist = tree_cons (NULL_TREE, arg0, arglist);
8203 return build_function_call_expr (powfn, arglist);
8207 /* Optimize pow(x,c)*x as pow(x,c+1). */
8208 if (fcode0 == BUILT_IN_POW
8209 || fcode0 == BUILT_IN_POWF
8210 || fcode0 == BUILT_IN_POWL)
8212 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8213 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8214 1)));
8215 if (TREE_CODE (arg01) == REAL_CST
8216 && ! TREE_CONSTANT_OVERFLOW (arg01)
8217 && operand_equal_p (arg1, arg00, 0))
8219 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8220 REAL_VALUE_TYPE c;
8221 tree arg, arglist;
8223 c = TREE_REAL_CST (arg01);
8224 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8225 arg = build_real (type, c);
8226 arglist = build_tree_list (NULL_TREE, arg);
8227 arglist = tree_cons (NULL_TREE, arg1, arglist);
8228 return build_function_call_expr (powfn, arglist);
8232 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8233 if (! optimize_size
8234 && operand_equal_p (arg0, arg1, 0))
8236 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8238 if (powfn)
8240 tree arg = build_real (type, dconst2);
8241 tree arglist = build_tree_list (NULL_TREE, arg);
8242 arglist = tree_cons (NULL_TREE, arg0, arglist);
8243 return build_function_call_expr (powfn, arglist);
8248 goto associate;
8250 case BIT_IOR_EXPR:
8251 bit_ior:
8252 if (integer_all_onesp (arg1))
8253 return omit_one_operand (type, arg1, arg0);
8254 if (integer_zerop (arg1))
8255 return non_lvalue (fold_convert (type, arg0));
8256 if (operand_equal_p (arg0, arg1, 0))
8257 return non_lvalue (fold_convert (type, arg0));
8259 /* ~X | X is -1. */
8260 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8261 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8263 t1 = build_int_cst (type, -1);
8264 t1 = force_fit_type (t1, 0, false, false);
8265 return omit_one_operand (type, t1, arg1);
8268 /* X | ~X is -1. */
8269 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8270 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8272 t1 = build_int_cst (type, -1);
8273 t1 = force_fit_type (t1, 0, false, false);
8274 return omit_one_operand (type, t1, arg0);
8277 t1 = distribute_bit_expr (code, type, arg0, arg1);
8278 if (t1 != NULL_TREE)
8279 return t1;
8281 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8283 This results in more efficient code for machines without a NAND
8284 instruction. Combine will canonicalize to the first form
8285 which will allow use of NAND instructions provided by the
8286 backend if they exist. */
8287 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8288 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8290 return fold (build1 (BIT_NOT_EXPR, type,
8291 build2 (BIT_AND_EXPR, type,
8292 TREE_OPERAND (arg0, 0),
8293 TREE_OPERAND (arg1, 0))));
8296 /* See if this can be simplified into a rotate first. If that
8297 is unsuccessful continue in the association code. */
8298 goto bit_rotate;
8300 case BIT_XOR_EXPR:
8301 if (integer_zerop (arg1))
8302 return non_lvalue (fold_convert (type, arg0));
8303 if (integer_all_onesp (arg1))
8304 return fold (build1 (BIT_NOT_EXPR, type, arg0));
8305 if (operand_equal_p (arg0, arg1, 0))
8306 return omit_one_operand (type, integer_zero_node, arg0);
8308 /* ~X ^ X is -1. */
8309 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8310 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8312 t1 = build_int_cst (type, -1);
8313 t1 = force_fit_type (t1, 0, false, false);
8314 return omit_one_operand (type, t1, arg1);
8317 /* X ^ ~X is -1. */
8318 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8319 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8321 t1 = build_int_cst (type, -1);
8322 t1 = force_fit_type (t1, 0, false, false);
8323 return omit_one_operand (type, t1, arg0);
8326 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8327 with a constant, and the two constants have no bits in common,
8328 we should treat this as a BIT_IOR_EXPR since this may produce more
8329 simplifications. */
8330 if (TREE_CODE (arg0) == BIT_AND_EXPR
8331 && TREE_CODE (arg1) == BIT_AND_EXPR
8332 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8333 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8334 && integer_zerop (const_binop (BIT_AND_EXPR,
8335 TREE_OPERAND (arg0, 1),
8336 TREE_OPERAND (arg1, 1), 0)))
8338 code = BIT_IOR_EXPR;
8339 goto bit_ior;
8342 /* See if this can be simplified into a rotate first. If that
8343 is unsuccessful continue in the association code. */
8344 goto bit_rotate;
8346 case BIT_AND_EXPR:
8347 if (integer_all_onesp (arg1))
8348 return non_lvalue (fold_convert (type, arg0));
8349 if (integer_zerop (arg1))
8350 return omit_one_operand (type, arg1, arg0);
8351 if (operand_equal_p (arg0, arg1, 0))
8352 return non_lvalue (fold_convert (type, arg0));
8354 /* ~X & X is always zero. */
8355 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8356 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8357 return omit_one_operand (type, integer_zero_node, arg1);
8359 /* X & ~X is always zero. */
8360 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8361 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8362 return omit_one_operand (type, integer_zero_node, arg0);
8364 t1 = distribute_bit_expr (code, type, arg0, arg1);
8365 if (t1 != NULL_TREE)
8366 return t1;
8367 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8368 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8369 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8371 unsigned int prec
8372 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8374 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8375 && (~TREE_INT_CST_LOW (arg1)
8376 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8377 return fold_convert (type, TREE_OPERAND (arg0, 0));
8380 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8382 This results in more efficient code for machines without a NOR
8383 instruction. Combine will canonicalize to the first form
8384 which will allow use of NOR instructions provided by the
8385 backend if they exist. */
8386 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8387 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8389 return fold (build1 (BIT_NOT_EXPR, type,
8390 build2 (BIT_IOR_EXPR, type,
8391 TREE_OPERAND (arg0, 0),
8392 TREE_OPERAND (arg1, 0))));
8395 goto associate;
8397 case RDIV_EXPR:
8398 /* Don't touch a floating-point divide by zero unless the mode
8399 of the constant can represent infinity. */
8400 if (TREE_CODE (arg1) == REAL_CST
8401 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8402 && real_zerop (arg1))
8403 return t;
8405 /* (-A) / (-B) -> A / B */
8406 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8407 return fold (build2 (RDIV_EXPR, type,
8408 TREE_OPERAND (arg0, 0),
8409 negate_expr (arg1)));
8410 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8411 return fold (build2 (RDIV_EXPR, type,
8412 negate_expr (arg0),
8413 TREE_OPERAND (arg1, 0)));
8415 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8416 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8417 && real_onep (arg1))
8418 return non_lvalue (fold_convert (type, arg0));
8420 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8421 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8422 && real_minus_onep (arg1))
8423 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8425 /* If ARG1 is a constant, we can convert this to a multiply by the
8426 reciprocal. This does not have the same rounding properties,
8427 so only do this if -funsafe-math-optimizations. We can actually
8428 always safely do it if ARG1 is a power of two, but it's hard to
8429 tell if it is or not in a portable manner. */
8430 if (TREE_CODE (arg1) == REAL_CST)
8432 if (flag_unsafe_math_optimizations
8433 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8434 arg1, 0)))
8435 return fold (build2 (MULT_EXPR, type, arg0, tem));
8436 /* Find the reciprocal if optimizing and the result is exact. */
8437 if (optimize)
8439 REAL_VALUE_TYPE r;
8440 r = TREE_REAL_CST (arg1);
8441 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8443 tem = build_real (type, r);
8444 return fold (build2 (MULT_EXPR, type, arg0, tem));
8448 /* Convert A/B/C to A/(B*C). */
8449 if (flag_unsafe_math_optimizations
8450 && TREE_CODE (arg0) == RDIV_EXPR)
8451 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8452 fold (build2 (MULT_EXPR, type,
8453 TREE_OPERAND (arg0, 1), arg1))));
8455 /* Convert A/(B/C) to (A/B)*C. */
8456 if (flag_unsafe_math_optimizations
8457 && TREE_CODE (arg1) == RDIV_EXPR)
8458 return fold (build2 (MULT_EXPR, type,
8459 fold (build2 (RDIV_EXPR, type, arg0,
8460 TREE_OPERAND (arg1, 0))),
8461 TREE_OPERAND (arg1, 1)));
8463 /* Convert C1/(X*C2) into (C1/C2)/X. */
8464 if (flag_unsafe_math_optimizations
8465 && TREE_CODE (arg1) == MULT_EXPR
8466 && TREE_CODE (arg0) == REAL_CST
8467 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8469 tree tem = const_binop (RDIV_EXPR, arg0,
8470 TREE_OPERAND (arg1, 1), 0);
8471 if (tem)
8472 return fold (build2 (RDIV_EXPR, type, tem,
8473 TREE_OPERAND (arg1, 0)));
8476 if (TREE_CODE (type) == COMPLEX_TYPE)
8478 tem = fold_complex_div (type, arg0, arg1, code);
8479 if (tem)
8480 return tem;
8483 if (flag_unsafe_math_optimizations)
8485 enum built_in_function fcode = builtin_mathfn_code (arg1);
8486 /* Optimize x/expN(y) into x*expN(-y). */
8487 if (BUILTIN_EXPONENT_P (fcode))
8489 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8490 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8491 tree arglist = build_tree_list (NULL_TREE,
8492 fold_convert (type, arg));
8493 arg1 = build_function_call_expr (expfn, arglist);
8494 return fold (build2 (MULT_EXPR, type, arg0, arg1));
8497 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8498 if (fcode == BUILT_IN_POW
8499 || fcode == BUILT_IN_POWF
8500 || fcode == BUILT_IN_POWL)
8502 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8503 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8504 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8505 tree neg11 = fold_convert (type, negate_expr (arg11));
8506 tree arglist = tree_cons(NULL_TREE, arg10,
8507 build_tree_list (NULL_TREE, neg11));
8508 arg1 = build_function_call_expr (powfn, arglist);
8509 return fold (build2 (MULT_EXPR, type, arg0, arg1));
8513 if (flag_unsafe_math_optimizations)
8515 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8516 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8518 /* Optimize sin(x)/cos(x) as tan(x). */
8519 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8520 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8521 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8522 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8523 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8525 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8527 if (tanfn != NULL_TREE)
8528 return build_function_call_expr (tanfn,
8529 TREE_OPERAND (arg0, 1));
8532 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8533 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8534 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8535 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8536 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8537 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8539 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8541 if (tanfn != NULL_TREE)
8543 tree tmp = TREE_OPERAND (arg0, 1);
8544 tmp = build_function_call_expr (tanfn, tmp);
8545 return fold (build2 (RDIV_EXPR, type,
8546 build_real (type, dconst1), tmp));
8550 /* Optimize pow(x,c)/x as pow(x,c-1). */
8551 if (fcode0 == BUILT_IN_POW
8552 || fcode0 == BUILT_IN_POWF
8553 || fcode0 == BUILT_IN_POWL)
8555 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8556 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8557 if (TREE_CODE (arg01) == REAL_CST
8558 && ! TREE_CONSTANT_OVERFLOW (arg01)
8559 && operand_equal_p (arg1, arg00, 0))
8561 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8562 REAL_VALUE_TYPE c;
8563 tree arg, arglist;
8565 c = TREE_REAL_CST (arg01);
8566 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8567 arg = build_real (type, c);
8568 arglist = build_tree_list (NULL_TREE, arg);
8569 arglist = tree_cons (NULL_TREE, arg1, arglist);
8570 return build_function_call_expr (powfn, arglist);
8574 goto binary;
8576 case TRUNC_DIV_EXPR:
8577 case ROUND_DIV_EXPR:
8578 case FLOOR_DIV_EXPR:
8579 case CEIL_DIV_EXPR:
8580 case EXACT_DIV_EXPR:
8581 if (integer_onep (arg1))
8582 return non_lvalue (fold_convert (type, arg0));
8583 if (integer_zerop (arg1))
8584 return t;
8585 /* X / -1 is -X. */
8586 if (!TYPE_UNSIGNED (type)
8587 && TREE_CODE (arg1) == INTEGER_CST
8588 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8589 && TREE_INT_CST_HIGH (arg1) == -1)
8590 return fold_convert (type, negate_expr (arg0));
8592 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8593 operation, EXACT_DIV_EXPR.
8595 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8596 At one time others generated faster code, it's not clear if they do
8597 after the last round to changes to the DIV code in expmed.c. */
8598 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8599 && multiple_of_p (type, arg0, arg1))
8600 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
8602 if (TREE_CODE (arg1) == INTEGER_CST
8603 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
8604 code, NULL_TREE)))
8605 return fold_convert (type, tem);
8607 if (TREE_CODE (type) == COMPLEX_TYPE)
8609 tem = fold_complex_div (type, arg0, arg1, code);
8610 if (tem)
8611 return tem;
8613 goto binary;
8615 case CEIL_MOD_EXPR:
8616 case FLOOR_MOD_EXPR:
8617 case ROUND_MOD_EXPR:
8618 case TRUNC_MOD_EXPR:
8619 /* X % 1 is always zero, but be sure to preserve any side
8620 effects in X. */
8621 if (integer_onep (arg1))
8622 return omit_one_operand (type, integer_zero_node, arg0);
8624 /* X % 0, return X % 0 unchanged so that we can get the
8625 proper warnings and errors. */
8626 if (integer_zerop (arg1))
8627 return t;
8629 /* 0 % X is always zero, but be sure to preserve any side
8630 effects in X. Place this after checking for X == 0. */
8631 if (integer_zerop (arg0))
8632 return omit_one_operand (type, integer_zero_node, arg1);
8634 /* X % -1 is zero. */
8635 if (!TYPE_UNSIGNED (type)
8636 && TREE_CODE (arg1) == INTEGER_CST
8637 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8638 && TREE_INT_CST_HIGH (arg1) == -1)
8639 return omit_one_operand (type, integer_zero_node, arg0);
8641 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8642 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8643 if (code == TRUNC_MOD_EXPR
8644 && TYPE_UNSIGNED (type)
8645 && integer_pow2p (arg1))
8647 unsigned HOST_WIDE_INT high, low;
8648 tree mask;
8649 int l;
8651 l = tree_log2 (arg1);
8652 if (l >= HOST_BITS_PER_WIDE_INT)
8654 high = ((unsigned HOST_WIDE_INT) 1
8655 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8656 low = -1;
8658 else
8660 high = 0;
8661 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8664 mask = build_int_cst_wide (type, low, high);
8665 return fold (build2 (BIT_AND_EXPR, type,
8666 fold_convert (type, arg0), mask));
8669 /* X % -C is the same as X % C. */
8670 if (code == TRUNC_MOD_EXPR
8671 && !TYPE_UNSIGNED (type)
8672 && TREE_CODE (arg1) == INTEGER_CST
8673 && TREE_INT_CST_HIGH (arg1) < 0
8674 && !flag_trapv
8675 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8676 && !sign_bit_p (arg1, arg1))
8677 return fold (build2 (code, type, fold_convert (type, arg0),
8678 fold_convert (type, negate_expr (arg1))));
8680 /* X % -Y is the same as X % Y. */
8681 if (code == TRUNC_MOD_EXPR
8682 && !TYPE_UNSIGNED (type)
8683 && TREE_CODE (arg1) == NEGATE_EXPR
8684 && !flag_trapv)
8685 return fold (build2 (code, type, fold_convert (type, arg0),
8686 fold_convert (type, TREE_OPERAND (arg1, 0))));
8688 if (TREE_CODE (arg1) == INTEGER_CST
8689 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
8690 code, NULL_TREE)))
8691 return fold_convert (type, tem);
8693 goto binary;
8695 case LROTATE_EXPR:
8696 case RROTATE_EXPR:
8697 if (integer_all_onesp (arg0))
8698 return omit_one_operand (type, arg0, arg1);
8699 goto shift;
8701 case RSHIFT_EXPR:
8702 /* Optimize -1 >> x for arithmetic right shifts. */
8703 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8704 return omit_one_operand (type, arg0, arg1);
8705 /* ... fall through ... */
8707 case LSHIFT_EXPR:
8708 shift:
8709 if (integer_zerop (arg1))
8710 return non_lvalue (fold_convert (type, arg0));
8711 if (integer_zerop (arg0))
8712 return omit_one_operand (type, arg0, arg1);
8714 /* Since negative shift count is not well-defined,
8715 don't try to compute it in the compiler. */
8716 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8717 return t;
8718 /* Rewrite an LROTATE_EXPR by a constant into an
8719 RROTATE_EXPR by a new constant. */
8720 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8722 tree tem = build_int_cst (NULL_TREE,
8723 GET_MODE_BITSIZE (TYPE_MODE (type)));
8724 tem = fold_convert (TREE_TYPE (arg1), tem);
8725 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8726 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
8729 /* If we have a rotate of a bit operation with the rotate count and
8730 the second operand of the bit operation both constant,
8731 permute the two operations. */
8732 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8733 && (TREE_CODE (arg0) == BIT_AND_EXPR
8734 || TREE_CODE (arg0) == BIT_IOR_EXPR
8735 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8736 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8737 return fold (build2 (TREE_CODE (arg0), type,
8738 fold (build2 (code, type,
8739 TREE_OPERAND (arg0, 0), arg1)),
8740 fold (build2 (code, type,
8741 TREE_OPERAND (arg0, 1), arg1))));
8743 /* Two consecutive rotates adding up to the width of the mode can
8744 be ignored. */
8745 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8746 && TREE_CODE (arg0) == RROTATE_EXPR
8747 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8748 && TREE_INT_CST_HIGH (arg1) == 0
8749 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8750 && ((TREE_INT_CST_LOW (arg1)
8751 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8752 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8753 return TREE_OPERAND (arg0, 0);
8755 goto binary;
8757 case MIN_EXPR:
8758 if (operand_equal_p (arg0, arg1, 0))
8759 return omit_one_operand (type, arg0, arg1);
8760 if (INTEGRAL_TYPE_P (type)
8761 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8762 return omit_one_operand (type, arg1, arg0);
8763 goto associate;
8765 case MAX_EXPR:
8766 if (operand_equal_p (arg0, arg1, 0))
8767 return omit_one_operand (type, arg0, arg1);
8768 if (INTEGRAL_TYPE_P (type)
8769 && TYPE_MAX_VALUE (type)
8770 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8771 return omit_one_operand (type, arg1, arg0);
8772 goto associate;
8774 case TRUTH_ANDIF_EXPR:
8775 /* Note that the operands of this must be ints
8776 and their values must be 0 or 1.
8777 ("true" is a fixed value perhaps depending on the language.) */
8778 /* If first arg is constant zero, return it. */
8779 if (integer_zerop (arg0))
8780 return fold_convert (type, arg0);
8781 case TRUTH_AND_EXPR:
8782 /* If either arg is constant true, drop it. */
8783 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8784 return non_lvalue (fold_convert (type, arg1));
8785 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8786 /* Preserve sequence points. */
8787 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8788 return non_lvalue (fold_convert (type, arg0));
8789 /* If second arg is constant zero, result is zero, but first arg
8790 must be evaluated. */
8791 if (integer_zerop (arg1))
8792 return omit_one_operand (type, arg1, arg0);
8793 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8794 case will be handled here. */
8795 if (integer_zerop (arg0))
8796 return omit_one_operand (type, arg0, arg1);
8798 /* !X && X is always false. */
8799 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8800 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8801 return omit_one_operand (type, integer_zero_node, arg1);
8802 /* X && !X is always false. */
8803 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8804 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8805 return omit_one_operand (type, integer_zero_node, arg0);
8807 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8808 means A >= Y && A != MAX, but in this case we know that
8809 A < X <= MAX. */
8811 if (!TREE_SIDE_EFFECTS (arg0)
8812 && !TREE_SIDE_EFFECTS (arg1))
8814 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8815 if (tem)
8816 return fold (build2 (code, type, tem, arg1));
8818 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8819 if (tem)
8820 return fold (build2 (code, type, arg0, tem));
8823 truth_andor:
8824 /* We only do these simplifications if we are optimizing. */
8825 if (!optimize)
8826 return t;
8828 /* Check for things like (A || B) && (A || C). We can convert this
8829 to A || (B && C). Note that either operator can be any of the four
8830 truth and/or operations and the transformation will still be
8831 valid. Also note that we only care about order for the
8832 ANDIF and ORIF operators. If B contains side effects, this
8833 might change the truth-value of A. */
8834 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8835 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8836 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8837 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8838 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8839 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8841 tree a00 = TREE_OPERAND (arg0, 0);
8842 tree a01 = TREE_OPERAND (arg0, 1);
8843 tree a10 = TREE_OPERAND (arg1, 0);
8844 tree a11 = TREE_OPERAND (arg1, 1);
8845 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8846 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8847 && (code == TRUTH_AND_EXPR
8848 || code == TRUTH_OR_EXPR));
8850 if (operand_equal_p (a00, a10, 0))
8851 return fold (build2 (TREE_CODE (arg0), type, a00,
8852 fold (build2 (code, type, a01, a11))));
8853 else if (commutative && operand_equal_p (a00, a11, 0))
8854 return fold (build2 (TREE_CODE (arg0), type, a00,
8855 fold (build2 (code, type, a01, a10))));
8856 else if (commutative && operand_equal_p (a01, a10, 0))
8857 return fold (build2 (TREE_CODE (arg0), type, a01,
8858 fold (build2 (code, type, a00, a11))));
8860 /* This case if tricky because we must either have commutative
8861 operators or else A10 must not have side-effects. */
8863 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8864 && operand_equal_p (a01, a11, 0))
8865 return fold (build2 (TREE_CODE (arg0), type,
8866 fold (build2 (code, type, a00, a10)),
8867 a01));
8870 /* See if we can build a range comparison. */
8871 if (0 != (tem = fold_range_test (t)))
8872 return tem;
8874 /* Check for the possibility of merging component references. If our
8875 lhs is another similar operation, try to merge its rhs with our
8876 rhs. Then try to merge our lhs and rhs. */
8877 if (TREE_CODE (arg0) == code
8878 && 0 != (tem = fold_truthop (code, type,
8879 TREE_OPERAND (arg0, 1), arg1)))
8880 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8882 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8883 return tem;
8885 return t;
8887 case TRUTH_ORIF_EXPR:
8888 /* Note that the operands of this must be ints
8889 and their values must be 0 or true.
8890 ("true" is a fixed value perhaps depending on the language.) */
8891 /* If first arg is constant true, return it. */
8892 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8893 return fold_convert (type, arg0);
8894 case TRUTH_OR_EXPR:
8895 /* If either arg is constant zero, drop it. */
8896 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8897 return non_lvalue (fold_convert (type, arg1));
8898 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8899 /* Preserve sequence points. */
8900 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8901 return non_lvalue (fold_convert (type, arg0));
8902 /* If second arg is constant true, result is true, but we must
8903 evaluate first arg. */
8904 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8905 return omit_one_operand (type, arg1, arg0);
8906 /* Likewise for first arg, but note this only occurs here for
8907 TRUTH_OR_EXPR. */
8908 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8909 return omit_one_operand (type, arg0, arg1);
8911 /* !X || X is always true. */
8912 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8913 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8914 return omit_one_operand (type, integer_one_node, arg1);
8915 /* X || !X is always true. */
8916 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8917 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8918 return omit_one_operand (type, integer_one_node, arg0);
8920 goto truth_andor;
8922 case TRUTH_XOR_EXPR:
8923 /* If the second arg is constant zero, drop it. */
8924 if (integer_zerop (arg1))
8925 return non_lvalue (fold_convert (type, arg0));
8926 /* If the second arg is constant true, this is a logical inversion. */
8927 if (integer_onep (arg1))
8928 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8929 /* Identical arguments cancel to zero. */
8930 if (operand_equal_p (arg0, arg1, 0))
8931 return omit_one_operand (type, integer_zero_node, arg0);
8933 /* !X ^ X is always true. */
8934 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8935 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8936 return omit_one_operand (type, integer_one_node, arg1);
8938 /* X ^ !X is always true. */
8939 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8940 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8941 return omit_one_operand (type, integer_one_node, arg0);
8943 return t;
8945 case EQ_EXPR:
8946 case NE_EXPR:
8947 case LT_EXPR:
8948 case GT_EXPR:
8949 case LE_EXPR:
8950 case GE_EXPR:
8951 /* If one arg is a real or integer constant, put it last. */
8952 if (tree_swap_operands_p (arg0, arg1, true))
8953 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8955 /* If this is an equality comparison of the address of a non-weak
8956 object against zero, then we know the result. */
8957 if ((code == EQ_EXPR || code == NE_EXPR)
8958 && TREE_CODE (arg0) == ADDR_EXPR
8959 && DECL_P (TREE_OPERAND (arg0, 0))
8960 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8961 && integer_zerop (arg1))
8962 return constant_boolean_node (code != EQ_EXPR, type);
8964 /* If this is an equality comparison of the address of two non-weak,
8965 unaliased symbols neither of which are extern (since we do not
8966 have access to attributes for externs), then we know the result. */
8967 if ((code == EQ_EXPR || code == NE_EXPR)
8968 && TREE_CODE (arg0) == ADDR_EXPR
8969 && DECL_P (TREE_OPERAND (arg0, 0))
8970 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8971 && ! lookup_attribute ("alias",
8972 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8973 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8974 && TREE_CODE (arg1) == ADDR_EXPR
8975 && DECL_P (TREE_OPERAND (arg1, 0))
8976 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8977 && ! lookup_attribute ("alias",
8978 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8979 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8980 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8981 ? code == EQ_EXPR : code != EQ_EXPR,
8982 type);
8984 /* If this is a comparison of two exprs that look like an
8985 ARRAY_REF of the same object, then we can fold this to a
8986 comparison of the two offsets. */
8987 if (COMPARISON_CLASS_P (t))
8989 tree base0, offset0, base1, offset1;
8991 if (extract_array_ref (arg0, &base0, &offset0)
8992 && extract_array_ref (arg1, &base1, &offset1)
8993 && operand_equal_p (base0, base1, 0))
8995 if (offset0 == NULL_TREE
8996 && offset1 == NULL_TREE)
8998 offset0 = integer_zero_node;
8999 offset1 = integer_zero_node;
9001 else if (offset0 == NULL_TREE)
9002 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9003 else if (offset1 == NULL_TREE)
9004 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9006 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
9007 return fold (build2 (code, type, offset0, offset1));
9011 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9013 tree targ0 = strip_float_extensions (arg0);
9014 tree targ1 = strip_float_extensions (arg1);
9015 tree newtype = TREE_TYPE (targ0);
9017 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9018 newtype = TREE_TYPE (targ1);
9020 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9021 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9022 return fold (build2 (code, type, fold_convert (newtype, targ0),
9023 fold_convert (newtype, targ1)));
9025 /* (-a) CMP (-b) -> b CMP a */
9026 if (TREE_CODE (arg0) == NEGATE_EXPR
9027 && TREE_CODE (arg1) == NEGATE_EXPR)
9028 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
9029 TREE_OPERAND (arg0, 0)));
9031 if (TREE_CODE (arg1) == REAL_CST)
9033 REAL_VALUE_TYPE cst;
9034 cst = TREE_REAL_CST (arg1);
9036 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9037 if (TREE_CODE (arg0) == NEGATE_EXPR)
9038 return
9039 fold (build2 (swap_tree_comparison (code), type,
9040 TREE_OPERAND (arg0, 0),
9041 build_real (TREE_TYPE (arg1),
9042 REAL_VALUE_NEGATE (cst))));
9044 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9045 /* a CMP (-0) -> a CMP 0 */
9046 if (REAL_VALUE_MINUS_ZERO (cst))
9047 return fold (build2 (code, type, arg0,
9048 build_real (TREE_TYPE (arg1), dconst0)));
9050 /* x != NaN is always true, other ops are always false. */
9051 if (REAL_VALUE_ISNAN (cst)
9052 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9054 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9055 return omit_one_operand (type, tem, arg0);
9058 /* Fold comparisons against infinity. */
9059 if (REAL_VALUE_ISINF (cst))
9061 tem = fold_inf_compare (code, type, arg0, arg1);
9062 if (tem != NULL_TREE)
9063 return tem;
9067 /* If this is a comparison of a real constant with a PLUS_EXPR
9068 or a MINUS_EXPR of a real constant, we can convert it into a
9069 comparison with a revised real constant as long as no overflow
9070 occurs when unsafe_math_optimizations are enabled. */
9071 if (flag_unsafe_math_optimizations
9072 && TREE_CODE (arg1) == REAL_CST
9073 && (TREE_CODE (arg0) == PLUS_EXPR
9074 || TREE_CODE (arg0) == MINUS_EXPR)
9075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9076 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9077 ? MINUS_EXPR : PLUS_EXPR,
9078 arg1, TREE_OPERAND (arg0, 1), 0))
9079 && ! TREE_CONSTANT_OVERFLOW (tem))
9080 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
9082 /* Likewise, we can simplify a comparison of a real constant with
9083 a MINUS_EXPR whose first operand is also a real constant, i.e.
9084 (c1 - x) < c2 becomes x > c1-c2. */
9085 if (flag_unsafe_math_optimizations
9086 && TREE_CODE (arg1) == REAL_CST
9087 && TREE_CODE (arg0) == MINUS_EXPR
9088 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9089 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9090 arg1, 0))
9091 && ! TREE_CONSTANT_OVERFLOW (tem))
9092 return fold (build2 (swap_tree_comparison (code), type,
9093 TREE_OPERAND (arg0, 1), tem));
9095 /* Fold comparisons against built-in math functions. */
9096 if (TREE_CODE (arg1) == REAL_CST
9097 && flag_unsafe_math_optimizations
9098 && ! flag_errno_math)
9100 enum built_in_function fcode = builtin_mathfn_code (arg0);
9102 if (fcode != END_BUILTINS)
9104 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9105 if (tem != NULL_TREE)
9106 return tem;
9111 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9112 if (TREE_CONSTANT (arg1)
9113 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9114 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9115 /* This optimization is invalid for ordered comparisons
9116 if CONST+INCR overflows or if foo+incr might overflow.
9117 This optimization is invalid for floating point due to rounding.
9118 For pointer types we assume overflow doesn't happen. */
9119 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9120 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9121 && (code == EQ_EXPR || code == NE_EXPR))))
9123 tree varop, newconst;
9125 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9127 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
9128 arg1, TREE_OPERAND (arg0, 1)));
9129 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9130 TREE_OPERAND (arg0, 0),
9131 TREE_OPERAND (arg0, 1));
9133 else
9135 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
9136 arg1, TREE_OPERAND (arg0, 1)));
9137 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9138 TREE_OPERAND (arg0, 0),
9139 TREE_OPERAND (arg0, 1));
9143 /* If VAROP is a reference to a bitfield, we must mask
9144 the constant by the width of the field. */
9145 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9146 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9147 && host_integerp (DECL_SIZE (TREE_OPERAND
9148 (TREE_OPERAND (varop, 0), 1)), 1))
9150 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9151 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9152 tree folded_compare, shift;
9154 /* First check whether the comparison would come out
9155 always the same. If we don't do that we would
9156 change the meaning with the masking. */
9157 folded_compare = fold (build2 (code, type,
9158 TREE_OPERAND (varop, 0), arg1));
9159 if (integer_zerop (folded_compare)
9160 || integer_onep (folded_compare))
9161 return omit_one_operand (type, folded_compare, varop);
9163 shift = build_int_cst (NULL_TREE,
9164 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9165 shift = fold_convert (TREE_TYPE (varop), shift);
9166 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9167 newconst, shift));
9168 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9169 newconst, shift));
9172 return fold (build2 (code, type, varop, newconst));
9175 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9176 This transformation affects the cases which are handled in later
9177 optimizations involving comparisons with non-negative constants. */
9178 if (TREE_CODE (arg1) == INTEGER_CST
9179 && TREE_CODE (arg0) != INTEGER_CST
9180 && tree_int_cst_sgn (arg1) > 0)
9182 switch (code)
9184 case GE_EXPR:
9185 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9186 return fold (build2 (GT_EXPR, type, arg0, arg1));
9188 case LT_EXPR:
9189 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9190 return fold (build2 (LE_EXPR, type, arg0, arg1));
9192 default:
9193 break;
9197 /* Comparisons with the highest or lowest possible integer of
9198 the specified size will have known values.
9200 This is quite similar to fold_relational_hi_lo, however,
9201 attempts to share the code have been nothing but trouble. */
9203 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9205 if (TREE_CODE (arg1) == INTEGER_CST
9206 && ! TREE_CONSTANT_OVERFLOW (arg1)
9207 && width <= 2 * HOST_BITS_PER_WIDE_INT
9208 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9209 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9211 HOST_WIDE_INT signed_max_hi;
9212 unsigned HOST_WIDE_INT signed_max_lo;
9213 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9215 if (width <= HOST_BITS_PER_WIDE_INT)
9217 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9218 - 1;
9219 signed_max_hi = 0;
9220 max_hi = 0;
9222 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9224 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9225 min_lo = 0;
9226 min_hi = 0;
9228 else
9230 max_lo = signed_max_lo;
9231 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9232 min_hi = -1;
9235 else
9237 width -= HOST_BITS_PER_WIDE_INT;
9238 signed_max_lo = -1;
9239 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9240 - 1;
9241 max_lo = -1;
9242 min_lo = 0;
9244 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9246 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9247 min_hi = 0;
9249 else
9251 max_hi = signed_max_hi;
9252 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9256 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9257 && TREE_INT_CST_LOW (arg1) == max_lo)
9258 switch (code)
9260 case GT_EXPR:
9261 return omit_one_operand (type, integer_zero_node, arg0);
9263 case GE_EXPR:
9264 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9266 case LE_EXPR:
9267 return omit_one_operand (type, integer_one_node, arg0);
9269 case LT_EXPR:
9270 return fold (build2 (NE_EXPR, type, arg0, arg1));
9272 /* The GE_EXPR and LT_EXPR cases above are not normally
9273 reached because of previous transformations. */
9275 default:
9276 break;
9278 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9279 == max_hi
9280 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9281 switch (code)
9283 case GT_EXPR:
9284 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9285 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9286 case LE_EXPR:
9287 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9288 return fold (build2 (NE_EXPR, type, arg0, arg1));
9289 default:
9290 break;
9292 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9293 == min_hi
9294 && TREE_INT_CST_LOW (arg1) == min_lo)
9295 switch (code)
9297 case LT_EXPR:
9298 return omit_one_operand (type, integer_zero_node, arg0);
9300 case LE_EXPR:
9301 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9303 case GE_EXPR:
9304 return omit_one_operand (type, integer_one_node, arg0);
9306 case GT_EXPR:
9307 return fold (build2 (NE_EXPR, type, arg0, arg1));
9309 default:
9310 break;
9312 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9313 == min_hi
9314 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9315 switch (code)
9317 case GE_EXPR:
9318 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9319 return fold (build2 (NE_EXPR, type, arg0, arg1));
9320 case LT_EXPR:
9321 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9322 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9323 default:
9324 break;
9327 else if (!in_gimple_form
9328 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9329 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9330 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9331 /* signed_type does not work on pointer types. */
9332 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9334 /* The following case also applies to X < signed_max+1
9335 and X >= signed_max+1 because previous transformations. */
9336 if (code == LE_EXPR || code == GT_EXPR)
9338 tree st0, st1;
9339 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9340 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9341 return fold
9342 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9343 type, fold_convert (st0, arg0),
9344 fold_convert (st1, integer_zero_node)));
9350 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9351 a MINUS_EXPR of a constant, we can convert it into a comparison with
9352 a revised constant as long as no overflow occurs. */
9353 if ((code == EQ_EXPR || code == NE_EXPR)
9354 && TREE_CODE (arg1) == INTEGER_CST
9355 && (TREE_CODE (arg0) == PLUS_EXPR
9356 || TREE_CODE (arg0) == MINUS_EXPR)
9357 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9358 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9359 ? MINUS_EXPR : PLUS_EXPR,
9360 arg1, TREE_OPERAND (arg0, 1), 0))
9361 && ! TREE_CONSTANT_OVERFLOW (tem))
9362 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
9364 /* Similarly for a NEGATE_EXPR. */
9365 else if ((code == EQ_EXPR || code == NE_EXPR)
9366 && TREE_CODE (arg0) == NEGATE_EXPR
9367 && TREE_CODE (arg1) == INTEGER_CST
9368 && 0 != (tem = negate_expr (arg1))
9369 && TREE_CODE (tem) == INTEGER_CST
9370 && ! TREE_CONSTANT_OVERFLOW (tem))
9371 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
9373 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9374 for !=. Don't do this for ordered comparisons due to overflow. */
9375 else if ((code == NE_EXPR || code == EQ_EXPR)
9376 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9377 return fold (build2 (code, type,
9378 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
9380 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9381 && TREE_CODE (arg0) == NOP_EXPR)
9383 /* If we are widening one operand of an integer comparison,
9384 see if the other operand is similarly being widened. Perhaps we
9385 can do the comparison in the narrower type. */
9386 tem = fold_widened_comparison (code, type, arg0, arg1);
9387 if (tem)
9388 return tem;
9390 /* Or if we are changing signedness. */
9391 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9392 if (tem)
9393 return tem;
9396 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9397 constant, we can simplify it. */
9398 else if (TREE_CODE (arg1) == INTEGER_CST
9399 && (TREE_CODE (arg0) == MIN_EXPR
9400 || TREE_CODE (arg0) == MAX_EXPR)
9401 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9402 return optimize_minmax_comparison (t);
9404 /* If we are comparing an ABS_EXPR with a constant, we can
9405 convert all the cases into explicit comparisons, but they may
9406 well not be faster than doing the ABS and one comparison.
9407 But ABS (X) <= C is a range comparison, which becomes a subtraction
9408 and a comparison, and is probably faster. */
9409 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9410 && TREE_CODE (arg0) == ABS_EXPR
9411 && ! TREE_SIDE_EFFECTS (arg0)
9412 && (0 != (tem = negate_expr (arg1)))
9413 && TREE_CODE (tem) == INTEGER_CST
9414 && ! TREE_CONSTANT_OVERFLOW (tem))
9415 return fold (build2 (TRUTH_ANDIF_EXPR, type,
9416 build2 (GE_EXPR, type,
9417 TREE_OPERAND (arg0, 0), tem),
9418 build2 (LE_EXPR, type,
9419 TREE_OPERAND (arg0, 0), arg1)));
9421 /* Convert ABS_EXPR<x> >= 0 to true. */
9422 else if (code == GE_EXPR
9423 && tree_expr_nonnegative_p (arg0)
9424 && (integer_zerop (arg1)
9425 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9426 && real_zerop (arg1))))
9427 return omit_one_operand (type, integer_one_node, arg0);
9429 /* Convert ABS_EXPR<x> < 0 to false. */
9430 else if (code == LT_EXPR
9431 && tree_expr_nonnegative_p (arg0)
9432 && (integer_zerop (arg1) || real_zerop (arg1)))
9433 return omit_one_operand (type, integer_zero_node, arg0);
9435 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9436 else if ((code == EQ_EXPR || code == NE_EXPR)
9437 && TREE_CODE (arg0) == ABS_EXPR
9438 && (integer_zerop (arg1) || real_zerop (arg1)))
9439 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), arg1));
9441 /* If this is an EQ or NE comparison with zero and ARG0 is
9442 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9443 two operations, but the latter can be done in one less insn
9444 on machines that have only two-operand insns or on which a
9445 constant cannot be the first operand. */
9446 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9447 && TREE_CODE (arg0) == BIT_AND_EXPR)
9449 tree arg00 = TREE_OPERAND (arg0, 0);
9450 tree arg01 = TREE_OPERAND (arg0, 1);
9451 if (TREE_CODE (arg00) == LSHIFT_EXPR
9452 && integer_onep (TREE_OPERAND (arg00, 0)))
9453 return
9454 fold (build2 (code, type,
9455 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9456 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9457 arg01, TREE_OPERAND (arg00, 1)),
9458 fold_convert (TREE_TYPE (arg0),
9459 integer_one_node)),
9460 arg1));
9461 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9462 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9463 return
9464 fold (build2 (code, type,
9465 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9466 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9467 arg00, TREE_OPERAND (arg01, 1)),
9468 fold_convert (TREE_TYPE (arg0),
9469 integer_one_node)),
9470 arg1));
9473 /* If this is an NE or EQ comparison of zero against the result of a
9474 signed MOD operation whose second operand is a power of 2, make
9475 the MOD operation unsigned since it is simpler and equivalent. */
9476 if ((code == NE_EXPR || code == EQ_EXPR)
9477 && integer_zerop (arg1)
9478 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9479 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9480 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9481 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9482 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9483 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9485 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9486 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
9487 fold_convert (newtype,
9488 TREE_OPERAND (arg0, 0)),
9489 fold_convert (newtype,
9490 TREE_OPERAND (arg0, 1))));
9492 return fold (build2 (code, type, newmod,
9493 fold_convert (newtype, arg1)));
9496 /* If this is an NE comparison of zero with an AND of one, remove the
9497 comparison since the AND will give the correct value. */
9498 if (code == NE_EXPR && integer_zerop (arg1)
9499 && TREE_CODE (arg0) == BIT_AND_EXPR
9500 && integer_onep (TREE_OPERAND (arg0, 1)))
9501 return fold_convert (type, arg0);
9503 /* If we have (A & C) == C where C is a power of 2, convert this into
9504 (A & C) != 0. Similarly for NE_EXPR. */
9505 if ((code == EQ_EXPR || code == NE_EXPR)
9506 && TREE_CODE (arg0) == BIT_AND_EXPR
9507 && integer_pow2p (TREE_OPERAND (arg0, 1))
9508 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9509 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9510 arg0, fold_convert (TREE_TYPE (arg0),
9511 integer_zero_node)));
9513 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
9514 2, then fold the expression into shifts and logical operations. */
9515 tem = fold_single_bit_test (code, arg0, arg1, type);
9516 if (tem)
9517 return tem;
9519 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9520 Similarly for NE_EXPR. */
9521 if ((code == EQ_EXPR || code == NE_EXPR)
9522 && TREE_CODE (arg0) == BIT_AND_EXPR
9523 && TREE_CODE (arg1) == INTEGER_CST
9524 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9526 tree notc = fold (build1 (BIT_NOT_EXPR,
9527 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9528 TREE_OPERAND (arg0, 1)));
9529 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9530 arg1, notc));
9531 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9532 if (integer_nonzerop (dandnotc))
9533 return omit_one_operand (type, rslt, arg0);
9536 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9537 Similarly for NE_EXPR. */
9538 if ((code == EQ_EXPR || code == NE_EXPR)
9539 && TREE_CODE (arg0) == BIT_IOR_EXPR
9540 && TREE_CODE (arg1) == INTEGER_CST
9541 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9543 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
9544 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9545 TREE_OPERAND (arg0, 1), notd));
9546 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9547 if (integer_nonzerop (candnotd))
9548 return omit_one_operand (type, rslt, arg0);
9551 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9552 and similarly for >= into !=. */
9553 if ((code == LT_EXPR || code == GE_EXPR)
9554 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9555 && TREE_CODE (arg1) == LSHIFT_EXPR
9556 && integer_onep (TREE_OPERAND (arg1, 0)))
9557 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9558 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9559 TREE_OPERAND (arg1, 1)),
9560 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9562 else if ((code == LT_EXPR || code == GE_EXPR)
9563 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9564 && (TREE_CODE (arg1) == NOP_EXPR
9565 || TREE_CODE (arg1) == CONVERT_EXPR)
9566 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9567 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9568 return
9569 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9570 fold_convert (TREE_TYPE (arg0),
9571 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9572 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9573 1))),
9574 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9576 /* Simplify comparison of something with itself. (For IEEE
9577 floating-point, we can only do some of these simplifications.) */
9578 if (operand_equal_p (arg0, arg1, 0))
9580 switch (code)
9582 case EQ_EXPR:
9583 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9584 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9585 return constant_boolean_node (1, type);
9586 break;
9588 case GE_EXPR:
9589 case LE_EXPR:
9590 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9591 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9592 return constant_boolean_node (1, type);
9593 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9595 case NE_EXPR:
9596 /* For NE, we can only do this simplification if integer
9597 or we don't honor IEEE floating point NaNs. */
9598 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9599 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9600 break;
9601 /* ... fall through ... */
9602 case GT_EXPR:
9603 case LT_EXPR:
9604 return constant_boolean_node (0, type);
9605 default:
9606 gcc_unreachable ();
9610 /* If we are comparing an expression that just has comparisons
9611 of two integer values, arithmetic expressions of those comparisons,
9612 and constants, we can simplify it. There are only three cases
9613 to check: the two values can either be equal, the first can be
9614 greater, or the second can be greater. Fold the expression for
9615 those three values. Since each value must be 0 or 1, we have
9616 eight possibilities, each of which corresponds to the constant 0
9617 or 1 or one of the six possible comparisons.
9619 This handles common cases like (a > b) == 0 but also handles
9620 expressions like ((x > y) - (y > x)) > 0, which supposedly
9621 occur in macroized code. */
9623 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9625 tree cval1 = 0, cval2 = 0;
9626 int save_p = 0;
9628 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9629 /* Don't handle degenerate cases here; they should already
9630 have been handled anyway. */
9631 && cval1 != 0 && cval2 != 0
9632 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9633 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9634 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9635 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9636 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9637 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9638 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9640 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9641 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9643 /* We can't just pass T to eval_subst in case cval1 or cval2
9644 was the same as ARG1. */
9646 tree high_result
9647 = fold (build2 (code, type,
9648 eval_subst (arg0, cval1, maxval,
9649 cval2, minval),
9650 arg1));
9651 tree equal_result
9652 = fold (build2 (code, type,
9653 eval_subst (arg0, cval1, maxval,
9654 cval2, maxval),
9655 arg1));
9656 tree low_result
9657 = fold (build2 (code, type,
9658 eval_subst (arg0, cval1, minval,
9659 cval2, maxval),
9660 arg1));
9662 /* All three of these results should be 0 or 1. Confirm they
9663 are. Then use those values to select the proper code
9664 to use. */
9666 if ((integer_zerop (high_result)
9667 || integer_onep (high_result))
9668 && (integer_zerop (equal_result)
9669 || integer_onep (equal_result))
9670 && (integer_zerop (low_result)
9671 || integer_onep (low_result)))
9673 /* Make a 3-bit mask with the high-order bit being the
9674 value for `>', the next for '=', and the low for '<'. */
9675 switch ((integer_onep (high_result) * 4)
9676 + (integer_onep (equal_result) * 2)
9677 + integer_onep (low_result))
9679 case 0:
9680 /* Always false. */
9681 return omit_one_operand (type, integer_zero_node, arg0);
9682 case 1:
9683 code = LT_EXPR;
9684 break;
9685 case 2:
9686 code = EQ_EXPR;
9687 break;
9688 case 3:
9689 code = LE_EXPR;
9690 break;
9691 case 4:
9692 code = GT_EXPR;
9693 break;
9694 case 5:
9695 code = NE_EXPR;
9696 break;
9697 case 6:
9698 code = GE_EXPR;
9699 break;
9700 case 7:
9701 /* Always true. */
9702 return omit_one_operand (type, integer_one_node, arg0);
9705 tem = build2 (code, type, cval1, cval2);
9706 if (save_p)
9707 return save_expr (tem);
9708 else
9709 return fold (tem);
9714 /* If this is a comparison of a field, we may be able to simplify it. */
9715 if (((TREE_CODE (arg0) == COMPONENT_REF
9716 && lang_hooks.can_use_bit_fields_p ())
9717 || TREE_CODE (arg0) == BIT_FIELD_REF)
9718 && (code == EQ_EXPR || code == NE_EXPR)
9719 /* Handle the constant case even without -O
9720 to make sure the warnings are given. */
9721 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9723 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9724 if (t1)
9725 return t1;
9728 /* If this is a comparison of complex values and either or both sides
9729 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9730 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9731 This may prevent needless evaluations. */
9732 if ((code == EQ_EXPR || code == NE_EXPR)
9733 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9734 && (TREE_CODE (arg0) == COMPLEX_EXPR
9735 || TREE_CODE (arg1) == COMPLEX_EXPR
9736 || TREE_CODE (arg0) == COMPLEX_CST
9737 || TREE_CODE (arg1) == COMPLEX_CST))
9739 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9740 tree real0, imag0, real1, imag1;
9742 arg0 = save_expr (arg0);
9743 arg1 = save_expr (arg1);
9744 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
9745 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
9746 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
9747 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
9749 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9750 : TRUTH_ORIF_EXPR),
9751 type,
9752 fold (build2 (code, type, real0, real1)),
9753 fold (build2 (code, type, imag0, imag1))));
9756 /* Optimize comparisons of strlen vs zero to a compare of the
9757 first character of the string vs zero. To wit,
9758 strlen(ptr) == 0 => *ptr == 0
9759 strlen(ptr) != 0 => *ptr != 0
9760 Other cases should reduce to one of these two (or a constant)
9761 due to the return value of strlen being unsigned. */
9762 if ((code == EQ_EXPR || code == NE_EXPR)
9763 && integer_zerop (arg1)
9764 && TREE_CODE (arg0) == CALL_EXPR)
9766 tree fndecl = get_callee_fndecl (arg0);
9767 tree arglist;
9769 if (fndecl
9770 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9771 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9772 && (arglist = TREE_OPERAND (arg0, 1))
9773 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9774 && ! TREE_CHAIN (arglist))
9775 return fold (build2 (code, type,
9776 build1 (INDIRECT_REF, char_type_node,
9777 TREE_VALUE (arglist)),
9778 fold_convert (char_type_node,
9779 integer_zero_node)));
9782 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9783 into a single range test. */
9784 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9785 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9786 && TREE_CODE (arg1) == INTEGER_CST
9787 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9788 && !integer_zerop (TREE_OPERAND (arg0, 1))
9789 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9790 && !TREE_OVERFLOW (arg1))
9792 t1 = fold_div_compare (code, type, arg0, arg1);
9793 if (t1 != NULL_TREE)
9794 return t1;
9797 if ((code == EQ_EXPR || code == NE_EXPR)
9798 && !TREE_SIDE_EFFECTS (arg0)
9799 && integer_zerop (arg1)
9800 && tree_expr_nonzero_p (arg0))
9801 return constant_boolean_node (code==NE_EXPR, type);
9803 t1 = fold_relational_const (code, type, arg0, arg1);
9804 return t1 == NULL_TREE ? t : t1;
9806 case UNORDERED_EXPR:
9807 case ORDERED_EXPR:
9808 case UNLT_EXPR:
9809 case UNLE_EXPR:
9810 case UNGT_EXPR:
9811 case UNGE_EXPR:
9812 case UNEQ_EXPR:
9813 case LTGT_EXPR:
9814 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9816 t1 = fold_relational_const (code, type, arg0, arg1);
9817 if (t1 != NULL_TREE)
9818 return t1;
9821 /* If the first operand is NaN, the result is constant. */
9822 if (TREE_CODE (arg0) == REAL_CST
9823 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9824 && (code != LTGT_EXPR || ! flag_trapping_math))
9826 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9827 ? integer_zero_node
9828 : integer_one_node;
9829 return omit_one_operand (type, t1, arg1);
9832 /* If the second operand is NaN, the result is constant. */
9833 if (TREE_CODE (arg1) == REAL_CST
9834 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9835 && (code != LTGT_EXPR || ! flag_trapping_math))
9837 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9838 ? integer_zero_node
9839 : integer_one_node;
9840 return omit_one_operand (type, t1, arg0);
9843 /* Simplify unordered comparison of something with itself. */
9844 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9845 && operand_equal_p (arg0, arg1, 0))
9846 return constant_boolean_node (1, type);
9848 if (code == LTGT_EXPR
9849 && !flag_trapping_math
9850 && operand_equal_p (arg0, arg1, 0))
9851 return constant_boolean_node (0, type);
9853 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9855 tree targ0 = strip_float_extensions (arg0);
9856 tree targ1 = strip_float_extensions (arg1);
9857 tree newtype = TREE_TYPE (targ0);
9859 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9860 newtype = TREE_TYPE (targ1);
9862 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9863 return fold (build2 (code, type, fold_convert (newtype, targ0),
9864 fold_convert (newtype, targ1)));
9867 return t;
9869 case COMPOUND_EXPR:
9870 /* When pedantic, a compound expression can be neither an lvalue
9871 nor an integer constant expression. */
9872 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9873 return t;
9874 /* Don't let (0, 0) be null pointer constant. */
9875 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9876 : fold_convert (type, arg1);
9877 return pedantic_non_lvalue (tem);
9879 case COMPLEX_EXPR:
9880 if (wins)
9881 return build_complex (type, arg0, arg1);
9882 return t;
9884 default:
9885 return t;
9886 } /* switch (code) */
9889 #ifdef ENABLE_FOLD_CHECKING
9890 #undef fold
9892 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9893 static void fold_check_failed (tree, tree);
9894 void print_fold_checksum (tree);
9896 /* When --enable-checking=fold, compute a digest of expr before
9897 and after actual fold call to see if fold did not accidentally
9898 change original expr. */
9900 tree
9901 fold (tree expr)
9903 tree ret;
9904 struct md5_ctx ctx;
9905 unsigned char checksum_before[16], checksum_after[16];
9906 htab_t ht;
9908 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9909 md5_init_ctx (&ctx);
9910 fold_checksum_tree (expr, &ctx, ht);
9911 md5_finish_ctx (&ctx, checksum_before);
9912 htab_empty (ht);
9914 ret = fold_1 (expr);
9916 md5_init_ctx (&ctx);
9917 fold_checksum_tree (expr, &ctx, ht);
9918 md5_finish_ctx (&ctx, checksum_after);
9919 htab_delete (ht);
9921 if (memcmp (checksum_before, checksum_after, 16))
9922 fold_check_failed (expr, ret);
9924 return ret;
9927 void
9928 print_fold_checksum (tree expr)
9930 struct md5_ctx ctx;
9931 unsigned char checksum[16], cnt;
9932 htab_t ht;
9934 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9935 md5_init_ctx (&ctx);
9936 fold_checksum_tree (expr, &ctx, ht);
9937 md5_finish_ctx (&ctx, checksum);
9938 htab_delete (ht);
9939 for (cnt = 0; cnt < 16; ++cnt)
9940 fprintf (stderr, "%02x", checksum[cnt]);
9941 putc ('\n', stderr);
9944 static void
9945 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9947 internal_error ("fold check: original tree changed by fold");
9950 static void
9951 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9953 void **slot;
9954 enum tree_code code;
9955 char buf[sizeof (struct tree_decl)];
9956 int i, len;
9958 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9959 <= sizeof (struct tree_decl))
9960 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9961 if (expr == NULL)
9962 return;
9963 slot = htab_find_slot (ht, expr, INSERT);
9964 if (*slot != NULL)
9965 return;
9966 *slot = expr;
9967 code = TREE_CODE (expr);
9968 if (TREE_CODE_CLASS (code) == tcc_declaration
9969 && DECL_ASSEMBLER_NAME_SET_P (expr))
9971 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9972 memcpy (buf, expr, tree_size (expr));
9973 expr = (tree) buf;
9974 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9976 else if (TREE_CODE_CLASS (code) == tcc_type
9977 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9978 || TYPE_CACHED_VALUES_P (expr)))
9980 /* Allow these fields to be modified. */
9981 memcpy (buf, expr, tree_size (expr));
9982 expr = (tree) buf;
9983 TYPE_POINTER_TO (expr) = NULL;
9984 TYPE_REFERENCE_TO (expr) = NULL;
9985 TYPE_CACHED_VALUES_P (expr) = 0;
9986 TYPE_CACHED_VALUES (expr) = NULL;
9988 md5_process_bytes (expr, tree_size (expr), ctx);
9989 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9990 if (TREE_CODE_CLASS (code) != tcc_type
9991 && TREE_CODE_CLASS (code) != tcc_declaration)
9992 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9993 switch (TREE_CODE_CLASS (code))
9995 case tcc_constant:
9996 switch (code)
9998 case STRING_CST:
9999 md5_process_bytes (TREE_STRING_POINTER (expr),
10000 TREE_STRING_LENGTH (expr), ctx);
10001 break;
10002 case COMPLEX_CST:
10003 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10004 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10005 break;
10006 case VECTOR_CST:
10007 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10008 break;
10009 default:
10010 break;
10012 break;
10013 case tcc_exceptional:
10014 switch (code)
10016 case TREE_LIST:
10017 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10018 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10019 break;
10020 case TREE_VEC:
10021 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10022 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10023 break;
10024 default:
10025 break;
10027 break;
10028 case tcc_expression:
10029 case tcc_reference:
10030 case tcc_comparison:
10031 case tcc_unary:
10032 case tcc_binary:
10033 case tcc_statement:
10034 len = TREE_CODE_LENGTH (code);
10035 for (i = 0; i < len; ++i)
10036 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10037 break;
10038 case tcc_declaration:
10039 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10040 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10041 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10042 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10043 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10044 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10045 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10046 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10047 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10048 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10049 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10050 break;
10051 case tcc_type:
10052 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10053 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10054 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10055 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10056 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10057 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10058 if (INTEGRAL_TYPE_P (expr)
10059 || SCALAR_FLOAT_TYPE_P (expr))
10061 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10062 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10064 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10065 if (TREE_CODE (expr) == RECORD_TYPE
10066 || TREE_CODE (expr) == UNION_TYPE
10067 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10068 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10069 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10070 break;
10071 default:
10072 break;
10076 #endif
10078 /* Perform constant folding and related simplification of initializer
10079 expression EXPR. This behaves identically to "fold" but ignores
10080 potential run-time traps and exceptions that fold must preserve. */
10082 tree
10083 fold_initializer (tree expr)
10085 int saved_signaling_nans = flag_signaling_nans;
10086 int saved_trapping_math = flag_trapping_math;
10087 int saved_rounding_math = flag_rounding_math;
10088 int saved_trapv = flag_trapv;
10089 tree result;
10091 flag_signaling_nans = 0;
10092 flag_trapping_math = 0;
10093 flag_rounding_math = 0;
10094 flag_trapv = 0;
10096 result = fold (expr);
10098 flag_signaling_nans = saved_signaling_nans;
10099 flag_trapping_math = saved_trapping_math;
10100 flag_rounding_math = saved_rounding_math;
10101 flag_trapv = saved_trapv;
10103 return result;
10106 /* Determine if first argument is a multiple of second argument. Return 0 if
10107 it is not, or we cannot easily determined it to be.
10109 An example of the sort of thing we care about (at this point; this routine
10110 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10111 fold cases do now) is discovering that
10113 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10115 is a multiple of
10117 SAVE_EXPR (J * 8)
10119 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10121 This code also handles discovering that
10123 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10125 is a multiple of 8 so we don't have to worry about dealing with a
10126 possible remainder.
10128 Note that we *look* inside a SAVE_EXPR only to determine how it was
10129 calculated; it is not safe for fold to do much of anything else with the
10130 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10131 at run time. For example, the latter example above *cannot* be implemented
10132 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10133 evaluation time of the original SAVE_EXPR is not necessarily the same at
10134 the time the new expression is evaluated. The only optimization of this
10135 sort that would be valid is changing
10137 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10139 divided by 8 to
10141 SAVE_EXPR (I) * SAVE_EXPR (J)
10143 (where the same SAVE_EXPR (J) is used in the original and the
10144 transformed version). */
10146 static int
10147 multiple_of_p (tree type, tree top, tree bottom)
10149 if (operand_equal_p (top, bottom, 0))
10150 return 1;
10152 if (TREE_CODE (type) != INTEGER_TYPE)
10153 return 0;
10155 switch (TREE_CODE (top))
10157 case BIT_AND_EXPR:
10158 /* Bitwise and provides a power of two multiple. If the mask is
10159 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10160 if (!integer_pow2p (bottom))
10161 return 0;
10162 /* FALLTHRU */
10164 case MULT_EXPR:
10165 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10166 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10168 case PLUS_EXPR:
10169 case MINUS_EXPR:
10170 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10171 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10173 case LSHIFT_EXPR:
10174 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10176 tree op1, t1;
10178 op1 = TREE_OPERAND (top, 1);
10179 /* const_binop may not detect overflow correctly,
10180 so check for it explicitly here. */
10181 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10182 > TREE_INT_CST_LOW (op1)
10183 && TREE_INT_CST_HIGH (op1) == 0
10184 && 0 != (t1 = fold_convert (type,
10185 const_binop (LSHIFT_EXPR,
10186 size_one_node,
10187 op1, 0)))
10188 && ! TREE_OVERFLOW (t1))
10189 return multiple_of_p (type, t1, bottom);
10191 return 0;
10193 case NOP_EXPR:
10194 /* Can't handle conversions from non-integral or wider integral type. */
10195 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10196 || (TYPE_PRECISION (type)
10197 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10198 return 0;
10200 /* .. fall through ... */
10202 case SAVE_EXPR:
10203 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10205 case INTEGER_CST:
10206 if (TREE_CODE (bottom) != INTEGER_CST
10207 || (TYPE_UNSIGNED (type)
10208 && (tree_int_cst_sgn (top) < 0
10209 || tree_int_cst_sgn (bottom) < 0)))
10210 return 0;
10211 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10212 top, bottom, 0));
10214 default:
10215 return 0;
10219 /* Return true if `t' is known to be non-negative. */
10222 tree_expr_nonnegative_p (tree t)
10224 switch (TREE_CODE (t))
10226 case ABS_EXPR:
10227 return 1;
10229 case INTEGER_CST:
10230 return tree_int_cst_sgn (t) >= 0;
10232 case REAL_CST:
10233 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10235 case PLUS_EXPR:
10236 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10237 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10238 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10240 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10241 both unsigned and at least 2 bits shorter than the result. */
10242 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10243 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10244 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10246 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10247 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10248 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10249 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10251 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10252 TYPE_PRECISION (inner2)) + 1;
10253 return prec < TYPE_PRECISION (TREE_TYPE (t));
10256 break;
10258 case MULT_EXPR:
10259 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10261 /* x * x for floating point x is always non-negative. */
10262 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10263 return 1;
10264 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10265 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10268 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10269 both unsigned and their total bits is shorter than the result. */
10270 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10271 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10272 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10274 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10275 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10276 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10277 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10278 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10279 < TYPE_PRECISION (TREE_TYPE (t));
10281 return 0;
10283 case TRUNC_DIV_EXPR:
10284 case CEIL_DIV_EXPR:
10285 case FLOOR_DIV_EXPR:
10286 case ROUND_DIV_EXPR:
10287 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10288 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10290 case TRUNC_MOD_EXPR:
10291 case CEIL_MOD_EXPR:
10292 case FLOOR_MOD_EXPR:
10293 case ROUND_MOD_EXPR:
10294 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10296 case RDIV_EXPR:
10297 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10298 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10300 case BIT_AND_EXPR:
10301 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10302 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10303 case BIT_IOR_EXPR:
10304 case BIT_XOR_EXPR:
10305 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10306 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10308 case NOP_EXPR:
10310 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10311 tree outer_type = TREE_TYPE (t);
10313 if (TREE_CODE (outer_type) == REAL_TYPE)
10315 if (TREE_CODE (inner_type) == REAL_TYPE)
10316 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10317 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10319 if (TYPE_UNSIGNED (inner_type))
10320 return 1;
10321 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10324 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10326 if (TREE_CODE (inner_type) == REAL_TYPE)
10327 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10328 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10329 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10330 && TYPE_UNSIGNED (inner_type);
10333 break;
10335 case COND_EXPR:
10336 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10337 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10338 case COMPOUND_EXPR:
10339 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10340 case MIN_EXPR:
10341 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10342 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10343 case MAX_EXPR:
10344 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10345 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10346 case MODIFY_EXPR:
10347 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10348 case BIND_EXPR:
10349 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10350 case SAVE_EXPR:
10351 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10352 case NON_LVALUE_EXPR:
10353 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10354 case FLOAT_EXPR:
10355 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10357 case TARGET_EXPR:
10359 tree temp = TARGET_EXPR_SLOT (t);
10360 t = TARGET_EXPR_INITIAL (t);
10362 /* If the initializer is non-void, then it's a normal expression
10363 that will be assigned to the slot. */
10364 if (!VOID_TYPE_P (t))
10365 return tree_expr_nonnegative_p (t);
10367 /* Otherwise, the initializer sets the slot in some way. One common
10368 way is an assignment statement at the end of the initializer. */
10369 while (1)
10371 if (TREE_CODE (t) == BIND_EXPR)
10372 t = expr_last (BIND_EXPR_BODY (t));
10373 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10374 || TREE_CODE (t) == TRY_CATCH_EXPR)
10375 t = expr_last (TREE_OPERAND (t, 0));
10376 else if (TREE_CODE (t) == STATEMENT_LIST)
10377 t = expr_last (t);
10378 else
10379 break;
10381 if (TREE_CODE (t) == MODIFY_EXPR
10382 && TREE_OPERAND (t, 0) == temp)
10383 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10385 return 0;
10388 case CALL_EXPR:
10390 tree fndecl = get_callee_fndecl (t);
10391 tree arglist = TREE_OPERAND (t, 1);
10392 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10393 switch (DECL_FUNCTION_CODE (fndecl))
10395 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10396 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10397 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10398 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10400 CASE_BUILTIN_F (BUILT_IN_ACOS)
10401 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10402 CASE_BUILTIN_F (BUILT_IN_CABS)
10403 CASE_BUILTIN_F (BUILT_IN_COSH)
10404 CASE_BUILTIN_F (BUILT_IN_ERFC)
10405 CASE_BUILTIN_F (BUILT_IN_EXP)
10406 CASE_BUILTIN_F (BUILT_IN_EXP10)
10407 CASE_BUILTIN_F (BUILT_IN_EXP2)
10408 CASE_BUILTIN_F (BUILT_IN_FABS)
10409 CASE_BUILTIN_F (BUILT_IN_FDIM)
10410 CASE_BUILTIN_F (BUILT_IN_FREXP)
10411 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10412 CASE_BUILTIN_F (BUILT_IN_POW10)
10413 CASE_BUILTIN_I (BUILT_IN_FFS)
10414 CASE_BUILTIN_I (BUILT_IN_PARITY)
10415 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10416 /* Always true. */
10417 return 1;
10419 CASE_BUILTIN_F (BUILT_IN_SQRT)
10420 /* sqrt(-0.0) is -0.0. */
10421 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10422 return 1;
10423 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10425 CASE_BUILTIN_F (BUILT_IN_ASINH)
10426 CASE_BUILTIN_F (BUILT_IN_ATAN)
10427 CASE_BUILTIN_F (BUILT_IN_ATANH)
10428 CASE_BUILTIN_F (BUILT_IN_CBRT)
10429 CASE_BUILTIN_F (BUILT_IN_CEIL)
10430 CASE_BUILTIN_F (BUILT_IN_ERF)
10431 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10432 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10433 CASE_BUILTIN_F (BUILT_IN_FMOD)
10434 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10435 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10436 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10437 CASE_BUILTIN_F (BUILT_IN_LRINT)
10438 CASE_BUILTIN_F (BUILT_IN_LROUND)
10439 CASE_BUILTIN_F (BUILT_IN_MODF)
10440 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10441 CASE_BUILTIN_F (BUILT_IN_POW)
10442 CASE_BUILTIN_F (BUILT_IN_RINT)
10443 CASE_BUILTIN_F (BUILT_IN_ROUND)
10444 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10445 CASE_BUILTIN_F (BUILT_IN_SINH)
10446 CASE_BUILTIN_F (BUILT_IN_TANH)
10447 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10448 /* True if the 1st argument is nonnegative. */
10449 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10451 CASE_BUILTIN_F (BUILT_IN_FMAX)
10452 /* True if the 1st OR 2nd arguments are nonnegative. */
10453 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10454 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10456 CASE_BUILTIN_F (BUILT_IN_FMIN)
10457 /* True if the 1st AND 2nd arguments are nonnegative. */
10458 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10459 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10461 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10462 /* True if the 2nd argument is nonnegative. */
10463 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10465 default:
10466 break;
10467 #undef CASE_BUILTIN_F
10468 #undef CASE_BUILTIN_I
10472 /* ... fall through ... */
10474 default:
10475 if (truth_value_p (TREE_CODE (t)))
10476 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10477 return 1;
10480 /* We don't know sign of `t', so be conservative and return false. */
10481 return 0;
10484 /* Return true when T is an address and is known to be nonzero.
10485 For floating point we further ensure that T is not denormal.
10486 Similar logic is present in nonzero_address in rtlanal.h. */
10488 static bool
10489 tree_expr_nonzero_p (tree t)
10491 tree type = TREE_TYPE (t);
10493 /* Doing something useful for floating point would need more work. */
10494 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10495 return false;
10497 switch (TREE_CODE (t))
10499 case ABS_EXPR:
10500 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10501 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10503 case INTEGER_CST:
10504 /* We used to test for !integer_zerop here. This does not work correctly
10505 if TREE_CONSTANT_OVERFLOW (t). */
10506 return (TREE_INT_CST_LOW (t) != 0
10507 || TREE_INT_CST_HIGH (t) != 0);
10509 case PLUS_EXPR:
10510 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10512 /* With the presence of negative values it is hard
10513 to say something. */
10514 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10515 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10516 return false;
10517 /* One of operands must be positive and the other non-negative. */
10518 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10519 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10521 break;
10523 case MULT_EXPR:
10524 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10526 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10527 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10529 break;
10531 case NOP_EXPR:
10533 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10534 tree outer_type = TREE_TYPE (t);
10536 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10537 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10539 break;
10541 case ADDR_EXPR:
10543 tree base = get_base_address (TREE_OPERAND (t, 0));
10545 if (!base)
10546 return false;
10548 /* Weak declarations may link to NULL. */
10549 if (DECL_P (base))
10550 return !DECL_WEAK (base);
10552 /* Constants are never weak. */
10553 if (CONSTANT_CLASS_P (base))
10554 return true;
10556 return false;
10559 case COND_EXPR:
10560 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10561 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10563 case MIN_EXPR:
10564 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10565 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10567 case MAX_EXPR:
10568 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10570 /* When both operands are nonzero, then MAX must be too. */
10571 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10572 return true;
10574 /* MAX where operand 0 is positive is positive. */
10575 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10577 /* MAX where operand 1 is positive is positive. */
10578 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10579 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10580 return true;
10581 break;
10583 case COMPOUND_EXPR:
10584 case MODIFY_EXPR:
10585 case BIND_EXPR:
10586 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10588 case SAVE_EXPR:
10589 case NON_LVALUE_EXPR:
10590 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10592 case BIT_IOR_EXPR:
10593 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10594 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10596 default:
10597 break;
10599 return false;
10602 /* See if we are applying CODE, a relational to the highest or lowest
10603 possible integer of TYPE. If so, then the result is a compile
10604 time constant. */
10606 static tree
10607 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10608 tree *op1_p)
10610 tree op0 = *op0_p;
10611 tree op1 = *op1_p;
10612 enum tree_code code = *code_p;
10613 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10615 if (TREE_CODE (op1) == INTEGER_CST
10616 && ! TREE_CONSTANT_OVERFLOW (op1)
10617 && width <= HOST_BITS_PER_WIDE_INT
10618 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10619 || POINTER_TYPE_P (TREE_TYPE (op1))))
10621 unsigned HOST_WIDE_INT signed_max;
10622 unsigned HOST_WIDE_INT max, min;
10624 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10626 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10628 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10629 min = 0;
10631 else
10633 max = signed_max;
10634 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10637 if (TREE_INT_CST_HIGH (op1) == 0
10638 && TREE_INT_CST_LOW (op1) == max)
10639 switch (code)
10641 case GT_EXPR:
10642 return omit_one_operand (type, integer_zero_node, op0);
10644 case GE_EXPR:
10645 *code_p = EQ_EXPR;
10646 break;
10647 case LE_EXPR:
10648 return omit_one_operand (type, integer_one_node, op0);
10650 case LT_EXPR:
10651 *code_p = NE_EXPR;
10652 break;
10654 /* The GE_EXPR and LT_EXPR cases above are not normally
10655 reached because of previous transformations. */
10657 default:
10658 break;
10660 else if (TREE_INT_CST_HIGH (op1) == 0
10661 && TREE_INT_CST_LOW (op1) == max - 1)
10662 switch (code)
10664 case GT_EXPR:
10665 *code_p = EQ_EXPR;
10666 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10667 break;
10668 case LE_EXPR:
10669 *code_p = NE_EXPR;
10670 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10671 break;
10672 default:
10673 break;
10675 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10676 && TREE_INT_CST_LOW (op1) == min)
10677 switch (code)
10679 case LT_EXPR:
10680 return omit_one_operand (type, integer_zero_node, op0);
10682 case LE_EXPR:
10683 *code_p = EQ_EXPR;
10684 break;
10686 case GE_EXPR:
10687 return omit_one_operand (type, integer_one_node, op0);
10689 case GT_EXPR:
10690 *code_p = NE_EXPR;
10691 break;
10693 default:
10694 break;
10696 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10697 && TREE_INT_CST_LOW (op1) == min + 1)
10698 switch (code)
10700 case GE_EXPR:
10701 *code_p = NE_EXPR;
10702 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10703 break;
10704 case LT_EXPR:
10705 *code_p = EQ_EXPR;
10706 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10707 break;
10708 default:
10709 break;
10712 else if (TREE_INT_CST_HIGH (op1) == 0
10713 && TREE_INT_CST_LOW (op1) == signed_max
10714 && TYPE_UNSIGNED (TREE_TYPE (op1))
10715 /* signed_type does not work on pointer types. */
10716 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10718 /* The following case also applies to X < signed_max+1
10719 and X >= signed_max+1 because previous transformations. */
10720 if (code == LE_EXPR || code == GT_EXPR)
10722 tree st0, st1, exp, retval;
10723 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10724 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10726 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10727 type,
10728 fold_convert (st0, op0),
10729 fold_convert (st1, integer_zero_node));
10731 retval = fold_binary_to_constant (TREE_CODE (exp),
10732 TREE_TYPE (exp),
10733 TREE_OPERAND (exp, 0),
10734 TREE_OPERAND (exp, 1));
10736 /* If we are in gimple form, then returning EXP would create
10737 non-gimple expressions. Clearing it is safe and insures
10738 we do not allow a non-gimple expression to escape. */
10739 if (in_gimple_form)
10740 exp = NULL;
10742 return (retval ? retval : exp);
10747 return NULL_TREE;
10751 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10752 attempt to fold the expression to a constant without modifying TYPE,
10753 OP0 or OP1.
10755 If the expression could be simplified to a constant, then return
10756 the constant. If the expression would not be simplified to a
10757 constant, then return NULL_TREE.
10759 Note this is primarily designed to be called after gimplification
10760 of the tree structures and when at least one operand is a constant.
10761 As a result of those simplifying assumptions this routine is far
10762 simpler than the generic fold routine. */
10764 tree
10765 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10767 int wins = 1;
10768 tree subop0;
10769 tree subop1;
10770 tree tem;
10772 /* If this is a commutative operation, and ARG0 is a constant, move it
10773 to ARG1 to reduce the number of tests below. */
10774 if (commutative_tree_code (code)
10775 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10777 tem = op0;
10778 op0 = op1;
10779 op1 = tem;
10782 /* If either operand is a complex type, extract its real component. */
10783 if (TREE_CODE (op0) == COMPLEX_CST)
10784 subop0 = TREE_REALPART (op0);
10785 else
10786 subop0 = op0;
10788 if (TREE_CODE (op1) == COMPLEX_CST)
10789 subop1 = TREE_REALPART (op1);
10790 else
10791 subop1 = op1;
10793 /* Note if either argument is not a real or integer constant.
10794 With a few exceptions, simplification is limited to cases
10795 where both arguments are constants. */
10796 if ((TREE_CODE (subop0) != INTEGER_CST
10797 && TREE_CODE (subop0) != REAL_CST)
10798 || (TREE_CODE (subop1) != INTEGER_CST
10799 && TREE_CODE (subop1) != REAL_CST))
10800 wins = 0;
10802 switch (code)
10804 case PLUS_EXPR:
10805 /* (plus (address) (const_int)) is a constant. */
10806 if (TREE_CODE (op0) == PLUS_EXPR
10807 && TREE_CODE (op1) == INTEGER_CST
10808 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10809 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10810 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10811 == ADDR_EXPR)))
10812 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10814 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10815 const_binop (PLUS_EXPR, op1,
10816 TREE_OPERAND (op0, 1), 0));
10818 case BIT_XOR_EXPR:
10820 binary:
10821 if (!wins)
10822 return NULL_TREE;
10824 /* Both arguments are constants. Simplify. */
10825 tem = const_binop (code, op0, op1, 0);
10826 if (tem != NULL_TREE)
10828 /* The return value should always have the same type as
10829 the original expression. */
10830 if (TREE_TYPE (tem) != type)
10831 tem = fold_convert (type, tem);
10833 return tem;
10835 return NULL_TREE;
10837 case MINUS_EXPR:
10838 /* Fold &x - &x. This can happen from &x.foo - &x.
10839 This is unsafe for certain floats even in non-IEEE formats.
10840 In IEEE, it is unsafe because it does wrong for NaNs.
10841 Also note that operand_equal_p is always false if an
10842 operand is volatile. */
10843 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10844 return fold_convert (type, integer_zero_node);
10846 goto binary;
10848 case MULT_EXPR:
10849 case BIT_AND_EXPR:
10850 /* Special case multiplication or bitwise AND where one argument
10851 is zero. */
10852 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10853 return omit_one_operand (type, op1, op0);
10854 else
10855 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10856 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10857 && real_zerop (op1))
10858 return omit_one_operand (type, op1, op0);
10860 goto binary;
10862 case BIT_IOR_EXPR:
10863 /* Special case when we know the result will be all ones. */
10864 if (integer_all_onesp (op1))
10865 return omit_one_operand (type, op1, op0);
10867 goto binary;
10869 case TRUNC_DIV_EXPR:
10870 case ROUND_DIV_EXPR:
10871 case FLOOR_DIV_EXPR:
10872 case CEIL_DIV_EXPR:
10873 case EXACT_DIV_EXPR:
10874 case TRUNC_MOD_EXPR:
10875 case ROUND_MOD_EXPR:
10876 case FLOOR_MOD_EXPR:
10877 case CEIL_MOD_EXPR:
10878 case RDIV_EXPR:
10879 /* Division by zero is undefined. */
10880 if (integer_zerop (op1))
10881 return NULL_TREE;
10883 if (TREE_CODE (op1) == REAL_CST
10884 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10885 && real_zerop (op1))
10886 return NULL_TREE;
10888 goto binary;
10890 case MIN_EXPR:
10891 if (INTEGRAL_TYPE_P (type)
10892 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10893 return omit_one_operand (type, op1, op0);
10895 goto binary;
10897 case MAX_EXPR:
10898 if (INTEGRAL_TYPE_P (type)
10899 && TYPE_MAX_VALUE (type)
10900 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10901 return omit_one_operand (type, op1, op0);
10903 goto binary;
10905 case RSHIFT_EXPR:
10906 /* Optimize -1 >> x for arithmetic right shifts. */
10907 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10908 return omit_one_operand (type, op0, op1);
10909 /* ... fall through ... */
10911 case LSHIFT_EXPR:
10912 if (integer_zerop (op0))
10913 return omit_one_operand (type, op0, op1);
10915 /* Since negative shift count is not well-defined, don't
10916 try to compute it in the compiler. */
10917 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10918 return NULL_TREE;
10920 goto binary;
10922 case LROTATE_EXPR:
10923 case RROTATE_EXPR:
10924 /* -1 rotated either direction by any amount is still -1. */
10925 if (integer_all_onesp (op0))
10926 return omit_one_operand (type, op0, op1);
10928 /* 0 rotated either direction by any amount is still zero. */
10929 if (integer_zerop (op0))
10930 return omit_one_operand (type, op0, op1);
10932 goto binary;
10934 case COMPLEX_EXPR:
10935 if (wins)
10936 return build_complex (type, op0, op1);
10937 return NULL_TREE;
10939 case LT_EXPR:
10940 case LE_EXPR:
10941 case GT_EXPR:
10942 case GE_EXPR:
10943 case EQ_EXPR:
10944 case NE_EXPR:
10945 /* If one arg is a real or integer constant, put it last. */
10946 if ((TREE_CODE (op0) == INTEGER_CST
10947 && TREE_CODE (op1) != INTEGER_CST)
10948 || (TREE_CODE (op0) == REAL_CST
10949 && TREE_CODE (op0) != REAL_CST))
10951 tree temp;
10953 temp = op0;
10954 op0 = op1;
10955 op1 = temp;
10956 code = swap_tree_comparison (code);
10959 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10960 This transformation affects the cases which are handled in later
10961 optimizations involving comparisons with non-negative constants. */
10962 if (TREE_CODE (op1) == INTEGER_CST
10963 && TREE_CODE (op0) != INTEGER_CST
10964 && tree_int_cst_sgn (op1) > 0)
10966 switch (code)
10968 case GE_EXPR:
10969 code = GT_EXPR;
10970 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10971 break;
10973 case LT_EXPR:
10974 code = LE_EXPR;
10975 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10976 break;
10978 default:
10979 break;
10983 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10984 if (tem)
10985 return tem;
10987 /* Fall through. */
10989 case ORDERED_EXPR:
10990 case UNORDERED_EXPR:
10991 case UNLT_EXPR:
10992 case UNLE_EXPR:
10993 case UNGT_EXPR:
10994 case UNGE_EXPR:
10995 case UNEQ_EXPR:
10996 case LTGT_EXPR:
10997 if (!wins)
10998 return NULL_TREE;
11000 return fold_relational_const (code, type, op0, op1);
11002 case RANGE_EXPR:
11003 /* This could probably be handled. */
11004 return NULL_TREE;
11006 case TRUTH_AND_EXPR:
11007 /* If second arg is constant zero, result is zero, but first arg
11008 must be evaluated. */
11009 if (integer_zerop (op1))
11010 return omit_one_operand (type, op1, op0);
11011 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11012 case will be handled here. */
11013 if (integer_zerop (op0))
11014 return omit_one_operand (type, op0, op1);
11015 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11016 return constant_boolean_node (true, type);
11017 return NULL_TREE;
11019 case TRUTH_OR_EXPR:
11020 /* If second arg is constant true, result is true, but we must
11021 evaluate first arg. */
11022 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
11023 return omit_one_operand (type, op1, op0);
11024 /* Likewise for first arg, but note this only occurs here for
11025 TRUTH_OR_EXPR. */
11026 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
11027 return omit_one_operand (type, op0, op1);
11028 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11029 return constant_boolean_node (false, type);
11030 return NULL_TREE;
11032 case TRUTH_XOR_EXPR:
11033 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11035 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
11036 return constant_boolean_node (x, type);
11038 return NULL_TREE;
11040 default:
11041 return NULL_TREE;
11045 /* Given the components of a unary expression CODE, TYPE and OP0,
11046 attempt to fold the expression to a constant without modifying
11047 TYPE or OP0.
11049 If the expression could be simplified to a constant, then return
11050 the constant. If the expression would not be simplified to a
11051 constant, then return NULL_TREE.
11053 Note this is primarily designed to be called after gimplification
11054 of the tree structures and when op0 is a constant. As a result
11055 of those simplifying assumptions this routine is far simpler than
11056 the generic fold routine. */
11058 tree
11059 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11061 /* Make sure we have a suitable constant argument. */
11062 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
11064 tree subop;
11066 if (TREE_CODE (op0) == COMPLEX_CST)
11067 subop = TREE_REALPART (op0);
11068 else
11069 subop = op0;
11071 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
11072 return NULL_TREE;
11075 switch (code)
11077 case NOP_EXPR:
11078 case FLOAT_EXPR:
11079 case CONVERT_EXPR:
11080 case FIX_TRUNC_EXPR:
11081 case FIX_FLOOR_EXPR:
11082 case FIX_CEIL_EXPR:
11083 return fold_convert_const (code, type, op0);
11085 case NEGATE_EXPR:
11086 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11087 return fold_negate_const (op0, type);
11088 else
11089 return NULL_TREE;
11091 case ABS_EXPR:
11092 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11093 return fold_abs_const (op0, type);
11094 else
11095 return NULL_TREE;
11097 case BIT_NOT_EXPR:
11098 if (TREE_CODE (op0) == INTEGER_CST)
11099 return fold_not_const (op0, type);
11100 else
11101 return NULL_TREE;
11103 case REALPART_EXPR:
11104 if (TREE_CODE (op0) == COMPLEX_CST)
11105 return TREE_REALPART (op0);
11106 else
11107 return NULL_TREE;
11109 case IMAGPART_EXPR:
11110 if (TREE_CODE (op0) == COMPLEX_CST)
11111 return TREE_IMAGPART (op0);
11112 else
11113 return NULL_TREE;
11115 case CONJ_EXPR:
11116 if (TREE_CODE (op0) == COMPLEX_CST
11117 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
11118 return build_complex (type, TREE_REALPART (op0),
11119 negate_expr (TREE_IMAGPART (op0)));
11120 return NULL_TREE;
11122 default:
11123 return NULL_TREE;
11127 /* If EXP represents referencing an element in a constant string
11128 (either via pointer arithmetic or array indexing), return the
11129 tree representing the value accessed, otherwise return NULL. */
11131 tree
11132 fold_read_from_constant_string (tree exp)
11134 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11136 tree exp1 = TREE_OPERAND (exp, 0);
11137 tree index;
11138 tree string;
11140 if (TREE_CODE (exp) == INDIRECT_REF)
11141 string = string_constant (exp1, &index);
11142 else
11144 tree low_bound = array_ref_low_bound (exp);
11145 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11147 /* Optimize the special-case of a zero lower bound.
11149 We convert the low_bound to sizetype to avoid some problems
11150 with constant folding. (E.g. suppose the lower bound is 1,
11151 and its mode is QI. Without the conversion,l (ARRAY
11152 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11153 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11154 if (! integer_zerop (low_bound))
11155 index = size_diffop (index, fold_convert (sizetype, low_bound));
11157 string = exp1;
11160 if (string
11161 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11162 && TREE_CODE (string) == STRING_CST
11163 && TREE_CODE (index) == INTEGER_CST
11164 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11165 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11166 == MODE_INT)
11167 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11168 return fold_convert (TREE_TYPE (exp),
11169 build_int_cst (NULL_TREE,
11170 (TREE_STRING_POINTER (string)
11171 [TREE_INT_CST_LOW (index)])));
11173 return NULL;
11176 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11177 an integer constant or real constant.
11179 TYPE is the type of the result. */
11181 static tree
11182 fold_negate_const (tree arg0, tree type)
11184 tree t = NULL_TREE;
11186 switch (TREE_CODE (arg0))
11188 case INTEGER_CST:
11190 unsigned HOST_WIDE_INT low;
11191 HOST_WIDE_INT high;
11192 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11193 TREE_INT_CST_HIGH (arg0),
11194 &low, &high);
11195 t = build_int_cst_wide (type, low, high);
11196 t = force_fit_type (t, 1,
11197 (overflow | TREE_OVERFLOW (arg0))
11198 && !TYPE_UNSIGNED (type),
11199 TREE_CONSTANT_OVERFLOW (arg0));
11200 break;
11203 case REAL_CST:
11204 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11205 break;
11207 default:
11208 gcc_unreachable ();
11211 return t;
11214 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11215 an integer constant or real constant.
11217 TYPE is the type of the result. */
11219 tree
11220 fold_abs_const (tree arg0, tree type)
11222 tree t = NULL_TREE;
11224 switch (TREE_CODE (arg0))
11226 case INTEGER_CST:
11227 /* If the value is unsigned, then the absolute value is
11228 the same as the ordinary value. */
11229 if (TYPE_UNSIGNED (type))
11230 t = arg0;
11231 /* Similarly, if the value is non-negative. */
11232 else if (INT_CST_LT (integer_minus_one_node, arg0))
11233 t = arg0;
11234 /* If the value is negative, then the absolute value is
11235 its negation. */
11236 else
11238 unsigned HOST_WIDE_INT low;
11239 HOST_WIDE_INT high;
11240 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11241 TREE_INT_CST_HIGH (arg0),
11242 &low, &high);
11243 t = build_int_cst_wide (type, low, high);
11244 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11245 TREE_CONSTANT_OVERFLOW (arg0));
11247 break;
11249 case REAL_CST:
11250 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11251 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11252 else
11253 t = arg0;
11254 break;
11256 default:
11257 gcc_unreachable ();
11260 return t;
11263 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11264 constant. TYPE is the type of the result. */
11266 static tree
11267 fold_not_const (tree arg0, tree type)
11269 tree t = NULL_TREE;
11271 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11273 t = build_int_cst_wide (type,
11274 ~ TREE_INT_CST_LOW (arg0),
11275 ~ TREE_INT_CST_HIGH (arg0));
11276 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11277 TREE_CONSTANT_OVERFLOW (arg0));
11279 return t;
11282 /* Given CODE, a relational operator, the target type, TYPE and two
11283 constant operands OP0 and OP1, return the result of the
11284 relational operation. If the result is not a compile time
11285 constant, then return NULL_TREE. */
11287 static tree
11288 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11290 int result, invert;
11292 /* From here on, the only cases we handle are when the result is
11293 known to be a constant. */
11295 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11297 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11298 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11300 /* Handle the cases where either operand is a NaN. */
11301 if (real_isnan (c0) || real_isnan (c1))
11303 switch (code)
11305 case EQ_EXPR:
11306 case ORDERED_EXPR:
11307 result = 0;
11308 break;
11310 case NE_EXPR:
11311 case UNORDERED_EXPR:
11312 case UNLT_EXPR:
11313 case UNLE_EXPR:
11314 case UNGT_EXPR:
11315 case UNGE_EXPR:
11316 case UNEQ_EXPR:
11317 result = 1;
11318 break;
11320 case LT_EXPR:
11321 case LE_EXPR:
11322 case GT_EXPR:
11323 case GE_EXPR:
11324 case LTGT_EXPR:
11325 if (flag_trapping_math)
11326 return NULL_TREE;
11327 result = 0;
11328 break;
11330 default:
11331 gcc_unreachable ();
11334 return constant_boolean_node (result, type);
11337 return constant_boolean_node (real_compare (code, c0, c1), type);
11340 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11342 To compute GT, swap the arguments and do LT.
11343 To compute GE, do LT and invert the result.
11344 To compute LE, swap the arguments, do LT and invert the result.
11345 To compute NE, do EQ and invert the result.
11347 Therefore, the code below must handle only EQ and LT. */
11349 if (code == LE_EXPR || code == GT_EXPR)
11351 tree tem = op0;
11352 op0 = op1;
11353 op1 = tem;
11354 code = swap_tree_comparison (code);
11357 /* Note that it is safe to invert for real values here because we
11358 have already handled the one case that it matters. */
11360 invert = 0;
11361 if (code == NE_EXPR || code == GE_EXPR)
11363 invert = 1;
11364 code = invert_tree_comparison (code, false);
11367 /* Compute a result for LT or EQ if args permit;
11368 Otherwise return T. */
11369 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11371 if (code == EQ_EXPR)
11372 result = tree_int_cst_equal (op0, op1);
11373 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11374 result = INT_CST_LT_UNSIGNED (op0, op1);
11375 else
11376 result = INT_CST_LT (op0, op1);
11378 else
11379 return NULL_TREE;
11381 if (invert)
11382 result ^= 1;
11383 return constant_boolean_node (result, type);
11386 /* Build an expression for the a clean point containing EXPR with type TYPE.
11387 Don't build a cleanup point expression for EXPR which don't have side
11388 effects. */
11390 tree
11391 fold_build_cleanup_point_expr (tree type, tree expr)
11393 /* If the expression does not have side effects then we don't have to wrap
11394 it with a cleanup point expression. */
11395 if (!TREE_SIDE_EFFECTS (expr))
11396 return expr;
11398 /* If the expression is a return, check to see if the expression inside the
11399 return has no side effects or the right hand side of the modify expression
11400 inside the return. If either don't have side effects set we don't need to
11401 wrap the expression in a cleanup point expression. Note we don't check the
11402 left hand side of the modify because it should always be a return decl. */
11403 if (TREE_CODE (expr) == RETURN_EXPR)
11405 tree op = TREE_OPERAND (expr, 0);
11406 if (!op || !TREE_SIDE_EFFECTS (op))
11407 return expr;
11408 op = TREE_OPERAND (op, 1);
11409 if (!TREE_SIDE_EFFECTS (op))
11410 return expr;
11413 return build1 (CLEANUP_POINT_EXPR, type, expr);
11416 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11417 avoid confusing the gimplify process. */
11419 tree
11420 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11422 /* The size of the object is not relevant when talking about its address. */
11423 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11424 t = TREE_OPERAND (t, 0);
11426 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11427 if (TREE_CODE (t) == INDIRECT_REF
11428 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11430 t = TREE_OPERAND (t, 0);
11431 if (TREE_TYPE (t) != ptrtype)
11432 t = build1 (NOP_EXPR, ptrtype, t);
11434 else
11436 tree base = t;
11438 while (handled_component_p (base))
11439 base = TREE_OPERAND (base, 0);
11440 if (DECL_P (base))
11441 TREE_ADDRESSABLE (base) = 1;
11443 t = build1 (ADDR_EXPR, ptrtype, t);
11446 return t;
11449 tree
11450 build_fold_addr_expr (tree t)
11452 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11455 /* Given a pointer value T, return a simplified version of an indirection
11456 through T, or NULL_TREE if no simplification is possible. */
11458 static tree
11459 fold_indirect_ref_1 (tree t)
11461 tree type = TREE_TYPE (TREE_TYPE (t));
11462 tree sub = t;
11463 tree subtype;
11465 STRIP_NOPS (sub);
11466 subtype = TREE_TYPE (sub);
11467 if (!POINTER_TYPE_P (subtype))
11468 return NULL_TREE;
11470 if (TREE_CODE (sub) == ADDR_EXPR)
11472 tree op = TREE_OPERAND (sub, 0);
11473 tree optype = TREE_TYPE (op);
11474 /* *&p => p */
11475 if (lang_hooks.types_compatible_p (type, optype))
11476 return op;
11477 /* *(foo *)&fooarray => fooarray[0] */
11478 else if (TREE_CODE (optype) == ARRAY_TYPE
11479 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11481 tree type_domain = TYPE_DOMAIN (optype);
11482 tree min_val = size_zero_node;
11483 if (type_domain && TYPE_MIN_VALUE (type_domain))
11484 min_val = TYPE_MIN_VALUE (type_domain);
11485 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11489 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11490 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11491 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11493 tree type_domain;
11494 tree min_val = size_zero_node;
11495 sub = build_fold_indirect_ref (sub);
11496 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11497 if (type_domain && TYPE_MIN_VALUE (type_domain))
11498 min_val = TYPE_MIN_VALUE (type_domain);
11499 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11502 return NULL_TREE;
11505 /* Builds an expression for an indirection through T, simplifying some
11506 cases. */
11508 tree
11509 build_fold_indirect_ref (tree t)
11511 tree sub = fold_indirect_ref_1 (t);
11513 if (sub)
11514 return sub;
11515 else
11516 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
11519 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11521 tree
11522 fold_indirect_ref (tree t)
11524 tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
11526 if (sub)
11527 return sub;
11528 else
11529 return t;
11532 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11533 whose result is ignored. The type of the returned tree need not be
11534 the same as the original expression. */
11536 tree
11537 fold_ignored_result (tree t)
11539 if (!TREE_SIDE_EFFECTS (t))
11540 return integer_zero_node;
11542 for (;;)
11543 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11545 case tcc_unary:
11546 t = TREE_OPERAND (t, 0);
11547 break;
11549 case tcc_binary:
11550 case tcc_comparison:
11551 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11552 t = TREE_OPERAND (t, 0);
11553 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11554 t = TREE_OPERAND (t, 1);
11555 else
11556 return t;
11557 break;
11559 case tcc_expression:
11560 switch (TREE_CODE (t))
11562 case COMPOUND_EXPR:
11563 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11564 return t;
11565 t = TREE_OPERAND (t, 0);
11566 break;
11568 case COND_EXPR:
11569 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11570 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11571 return t;
11572 t = TREE_OPERAND (t, 0);
11573 break;
11575 default:
11576 return t;
11578 break;
11580 default:
11581 return t;
11585 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11586 This can only be applied to objects of a sizetype. */
11588 tree
11589 round_up (tree value, int divisor)
11591 tree div = NULL_TREE;
11593 gcc_assert (divisor > 0);
11594 if (divisor == 1)
11595 return value;
11597 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11598 have to do anything. Only do this when we are not given a const,
11599 because in that case, this check is more expensive than just
11600 doing it. */
11601 if (TREE_CODE (value) != INTEGER_CST)
11603 div = build_int_cst (TREE_TYPE (value), divisor);
11605 if (multiple_of_p (TREE_TYPE (value), value, div))
11606 return value;
11609 /* If divisor is a power of two, simplify this to bit manipulation. */
11610 if (divisor == (divisor & -divisor))
11612 tree t;
11614 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11615 value = size_binop (PLUS_EXPR, value, t);
11616 t = build_int_cst (TREE_TYPE (value), -divisor);
11617 value = size_binop (BIT_AND_EXPR, value, t);
11619 else
11621 if (!div)
11622 div = build_int_cst (TREE_TYPE (value), divisor);
11623 value = size_binop (CEIL_DIV_EXPR, value, div);
11624 value = size_binop (MULT_EXPR, value, div);
11627 return value;
11630 /* Likewise, but round down. */
11632 tree
11633 round_down (tree value, int divisor)
11635 tree div = NULL_TREE;
11637 gcc_assert (divisor > 0);
11638 if (divisor == 1)
11639 return value;
11641 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11642 have to do anything. Only do this when we are not given a const,
11643 because in that case, this check is more expensive than just
11644 doing it. */
11645 if (TREE_CODE (value) != INTEGER_CST)
11647 div = build_int_cst (TREE_TYPE (value), divisor);
11649 if (multiple_of_p (TREE_TYPE (value), value, div))
11650 return value;
11653 /* If divisor is a power of two, simplify this to bit manipulation. */
11654 if (divisor == (divisor & -divisor))
11656 tree t;
11658 t = build_int_cst (TREE_TYPE (value), -divisor);
11659 value = size_binop (BIT_AND_EXPR, value, t);
11661 else
11663 if (!div)
11664 div = build_int_cst (TREE_TYPE (value), divisor);
11665 value = size_binop (FLOOR_DIV_EXPR, value, div);
11666 value = size_binop (MULT_EXPR, value, div);
11669 return value;
11672 /* Returns the pointer to the base of the object addressed by EXP and
11673 extracts the information about the offset of the access, storing it
11674 to PBITPOS and POFFSET. */
11676 static tree
11677 split_address_to_core_and_offset (tree exp,
11678 HOST_WIDE_INT *pbitpos, tree *poffset)
11680 tree core;
11681 enum machine_mode mode;
11682 int unsignedp, volatilep;
11683 HOST_WIDE_INT bitsize;
11685 if (TREE_CODE (exp) == ADDR_EXPR)
11687 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11688 poffset, &mode, &unsignedp, &volatilep,
11689 false);
11691 if (TREE_CODE (core) == INDIRECT_REF)
11692 core = TREE_OPERAND (core, 0);
11694 else
11696 core = exp;
11697 *pbitpos = 0;
11698 *poffset = NULL_TREE;
11701 return core;
11704 /* Returns true if addresses of E1 and E2 differ by a constant, false
11705 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11707 bool
11708 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11710 tree core1, core2;
11711 HOST_WIDE_INT bitpos1, bitpos2;
11712 tree toffset1, toffset2, tdiff, type;
11714 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11715 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11717 if (bitpos1 % BITS_PER_UNIT != 0
11718 || bitpos2 % BITS_PER_UNIT != 0
11719 || !operand_equal_p (core1, core2, 0))
11720 return false;
11722 if (toffset1 && toffset2)
11724 type = TREE_TYPE (toffset1);
11725 if (type != TREE_TYPE (toffset2))
11726 toffset2 = fold_convert (type, toffset2);
11728 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11729 if (!host_integerp (tdiff, 0))
11730 return false;
11732 *diff = tree_low_cst (tdiff, 0);
11734 else if (toffset1 || toffset2)
11736 /* If only one of the offsets is non-constant, the difference cannot
11737 be a constant. */
11738 return false;
11740 else
11741 *diff = 0;
11743 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11744 return true;
11747 /* Simplify the floating point expression EXP when the sign of the
11748 result is not significant. Return NULL_TREE if no simplification
11749 is possible. */
11751 tree
11752 fold_strip_sign_ops (tree exp)
11754 tree arg0, arg1;
11756 switch (TREE_CODE (exp))
11758 case ABS_EXPR:
11759 case NEGATE_EXPR:
11760 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11761 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11763 case MULT_EXPR:
11764 case RDIV_EXPR:
11765 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11766 return NULL_TREE;
11767 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11768 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11769 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11770 return fold (build2 (TREE_CODE (exp), TREE_TYPE (exp),
11771 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11772 arg1 ? arg1 : TREE_OPERAND (exp, 1)));
11773 break;
11775 default:
11776 break;
11778 return NULL_TREE;