2005-05-19 Paul Brook <paul@codesourcery.com>
[official-gcc.git] / gcc / fold-const.c
blob050d45c6069c2ab6b4d7482d2b2d1d9186d7c191
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (enum tree_code, tree, tree, tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
125 tree, tree,
126 tree, tree, int);
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 tree, tree, tree);
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static bool tree_expr_nonzero_p (tree);
138 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
139 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
140 and SUM1. Then this yields nonzero if overflow occurred during the
141 addition.
143 Overflow occurs if A and B have the same sign, but A and SUM differ in
144 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
145 sign. */
146 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
148 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
149 We do that by representing the two-word integer in 4 words, with only
150 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
151 number. The value of the word is LOWPART + HIGHPART * BASE. */
153 #define LOWPART(x) \
154 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
155 #define HIGHPART(x) \
156 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
157 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
159 /* Unpack a two-word integer into 4 words.
160 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
161 WORDS points to the array of HOST_WIDE_INTs. */
163 static void
164 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
166 words[0] = LOWPART (low);
167 words[1] = HIGHPART (low);
168 words[2] = LOWPART (hi);
169 words[3] = HIGHPART (hi);
172 /* Pack an array of 4 words into a two-word integer.
173 WORDS points to the array of words.
174 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
176 static void
177 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 HOST_WIDE_INT *hi)
180 *low = words[0] + words[1] * BASE;
181 *hi = words[2] + words[3] * BASE;
184 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
185 in overflow of the value, when >0 we are only interested in signed
186 overflow, for <0 we are interested in any overflow. OVERFLOWED
187 indicates whether overflow has already occurred. CONST_OVERFLOWED
188 indicates whether constant overflow has already occurred. We force
189 T's value to be within range of T's type (by setting to 0 or 1 all
190 the bits outside the type's range). We set TREE_OVERFLOWED if,
191 OVERFLOWED is nonzero,
192 or OVERFLOWABLE is >0 and signed overflow occurs
193 or OVERFLOWABLE is <0 and any overflow occurs
194 We set TREE_CONSTANT_OVERFLOWED if,
195 CONST_OVERFLOWED is nonzero
196 or we set TREE_OVERFLOWED.
197 We return either the original T, or a copy. */
199 tree
200 force_fit_type (tree t, int overflowable,
201 bool overflowed, bool overflowed_const)
203 unsigned HOST_WIDE_INT low;
204 HOST_WIDE_INT high;
205 unsigned int prec;
206 int sign_extended_type;
208 gcc_assert (TREE_CODE (t) == INTEGER_CST);
210 low = TREE_INT_CST_LOW (t);
211 high = TREE_INT_CST_HIGH (t);
213 if (POINTER_TYPE_P (TREE_TYPE (t))
214 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = POINTER_SIZE;
216 else
217 prec = TYPE_PRECISION (TREE_TYPE (t));
218 /* Size types *are* sign extended. */
219 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
220 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
223 /* First clear all bits that are beyond the type's precision. */
225 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
227 else if (prec > HOST_BITS_PER_WIDE_INT)
228 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 else
231 high = 0;
232 if (prec < HOST_BITS_PER_WIDE_INT)
233 low &= ~((HOST_WIDE_INT) (-1) << prec);
236 if (!sign_extended_type)
237 /* No sign extension */;
238 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
239 /* Correct width already. */;
240 else if (prec > HOST_BITS_PER_WIDE_INT)
242 /* Sign extend top half? */
243 if (high & ((unsigned HOST_WIDE_INT)1
244 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
245 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
247 else if (prec == HOST_BITS_PER_WIDE_INT)
249 if ((HOST_WIDE_INT)low < 0)
250 high = -1;
252 else
254 /* Sign extend bottom half? */
255 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
257 high = -1;
258 low |= (HOST_WIDE_INT)(-1) << prec;
262 /* If the value changed, return a new node. */
263 if (overflowed || overflowed_const
264 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
266 t = build_int_cst_wide (TREE_TYPE (t), low, high);
268 if (overflowed
269 || overflowable < 0
270 || (overflowable > 0 && sign_extended_type))
272 t = copy_node (t);
273 TREE_OVERFLOW (t) = 1;
274 TREE_CONSTANT_OVERFLOW (t) = 1;
276 else if (overflowed_const)
278 t = copy_node (t);
279 TREE_CONSTANT_OVERFLOW (t) = 1;
283 return t;
286 /* Add two doubleword integers with doubleword result.
287 Each argument is given as two `HOST_WIDE_INT' pieces.
288 One argument is L1 and H1; the other, L2 and H2.
289 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
292 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
293 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
294 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
296 unsigned HOST_WIDE_INT l;
297 HOST_WIDE_INT h;
299 l = l1 + l2;
300 h = h1 + h2 + (l < l1);
302 *lv = l;
303 *hv = h;
304 return OVERFLOW_SUM_SIGN (h1, h2, h);
307 /* Negate a doubleword integer with doubleword result.
308 Return nonzero if the operation overflows, assuming it's signed.
309 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
310 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
313 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
314 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
316 if (l1 == 0)
318 *lv = 0;
319 *hv = - h1;
320 return (*hv & h1) < 0;
322 else
324 *lv = -l1;
325 *hv = ~h1;
326 return 0;
330 /* Multiply two doubleword integers with doubleword result.
331 Return nonzero if the operation overflows, assuming it's signed.
332 Each argument is given as two `HOST_WIDE_INT' pieces.
333 One argument is L1 and H1; the other, L2 and H2.
334 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
337 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
338 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
339 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
341 HOST_WIDE_INT arg1[4];
342 HOST_WIDE_INT arg2[4];
343 HOST_WIDE_INT prod[4 * 2];
344 unsigned HOST_WIDE_INT carry;
345 int i, j, k;
346 unsigned HOST_WIDE_INT toplow, neglow;
347 HOST_WIDE_INT tophigh, neghigh;
349 encode (arg1, l1, h1);
350 encode (arg2, l2, h2);
352 memset (prod, 0, sizeof prod);
354 for (i = 0; i < 4; i++)
356 carry = 0;
357 for (j = 0; j < 4; j++)
359 k = i + j;
360 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
361 carry += arg1[i] * arg2[j];
362 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
363 carry += prod[k];
364 prod[k] = LOWPART (carry);
365 carry = HIGHPART (carry);
367 prod[i + 4] = carry;
370 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
372 /* Check for overflow by calculating the top half of the answer in full;
373 it should agree with the low half's sign bit. */
374 decode (prod + 4, &toplow, &tophigh);
375 if (h1 < 0)
377 neg_double (l2, h2, &neglow, &neghigh);
378 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
380 if (h2 < 0)
382 neg_double (l1, h1, &neglow, &neghigh);
383 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
385 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
388 /* Shift the doubleword integer in L1, H1 left by COUNT places
389 keeping only PREC bits of result.
390 Shift right if COUNT is negative.
391 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
392 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
394 void
395 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
396 HOST_WIDE_INT count, unsigned int prec,
397 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
399 unsigned HOST_WIDE_INT signmask;
401 if (count < 0)
403 rshift_double (l1, h1, -count, prec, lv, hv, arith);
404 return;
407 if (SHIFT_COUNT_TRUNCATED)
408 count %= prec;
410 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
412 /* Shifting by the host word size is undefined according to the
413 ANSI standard, so we must handle this as a special case. */
414 *hv = 0;
415 *lv = 0;
417 else if (count >= HOST_BITS_PER_WIDE_INT)
419 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
420 *lv = 0;
422 else
424 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
425 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
426 *lv = l1 << count;
429 /* Sign extend all bits that are beyond the precision. */
431 signmask = -((prec > HOST_BITS_PER_WIDE_INT
432 ? ((unsigned HOST_WIDE_INT) *hv
433 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
434 : (*lv >> (prec - 1))) & 1);
436 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
438 else if (prec >= HOST_BITS_PER_WIDE_INT)
440 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
441 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
443 else
445 *hv = signmask;
446 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
447 *lv |= signmask << prec;
451 /* Shift the doubleword integer in L1, H1 right by COUNT places
452 keeping only PREC bits of result. COUNT must be positive.
453 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
454 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
456 void
457 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
458 HOST_WIDE_INT count, unsigned int prec,
459 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
460 int arith)
462 unsigned HOST_WIDE_INT signmask;
464 signmask = (arith
465 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
466 : 0);
468 if (SHIFT_COUNT_TRUNCATED)
469 count %= prec;
471 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
473 /* Shifting by the host word size is undefined according to the
474 ANSI standard, so we must handle this as a special case. */
475 *hv = 0;
476 *lv = 0;
478 else if (count >= HOST_BITS_PER_WIDE_INT)
480 *hv = 0;
481 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
483 else
485 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
486 *lv = ((l1 >> count)
487 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
490 /* Zero / sign extend all bits that are beyond the precision. */
492 if (count >= (HOST_WIDE_INT)prec)
494 *hv = signmask;
495 *lv = signmask;
497 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
499 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
501 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
502 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
504 else
506 *hv = signmask;
507 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
508 *lv |= signmask << (prec - count);
512 /* Rotate the doubleword integer in L1, H1 left by COUNT places
513 keeping only PREC bits of result.
514 Rotate right if COUNT is negative.
515 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
517 void
518 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
519 HOST_WIDE_INT count, unsigned int prec,
520 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
522 unsigned HOST_WIDE_INT s1l, s2l;
523 HOST_WIDE_INT s1h, s2h;
525 count %= prec;
526 if (count < 0)
527 count += prec;
529 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
530 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 *lv = s1l | s2l;
532 *hv = s1h | s2h;
535 /* Rotate the doubleword integer in L1, H1 left by COUNT places
536 keeping only PREC bits of result. COUNT must be positive.
537 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
539 void
540 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
541 HOST_WIDE_INT count, unsigned int prec,
542 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
544 unsigned HOST_WIDE_INT s1l, s2l;
545 HOST_WIDE_INT s1h, s2h;
547 count %= prec;
548 if (count < 0)
549 count += prec;
551 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
552 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 *lv = s1l | s2l;
554 *hv = s1h | s2h;
557 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
558 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
559 CODE is a tree code for a kind of division, one of
560 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
561 or EXACT_DIV_EXPR
562 It controls how the quotient is rounded to an integer.
563 Return nonzero if the operation overflows.
564 UNS nonzero says do unsigned division. */
567 div_and_round_double (enum tree_code code, int uns,
568 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
569 HOST_WIDE_INT hnum_orig,
570 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
571 HOST_WIDE_INT hden_orig,
572 unsigned HOST_WIDE_INT *lquo,
573 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
574 HOST_WIDE_INT *hrem)
576 int quo_neg = 0;
577 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
578 HOST_WIDE_INT den[4], quo[4];
579 int i, j;
580 unsigned HOST_WIDE_INT work;
581 unsigned HOST_WIDE_INT carry = 0;
582 unsigned HOST_WIDE_INT lnum = lnum_orig;
583 HOST_WIDE_INT hnum = hnum_orig;
584 unsigned HOST_WIDE_INT lden = lden_orig;
585 HOST_WIDE_INT hden = hden_orig;
586 int overflow = 0;
588 if (hden == 0 && lden == 0)
589 overflow = 1, lden = 1;
591 /* Calculate quotient sign and convert operands to unsigned. */
592 if (!uns)
594 if (hnum < 0)
596 quo_neg = ~ quo_neg;
597 /* (minimum integer) / (-1) is the only overflow case. */
598 if (neg_double (lnum, hnum, &lnum, &hnum)
599 && ((HOST_WIDE_INT) lden & hden) == -1)
600 overflow = 1;
602 if (hden < 0)
604 quo_neg = ~ quo_neg;
605 neg_double (lden, hden, &lden, &hden);
609 if (hnum == 0 && hden == 0)
610 { /* single precision */
611 *hquo = *hrem = 0;
612 /* This unsigned division rounds toward zero. */
613 *lquo = lnum / lden;
614 goto finish_up;
617 if (hnum == 0)
618 { /* trivial case: dividend < divisor */
619 /* hden != 0 already checked. */
620 *hquo = *lquo = 0;
621 *hrem = hnum;
622 *lrem = lnum;
623 goto finish_up;
626 memset (quo, 0, sizeof quo);
628 memset (num, 0, sizeof num); /* to zero 9th element */
629 memset (den, 0, sizeof den);
631 encode (num, lnum, hnum);
632 encode (den, lden, hden);
634 /* Special code for when the divisor < BASE. */
635 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
637 /* hnum != 0 already checked. */
638 for (i = 4 - 1; i >= 0; i--)
640 work = num[i] + carry * BASE;
641 quo[i] = work / lden;
642 carry = work % lden;
645 else
647 /* Full double precision division,
648 with thanks to Don Knuth's "Seminumerical Algorithms". */
649 int num_hi_sig, den_hi_sig;
650 unsigned HOST_WIDE_INT quo_est, scale;
652 /* Find the highest nonzero divisor digit. */
653 for (i = 4 - 1;; i--)
654 if (den[i] != 0)
656 den_hi_sig = i;
657 break;
660 /* Insure that the first digit of the divisor is at least BASE/2.
661 This is required by the quotient digit estimation algorithm. */
663 scale = BASE / (den[den_hi_sig] + 1);
664 if (scale > 1)
665 { /* scale divisor and dividend */
666 carry = 0;
667 for (i = 0; i <= 4 - 1; i++)
669 work = (num[i] * scale) + carry;
670 num[i] = LOWPART (work);
671 carry = HIGHPART (work);
674 num[4] = carry;
675 carry = 0;
676 for (i = 0; i <= 4 - 1; i++)
678 work = (den[i] * scale) + carry;
679 den[i] = LOWPART (work);
680 carry = HIGHPART (work);
681 if (den[i] != 0) den_hi_sig = i;
685 num_hi_sig = 4;
687 /* Main loop */
688 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
690 /* Guess the next quotient digit, quo_est, by dividing the first
691 two remaining dividend digits by the high order quotient digit.
692 quo_est is never low and is at most 2 high. */
693 unsigned HOST_WIDE_INT tmp;
695 num_hi_sig = i + den_hi_sig + 1;
696 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
697 if (num[num_hi_sig] != den[den_hi_sig])
698 quo_est = work / den[den_hi_sig];
699 else
700 quo_est = BASE - 1;
702 /* Refine quo_est so it's usually correct, and at most one high. */
703 tmp = work - quo_est * den[den_hi_sig];
704 if (tmp < BASE
705 && (den[den_hi_sig - 1] * quo_est
706 > (tmp * BASE + num[num_hi_sig - 2])))
707 quo_est--;
709 /* Try QUO_EST as the quotient digit, by multiplying the
710 divisor by QUO_EST and subtracting from the remaining dividend.
711 Keep in mind that QUO_EST is the I - 1st digit. */
713 carry = 0;
714 for (j = 0; j <= den_hi_sig; j++)
716 work = quo_est * den[j] + carry;
717 carry = HIGHPART (work);
718 work = num[i + j] - LOWPART (work);
719 num[i + j] = LOWPART (work);
720 carry += HIGHPART (work) != 0;
723 /* If quo_est was high by one, then num[i] went negative and
724 we need to correct things. */
725 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
727 quo_est--;
728 carry = 0; /* add divisor back in */
729 for (j = 0; j <= den_hi_sig; j++)
731 work = num[i + j] + den[j] + carry;
732 carry = HIGHPART (work);
733 num[i + j] = LOWPART (work);
736 num [num_hi_sig] += carry;
739 /* Store the quotient digit. */
740 quo[i] = quo_est;
744 decode (quo, lquo, hquo);
746 finish_up:
747 /* If result is negative, make it so. */
748 if (quo_neg)
749 neg_double (*lquo, *hquo, lquo, hquo);
751 /* Compute trial remainder: rem = num - (quo * den) */
752 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
753 neg_double (*lrem, *hrem, lrem, hrem);
754 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
756 switch (code)
758 case TRUNC_DIV_EXPR:
759 case TRUNC_MOD_EXPR: /* round toward zero */
760 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
761 return overflow;
763 case FLOOR_DIV_EXPR:
764 case FLOOR_MOD_EXPR: /* round toward negative infinity */
765 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
767 /* quo = quo - 1; */
768 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
769 lquo, hquo);
771 else
772 return overflow;
773 break;
775 case CEIL_DIV_EXPR:
776 case CEIL_MOD_EXPR: /* round toward positive infinity */
777 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
779 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
780 lquo, hquo);
782 else
783 return overflow;
784 break;
786 case ROUND_DIV_EXPR:
787 case ROUND_MOD_EXPR: /* round to closest integer */
789 unsigned HOST_WIDE_INT labs_rem = *lrem;
790 HOST_WIDE_INT habs_rem = *hrem;
791 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
792 HOST_WIDE_INT habs_den = hden, htwice;
794 /* Get absolute values. */
795 if (*hrem < 0)
796 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
797 if (hden < 0)
798 neg_double (lden, hden, &labs_den, &habs_den);
800 /* If (2 * abs (lrem) >= abs (lden)) */
801 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
802 labs_rem, habs_rem, &ltwice, &htwice);
804 if (((unsigned HOST_WIDE_INT) habs_den
805 < (unsigned HOST_WIDE_INT) htwice)
806 || (((unsigned HOST_WIDE_INT) habs_den
807 == (unsigned HOST_WIDE_INT) htwice)
808 && (labs_den < ltwice)))
810 if (*hquo < 0)
811 /* quo = quo - 1; */
812 add_double (*lquo, *hquo,
813 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 else
815 /* quo = quo + 1; */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
817 lquo, hquo);
819 else
820 return overflow;
822 break;
824 default:
825 gcc_unreachable ();
828 /* Compute true remainder: rem = num - (quo * den) */
829 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
830 neg_double (*lrem, *hrem, lrem, hrem);
831 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
832 return overflow;
835 /* If ARG2 divides ARG1 with zero remainder, carries out the division
836 of type CODE and returns the quotient.
837 Otherwise returns NULL_TREE. */
839 static tree
840 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
842 unsigned HOST_WIDE_INT int1l, int2l;
843 HOST_WIDE_INT int1h, int2h;
844 unsigned HOST_WIDE_INT quol, reml;
845 HOST_WIDE_INT quoh, remh;
846 tree type = TREE_TYPE (arg1);
847 int uns = TYPE_UNSIGNED (type);
849 int1l = TREE_INT_CST_LOW (arg1);
850 int1h = TREE_INT_CST_HIGH (arg1);
851 int2l = TREE_INT_CST_LOW (arg2);
852 int2h = TREE_INT_CST_HIGH (arg2);
854 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
855 &quol, &quoh, &reml, &remh);
856 if (remh != 0 || reml != 0)
857 return NULL_TREE;
859 return build_int_cst_wide (type, quol, quoh);
862 /* Return true if built-in mathematical function specified by CODE
863 preserves the sign of it argument, i.e. -f(x) == f(-x). */
865 static bool
866 negate_mathfn_p (enum built_in_function code)
868 switch (code)
870 case BUILT_IN_ASIN:
871 case BUILT_IN_ASINF:
872 case BUILT_IN_ASINL:
873 case BUILT_IN_ATAN:
874 case BUILT_IN_ATANF:
875 case BUILT_IN_ATANL:
876 case BUILT_IN_SIN:
877 case BUILT_IN_SINF:
878 case BUILT_IN_SINL:
879 case BUILT_IN_TAN:
880 case BUILT_IN_TANF:
881 case BUILT_IN_TANL:
882 return true;
884 default:
885 break;
887 return false;
890 /* Check whether we may negate an integer constant T without causing
891 overflow. */
893 bool
894 may_negate_without_overflow_p (tree t)
896 unsigned HOST_WIDE_INT val;
897 unsigned int prec;
898 tree type;
900 gcc_assert (TREE_CODE (t) == INTEGER_CST);
902 type = TREE_TYPE (t);
903 if (TYPE_UNSIGNED (type))
904 return false;
906 prec = TYPE_PRECISION (type);
907 if (prec > HOST_BITS_PER_WIDE_INT)
909 if (TREE_INT_CST_LOW (t) != 0)
910 return true;
911 prec -= HOST_BITS_PER_WIDE_INT;
912 val = TREE_INT_CST_HIGH (t);
914 else
915 val = TREE_INT_CST_LOW (t);
916 if (prec < HOST_BITS_PER_WIDE_INT)
917 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
918 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
921 /* Determine whether an expression T can be cheaply negated using
922 the function negate_expr. */
924 static bool
925 negate_expr_p (tree t)
927 tree type;
929 if (t == 0)
930 return false;
932 type = TREE_TYPE (t);
934 STRIP_SIGN_NOPS (t);
935 switch (TREE_CODE (t))
937 case INTEGER_CST:
938 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 return true;
941 /* Check that -CST will not overflow type. */
942 return may_negate_without_overflow_p (t);
944 case REAL_CST:
945 case NEGATE_EXPR:
946 return true;
948 case COMPLEX_CST:
949 return negate_expr_p (TREE_REALPART (t))
950 && negate_expr_p (TREE_IMAGPART (t));
952 case PLUS_EXPR:
953 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
954 return false;
955 /* -(A + B) -> (-B) - A. */
956 if (negate_expr_p (TREE_OPERAND (t, 1))
957 && reorder_operands_p (TREE_OPERAND (t, 0),
958 TREE_OPERAND (t, 1)))
959 return true;
960 /* -(A + B) -> (-A) - B. */
961 return negate_expr_p (TREE_OPERAND (t, 0));
963 case MINUS_EXPR:
964 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
965 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
966 && reorder_operands_p (TREE_OPERAND (t, 0),
967 TREE_OPERAND (t, 1));
969 case MULT_EXPR:
970 if (TYPE_UNSIGNED (TREE_TYPE (t)))
971 break;
973 /* Fall through. */
975 case RDIV_EXPR:
976 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
977 return negate_expr_p (TREE_OPERAND (t, 1))
978 || negate_expr_p (TREE_OPERAND (t, 0));
979 break;
981 case NOP_EXPR:
982 /* Negate -((double)float) as (double)(-float). */
983 if (TREE_CODE (type) == REAL_TYPE)
985 tree tem = strip_float_extensions (t);
986 if (tem != t)
987 return negate_expr_p (tem);
989 break;
991 case CALL_EXPR:
992 /* Negate -f(x) as f(-x). */
993 if (negate_mathfn_p (builtin_mathfn_code (t)))
994 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
995 break;
997 case RSHIFT_EXPR:
998 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
999 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1001 tree op1 = TREE_OPERAND (t, 1);
1002 if (TREE_INT_CST_HIGH (op1) == 0
1003 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1004 == TREE_INT_CST_LOW (op1))
1005 return true;
1007 break;
1009 default:
1010 break;
1012 return false;
1015 /* Given T, an expression, return the negation of T. Allow for T to be
1016 null, in which case return null. */
1018 static tree
1019 negate_expr (tree t)
1021 tree type;
1022 tree tem;
1024 if (t == 0)
1025 return 0;
1027 type = TREE_TYPE (t);
1028 STRIP_SIGN_NOPS (t);
1030 switch (TREE_CODE (t))
1032 case INTEGER_CST:
1033 tem = fold_negate_const (t, type);
1034 if (! TREE_OVERFLOW (tem)
1035 || TYPE_UNSIGNED (type)
1036 || ! flag_trapv)
1037 return tem;
1038 break;
1040 case REAL_CST:
1041 tem = fold_negate_const (t, type);
1042 /* Two's complement FP formats, such as c4x, may overflow. */
1043 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1044 return fold_convert (type, tem);
1045 break;
1047 case COMPLEX_CST:
1049 tree rpart = negate_expr (TREE_REALPART (t));
1050 tree ipart = negate_expr (TREE_IMAGPART (t));
1052 if ((TREE_CODE (rpart) == REAL_CST
1053 && TREE_CODE (ipart) == REAL_CST)
1054 || (TREE_CODE (rpart) == INTEGER_CST
1055 && TREE_CODE (ipart) == INTEGER_CST))
1056 return build_complex (type, rpart, ipart);
1058 break;
1060 case NEGATE_EXPR:
1061 return fold_convert (type, TREE_OPERAND (t, 0));
1063 case PLUS_EXPR:
1064 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1066 /* -(A + B) -> (-B) - A. */
1067 if (negate_expr_p (TREE_OPERAND (t, 1))
1068 && reorder_operands_p (TREE_OPERAND (t, 0),
1069 TREE_OPERAND (t, 1)))
1071 tem = negate_expr (TREE_OPERAND (t, 1));
1072 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1073 tem, TREE_OPERAND (t, 0));
1074 return fold_convert (type, tem);
1077 /* -(A + B) -> (-A) - B. */
1078 if (negate_expr_p (TREE_OPERAND (t, 0)))
1080 tem = negate_expr (TREE_OPERAND (t, 0));
1081 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1082 tem, TREE_OPERAND (t, 1));
1083 return fold_convert (type, tem);
1086 break;
1088 case MINUS_EXPR:
1089 /* - (A - B) -> B - A */
1090 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1091 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1092 return fold_convert (type,
1093 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1094 TREE_OPERAND (t, 1),
1095 TREE_OPERAND (t, 0)));
1096 break;
1098 case MULT_EXPR:
1099 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1100 break;
1102 /* Fall through. */
1104 case RDIV_EXPR:
1105 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1107 tem = TREE_OPERAND (t, 1);
1108 if (negate_expr_p (tem))
1109 return fold_convert (type,
1110 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1111 TREE_OPERAND (t, 0),
1112 negate_expr (tem)));
1113 tem = TREE_OPERAND (t, 0);
1114 if (negate_expr_p (tem))
1115 return fold_convert (type,
1116 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1117 negate_expr (tem),
1118 TREE_OPERAND (t, 1)));
1120 break;
1122 case NOP_EXPR:
1123 /* Convert -((double)float) into (double)(-float). */
1124 if (TREE_CODE (type) == REAL_TYPE)
1126 tem = strip_float_extensions (t);
1127 if (tem != t && negate_expr_p (tem))
1128 return fold_convert (type, negate_expr (tem));
1130 break;
1132 case CALL_EXPR:
1133 /* Negate -f(x) as f(-x). */
1134 if (negate_mathfn_p (builtin_mathfn_code (t))
1135 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1137 tree fndecl, arg, arglist;
1139 fndecl = get_callee_fndecl (t);
1140 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1141 arglist = build_tree_list (NULL_TREE, arg);
1142 return build_function_call_expr (fndecl, arglist);
1144 break;
1146 case RSHIFT_EXPR:
1147 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1148 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1150 tree op1 = TREE_OPERAND (t, 1);
1151 if (TREE_INT_CST_HIGH (op1) == 0
1152 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1153 == TREE_INT_CST_LOW (op1))
1155 tree ntype = TYPE_UNSIGNED (type)
1156 ? lang_hooks.types.signed_type (type)
1157 : lang_hooks.types.unsigned_type (type);
1158 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1159 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1160 return fold_convert (type, temp);
1163 break;
1165 default:
1166 break;
1169 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1170 return fold_convert (type, tem);
1173 /* Split a tree IN into a constant, literal and variable parts that could be
1174 combined with CODE to make IN. "constant" means an expression with
1175 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1176 commutative arithmetic operation. Store the constant part into *CONP,
1177 the literal in *LITP and return the variable part. If a part isn't
1178 present, set it to null. If the tree does not decompose in this way,
1179 return the entire tree as the variable part and the other parts as null.
1181 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1182 case, we negate an operand that was subtracted. Except if it is a
1183 literal for which we use *MINUS_LITP instead.
1185 If NEGATE_P is true, we are negating all of IN, again except a literal
1186 for which we use *MINUS_LITP instead.
1188 If IN is itself a literal or constant, return it as appropriate.
1190 Note that we do not guarantee that any of the three values will be the
1191 same type as IN, but they will have the same signedness and mode. */
1193 static tree
1194 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1195 tree *minus_litp, int negate_p)
1197 tree var = 0;
1199 *conp = 0;
1200 *litp = 0;
1201 *minus_litp = 0;
1203 /* Strip any conversions that don't change the machine mode or signedness. */
1204 STRIP_SIGN_NOPS (in);
1206 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1207 *litp = in;
1208 else if (TREE_CODE (in) == code
1209 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1210 /* We can associate addition and subtraction together (even
1211 though the C standard doesn't say so) for integers because
1212 the value is not affected. For reals, the value might be
1213 affected, so we can't. */
1214 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1215 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1217 tree op0 = TREE_OPERAND (in, 0);
1218 tree op1 = TREE_OPERAND (in, 1);
1219 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1220 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1222 /* First see if either of the operands is a literal, then a constant. */
1223 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1224 *litp = op0, op0 = 0;
1225 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1226 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1228 if (op0 != 0 && TREE_CONSTANT (op0))
1229 *conp = op0, op0 = 0;
1230 else if (op1 != 0 && TREE_CONSTANT (op1))
1231 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1233 /* If we haven't dealt with either operand, this is not a case we can
1234 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1235 if (op0 != 0 && op1 != 0)
1236 var = in;
1237 else if (op0 != 0)
1238 var = op0;
1239 else
1240 var = op1, neg_var_p = neg1_p;
1242 /* Now do any needed negations. */
1243 if (neg_litp_p)
1244 *minus_litp = *litp, *litp = 0;
1245 if (neg_conp_p)
1246 *conp = negate_expr (*conp);
1247 if (neg_var_p)
1248 var = negate_expr (var);
1250 else if (TREE_CONSTANT (in))
1251 *conp = in;
1252 else
1253 var = in;
1255 if (negate_p)
1257 if (*litp)
1258 *minus_litp = *litp, *litp = 0;
1259 else if (*minus_litp)
1260 *litp = *minus_litp, *minus_litp = 0;
1261 *conp = negate_expr (*conp);
1262 var = negate_expr (var);
1265 return var;
1268 /* Re-associate trees split by the above function. T1 and T2 are either
1269 expressions to associate or null. Return the new expression, if any. If
1270 we build an operation, do it in TYPE and with CODE. */
1272 static tree
1273 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1275 if (t1 == 0)
1276 return t2;
1277 else if (t2 == 0)
1278 return t1;
1280 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1281 try to fold this since we will have infinite recursion. But do
1282 deal with any NEGATE_EXPRs. */
1283 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1284 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1286 if (code == PLUS_EXPR)
1288 if (TREE_CODE (t1) == NEGATE_EXPR)
1289 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1290 fold_convert (type, TREE_OPERAND (t1, 0)));
1291 else if (TREE_CODE (t2) == NEGATE_EXPR)
1292 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1293 fold_convert (type, TREE_OPERAND (t2, 0)));
1294 else if (integer_zerop (t2))
1295 return fold_convert (type, t1);
1297 else if (code == MINUS_EXPR)
1299 if (integer_zerop (t2))
1300 return fold_convert (type, t1);
1303 return build2 (code, type, fold_convert (type, t1),
1304 fold_convert (type, t2));
1307 return fold_build2 (code, type, fold_convert (type, t1),
1308 fold_convert (type, t2));
1311 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1312 to produce a new constant.
1314 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1316 tree
1317 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1319 unsigned HOST_WIDE_INT int1l, int2l;
1320 HOST_WIDE_INT int1h, int2h;
1321 unsigned HOST_WIDE_INT low;
1322 HOST_WIDE_INT hi;
1323 unsigned HOST_WIDE_INT garbagel;
1324 HOST_WIDE_INT garbageh;
1325 tree t;
1326 tree type = TREE_TYPE (arg1);
1327 int uns = TYPE_UNSIGNED (type);
1328 int is_sizetype
1329 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1330 int overflow = 0;
1332 int1l = TREE_INT_CST_LOW (arg1);
1333 int1h = TREE_INT_CST_HIGH (arg1);
1334 int2l = TREE_INT_CST_LOW (arg2);
1335 int2h = TREE_INT_CST_HIGH (arg2);
1337 switch (code)
1339 case BIT_IOR_EXPR:
1340 low = int1l | int2l, hi = int1h | int2h;
1341 break;
1343 case BIT_XOR_EXPR:
1344 low = int1l ^ int2l, hi = int1h ^ int2h;
1345 break;
1347 case BIT_AND_EXPR:
1348 low = int1l & int2l, hi = int1h & int2h;
1349 break;
1351 case RSHIFT_EXPR:
1352 int2l = -int2l;
1353 case LSHIFT_EXPR:
1354 /* It's unclear from the C standard whether shifts can overflow.
1355 The following code ignores overflow; perhaps a C standard
1356 interpretation ruling is needed. */
1357 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1358 &low, &hi, !uns);
1359 break;
1361 case RROTATE_EXPR:
1362 int2l = - int2l;
1363 case LROTATE_EXPR:
1364 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1365 &low, &hi);
1366 break;
1368 case PLUS_EXPR:
1369 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1370 break;
1372 case MINUS_EXPR:
1373 neg_double (int2l, int2h, &low, &hi);
1374 add_double (int1l, int1h, low, hi, &low, &hi);
1375 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1376 break;
1378 case MULT_EXPR:
1379 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1380 break;
1382 case TRUNC_DIV_EXPR:
1383 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1384 case EXACT_DIV_EXPR:
1385 /* This is a shortcut for a common special case. */
1386 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1387 && ! TREE_CONSTANT_OVERFLOW (arg1)
1388 && ! TREE_CONSTANT_OVERFLOW (arg2)
1389 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1391 if (code == CEIL_DIV_EXPR)
1392 int1l += int2l - 1;
1394 low = int1l / int2l, hi = 0;
1395 break;
1398 /* ... fall through ... */
1400 case ROUND_DIV_EXPR:
1401 if (int2h == 0 && int2l == 1)
1403 low = int1l, hi = int1h;
1404 break;
1406 if (int1l == int2l && int1h == int2h
1407 && ! (int1l == 0 && int1h == 0))
1409 low = 1, hi = 0;
1410 break;
1412 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1413 &low, &hi, &garbagel, &garbageh);
1414 break;
1416 case TRUNC_MOD_EXPR:
1417 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1418 /* This is a shortcut for a common special case. */
1419 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1420 && ! TREE_CONSTANT_OVERFLOW (arg1)
1421 && ! TREE_CONSTANT_OVERFLOW (arg2)
1422 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1424 if (code == CEIL_MOD_EXPR)
1425 int1l += int2l - 1;
1426 low = int1l % int2l, hi = 0;
1427 break;
1430 /* ... fall through ... */
1432 case ROUND_MOD_EXPR:
1433 overflow = div_and_round_double (code, uns,
1434 int1l, int1h, int2l, int2h,
1435 &garbagel, &garbageh, &low, &hi);
1436 break;
1438 case MIN_EXPR:
1439 case MAX_EXPR:
1440 if (uns)
1441 low = (((unsigned HOST_WIDE_INT) int1h
1442 < (unsigned HOST_WIDE_INT) int2h)
1443 || (((unsigned HOST_WIDE_INT) int1h
1444 == (unsigned HOST_WIDE_INT) int2h)
1445 && int1l < int2l));
1446 else
1447 low = (int1h < int2h
1448 || (int1h == int2h && int1l < int2l));
1450 if (low == (code == MIN_EXPR))
1451 low = int1l, hi = int1h;
1452 else
1453 low = int2l, hi = int2h;
1454 break;
1456 default:
1457 gcc_unreachable ();
1460 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1462 if (notrunc)
1464 /* Propagate overflow flags ourselves. */
1465 if (((!uns || is_sizetype) && overflow)
1466 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1468 t = copy_node (t);
1469 TREE_OVERFLOW (t) = 1;
1470 TREE_CONSTANT_OVERFLOW (t) = 1;
1472 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1474 t = copy_node (t);
1475 TREE_CONSTANT_OVERFLOW (t) = 1;
1478 else
1479 t = force_fit_type (t, 1,
1480 ((!uns || is_sizetype) && overflow)
1481 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1482 TREE_CONSTANT_OVERFLOW (arg1)
1483 | TREE_CONSTANT_OVERFLOW (arg2));
1485 return t;
1488 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1489 constant. We assume ARG1 and ARG2 have the same data type, or at least
1490 are the same kind of constant and the same machine mode.
1492 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1494 static tree
1495 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1497 STRIP_NOPS (arg1);
1498 STRIP_NOPS (arg2);
1500 if (TREE_CODE (arg1) == INTEGER_CST)
1501 return int_const_binop (code, arg1, arg2, notrunc);
1503 if (TREE_CODE (arg1) == REAL_CST)
1505 enum machine_mode mode;
1506 REAL_VALUE_TYPE d1;
1507 REAL_VALUE_TYPE d2;
1508 REAL_VALUE_TYPE value;
1509 REAL_VALUE_TYPE result;
1510 bool inexact;
1511 tree t, type;
1513 d1 = TREE_REAL_CST (arg1);
1514 d2 = TREE_REAL_CST (arg2);
1516 type = TREE_TYPE (arg1);
1517 mode = TYPE_MODE (type);
1519 /* Don't perform operation if we honor signaling NaNs and
1520 either operand is a NaN. */
1521 if (HONOR_SNANS (mode)
1522 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1523 return NULL_TREE;
1525 /* Don't perform operation if it would raise a division
1526 by zero exception. */
1527 if (code == RDIV_EXPR
1528 && REAL_VALUES_EQUAL (d2, dconst0)
1529 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1530 return NULL_TREE;
1532 /* If either operand is a NaN, just return it. Otherwise, set up
1533 for floating-point trap; we return an overflow. */
1534 if (REAL_VALUE_ISNAN (d1))
1535 return arg1;
1536 else if (REAL_VALUE_ISNAN (d2))
1537 return arg2;
1539 inexact = real_arithmetic (&value, code, &d1, &d2);
1540 real_convert (&result, mode, &value);
1542 /* Don't constant fold this floating point operation if the
1543 result may dependent upon the run-time rounding mode and
1544 flag_rounding_math is set, or if GCC's software emulation
1545 is unable to accurately represent the result. */
1547 if ((flag_rounding_math
1548 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1549 && !flag_unsafe_math_optimizations))
1550 && (inexact || !real_identical (&result, &value)))
1551 return NULL_TREE;
1553 t = build_real (type, result);
1555 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1556 TREE_CONSTANT_OVERFLOW (t)
1557 = TREE_OVERFLOW (t)
1558 | TREE_CONSTANT_OVERFLOW (arg1)
1559 | TREE_CONSTANT_OVERFLOW (arg2);
1560 return t;
1562 if (TREE_CODE (arg1) == COMPLEX_CST)
1564 tree type = TREE_TYPE (arg1);
1565 tree r1 = TREE_REALPART (arg1);
1566 tree i1 = TREE_IMAGPART (arg1);
1567 tree r2 = TREE_REALPART (arg2);
1568 tree i2 = TREE_IMAGPART (arg2);
1569 tree t;
1571 switch (code)
1573 case PLUS_EXPR:
1574 t = build_complex (type,
1575 const_binop (PLUS_EXPR, r1, r2, notrunc),
1576 const_binop (PLUS_EXPR, i1, i2, notrunc));
1577 break;
1579 case MINUS_EXPR:
1580 t = build_complex (type,
1581 const_binop (MINUS_EXPR, r1, r2, notrunc),
1582 const_binop (MINUS_EXPR, i1, i2, notrunc));
1583 break;
1585 case MULT_EXPR:
1586 t = build_complex (type,
1587 const_binop (MINUS_EXPR,
1588 const_binop (MULT_EXPR,
1589 r1, r2, notrunc),
1590 const_binop (MULT_EXPR,
1591 i1, i2, notrunc),
1592 notrunc),
1593 const_binop (PLUS_EXPR,
1594 const_binop (MULT_EXPR,
1595 r1, i2, notrunc),
1596 const_binop (MULT_EXPR,
1597 i1, r2, notrunc),
1598 notrunc));
1599 break;
1601 case RDIV_EXPR:
1603 tree magsquared
1604 = const_binop (PLUS_EXPR,
1605 const_binop (MULT_EXPR, r2, r2, notrunc),
1606 const_binop (MULT_EXPR, i2, i2, notrunc),
1607 notrunc);
1609 t = build_complex (type,
1610 const_binop
1611 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1612 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1613 const_binop (PLUS_EXPR,
1614 const_binop (MULT_EXPR, r1, r2,
1615 notrunc),
1616 const_binop (MULT_EXPR, i1, i2,
1617 notrunc),
1618 notrunc),
1619 magsquared, notrunc),
1620 const_binop
1621 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1622 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1623 const_binop (MINUS_EXPR,
1624 const_binop (MULT_EXPR, i1, r2,
1625 notrunc),
1626 const_binop (MULT_EXPR, r1, i2,
1627 notrunc),
1628 notrunc),
1629 magsquared, notrunc));
1631 break;
1633 default:
1634 gcc_unreachable ();
1636 return t;
1638 return 0;
1641 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1642 indicates which particular sizetype to create. */
1644 tree
1645 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1647 return build_int_cst (sizetype_tab[(int) kind], number);
1650 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1651 is a tree code. The type of the result is taken from the operands.
1652 Both must be the same type integer type and it must be a size type.
1653 If the operands are constant, so is the result. */
1655 tree
1656 size_binop (enum tree_code code, tree arg0, tree arg1)
1658 tree type = TREE_TYPE (arg0);
1660 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1661 && type == TREE_TYPE (arg1));
1663 /* Handle the special case of two integer constants faster. */
1664 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1666 /* And some specific cases even faster than that. */
1667 if (code == PLUS_EXPR && integer_zerop (arg0))
1668 return arg1;
1669 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1670 && integer_zerop (arg1))
1671 return arg0;
1672 else if (code == MULT_EXPR && integer_onep (arg0))
1673 return arg1;
1675 /* Handle general case of two integer constants. */
1676 return int_const_binop (code, arg0, arg1, 0);
1679 if (arg0 == error_mark_node || arg1 == error_mark_node)
1680 return error_mark_node;
1682 return fold_build2 (code, type, arg0, arg1);
1685 /* Given two values, either both of sizetype or both of bitsizetype,
1686 compute the difference between the two values. Return the value
1687 in signed type corresponding to the type of the operands. */
1689 tree
1690 size_diffop (tree arg0, tree arg1)
1692 tree type = TREE_TYPE (arg0);
1693 tree ctype;
1695 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1696 && type == TREE_TYPE (arg1));
1698 /* If the type is already signed, just do the simple thing. */
1699 if (!TYPE_UNSIGNED (type))
1700 return size_binop (MINUS_EXPR, arg0, arg1);
1702 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1704 /* If either operand is not a constant, do the conversions to the signed
1705 type and subtract. The hardware will do the right thing with any
1706 overflow in the subtraction. */
1707 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1708 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1709 fold_convert (ctype, arg1));
1711 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1712 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1713 overflow) and negate (which can't either). Special-case a result
1714 of zero while we're here. */
1715 if (tree_int_cst_equal (arg0, arg1))
1716 return fold_convert (ctype, integer_zero_node);
1717 else if (tree_int_cst_lt (arg1, arg0))
1718 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1719 else
1720 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1721 fold_convert (ctype, size_binop (MINUS_EXPR,
1722 arg1, arg0)));
1725 /* A subroutine of fold_convert_const handling conversions of an
1726 INTEGER_CST to another integer type. */
1728 static tree
1729 fold_convert_const_int_from_int (tree type, tree arg1)
1731 tree t;
1733 /* Given an integer constant, make new constant with new type,
1734 appropriately sign-extended or truncated. */
1735 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1736 TREE_INT_CST_HIGH (arg1));
1738 t = force_fit_type (t,
1739 /* Don't set the overflow when
1740 converting a pointer */
1741 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1742 (TREE_INT_CST_HIGH (arg1) < 0
1743 && (TYPE_UNSIGNED (type)
1744 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1745 | TREE_OVERFLOW (arg1),
1746 TREE_CONSTANT_OVERFLOW (arg1));
1748 return t;
1751 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1752 to an integer type. */
1754 static tree
1755 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1757 int overflow = 0;
1758 tree t;
1760 /* The following code implements the floating point to integer
1761 conversion rules required by the Java Language Specification,
1762 that IEEE NaNs are mapped to zero and values that overflow
1763 the target precision saturate, i.e. values greater than
1764 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1765 are mapped to INT_MIN. These semantics are allowed by the
1766 C and C++ standards that simply state that the behavior of
1767 FP-to-integer conversion is unspecified upon overflow. */
1769 HOST_WIDE_INT high, low;
1770 REAL_VALUE_TYPE r;
1771 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1773 switch (code)
1775 case FIX_TRUNC_EXPR:
1776 real_trunc (&r, VOIDmode, &x);
1777 break;
1779 case FIX_CEIL_EXPR:
1780 real_ceil (&r, VOIDmode, &x);
1781 break;
1783 case FIX_FLOOR_EXPR:
1784 real_floor (&r, VOIDmode, &x);
1785 break;
1787 case FIX_ROUND_EXPR:
1788 real_round (&r, VOIDmode, &x);
1789 break;
1791 default:
1792 gcc_unreachable ();
1795 /* If R is NaN, return zero and show we have an overflow. */
1796 if (REAL_VALUE_ISNAN (r))
1798 overflow = 1;
1799 high = 0;
1800 low = 0;
1803 /* See if R is less than the lower bound or greater than the
1804 upper bound. */
1806 if (! overflow)
1808 tree lt = TYPE_MIN_VALUE (type);
1809 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1810 if (REAL_VALUES_LESS (r, l))
1812 overflow = 1;
1813 high = TREE_INT_CST_HIGH (lt);
1814 low = TREE_INT_CST_LOW (lt);
1818 if (! overflow)
1820 tree ut = TYPE_MAX_VALUE (type);
1821 if (ut)
1823 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1824 if (REAL_VALUES_LESS (u, r))
1826 overflow = 1;
1827 high = TREE_INT_CST_HIGH (ut);
1828 low = TREE_INT_CST_LOW (ut);
1833 if (! overflow)
1834 REAL_VALUE_TO_INT (&low, &high, r);
1836 t = build_int_cst_wide (type, low, high);
1838 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1839 TREE_CONSTANT_OVERFLOW (arg1));
1840 return t;
1843 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1844 to another floating point type. */
1846 static tree
1847 fold_convert_const_real_from_real (tree type, tree arg1)
1849 REAL_VALUE_TYPE value;
1850 tree t;
1852 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1853 t = build_real (type, value);
1855 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1856 TREE_CONSTANT_OVERFLOW (t)
1857 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1858 return t;
1861 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1862 type TYPE. If no simplification can be done return NULL_TREE. */
1864 static tree
1865 fold_convert_const (enum tree_code code, tree type, tree arg1)
1867 if (TREE_TYPE (arg1) == type)
1868 return arg1;
1870 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1872 if (TREE_CODE (arg1) == INTEGER_CST)
1873 return fold_convert_const_int_from_int (type, arg1);
1874 else if (TREE_CODE (arg1) == REAL_CST)
1875 return fold_convert_const_int_from_real (code, type, arg1);
1877 else if (TREE_CODE (type) == REAL_TYPE)
1879 if (TREE_CODE (arg1) == INTEGER_CST)
1880 return build_real_from_int_cst (type, arg1);
1881 if (TREE_CODE (arg1) == REAL_CST)
1882 return fold_convert_const_real_from_real (type, arg1);
1884 return NULL_TREE;
1887 /* Construct a vector of zero elements of vector type TYPE. */
1889 static tree
1890 build_zero_vector (tree type)
1892 tree elem, list;
1893 int i, units;
1895 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1896 units = TYPE_VECTOR_SUBPARTS (type);
1898 list = NULL_TREE;
1899 for (i = 0; i < units; i++)
1900 list = tree_cons (NULL_TREE, elem, list);
1901 return build_vector (type, list);
1904 /* Convert expression ARG to type TYPE. Used by the middle-end for
1905 simple conversions in preference to calling the front-end's convert. */
1907 tree
1908 fold_convert (tree type, tree arg)
1910 tree orig = TREE_TYPE (arg);
1911 tree tem;
1913 if (type == orig)
1914 return arg;
1916 if (TREE_CODE (arg) == ERROR_MARK
1917 || TREE_CODE (type) == ERROR_MARK
1918 || TREE_CODE (orig) == ERROR_MARK)
1919 return error_mark_node;
1921 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1922 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1923 TYPE_MAIN_VARIANT (orig)))
1924 return fold_build1 (NOP_EXPR, type, arg);
1926 switch (TREE_CODE (type))
1928 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1929 case POINTER_TYPE: case REFERENCE_TYPE:
1930 case OFFSET_TYPE:
1931 if (TREE_CODE (arg) == INTEGER_CST)
1933 tem = fold_convert_const (NOP_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1937 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1938 || TREE_CODE (orig) == OFFSET_TYPE)
1939 return fold_build1 (NOP_EXPR, type, arg);
1940 if (TREE_CODE (orig) == COMPLEX_TYPE)
1942 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1943 return fold_convert (type, tem);
1945 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1946 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1947 return fold_build1 (NOP_EXPR, type, arg);
1949 case REAL_TYPE:
1950 if (TREE_CODE (arg) == INTEGER_CST)
1952 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1953 if (tem != NULL_TREE)
1954 return tem;
1956 else if (TREE_CODE (arg) == REAL_CST)
1958 tem = fold_convert_const (NOP_EXPR, type, arg);
1959 if (tem != NULL_TREE)
1960 return tem;
1963 switch (TREE_CODE (orig))
1965 case INTEGER_TYPE: case CHAR_TYPE:
1966 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1967 case POINTER_TYPE: case REFERENCE_TYPE:
1968 return fold_build1 (FLOAT_EXPR, type, arg);
1970 case REAL_TYPE:
1971 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1972 type, arg);
1974 case COMPLEX_TYPE:
1975 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1976 return fold_convert (type, tem);
1978 default:
1979 gcc_unreachable ();
1982 case COMPLEX_TYPE:
1983 switch (TREE_CODE (orig))
1985 case INTEGER_TYPE: case CHAR_TYPE:
1986 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1987 case POINTER_TYPE: case REFERENCE_TYPE:
1988 case REAL_TYPE:
1989 return build2 (COMPLEX_EXPR, type,
1990 fold_convert (TREE_TYPE (type), arg),
1991 fold_convert (TREE_TYPE (type), integer_zero_node));
1992 case COMPLEX_TYPE:
1994 tree rpart, ipart;
1996 if (TREE_CODE (arg) == COMPLEX_EXPR)
1998 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1999 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2000 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2003 arg = save_expr (arg);
2004 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2005 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2006 rpart = fold_convert (TREE_TYPE (type), rpart);
2007 ipart = fold_convert (TREE_TYPE (type), ipart);
2008 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2011 default:
2012 gcc_unreachable ();
2015 case VECTOR_TYPE:
2016 if (integer_zerop (arg))
2017 return build_zero_vector (type);
2018 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2019 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2020 || TREE_CODE (orig) == VECTOR_TYPE);
2021 return fold_build1 (NOP_EXPR, type, arg);
2023 case VOID_TYPE:
2024 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2026 default:
2027 gcc_unreachable ();
2031 /* Return false if expr can be assumed not to be an value, true
2032 otherwise. */
2034 static bool
2035 maybe_lvalue_p (tree x)
2037 /* We only need to wrap lvalue tree codes. */
2038 switch (TREE_CODE (x))
2040 case VAR_DECL:
2041 case PARM_DECL:
2042 case RESULT_DECL:
2043 case LABEL_DECL:
2044 case FUNCTION_DECL:
2045 case SSA_NAME:
2047 case COMPONENT_REF:
2048 case INDIRECT_REF:
2049 case ALIGN_INDIRECT_REF:
2050 case MISALIGNED_INDIRECT_REF:
2051 case ARRAY_REF:
2052 case ARRAY_RANGE_REF:
2053 case BIT_FIELD_REF:
2054 case OBJ_TYPE_REF:
2056 case REALPART_EXPR:
2057 case IMAGPART_EXPR:
2058 case PREINCREMENT_EXPR:
2059 case PREDECREMENT_EXPR:
2060 case SAVE_EXPR:
2061 case TRY_CATCH_EXPR:
2062 case WITH_CLEANUP_EXPR:
2063 case COMPOUND_EXPR:
2064 case MODIFY_EXPR:
2065 case TARGET_EXPR:
2066 case COND_EXPR:
2067 case BIND_EXPR:
2068 case MIN_EXPR:
2069 case MAX_EXPR:
2070 break;
2072 default:
2073 /* Assume the worst for front-end tree codes. */
2074 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2075 break;
2076 return false;
2079 return true;
2082 /* Return an expr equal to X but certainly not valid as an lvalue. */
2084 tree
2085 non_lvalue (tree x)
2087 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2088 us. */
2089 if (in_gimple_form)
2090 return x;
2092 if (! maybe_lvalue_p (x))
2093 return x;
2094 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2097 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2098 Zero means allow extended lvalues. */
2100 int pedantic_lvalues;
2102 /* When pedantic, return an expr equal to X but certainly not valid as a
2103 pedantic lvalue. Otherwise, return X. */
2105 static tree
2106 pedantic_non_lvalue (tree x)
2108 if (pedantic_lvalues)
2109 return non_lvalue (x);
2110 else
2111 return x;
2114 /* Given a tree comparison code, return the code that is the logical inverse
2115 of the given code. It is not safe to do this for floating-point
2116 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2117 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2119 static enum tree_code
2120 invert_tree_comparison (enum tree_code code, bool honor_nans)
2122 if (honor_nans && flag_trapping_math)
2123 return ERROR_MARK;
2125 switch (code)
2127 case EQ_EXPR:
2128 return NE_EXPR;
2129 case NE_EXPR:
2130 return EQ_EXPR;
2131 case GT_EXPR:
2132 return honor_nans ? UNLE_EXPR : LE_EXPR;
2133 case GE_EXPR:
2134 return honor_nans ? UNLT_EXPR : LT_EXPR;
2135 case LT_EXPR:
2136 return honor_nans ? UNGE_EXPR : GE_EXPR;
2137 case LE_EXPR:
2138 return honor_nans ? UNGT_EXPR : GT_EXPR;
2139 case LTGT_EXPR:
2140 return UNEQ_EXPR;
2141 case UNEQ_EXPR:
2142 return LTGT_EXPR;
2143 case UNGT_EXPR:
2144 return LE_EXPR;
2145 case UNGE_EXPR:
2146 return LT_EXPR;
2147 case UNLT_EXPR:
2148 return GE_EXPR;
2149 case UNLE_EXPR:
2150 return GT_EXPR;
2151 case ORDERED_EXPR:
2152 return UNORDERED_EXPR;
2153 case UNORDERED_EXPR:
2154 return ORDERED_EXPR;
2155 default:
2156 gcc_unreachable ();
2160 /* Similar, but return the comparison that results if the operands are
2161 swapped. This is safe for floating-point. */
2163 enum tree_code
2164 swap_tree_comparison (enum tree_code code)
2166 switch (code)
2168 case EQ_EXPR:
2169 case NE_EXPR:
2170 return code;
2171 case GT_EXPR:
2172 return LT_EXPR;
2173 case GE_EXPR:
2174 return LE_EXPR;
2175 case LT_EXPR:
2176 return GT_EXPR;
2177 case LE_EXPR:
2178 return GE_EXPR;
2179 default:
2180 gcc_unreachable ();
2185 /* Convert a comparison tree code from an enum tree_code representation
2186 into a compcode bit-based encoding. This function is the inverse of
2187 compcode_to_comparison. */
2189 static enum comparison_code
2190 comparison_to_compcode (enum tree_code code)
2192 switch (code)
2194 case LT_EXPR:
2195 return COMPCODE_LT;
2196 case EQ_EXPR:
2197 return COMPCODE_EQ;
2198 case LE_EXPR:
2199 return COMPCODE_LE;
2200 case GT_EXPR:
2201 return COMPCODE_GT;
2202 case NE_EXPR:
2203 return COMPCODE_NE;
2204 case GE_EXPR:
2205 return COMPCODE_GE;
2206 case ORDERED_EXPR:
2207 return COMPCODE_ORD;
2208 case UNORDERED_EXPR:
2209 return COMPCODE_UNORD;
2210 case UNLT_EXPR:
2211 return COMPCODE_UNLT;
2212 case UNEQ_EXPR:
2213 return COMPCODE_UNEQ;
2214 case UNLE_EXPR:
2215 return COMPCODE_UNLE;
2216 case UNGT_EXPR:
2217 return COMPCODE_UNGT;
2218 case LTGT_EXPR:
2219 return COMPCODE_LTGT;
2220 case UNGE_EXPR:
2221 return COMPCODE_UNGE;
2222 default:
2223 gcc_unreachable ();
2227 /* Convert a compcode bit-based encoding of a comparison operator back
2228 to GCC's enum tree_code representation. This function is the
2229 inverse of comparison_to_compcode. */
2231 static enum tree_code
2232 compcode_to_comparison (enum comparison_code code)
2234 switch (code)
2236 case COMPCODE_LT:
2237 return LT_EXPR;
2238 case COMPCODE_EQ:
2239 return EQ_EXPR;
2240 case COMPCODE_LE:
2241 return LE_EXPR;
2242 case COMPCODE_GT:
2243 return GT_EXPR;
2244 case COMPCODE_NE:
2245 return NE_EXPR;
2246 case COMPCODE_GE:
2247 return GE_EXPR;
2248 case COMPCODE_ORD:
2249 return ORDERED_EXPR;
2250 case COMPCODE_UNORD:
2251 return UNORDERED_EXPR;
2252 case COMPCODE_UNLT:
2253 return UNLT_EXPR;
2254 case COMPCODE_UNEQ:
2255 return UNEQ_EXPR;
2256 case COMPCODE_UNLE:
2257 return UNLE_EXPR;
2258 case COMPCODE_UNGT:
2259 return UNGT_EXPR;
2260 case COMPCODE_LTGT:
2261 return LTGT_EXPR;
2262 case COMPCODE_UNGE:
2263 return UNGE_EXPR;
2264 default:
2265 gcc_unreachable ();
2269 /* Return a tree for the comparison which is the combination of
2270 doing the AND or OR (depending on CODE) of the two operations LCODE
2271 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2272 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2273 if this makes the transformation invalid. */
2275 tree
2276 combine_comparisons (enum tree_code code, enum tree_code lcode,
2277 enum tree_code rcode, tree truth_type,
2278 tree ll_arg, tree lr_arg)
2280 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2281 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2282 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2283 enum comparison_code compcode;
2285 switch (code)
2287 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2288 compcode = lcompcode & rcompcode;
2289 break;
2291 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2292 compcode = lcompcode | rcompcode;
2293 break;
2295 default:
2296 return NULL_TREE;
2299 if (!honor_nans)
2301 /* Eliminate unordered comparisons, as well as LTGT and ORD
2302 which are not used unless the mode has NaNs. */
2303 compcode &= ~COMPCODE_UNORD;
2304 if (compcode == COMPCODE_LTGT)
2305 compcode = COMPCODE_NE;
2306 else if (compcode == COMPCODE_ORD)
2307 compcode = COMPCODE_TRUE;
2309 else if (flag_trapping_math)
2311 /* Check that the original operation and the optimized ones will trap
2312 under the same condition. */
2313 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2314 && (lcompcode != COMPCODE_EQ)
2315 && (lcompcode != COMPCODE_ORD);
2316 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2317 && (rcompcode != COMPCODE_EQ)
2318 && (rcompcode != COMPCODE_ORD);
2319 bool trap = (compcode & COMPCODE_UNORD) == 0
2320 && (compcode != COMPCODE_EQ)
2321 && (compcode != COMPCODE_ORD);
2323 /* In a short-circuited boolean expression the LHS might be
2324 such that the RHS, if evaluated, will never trap. For
2325 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2326 if neither x nor y is NaN. (This is a mixed blessing: for
2327 example, the expression above will never trap, hence
2328 optimizing it to x < y would be invalid). */
2329 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2330 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2331 rtrap = false;
2333 /* If the comparison was short-circuited, and only the RHS
2334 trapped, we may now generate a spurious trap. */
2335 if (rtrap && !ltrap
2336 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2337 return NULL_TREE;
2339 /* If we changed the conditions that cause a trap, we lose. */
2340 if ((ltrap || rtrap) != trap)
2341 return NULL_TREE;
2344 if (compcode == COMPCODE_TRUE)
2345 return constant_boolean_node (true, truth_type);
2346 else if (compcode == COMPCODE_FALSE)
2347 return constant_boolean_node (false, truth_type);
2348 else
2349 return fold_build2 (compcode_to_comparison (compcode),
2350 truth_type, ll_arg, lr_arg);
2353 /* Return nonzero if CODE is a tree code that represents a truth value. */
2355 static int
2356 truth_value_p (enum tree_code code)
2358 return (TREE_CODE_CLASS (code) == tcc_comparison
2359 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2360 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2361 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2364 /* Return nonzero if two operands (typically of the same tree node)
2365 are necessarily equal. If either argument has side-effects this
2366 function returns zero. FLAGS modifies behavior as follows:
2368 If OEP_ONLY_CONST is set, only return nonzero for constants.
2369 This function tests whether the operands are indistinguishable;
2370 it does not test whether they are equal using C's == operation.
2371 The distinction is important for IEEE floating point, because
2372 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2373 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2375 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2376 even though it may hold multiple values during a function.
2377 This is because a GCC tree node guarantees that nothing else is
2378 executed between the evaluation of its "operands" (which may often
2379 be evaluated in arbitrary order). Hence if the operands themselves
2380 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2381 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2382 unset means assuming isochronic (or instantaneous) tree equivalence.
2383 Unless comparing arbitrary expression trees, such as from different
2384 statements, this flag can usually be left unset.
2386 If OEP_PURE_SAME is set, then pure functions with identical arguments
2387 are considered the same. It is used when the caller has other ways
2388 to ensure that global memory is unchanged in between. */
2391 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2393 /* If either is ERROR_MARK, they aren't equal. */
2394 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2395 return 0;
2397 /* If both types don't have the same signedness, then we can't consider
2398 them equal. We must check this before the STRIP_NOPS calls
2399 because they may change the signedness of the arguments. */
2400 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2401 return 0;
2403 STRIP_NOPS (arg0);
2404 STRIP_NOPS (arg1);
2406 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2407 /* This is needed for conversions and for COMPONENT_REF.
2408 Might as well play it safe and always test this. */
2409 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2410 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2411 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2412 return 0;
2414 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2415 We don't care about side effects in that case because the SAVE_EXPR
2416 takes care of that for us. In all other cases, two expressions are
2417 equal if they have no side effects. If we have two identical
2418 expressions with side effects that should be treated the same due
2419 to the only side effects being identical SAVE_EXPR's, that will
2420 be detected in the recursive calls below. */
2421 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2422 && (TREE_CODE (arg0) == SAVE_EXPR
2423 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2424 return 1;
2426 /* Next handle constant cases, those for which we can return 1 even
2427 if ONLY_CONST is set. */
2428 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2429 switch (TREE_CODE (arg0))
2431 case INTEGER_CST:
2432 return (! TREE_CONSTANT_OVERFLOW (arg0)
2433 && ! TREE_CONSTANT_OVERFLOW (arg1)
2434 && tree_int_cst_equal (arg0, arg1));
2436 case REAL_CST:
2437 return (! TREE_CONSTANT_OVERFLOW (arg0)
2438 && ! TREE_CONSTANT_OVERFLOW (arg1)
2439 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2440 TREE_REAL_CST (arg1)));
2442 case VECTOR_CST:
2444 tree v1, v2;
2446 if (TREE_CONSTANT_OVERFLOW (arg0)
2447 || TREE_CONSTANT_OVERFLOW (arg1))
2448 return 0;
2450 v1 = TREE_VECTOR_CST_ELTS (arg0);
2451 v2 = TREE_VECTOR_CST_ELTS (arg1);
2452 while (v1 && v2)
2454 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2455 flags))
2456 return 0;
2457 v1 = TREE_CHAIN (v1);
2458 v2 = TREE_CHAIN (v2);
2461 return 1;
2464 case COMPLEX_CST:
2465 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2466 flags)
2467 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2468 flags));
2470 case STRING_CST:
2471 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2472 && ! memcmp (TREE_STRING_POINTER (arg0),
2473 TREE_STRING_POINTER (arg1),
2474 TREE_STRING_LENGTH (arg0)));
2476 case ADDR_EXPR:
2477 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2479 default:
2480 break;
2483 if (flags & OEP_ONLY_CONST)
2484 return 0;
2486 /* Define macros to test an operand from arg0 and arg1 for equality and a
2487 variant that allows null and views null as being different from any
2488 non-null value. In the latter case, if either is null, the both
2489 must be; otherwise, do the normal comparison. */
2490 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2491 TREE_OPERAND (arg1, N), flags)
2493 #define OP_SAME_WITH_NULL(N) \
2494 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2495 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2497 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2499 case tcc_unary:
2500 /* Two conversions are equal only if signedness and modes match. */
2501 switch (TREE_CODE (arg0))
2503 case NOP_EXPR:
2504 case CONVERT_EXPR:
2505 case FIX_CEIL_EXPR:
2506 case FIX_TRUNC_EXPR:
2507 case FIX_FLOOR_EXPR:
2508 case FIX_ROUND_EXPR:
2509 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2510 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2511 return 0;
2512 break;
2513 default:
2514 break;
2517 return OP_SAME (0);
2520 case tcc_comparison:
2521 case tcc_binary:
2522 if (OP_SAME (0) && OP_SAME (1))
2523 return 1;
2525 /* For commutative ops, allow the other order. */
2526 return (commutative_tree_code (TREE_CODE (arg0))
2527 && operand_equal_p (TREE_OPERAND (arg0, 0),
2528 TREE_OPERAND (arg1, 1), flags)
2529 && operand_equal_p (TREE_OPERAND (arg0, 1),
2530 TREE_OPERAND (arg1, 0), flags));
2532 case tcc_reference:
2533 /* If either of the pointer (or reference) expressions we are
2534 dereferencing contain a side effect, these cannot be equal. */
2535 if (TREE_SIDE_EFFECTS (arg0)
2536 || TREE_SIDE_EFFECTS (arg1))
2537 return 0;
2539 switch (TREE_CODE (arg0))
2541 case INDIRECT_REF:
2542 case ALIGN_INDIRECT_REF:
2543 case MISALIGNED_INDIRECT_REF:
2544 case REALPART_EXPR:
2545 case IMAGPART_EXPR:
2546 return OP_SAME (0);
2548 case ARRAY_REF:
2549 case ARRAY_RANGE_REF:
2550 /* Operands 2 and 3 may be null. */
2551 return (OP_SAME (0)
2552 && OP_SAME (1)
2553 && OP_SAME_WITH_NULL (2)
2554 && OP_SAME_WITH_NULL (3));
2556 case COMPONENT_REF:
2557 /* Handle operand 2 the same as for ARRAY_REF. */
2558 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2560 case BIT_FIELD_REF:
2561 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2563 default:
2564 return 0;
2567 case tcc_expression:
2568 switch (TREE_CODE (arg0))
2570 case ADDR_EXPR:
2571 case TRUTH_NOT_EXPR:
2572 return OP_SAME (0);
2574 case TRUTH_ANDIF_EXPR:
2575 case TRUTH_ORIF_EXPR:
2576 return OP_SAME (0) && OP_SAME (1);
2578 case TRUTH_AND_EXPR:
2579 case TRUTH_OR_EXPR:
2580 case TRUTH_XOR_EXPR:
2581 if (OP_SAME (0) && OP_SAME (1))
2582 return 1;
2584 /* Otherwise take into account this is a commutative operation. */
2585 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2586 TREE_OPERAND (arg1, 1), flags)
2587 && operand_equal_p (TREE_OPERAND (arg0, 1),
2588 TREE_OPERAND (arg1, 0), flags));
2590 case CALL_EXPR:
2591 /* If the CALL_EXPRs call different functions, then they
2592 clearly can not be equal. */
2593 if (!OP_SAME (0))
2594 return 0;
2597 unsigned int cef = call_expr_flags (arg0);
2598 if (flags & OEP_PURE_SAME)
2599 cef &= ECF_CONST | ECF_PURE;
2600 else
2601 cef &= ECF_CONST;
2602 if (!cef)
2603 return 0;
2606 /* Now see if all the arguments are the same. operand_equal_p
2607 does not handle TREE_LIST, so we walk the operands here
2608 feeding them to operand_equal_p. */
2609 arg0 = TREE_OPERAND (arg0, 1);
2610 arg1 = TREE_OPERAND (arg1, 1);
2611 while (arg0 && arg1)
2613 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2614 flags))
2615 return 0;
2617 arg0 = TREE_CHAIN (arg0);
2618 arg1 = TREE_CHAIN (arg1);
2621 /* If we get here and both argument lists are exhausted
2622 then the CALL_EXPRs are equal. */
2623 return ! (arg0 || arg1);
2625 default:
2626 return 0;
2629 case tcc_declaration:
2630 /* Consider __builtin_sqrt equal to sqrt. */
2631 return (TREE_CODE (arg0) == FUNCTION_DECL
2632 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2633 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2634 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2636 default:
2637 return 0;
2640 #undef OP_SAME
2641 #undef OP_SAME_WITH_NULL
2644 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2645 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2647 When in doubt, return 0. */
2649 static int
2650 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2652 int unsignedp1, unsignedpo;
2653 tree primarg0, primarg1, primother;
2654 unsigned int correct_width;
2656 if (operand_equal_p (arg0, arg1, 0))
2657 return 1;
2659 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2660 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2661 return 0;
2663 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2664 and see if the inner values are the same. This removes any
2665 signedness comparison, which doesn't matter here. */
2666 primarg0 = arg0, primarg1 = arg1;
2667 STRIP_NOPS (primarg0);
2668 STRIP_NOPS (primarg1);
2669 if (operand_equal_p (primarg0, primarg1, 0))
2670 return 1;
2672 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2673 actual comparison operand, ARG0.
2675 First throw away any conversions to wider types
2676 already present in the operands. */
2678 primarg1 = get_narrower (arg1, &unsignedp1);
2679 primother = get_narrower (other, &unsignedpo);
2681 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2682 if (unsignedp1 == unsignedpo
2683 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2684 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2686 tree type = TREE_TYPE (arg0);
2688 /* Make sure shorter operand is extended the right way
2689 to match the longer operand. */
2690 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2691 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2693 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2694 return 1;
2697 return 0;
2700 /* See if ARG is an expression that is either a comparison or is performing
2701 arithmetic on comparisons. The comparisons must only be comparing
2702 two different values, which will be stored in *CVAL1 and *CVAL2; if
2703 they are nonzero it means that some operands have already been found.
2704 No variables may be used anywhere else in the expression except in the
2705 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2706 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2708 If this is true, return 1. Otherwise, return zero. */
2710 static int
2711 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2713 enum tree_code code = TREE_CODE (arg);
2714 enum tree_code_class class = TREE_CODE_CLASS (code);
2716 /* We can handle some of the tcc_expression cases here. */
2717 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2718 class = tcc_unary;
2719 else if (class == tcc_expression
2720 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2721 || code == COMPOUND_EXPR))
2722 class = tcc_binary;
2724 else if (class == tcc_expression && code == SAVE_EXPR
2725 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2727 /* If we've already found a CVAL1 or CVAL2, this expression is
2728 two complex to handle. */
2729 if (*cval1 || *cval2)
2730 return 0;
2732 class = tcc_unary;
2733 *save_p = 1;
2736 switch (class)
2738 case tcc_unary:
2739 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2741 case tcc_binary:
2742 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2743 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2744 cval1, cval2, save_p));
2746 case tcc_constant:
2747 return 1;
2749 case tcc_expression:
2750 if (code == COND_EXPR)
2751 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2752 cval1, cval2, save_p)
2753 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2754 cval1, cval2, save_p)
2755 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2756 cval1, cval2, save_p));
2757 return 0;
2759 case tcc_comparison:
2760 /* First see if we can handle the first operand, then the second. For
2761 the second operand, we know *CVAL1 can't be zero. It must be that
2762 one side of the comparison is each of the values; test for the
2763 case where this isn't true by failing if the two operands
2764 are the same. */
2766 if (operand_equal_p (TREE_OPERAND (arg, 0),
2767 TREE_OPERAND (arg, 1), 0))
2768 return 0;
2770 if (*cval1 == 0)
2771 *cval1 = TREE_OPERAND (arg, 0);
2772 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2774 else if (*cval2 == 0)
2775 *cval2 = TREE_OPERAND (arg, 0);
2776 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2778 else
2779 return 0;
2781 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2783 else if (*cval2 == 0)
2784 *cval2 = TREE_OPERAND (arg, 1);
2785 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2787 else
2788 return 0;
2790 return 1;
2792 default:
2793 return 0;
2797 /* ARG is a tree that is known to contain just arithmetic operations and
2798 comparisons. Evaluate the operations in the tree substituting NEW0 for
2799 any occurrence of OLD0 as an operand of a comparison and likewise for
2800 NEW1 and OLD1. */
2802 static tree
2803 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2805 tree type = TREE_TYPE (arg);
2806 enum tree_code code = TREE_CODE (arg);
2807 enum tree_code_class class = TREE_CODE_CLASS (code);
2809 /* We can handle some of the tcc_expression cases here. */
2810 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2811 class = tcc_unary;
2812 else if (class == tcc_expression
2813 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2814 class = tcc_binary;
2816 switch (class)
2818 case tcc_unary:
2819 return fold_build1 (code, type,
2820 eval_subst (TREE_OPERAND (arg, 0),
2821 old0, new0, old1, new1));
2823 case tcc_binary:
2824 return fold_build2 (code, type,
2825 eval_subst (TREE_OPERAND (arg, 0),
2826 old0, new0, old1, new1),
2827 eval_subst (TREE_OPERAND (arg, 1),
2828 old0, new0, old1, new1));
2830 case tcc_expression:
2831 switch (code)
2833 case SAVE_EXPR:
2834 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2836 case COMPOUND_EXPR:
2837 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2839 case COND_EXPR:
2840 return fold_build3 (code, type,
2841 eval_subst (TREE_OPERAND (arg, 0),
2842 old0, new0, old1, new1),
2843 eval_subst (TREE_OPERAND (arg, 1),
2844 old0, new0, old1, new1),
2845 eval_subst (TREE_OPERAND (arg, 2),
2846 old0, new0, old1, new1));
2847 default:
2848 break;
2850 /* Fall through - ??? */
2852 case tcc_comparison:
2854 tree arg0 = TREE_OPERAND (arg, 0);
2855 tree arg1 = TREE_OPERAND (arg, 1);
2857 /* We need to check both for exact equality and tree equality. The
2858 former will be true if the operand has a side-effect. In that
2859 case, we know the operand occurred exactly once. */
2861 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2862 arg0 = new0;
2863 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2864 arg0 = new1;
2866 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2867 arg1 = new0;
2868 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2869 arg1 = new1;
2871 return fold_build2 (code, type, arg0, arg1);
2874 default:
2875 return arg;
2879 /* Return a tree for the case when the result of an expression is RESULT
2880 converted to TYPE and OMITTED was previously an operand of the expression
2881 but is now not needed (e.g., we folded OMITTED * 0).
2883 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2884 the conversion of RESULT to TYPE. */
2886 tree
2887 omit_one_operand (tree type, tree result, tree omitted)
2889 tree t = fold_convert (type, result);
2891 if (TREE_SIDE_EFFECTS (omitted))
2892 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2894 return non_lvalue (t);
2897 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2899 static tree
2900 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2902 tree t = fold_convert (type, result);
2904 if (TREE_SIDE_EFFECTS (omitted))
2905 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2907 return pedantic_non_lvalue (t);
2910 /* Return a tree for the case when the result of an expression is RESULT
2911 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2912 of the expression but are now not needed.
2914 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2915 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2916 evaluated before OMITTED2. Otherwise, if neither has side effects,
2917 just do the conversion of RESULT to TYPE. */
2919 tree
2920 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2922 tree t = fold_convert (type, result);
2924 if (TREE_SIDE_EFFECTS (omitted2))
2925 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2926 if (TREE_SIDE_EFFECTS (omitted1))
2927 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2929 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2933 /* Return a simplified tree node for the truth-negation of ARG. This
2934 never alters ARG itself. We assume that ARG is an operation that
2935 returns a truth value (0 or 1).
2937 FIXME: one would think we would fold the result, but it causes
2938 problems with the dominator optimizer. */
2939 tree
2940 invert_truthvalue (tree arg)
2942 tree type = TREE_TYPE (arg);
2943 enum tree_code code = TREE_CODE (arg);
2945 if (code == ERROR_MARK)
2946 return arg;
2948 /* If this is a comparison, we can simply invert it, except for
2949 floating-point non-equality comparisons, in which case we just
2950 enclose a TRUTH_NOT_EXPR around what we have. */
2952 if (TREE_CODE_CLASS (code) == tcc_comparison)
2954 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2955 if (FLOAT_TYPE_P (op_type)
2956 && flag_trapping_math
2957 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2958 && code != NE_EXPR && code != EQ_EXPR)
2959 return build1 (TRUTH_NOT_EXPR, type, arg);
2960 else
2962 code = invert_tree_comparison (code,
2963 HONOR_NANS (TYPE_MODE (op_type)));
2964 if (code == ERROR_MARK)
2965 return build1 (TRUTH_NOT_EXPR, type, arg);
2966 else
2967 return build2 (code, type,
2968 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2972 switch (code)
2974 case INTEGER_CST:
2975 return constant_boolean_node (integer_zerop (arg), type);
2977 case TRUTH_AND_EXPR:
2978 return build2 (TRUTH_OR_EXPR, type,
2979 invert_truthvalue (TREE_OPERAND (arg, 0)),
2980 invert_truthvalue (TREE_OPERAND (arg, 1)));
2982 case TRUTH_OR_EXPR:
2983 return build2 (TRUTH_AND_EXPR, type,
2984 invert_truthvalue (TREE_OPERAND (arg, 0)),
2985 invert_truthvalue (TREE_OPERAND (arg, 1)));
2987 case TRUTH_XOR_EXPR:
2988 /* Here we can invert either operand. We invert the first operand
2989 unless the second operand is a TRUTH_NOT_EXPR in which case our
2990 result is the XOR of the first operand with the inside of the
2991 negation of the second operand. */
2993 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2994 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2995 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2996 else
2997 return build2 (TRUTH_XOR_EXPR, type,
2998 invert_truthvalue (TREE_OPERAND (arg, 0)),
2999 TREE_OPERAND (arg, 1));
3001 case TRUTH_ANDIF_EXPR:
3002 return build2 (TRUTH_ORIF_EXPR, type,
3003 invert_truthvalue (TREE_OPERAND (arg, 0)),
3004 invert_truthvalue (TREE_OPERAND (arg, 1)));
3006 case TRUTH_ORIF_EXPR:
3007 return build2 (TRUTH_ANDIF_EXPR, type,
3008 invert_truthvalue (TREE_OPERAND (arg, 0)),
3009 invert_truthvalue (TREE_OPERAND (arg, 1)));
3011 case TRUTH_NOT_EXPR:
3012 return TREE_OPERAND (arg, 0);
3014 case COND_EXPR:
3015 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3016 invert_truthvalue (TREE_OPERAND (arg, 1)),
3017 invert_truthvalue (TREE_OPERAND (arg, 2)));
3019 case COMPOUND_EXPR:
3020 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3021 invert_truthvalue (TREE_OPERAND (arg, 1)));
3023 case NON_LVALUE_EXPR:
3024 return invert_truthvalue (TREE_OPERAND (arg, 0));
3026 case NOP_EXPR:
3027 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3028 break;
3030 case CONVERT_EXPR:
3031 case FLOAT_EXPR:
3032 return build1 (TREE_CODE (arg), type,
3033 invert_truthvalue (TREE_OPERAND (arg, 0)));
3035 case BIT_AND_EXPR:
3036 if (!integer_onep (TREE_OPERAND (arg, 1)))
3037 break;
3038 return build2 (EQ_EXPR, type, arg,
3039 fold_convert (type, integer_zero_node));
3041 case SAVE_EXPR:
3042 return build1 (TRUTH_NOT_EXPR, type, arg);
3044 case CLEANUP_POINT_EXPR:
3045 return build1 (CLEANUP_POINT_EXPR, type,
3046 invert_truthvalue (TREE_OPERAND (arg, 0)));
3048 default:
3049 break;
3051 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3052 return build1 (TRUTH_NOT_EXPR, type, arg);
3055 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3056 operands are another bit-wise operation with a common input. If so,
3057 distribute the bit operations to save an operation and possibly two if
3058 constants are involved. For example, convert
3059 (A | B) & (A | C) into A | (B & C)
3060 Further simplification will occur if B and C are constants.
3062 If this optimization cannot be done, 0 will be returned. */
3064 static tree
3065 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3067 tree common;
3068 tree left, right;
3070 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3071 || TREE_CODE (arg0) == code
3072 || (TREE_CODE (arg0) != BIT_AND_EXPR
3073 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3074 return 0;
3076 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3078 common = TREE_OPERAND (arg0, 0);
3079 left = TREE_OPERAND (arg0, 1);
3080 right = TREE_OPERAND (arg1, 1);
3082 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3084 common = TREE_OPERAND (arg0, 0);
3085 left = TREE_OPERAND (arg0, 1);
3086 right = TREE_OPERAND (arg1, 0);
3088 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3090 common = TREE_OPERAND (arg0, 1);
3091 left = TREE_OPERAND (arg0, 0);
3092 right = TREE_OPERAND (arg1, 1);
3094 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3096 common = TREE_OPERAND (arg0, 1);
3097 left = TREE_OPERAND (arg0, 0);
3098 right = TREE_OPERAND (arg1, 0);
3100 else
3101 return 0;
3103 return fold_build2 (TREE_CODE (arg0), type, common,
3104 fold_build2 (code, type, left, right));
3107 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3108 with code CODE. This optimization is unsafe. */
3109 static tree
3110 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3112 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3113 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3115 /* (A / C) +- (B / C) -> (A +- B) / C. */
3116 if (mul0 == mul1
3117 && operand_equal_p (TREE_OPERAND (arg0, 1),
3118 TREE_OPERAND (arg1, 1), 0))
3119 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3120 fold_build2 (code, type,
3121 TREE_OPERAND (arg0, 0),
3122 TREE_OPERAND (arg1, 0)),
3123 TREE_OPERAND (arg0, 1));
3125 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3126 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3127 TREE_OPERAND (arg1, 0), 0)
3128 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3129 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3131 REAL_VALUE_TYPE r0, r1;
3132 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3133 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3134 if (!mul0)
3135 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3136 if (!mul1)
3137 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3138 real_arithmetic (&r0, code, &r0, &r1);
3139 return fold_build2 (MULT_EXPR, type,
3140 TREE_OPERAND (arg0, 0),
3141 build_real (type, r0));
3144 return NULL_TREE;
3147 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3148 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3150 static tree
3151 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3152 int unsignedp)
3154 tree result;
3156 if (bitpos == 0)
3158 tree size = TYPE_SIZE (TREE_TYPE (inner));
3159 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3160 || POINTER_TYPE_P (TREE_TYPE (inner)))
3161 && host_integerp (size, 0)
3162 && tree_low_cst (size, 0) == bitsize)
3163 return fold_convert (type, inner);
3166 result = build3 (BIT_FIELD_REF, type, inner,
3167 size_int (bitsize), bitsize_int (bitpos));
3169 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3171 return result;
3174 /* Optimize a bit-field compare.
3176 There are two cases: First is a compare against a constant and the
3177 second is a comparison of two items where the fields are at the same
3178 bit position relative to the start of a chunk (byte, halfword, word)
3179 large enough to contain it. In these cases we can avoid the shift
3180 implicit in bitfield extractions.
3182 For constants, we emit a compare of the shifted constant with the
3183 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3184 compared. For two fields at the same position, we do the ANDs with the
3185 similar mask and compare the result of the ANDs.
3187 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3188 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3189 are the left and right operands of the comparison, respectively.
3191 If the optimization described above can be done, we return the resulting
3192 tree. Otherwise we return zero. */
3194 static tree
3195 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3196 tree lhs, tree rhs)
3198 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3199 tree type = TREE_TYPE (lhs);
3200 tree signed_type, unsigned_type;
3201 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3202 enum machine_mode lmode, rmode, nmode;
3203 int lunsignedp, runsignedp;
3204 int lvolatilep = 0, rvolatilep = 0;
3205 tree linner, rinner = NULL_TREE;
3206 tree mask;
3207 tree offset;
3209 /* Get all the information about the extractions being done. If the bit size
3210 if the same as the size of the underlying object, we aren't doing an
3211 extraction at all and so can do nothing. We also don't want to
3212 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3213 then will no longer be able to replace it. */
3214 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3215 &lunsignedp, &lvolatilep, false);
3216 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3217 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3218 return 0;
3220 if (!const_p)
3222 /* If this is not a constant, we can only do something if bit positions,
3223 sizes, and signedness are the same. */
3224 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3225 &runsignedp, &rvolatilep, false);
3227 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3228 || lunsignedp != runsignedp || offset != 0
3229 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3230 return 0;
3233 /* See if we can find a mode to refer to this field. We should be able to,
3234 but fail if we can't. */
3235 nmode = get_best_mode (lbitsize, lbitpos,
3236 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3237 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3238 TYPE_ALIGN (TREE_TYPE (rinner))),
3239 word_mode, lvolatilep || rvolatilep);
3240 if (nmode == VOIDmode)
3241 return 0;
3243 /* Set signed and unsigned types of the precision of this mode for the
3244 shifts below. */
3245 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3246 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3248 /* Compute the bit position and size for the new reference and our offset
3249 within it. If the new reference is the same size as the original, we
3250 won't optimize anything, so return zero. */
3251 nbitsize = GET_MODE_BITSIZE (nmode);
3252 nbitpos = lbitpos & ~ (nbitsize - 1);
3253 lbitpos -= nbitpos;
3254 if (nbitsize == lbitsize)
3255 return 0;
3257 if (BYTES_BIG_ENDIAN)
3258 lbitpos = nbitsize - lbitsize - lbitpos;
3260 /* Make the mask to be used against the extracted field. */
3261 mask = build_int_cst (unsigned_type, -1);
3262 mask = force_fit_type (mask, 0, false, false);
3263 mask = fold_convert (unsigned_type, mask);
3264 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3265 mask = const_binop (RSHIFT_EXPR, mask,
3266 size_int (nbitsize - lbitsize - lbitpos), 0);
3268 if (! const_p)
3269 /* If not comparing with constant, just rework the comparison
3270 and return. */
3271 return build2 (code, compare_type,
3272 build2 (BIT_AND_EXPR, unsigned_type,
3273 make_bit_field_ref (linner, unsigned_type,
3274 nbitsize, nbitpos, 1),
3275 mask),
3276 build2 (BIT_AND_EXPR, unsigned_type,
3277 make_bit_field_ref (rinner, unsigned_type,
3278 nbitsize, nbitpos, 1),
3279 mask));
3281 /* Otherwise, we are handling the constant case. See if the constant is too
3282 big for the field. Warn and return a tree of for 0 (false) if so. We do
3283 this not only for its own sake, but to avoid having to test for this
3284 error case below. If we didn't, we might generate wrong code.
3286 For unsigned fields, the constant shifted right by the field length should
3287 be all zero. For signed fields, the high-order bits should agree with
3288 the sign bit. */
3290 if (lunsignedp)
3292 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3293 fold_convert (unsigned_type, rhs),
3294 size_int (lbitsize), 0)))
3296 warning (0, "comparison is always %d due to width of bit-field",
3297 code == NE_EXPR);
3298 return constant_boolean_node (code == NE_EXPR, compare_type);
3301 else
3303 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3304 size_int (lbitsize - 1), 0);
3305 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3307 warning (0, "comparison is always %d due to width of bit-field",
3308 code == NE_EXPR);
3309 return constant_boolean_node (code == NE_EXPR, compare_type);
3313 /* Single-bit compares should always be against zero. */
3314 if (lbitsize == 1 && ! integer_zerop (rhs))
3316 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3317 rhs = fold_convert (type, integer_zero_node);
3320 /* Make a new bitfield reference, shift the constant over the
3321 appropriate number of bits and mask it with the computed mask
3322 (in case this was a signed field). If we changed it, make a new one. */
3323 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3324 if (lvolatilep)
3326 TREE_SIDE_EFFECTS (lhs) = 1;
3327 TREE_THIS_VOLATILE (lhs) = 1;
3330 rhs = fold (const_binop (BIT_AND_EXPR,
3331 const_binop (LSHIFT_EXPR,
3332 fold_convert (unsigned_type, rhs),
3333 size_int (lbitpos), 0),
3334 mask, 0));
3336 return build2 (code, compare_type,
3337 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3338 rhs);
3341 /* Subroutine for fold_truthop: decode a field reference.
3343 If EXP is a comparison reference, we return the innermost reference.
3345 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3346 set to the starting bit number.
3348 If the innermost field can be completely contained in a mode-sized
3349 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3351 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3352 otherwise it is not changed.
3354 *PUNSIGNEDP is set to the signedness of the field.
3356 *PMASK is set to the mask used. This is either contained in a
3357 BIT_AND_EXPR or derived from the width of the field.
3359 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3361 Return 0 if this is not a component reference or is one that we can't
3362 do anything with. */
3364 static tree
3365 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3366 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3367 int *punsignedp, int *pvolatilep,
3368 tree *pmask, tree *pand_mask)
3370 tree outer_type = 0;
3371 tree and_mask = 0;
3372 tree mask, inner, offset;
3373 tree unsigned_type;
3374 unsigned int precision;
3376 /* All the optimizations using this function assume integer fields.
3377 There are problems with FP fields since the type_for_size call
3378 below can fail for, e.g., XFmode. */
3379 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3380 return 0;
3382 /* We are interested in the bare arrangement of bits, so strip everything
3383 that doesn't affect the machine mode. However, record the type of the
3384 outermost expression if it may matter below. */
3385 if (TREE_CODE (exp) == NOP_EXPR
3386 || TREE_CODE (exp) == CONVERT_EXPR
3387 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3388 outer_type = TREE_TYPE (exp);
3389 STRIP_NOPS (exp);
3391 if (TREE_CODE (exp) == BIT_AND_EXPR)
3393 and_mask = TREE_OPERAND (exp, 1);
3394 exp = TREE_OPERAND (exp, 0);
3395 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3396 if (TREE_CODE (and_mask) != INTEGER_CST)
3397 return 0;
3400 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3401 punsignedp, pvolatilep, false);
3402 if ((inner == exp && and_mask == 0)
3403 || *pbitsize < 0 || offset != 0
3404 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3405 return 0;
3407 /* If the number of bits in the reference is the same as the bitsize of
3408 the outer type, then the outer type gives the signedness. Otherwise
3409 (in case of a small bitfield) the signedness is unchanged. */
3410 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3411 *punsignedp = TYPE_UNSIGNED (outer_type);
3413 /* Compute the mask to access the bitfield. */
3414 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3415 precision = TYPE_PRECISION (unsigned_type);
3417 mask = build_int_cst (unsigned_type, -1);
3418 mask = force_fit_type (mask, 0, false, false);
3420 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3421 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3423 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3424 if (and_mask != 0)
3425 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3426 fold_convert (unsigned_type, and_mask), mask);
3428 *pmask = mask;
3429 *pand_mask = and_mask;
3430 return inner;
3433 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3434 bit positions. */
3436 static int
3437 all_ones_mask_p (tree mask, int size)
3439 tree type = TREE_TYPE (mask);
3440 unsigned int precision = TYPE_PRECISION (type);
3441 tree tmask;
3443 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3444 tmask = force_fit_type (tmask, 0, false, false);
3446 return
3447 tree_int_cst_equal (mask,
3448 const_binop (RSHIFT_EXPR,
3449 const_binop (LSHIFT_EXPR, tmask,
3450 size_int (precision - size),
3452 size_int (precision - size), 0));
3455 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3456 represents the sign bit of EXP's type. If EXP represents a sign
3457 or zero extension, also test VAL against the unextended type.
3458 The return value is the (sub)expression whose sign bit is VAL,
3459 or NULL_TREE otherwise. */
3461 static tree
3462 sign_bit_p (tree exp, tree val)
3464 unsigned HOST_WIDE_INT mask_lo, lo;
3465 HOST_WIDE_INT mask_hi, hi;
3466 int width;
3467 tree t;
3469 /* Tree EXP must have an integral type. */
3470 t = TREE_TYPE (exp);
3471 if (! INTEGRAL_TYPE_P (t))
3472 return NULL_TREE;
3474 /* Tree VAL must be an integer constant. */
3475 if (TREE_CODE (val) != INTEGER_CST
3476 || TREE_CONSTANT_OVERFLOW (val))
3477 return NULL_TREE;
3479 width = TYPE_PRECISION (t);
3480 if (width > HOST_BITS_PER_WIDE_INT)
3482 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3483 lo = 0;
3485 mask_hi = ((unsigned HOST_WIDE_INT) -1
3486 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3487 mask_lo = -1;
3489 else
3491 hi = 0;
3492 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3494 mask_hi = 0;
3495 mask_lo = ((unsigned HOST_WIDE_INT) -1
3496 >> (HOST_BITS_PER_WIDE_INT - width));
3499 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3500 treat VAL as if it were unsigned. */
3501 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3502 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3503 return exp;
3505 /* Handle extension from a narrower type. */
3506 if (TREE_CODE (exp) == NOP_EXPR
3507 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3508 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3510 return NULL_TREE;
3513 /* Subroutine for fold_truthop: determine if an operand is simple enough
3514 to be evaluated unconditionally. */
3516 static int
3517 simple_operand_p (tree exp)
3519 /* Strip any conversions that don't change the machine mode. */
3520 STRIP_NOPS (exp);
3522 return (CONSTANT_CLASS_P (exp)
3523 || TREE_CODE (exp) == SSA_NAME
3524 || (DECL_P (exp)
3525 && ! TREE_ADDRESSABLE (exp)
3526 && ! TREE_THIS_VOLATILE (exp)
3527 && ! DECL_NONLOCAL (exp)
3528 /* Don't regard global variables as simple. They may be
3529 allocated in ways unknown to the compiler (shared memory,
3530 #pragma weak, etc). */
3531 && ! TREE_PUBLIC (exp)
3532 && ! DECL_EXTERNAL (exp)
3533 /* Loading a static variable is unduly expensive, but global
3534 registers aren't expensive. */
3535 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3538 /* The following functions are subroutines to fold_range_test and allow it to
3539 try to change a logical combination of comparisons into a range test.
3541 For example, both
3542 X == 2 || X == 3 || X == 4 || X == 5
3544 X >= 2 && X <= 5
3545 are converted to
3546 (unsigned) (X - 2) <= 3
3548 We describe each set of comparisons as being either inside or outside
3549 a range, using a variable named like IN_P, and then describe the
3550 range with a lower and upper bound. If one of the bounds is omitted,
3551 it represents either the highest or lowest value of the type.
3553 In the comments below, we represent a range by two numbers in brackets
3554 preceded by a "+" to designate being inside that range, or a "-" to
3555 designate being outside that range, so the condition can be inverted by
3556 flipping the prefix. An omitted bound is represented by a "-". For
3557 example, "- [-, 10]" means being outside the range starting at the lowest
3558 possible value and ending at 10, in other words, being greater than 10.
3559 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3560 always false.
3562 We set up things so that the missing bounds are handled in a consistent
3563 manner so neither a missing bound nor "true" and "false" need to be
3564 handled using a special case. */
3566 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3567 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3568 and UPPER1_P are nonzero if the respective argument is an upper bound
3569 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3570 must be specified for a comparison. ARG1 will be converted to ARG0's
3571 type if both are specified. */
3573 static tree
3574 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3575 tree arg1, int upper1_p)
3577 tree tem;
3578 int result;
3579 int sgn0, sgn1;
3581 /* If neither arg represents infinity, do the normal operation.
3582 Else, if not a comparison, return infinity. Else handle the special
3583 comparison rules. Note that most of the cases below won't occur, but
3584 are handled for consistency. */
3586 if (arg0 != 0 && arg1 != 0)
3588 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3589 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3590 STRIP_NOPS (tem);
3591 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3594 if (TREE_CODE_CLASS (code) != tcc_comparison)
3595 return 0;
3597 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3598 for neither. In real maths, we cannot assume open ended ranges are
3599 the same. But, this is computer arithmetic, where numbers are finite.
3600 We can therefore make the transformation of any unbounded range with
3601 the value Z, Z being greater than any representable number. This permits
3602 us to treat unbounded ranges as equal. */
3603 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3604 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3605 switch (code)
3607 case EQ_EXPR:
3608 result = sgn0 == sgn1;
3609 break;
3610 case NE_EXPR:
3611 result = sgn0 != sgn1;
3612 break;
3613 case LT_EXPR:
3614 result = sgn0 < sgn1;
3615 break;
3616 case LE_EXPR:
3617 result = sgn0 <= sgn1;
3618 break;
3619 case GT_EXPR:
3620 result = sgn0 > sgn1;
3621 break;
3622 case GE_EXPR:
3623 result = sgn0 >= sgn1;
3624 break;
3625 default:
3626 gcc_unreachable ();
3629 return constant_boolean_node (result, type);
3632 /* Given EXP, a logical expression, set the range it is testing into
3633 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3634 actually being tested. *PLOW and *PHIGH will be made of the same type
3635 as the returned expression. If EXP is not a comparison, we will most
3636 likely not be returning a useful value and range. */
3638 static tree
3639 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3641 enum tree_code code;
3642 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3643 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3644 int in_p, n_in_p;
3645 tree low, high, n_low, n_high;
3647 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3648 and see if we can refine the range. Some of the cases below may not
3649 happen, but it doesn't seem worth worrying about this. We "continue"
3650 the outer loop when we've changed something; otherwise we "break"
3651 the switch, which will "break" the while. */
3653 in_p = 0;
3654 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3656 while (1)
3658 code = TREE_CODE (exp);
3659 exp_type = TREE_TYPE (exp);
3661 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3663 if (TREE_CODE_LENGTH (code) > 0)
3664 arg0 = TREE_OPERAND (exp, 0);
3665 if (TREE_CODE_CLASS (code) == tcc_comparison
3666 || TREE_CODE_CLASS (code) == tcc_unary
3667 || TREE_CODE_CLASS (code) == tcc_binary)
3668 arg0_type = TREE_TYPE (arg0);
3669 if (TREE_CODE_CLASS (code) == tcc_binary
3670 || TREE_CODE_CLASS (code) == tcc_comparison
3671 || (TREE_CODE_CLASS (code) == tcc_expression
3672 && TREE_CODE_LENGTH (code) > 1))
3673 arg1 = TREE_OPERAND (exp, 1);
3676 switch (code)
3678 case TRUTH_NOT_EXPR:
3679 in_p = ! in_p, exp = arg0;
3680 continue;
3682 case EQ_EXPR: case NE_EXPR:
3683 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3684 /* We can only do something if the range is testing for zero
3685 and if the second operand is an integer constant. Note that
3686 saying something is "in" the range we make is done by
3687 complementing IN_P since it will set in the initial case of
3688 being not equal to zero; "out" is leaving it alone. */
3689 if (low == 0 || high == 0
3690 || ! integer_zerop (low) || ! integer_zerop (high)
3691 || TREE_CODE (arg1) != INTEGER_CST)
3692 break;
3694 switch (code)
3696 case NE_EXPR: /* - [c, c] */
3697 low = high = arg1;
3698 break;
3699 case EQ_EXPR: /* + [c, c] */
3700 in_p = ! in_p, low = high = arg1;
3701 break;
3702 case GT_EXPR: /* - [-, c] */
3703 low = 0, high = arg1;
3704 break;
3705 case GE_EXPR: /* + [c, -] */
3706 in_p = ! in_p, low = arg1, high = 0;
3707 break;
3708 case LT_EXPR: /* - [c, -] */
3709 low = arg1, high = 0;
3710 break;
3711 case LE_EXPR: /* + [-, c] */
3712 in_p = ! in_p, low = 0, high = arg1;
3713 break;
3714 default:
3715 gcc_unreachable ();
3718 /* If this is an unsigned comparison, we also know that EXP is
3719 greater than or equal to zero. We base the range tests we make
3720 on that fact, so we record it here so we can parse existing
3721 range tests. We test arg0_type since often the return type
3722 of, e.g. EQ_EXPR, is boolean. */
3723 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3725 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3726 in_p, low, high, 1,
3727 fold_convert (arg0_type, integer_zero_node),
3728 NULL_TREE))
3729 break;
3731 in_p = n_in_p, low = n_low, high = n_high;
3733 /* If the high bound is missing, but we have a nonzero low
3734 bound, reverse the range so it goes from zero to the low bound
3735 minus 1. */
3736 if (high == 0 && low && ! integer_zerop (low))
3738 in_p = ! in_p;
3739 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3740 integer_one_node, 0);
3741 low = fold_convert (arg0_type, integer_zero_node);
3745 exp = arg0;
3746 continue;
3748 case NEGATE_EXPR:
3749 /* (-x) IN [a,b] -> x in [-b, -a] */
3750 n_low = range_binop (MINUS_EXPR, exp_type,
3751 fold_convert (exp_type, integer_zero_node),
3752 0, high, 1);
3753 n_high = range_binop (MINUS_EXPR, exp_type,
3754 fold_convert (exp_type, integer_zero_node),
3755 0, low, 0);
3756 low = n_low, high = n_high;
3757 exp = arg0;
3758 continue;
3760 case BIT_NOT_EXPR:
3761 /* ~ X -> -X - 1 */
3762 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3763 fold_convert (exp_type, integer_one_node));
3764 continue;
3766 case PLUS_EXPR: case MINUS_EXPR:
3767 if (TREE_CODE (arg1) != INTEGER_CST)
3768 break;
3770 /* If EXP is signed, any overflow in the computation is undefined,
3771 so we don't worry about it so long as our computations on
3772 the bounds don't overflow. For unsigned, overflow is defined
3773 and this is exactly the right thing. */
3774 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3775 arg0_type, low, 0, arg1, 0);
3776 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3777 arg0_type, high, 1, arg1, 0);
3778 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3779 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3780 break;
3782 /* Check for an unsigned range which has wrapped around the maximum
3783 value thus making n_high < n_low, and normalize it. */
3784 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3786 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3787 integer_one_node, 0);
3788 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3789 integer_one_node, 0);
3791 /* If the range is of the form +/- [ x+1, x ], we won't
3792 be able to normalize it. But then, it represents the
3793 whole range or the empty set, so make it
3794 +/- [ -, - ]. */
3795 if (tree_int_cst_equal (n_low, low)
3796 && tree_int_cst_equal (n_high, high))
3797 low = high = 0;
3798 else
3799 in_p = ! in_p;
3801 else
3802 low = n_low, high = n_high;
3804 exp = arg0;
3805 continue;
3807 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3808 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3809 break;
3811 if (! INTEGRAL_TYPE_P (arg0_type)
3812 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3813 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3814 break;
3816 n_low = low, n_high = high;
3818 if (n_low != 0)
3819 n_low = fold_convert (arg0_type, n_low);
3821 if (n_high != 0)
3822 n_high = fold_convert (arg0_type, n_high);
3825 /* If we're converting arg0 from an unsigned type, to exp,
3826 a signed type, we will be doing the comparison as unsigned.
3827 The tests above have already verified that LOW and HIGH
3828 are both positive.
3830 So we have to ensure that we will handle large unsigned
3831 values the same way that the current signed bounds treat
3832 negative values. */
3834 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3836 tree high_positive;
3837 tree equiv_type = lang_hooks.types.type_for_mode
3838 (TYPE_MODE (arg0_type), 1);
3840 /* A range without an upper bound is, naturally, unbounded.
3841 Since convert would have cropped a very large value, use
3842 the max value for the destination type. */
3843 high_positive
3844 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3845 : TYPE_MAX_VALUE (arg0_type);
3847 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3848 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3849 fold_convert (arg0_type,
3850 high_positive),
3851 fold_convert (arg0_type,
3852 integer_one_node));
3854 /* If the low bound is specified, "and" the range with the
3855 range for which the original unsigned value will be
3856 positive. */
3857 if (low != 0)
3859 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3860 1, n_low, n_high, 1,
3861 fold_convert (arg0_type,
3862 integer_zero_node),
3863 high_positive))
3864 break;
3866 in_p = (n_in_p == in_p);
3868 else
3870 /* Otherwise, "or" the range with the range of the input
3871 that will be interpreted as negative. */
3872 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3873 0, n_low, n_high, 1,
3874 fold_convert (arg0_type,
3875 integer_zero_node),
3876 high_positive))
3877 break;
3879 in_p = (in_p != n_in_p);
3883 exp = arg0;
3884 low = n_low, high = n_high;
3885 continue;
3887 default:
3888 break;
3891 break;
3894 /* If EXP is a constant, we can evaluate whether this is true or false. */
3895 if (TREE_CODE (exp) == INTEGER_CST)
3897 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3898 exp, 0, low, 0))
3899 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3900 exp, 1, high, 1)));
3901 low = high = 0;
3902 exp = 0;
3905 *pin_p = in_p, *plow = low, *phigh = high;
3906 return exp;
3909 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3910 type, TYPE, return an expression to test if EXP is in (or out of, depending
3911 on IN_P) the range. Return 0 if the test couldn't be created. */
3913 static tree
3914 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3916 tree etype = TREE_TYPE (exp);
3917 tree value;
3919 if (! in_p)
3921 value = build_range_check (type, exp, 1, low, high);
3922 if (value != 0)
3923 return invert_truthvalue (value);
3925 return 0;
3928 if (low == 0 && high == 0)
3929 return fold_convert (type, integer_one_node);
3931 if (low == 0)
3932 return fold_build2 (LE_EXPR, type, exp, high);
3934 if (high == 0)
3935 return fold_build2 (GE_EXPR, type, exp, low);
3937 if (operand_equal_p (low, high, 0))
3938 return fold_build2 (EQ_EXPR, type, exp, low);
3940 if (integer_zerop (low))
3942 if (! TYPE_UNSIGNED (etype))
3944 etype = lang_hooks.types.unsigned_type (etype);
3945 high = fold_convert (etype, high);
3946 exp = fold_convert (etype, exp);
3948 return build_range_check (type, exp, 1, 0, high);
3951 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3952 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3954 unsigned HOST_WIDE_INT lo;
3955 HOST_WIDE_INT hi;
3956 int prec;
3958 prec = TYPE_PRECISION (etype);
3959 if (prec <= HOST_BITS_PER_WIDE_INT)
3961 hi = 0;
3962 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3964 else
3966 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3967 lo = (unsigned HOST_WIDE_INT) -1;
3970 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3972 if (TYPE_UNSIGNED (etype))
3974 etype = lang_hooks.types.signed_type (etype);
3975 exp = fold_convert (etype, exp);
3977 return fold_build2 (GT_EXPR, type, exp,
3978 fold_convert (etype, integer_zero_node));
3982 value = const_binop (MINUS_EXPR, high, low, 0);
3983 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3985 tree utype, minv, maxv;
3987 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3988 for the type in question, as we rely on this here. */
3989 switch (TREE_CODE (etype))
3991 case INTEGER_TYPE:
3992 case ENUMERAL_TYPE:
3993 case CHAR_TYPE:
3994 utype = lang_hooks.types.unsigned_type (etype);
3995 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3996 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3997 integer_one_node, 1);
3998 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3999 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4000 minv, 1, maxv, 1)))
4002 etype = utype;
4003 high = fold_convert (etype, high);
4004 low = fold_convert (etype, low);
4005 exp = fold_convert (etype, exp);
4006 value = const_binop (MINUS_EXPR, high, low, 0);
4008 break;
4009 default:
4010 break;
4014 if (value != 0 && ! TREE_OVERFLOW (value))
4015 return build_range_check (type,
4016 fold_build2 (MINUS_EXPR, etype, exp, low),
4017 1, fold_convert (etype, integer_zero_node),
4018 value);
4020 return 0;
4023 /* Given two ranges, see if we can merge them into one. Return 1 if we
4024 can, 0 if we can't. Set the output range into the specified parameters. */
4026 static int
4027 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4028 tree high0, int in1_p, tree low1, tree high1)
4030 int no_overlap;
4031 int subset;
4032 int temp;
4033 tree tem;
4034 int in_p;
4035 tree low, high;
4036 int lowequal = ((low0 == 0 && low1 == 0)
4037 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4038 low0, 0, low1, 0)));
4039 int highequal = ((high0 == 0 && high1 == 0)
4040 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4041 high0, 1, high1, 1)));
4043 /* Make range 0 be the range that starts first, or ends last if they
4044 start at the same value. Swap them if it isn't. */
4045 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4046 low0, 0, low1, 0))
4047 || (lowequal
4048 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4049 high1, 1, high0, 1))))
4051 temp = in0_p, in0_p = in1_p, in1_p = temp;
4052 tem = low0, low0 = low1, low1 = tem;
4053 tem = high0, high0 = high1, high1 = tem;
4056 /* Now flag two cases, whether the ranges are disjoint or whether the
4057 second range is totally subsumed in the first. Note that the tests
4058 below are simplified by the ones above. */
4059 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4060 high0, 1, low1, 0));
4061 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4062 high1, 1, high0, 1));
4064 /* We now have four cases, depending on whether we are including or
4065 excluding the two ranges. */
4066 if (in0_p && in1_p)
4068 /* If they don't overlap, the result is false. If the second range
4069 is a subset it is the result. Otherwise, the range is from the start
4070 of the second to the end of the first. */
4071 if (no_overlap)
4072 in_p = 0, low = high = 0;
4073 else if (subset)
4074 in_p = 1, low = low1, high = high1;
4075 else
4076 in_p = 1, low = low1, high = high0;
4079 else if (in0_p && ! in1_p)
4081 /* If they don't overlap, the result is the first range. If they are
4082 equal, the result is false. If the second range is a subset of the
4083 first, and the ranges begin at the same place, we go from just after
4084 the end of the first range to the end of the second. If the second
4085 range is not a subset of the first, or if it is a subset and both
4086 ranges end at the same place, the range starts at the start of the
4087 first range and ends just before the second range.
4088 Otherwise, we can't describe this as a single range. */
4089 if (no_overlap)
4090 in_p = 1, low = low0, high = high0;
4091 else if (lowequal && highequal)
4092 in_p = 0, low = high = 0;
4093 else if (subset && lowequal)
4095 in_p = 1, high = high0;
4096 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4097 integer_one_node, 0);
4099 else if (! subset || highequal)
4101 in_p = 1, low = low0;
4102 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4103 integer_one_node, 0);
4105 else
4106 return 0;
4109 else if (! in0_p && in1_p)
4111 /* If they don't overlap, the result is the second range. If the second
4112 is a subset of the first, the result is false. Otherwise,
4113 the range starts just after the first range and ends at the
4114 end of the second. */
4115 if (no_overlap)
4116 in_p = 1, low = low1, high = high1;
4117 else if (subset || highequal)
4118 in_p = 0, low = high = 0;
4119 else
4121 in_p = 1, high = high1;
4122 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4123 integer_one_node, 0);
4127 else
4129 /* The case where we are excluding both ranges. Here the complex case
4130 is if they don't overlap. In that case, the only time we have a
4131 range is if they are adjacent. If the second is a subset of the
4132 first, the result is the first. Otherwise, the range to exclude
4133 starts at the beginning of the first range and ends at the end of the
4134 second. */
4135 if (no_overlap)
4137 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4138 range_binop (PLUS_EXPR, NULL_TREE,
4139 high0, 1,
4140 integer_one_node, 1),
4141 1, low1, 0)))
4142 in_p = 0, low = low0, high = high1;
4143 else
4145 /* Canonicalize - [min, x] into - [-, x]. */
4146 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4147 switch (TREE_CODE (TREE_TYPE (low0)))
4149 case ENUMERAL_TYPE:
4150 if (TYPE_PRECISION (TREE_TYPE (low0))
4151 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4152 break;
4153 /* FALLTHROUGH */
4154 case INTEGER_TYPE:
4155 case CHAR_TYPE:
4156 if (tree_int_cst_equal (low0,
4157 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4158 low0 = 0;
4159 break;
4160 case POINTER_TYPE:
4161 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4162 && integer_zerop (low0))
4163 low0 = 0;
4164 break;
4165 default:
4166 break;
4169 /* Canonicalize - [x, max] into - [x, -]. */
4170 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4171 switch (TREE_CODE (TREE_TYPE (high1)))
4173 case ENUMERAL_TYPE:
4174 if (TYPE_PRECISION (TREE_TYPE (high1))
4175 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4176 break;
4177 /* FALLTHROUGH */
4178 case INTEGER_TYPE:
4179 case CHAR_TYPE:
4180 if (tree_int_cst_equal (high1,
4181 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4182 high1 = 0;
4183 break;
4184 case POINTER_TYPE:
4185 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4186 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4187 high1, 1,
4188 integer_one_node, 1)))
4189 high1 = 0;
4190 break;
4191 default:
4192 break;
4195 /* The ranges might be also adjacent between the maximum and
4196 minimum values of the given type. For
4197 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4198 return + [x + 1, y - 1]. */
4199 if (low0 == 0 && high1 == 0)
4201 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4202 integer_one_node, 1);
4203 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4204 integer_one_node, 0);
4205 if (low == 0 || high == 0)
4206 return 0;
4208 in_p = 1;
4210 else
4211 return 0;
4214 else if (subset)
4215 in_p = 0, low = low0, high = high0;
4216 else
4217 in_p = 0, low = low0, high = high1;
4220 *pin_p = in_p, *plow = low, *phigh = high;
4221 return 1;
4225 /* Subroutine of fold, looking inside expressions of the form
4226 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4227 of the COND_EXPR. This function is being used also to optimize
4228 A op B ? C : A, by reversing the comparison first.
4230 Return a folded expression whose code is not a COND_EXPR
4231 anymore, or NULL_TREE if no folding opportunity is found. */
4233 static tree
4234 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4236 enum tree_code comp_code = TREE_CODE (arg0);
4237 tree arg00 = TREE_OPERAND (arg0, 0);
4238 tree arg01 = TREE_OPERAND (arg0, 1);
4239 tree arg1_type = TREE_TYPE (arg1);
4240 tree tem;
4242 STRIP_NOPS (arg1);
4243 STRIP_NOPS (arg2);
4245 /* If we have A op 0 ? A : -A, consider applying the following
4246 transformations:
4248 A == 0? A : -A same as -A
4249 A != 0? A : -A same as A
4250 A >= 0? A : -A same as abs (A)
4251 A > 0? A : -A same as abs (A)
4252 A <= 0? A : -A same as -abs (A)
4253 A < 0? A : -A same as -abs (A)
4255 None of these transformations work for modes with signed
4256 zeros. If A is +/-0, the first two transformations will
4257 change the sign of the result (from +0 to -0, or vice
4258 versa). The last four will fix the sign of the result,
4259 even though the original expressions could be positive or
4260 negative, depending on the sign of A.
4262 Note that all these transformations are correct if A is
4263 NaN, since the two alternatives (A and -A) are also NaNs. */
4264 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4265 ? real_zerop (arg01)
4266 : integer_zerop (arg01))
4267 && ((TREE_CODE (arg2) == NEGATE_EXPR
4268 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4269 /* In the case that A is of the form X-Y, '-A' (arg2) may
4270 have already been folded to Y-X, check for that. */
4271 || (TREE_CODE (arg1) == MINUS_EXPR
4272 && TREE_CODE (arg2) == MINUS_EXPR
4273 && operand_equal_p (TREE_OPERAND (arg1, 0),
4274 TREE_OPERAND (arg2, 1), 0)
4275 && operand_equal_p (TREE_OPERAND (arg1, 1),
4276 TREE_OPERAND (arg2, 0), 0))))
4277 switch (comp_code)
4279 case EQ_EXPR:
4280 case UNEQ_EXPR:
4281 tem = fold_convert (arg1_type, arg1);
4282 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4283 case NE_EXPR:
4284 case LTGT_EXPR:
4285 return pedantic_non_lvalue (fold_convert (type, arg1));
4286 case UNGE_EXPR:
4287 case UNGT_EXPR:
4288 if (flag_trapping_math)
4289 break;
4290 /* Fall through. */
4291 case GE_EXPR:
4292 case GT_EXPR:
4293 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4294 arg1 = fold_convert (lang_hooks.types.signed_type
4295 (TREE_TYPE (arg1)), arg1);
4296 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4297 return pedantic_non_lvalue (fold_convert (type, tem));
4298 case UNLE_EXPR:
4299 case UNLT_EXPR:
4300 if (flag_trapping_math)
4301 break;
4302 case LE_EXPR:
4303 case LT_EXPR:
4304 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4305 arg1 = fold_convert (lang_hooks.types.signed_type
4306 (TREE_TYPE (arg1)), arg1);
4307 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4308 return negate_expr (fold_convert (type, tem));
4309 default:
4310 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4311 break;
4314 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4315 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4316 both transformations are correct when A is NaN: A != 0
4317 is then true, and A == 0 is false. */
4319 if (integer_zerop (arg01) && integer_zerop (arg2))
4321 if (comp_code == NE_EXPR)
4322 return pedantic_non_lvalue (fold_convert (type, arg1));
4323 else if (comp_code == EQ_EXPR)
4324 return fold_convert (type, integer_zero_node);
4327 /* Try some transformations of A op B ? A : B.
4329 A == B? A : B same as B
4330 A != B? A : B same as A
4331 A >= B? A : B same as max (A, B)
4332 A > B? A : B same as max (B, A)
4333 A <= B? A : B same as min (A, B)
4334 A < B? A : B same as min (B, A)
4336 As above, these transformations don't work in the presence
4337 of signed zeros. For example, if A and B are zeros of
4338 opposite sign, the first two transformations will change
4339 the sign of the result. In the last four, the original
4340 expressions give different results for (A=+0, B=-0) and
4341 (A=-0, B=+0), but the transformed expressions do not.
4343 The first two transformations are correct if either A or B
4344 is a NaN. In the first transformation, the condition will
4345 be false, and B will indeed be chosen. In the case of the
4346 second transformation, the condition A != B will be true,
4347 and A will be chosen.
4349 The conversions to max() and min() are not correct if B is
4350 a number and A is not. The conditions in the original
4351 expressions will be false, so all four give B. The min()
4352 and max() versions would give a NaN instead. */
4353 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4354 /* Avoid these transformations if the COND_EXPR may be used
4355 as an lvalue in the C++ front-end. PR c++/19199. */
4356 && (in_gimple_form
4357 || strcmp (lang_hooks.name, "GNU C++") != 0
4358 || ! maybe_lvalue_p (arg1)
4359 || ! maybe_lvalue_p (arg2)))
4361 tree comp_op0 = arg00;
4362 tree comp_op1 = arg01;
4363 tree comp_type = TREE_TYPE (comp_op0);
4365 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4366 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4368 comp_type = type;
4369 comp_op0 = arg1;
4370 comp_op1 = arg2;
4373 switch (comp_code)
4375 case EQ_EXPR:
4376 return pedantic_non_lvalue (fold_convert (type, arg2));
4377 case NE_EXPR:
4378 return pedantic_non_lvalue (fold_convert (type, arg1));
4379 case LE_EXPR:
4380 case LT_EXPR:
4381 case UNLE_EXPR:
4382 case UNLT_EXPR:
4383 /* In C++ a ?: expression can be an lvalue, so put the
4384 operand which will be used if they are equal first
4385 so that we can convert this back to the
4386 corresponding COND_EXPR. */
4387 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4389 comp_op0 = fold_convert (comp_type, comp_op0);
4390 comp_op1 = fold_convert (comp_type, comp_op1);
4391 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4392 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4393 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4394 return pedantic_non_lvalue (fold_convert (type, tem));
4396 break;
4397 case GE_EXPR:
4398 case GT_EXPR:
4399 case UNGE_EXPR:
4400 case UNGT_EXPR:
4401 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4403 comp_op0 = fold_convert (comp_type, comp_op0);
4404 comp_op1 = fold_convert (comp_type, comp_op1);
4405 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4406 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4407 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4408 return pedantic_non_lvalue (fold_convert (type, tem));
4410 break;
4411 case UNEQ_EXPR:
4412 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4413 return pedantic_non_lvalue (fold_convert (type, arg2));
4414 break;
4415 case LTGT_EXPR:
4416 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4417 return pedantic_non_lvalue (fold_convert (type, arg1));
4418 break;
4419 default:
4420 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4421 break;
4425 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4426 we might still be able to simplify this. For example,
4427 if C1 is one less or one more than C2, this might have started
4428 out as a MIN or MAX and been transformed by this function.
4429 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4431 if (INTEGRAL_TYPE_P (type)
4432 && TREE_CODE (arg01) == INTEGER_CST
4433 && TREE_CODE (arg2) == INTEGER_CST)
4434 switch (comp_code)
4436 case EQ_EXPR:
4437 /* We can replace A with C1 in this case. */
4438 arg1 = fold_convert (type, arg01);
4439 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4441 case LT_EXPR:
4442 /* If C1 is C2 + 1, this is min(A, C2). */
4443 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4444 OEP_ONLY_CONST)
4445 && operand_equal_p (arg01,
4446 const_binop (PLUS_EXPR, arg2,
4447 integer_one_node, 0),
4448 OEP_ONLY_CONST))
4449 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4450 type, arg1, arg2));
4451 break;
4453 case LE_EXPR:
4454 /* If C1 is C2 - 1, this is min(A, C2). */
4455 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4456 OEP_ONLY_CONST)
4457 && operand_equal_p (arg01,
4458 const_binop (MINUS_EXPR, arg2,
4459 integer_one_node, 0),
4460 OEP_ONLY_CONST))
4461 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4462 type, arg1, arg2));
4463 break;
4465 case GT_EXPR:
4466 /* If C1 is C2 - 1, this is max(A, C2). */
4467 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4468 OEP_ONLY_CONST)
4469 && operand_equal_p (arg01,
4470 const_binop (MINUS_EXPR, arg2,
4471 integer_one_node, 0),
4472 OEP_ONLY_CONST))
4473 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4474 type, arg1, arg2));
4475 break;
4477 case GE_EXPR:
4478 /* If C1 is C2 + 1, this is max(A, C2). */
4479 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4480 OEP_ONLY_CONST)
4481 && operand_equal_p (arg01,
4482 const_binop (PLUS_EXPR, arg2,
4483 integer_one_node, 0),
4484 OEP_ONLY_CONST))
4485 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4486 type, arg1, arg2));
4487 break;
4488 case NE_EXPR:
4489 break;
4490 default:
4491 gcc_unreachable ();
4494 return NULL_TREE;
4499 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4500 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4501 #endif
4503 /* EXP is some logical combination of boolean tests. See if we can
4504 merge it into some range test. Return the new tree if so. */
4506 static tree
4507 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4509 int or_op = (code == TRUTH_ORIF_EXPR
4510 || code == TRUTH_OR_EXPR);
4511 int in0_p, in1_p, in_p;
4512 tree low0, low1, low, high0, high1, high;
4513 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4514 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4515 tree tem;
4517 /* If this is an OR operation, invert both sides; we will invert
4518 again at the end. */
4519 if (or_op)
4520 in0_p = ! in0_p, in1_p = ! in1_p;
4522 /* If both expressions are the same, if we can merge the ranges, and we
4523 can build the range test, return it or it inverted. If one of the
4524 ranges is always true or always false, consider it to be the same
4525 expression as the other. */
4526 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4527 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4528 in1_p, low1, high1)
4529 && 0 != (tem = (build_range_check (type,
4530 lhs != 0 ? lhs
4531 : rhs != 0 ? rhs : integer_zero_node,
4532 in_p, low, high))))
4533 return or_op ? invert_truthvalue (tem) : tem;
4535 /* On machines where the branch cost is expensive, if this is a
4536 short-circuited branch and the underlying object on both sides
4537 is the same, make a non-short-circuit operation. */
4538 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4539 && lhs != 0 && rhs != 0
4540 && (code == TRUTH_ANDIF_EXPR
4541 || code == TRUTH_ORIF_EXPR)
4542 && operand_equal_p (lhs, rhs, 0))
4544 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4545 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4546 which cases we can't do this. */
4547 if (simple_operand_p (lhs))
4548 return build2 (code == TRUTH_ANDIF_EXPR
4549 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4550 type, op0, op1);
4552 else if (lang_hooks.decls.global_bindings_p () == 0
4553 && ! CONTAINS_PLACEHOLDER_P (lhs))
4555 tree common = save_expr (lhs);
4557 if (0 != (lhs = build_range_check (type, common,
4558 or_op ? ! in0_p : in0_p,
4559 low0, high0))
4560 && (0 != (rhs = build_range_check (type, common,
4561 or_op ? ! in1_p : in1_p,
4562 low1, high1))))
4563 return build2 (code == TRUTH_ANDIF_EXPR
4564 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4565 type, lhs, rhs);
4569 return 0;
4572 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4573 bit value. Arrange things so the extra bits will be set to zero if and
4574 only if C is signed-extended to its full width. If MASK is nonzero,
4575 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4577 static tree
4578 unextend (tree c, int p, int unsignedp, tree mask)
4580 tree type = TREE_TYPE (c);
4581 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4582 tree temp;
4584 if (p == modesize || unsignedp)
4585 return c;
4587 /* We work by getting just the sign bit into the low-order bit, then
4588 into the high-order bit, then sign-extend. We then XOR that value
4589 with C. */
4590 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4591 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4593 /* We must use a signed type in order to get an arithmetic right shift.
4594 However, we must also avoid introducing accidental overflows, so that
4595 a subsequent call to integer_zerop will work. Hence we must
4596 do the type conversion here. At this point, the constant is either
4597 zero or one, and the conversion to a signed type can never overflow.
4598 We could get an overflow if this conversion is done anywhere else. */
4599 if (TYPE_UNSIGNED (type))
4600 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4602 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4603 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4604 if (mask != 0)
4605 temp = const_binop (BIT_AND_EXPR, temp,
4606 fold_convert (TREE_TYPE (c), mask), 0);
4607 /* If necessary, convert the type back to match the type of C. */
4608 if (TYPE_UNSIGNED (type))
4609 temp = fold_convert (type, temp);
4611 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4614 /* Find ways of folding logical expressions of LHS and RHS:
4615 Try to merge two comparisons to the same innermost item.
4616 Look for range tests like "ch >= '0' && ch <= '9'".
4617 Look for combinations of simple terms on machines with expensive branches
4618 and evaluate the RHS unconditionally.
4620 For example, if we have p->a == 2 && p->b == 4 and we can make an
4621 object large enough to span both A and B, we can do this with a comparison
4622 against the object ANDed with the a mask.
4624 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4625 operations to do this with one comparison.
4627 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4628 function and the one above.
4630 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4631 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4633 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4634 two operands.
4636 We return the simplified tree or 0 if no optimization is possible. */
4638 static tree
4639 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4641 /* If this is the "or" of two comparisons, we can do something if
4642 the comparisons are NE_EXPR. If this is the "and", we can do something
4643 if the comparisons are EQ_EXPR. I.e.,
4644 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4646 WANTED_CODE is this operation code. For single bit fields, we can
4647 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4648 comparison for one-bit fields. */
4650 enum tree_code wanted_code;
4651 enum tree_code lcode, rcode;
4652 tree ll_arg, lr_arg, rl_arg, rr_arg;
4653 tree ll_inner, lr_inner, rl_inner, rr_inner;
4654 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4655 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4656 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4657 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4658 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4659 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4660 enum machine_mode lnmode, rnmode;
4661 tree ll_mask, lr_mask, rl_mask, rr_mask;
4662 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4663 tree l_const, r_const;
4664 tree lntype, rntype, result;
4665 int first_bit, end_bit;
4666 int volatilep;
4668 /* Start by getting the comparison codes. Fail if anything is volatile.
4669 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4670 it were surrounded with a NE_EXPR. */
4672 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4673 return 0;
4675 lcode = TREE_CODE (lhs);
4676 rcode = TREE_CODE (rhs);
4678 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4680 lhs = build2 (NE_EXPR, truth_type, lhs,
4681 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4682 lcode = NE_EXPR;
4685 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4687 rhs = build2 (NE_EXPR, truth_type, rhs,
4688 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4689 rcode = NE_EXPR;
4692 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4693 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4694 return 0;
4696 ll_arg = TREE_OPERAND (lhs, 0);
4697 lr_arg = TREE_OPERAND (lhs, 1);
4698 rl_arg = TREE_OPERAND (rhs, 0);
4699 rr_arg = TREE_OPERAND (rhs, 1);
4701 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4702 if (simple_operand_p (ll_arg)
4703 && simple_operand_p (lr_arg))
4705 tree result;
4706 if (operand_equal_p (ll_arg, rl_arg, 0)
4707 && operand_equal_p (lr_arg, rr_arg, 0))
4709 result = combine_comparisons (code, lcode, rcode,
4710 truth_type, ll_arg, lr_arg);
4711 if (result)
4712 return result;
4714 else if (operand_equal_p (ll_arg, rr_arg, 0)
4715 && operand_equal_p (lr_arg, rl_arg, 0))
4717 result = combine_comparisons (code, lcode,
4718 swap_tree_comparison (rcode),
4719 truth_type, ll_arg, lr_arg);
4720 if (result)
4721 return result;
4725 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4726 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4728 /* If the RHS can be evaluated unconditionally and its operands are
4729 simple, it wins to evaluate the RHS unconditionally on machines
4730 with expensive branches. In this case, this isn't a comparison
4731 that can be merged. Avoid doing this if the RHS is a floating-point
4732 comparison since those can trap. */
4734 if (BRANCH_COST >= 2
4735 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4736 && simple_operand_p (rl_arg)
4737 && simple_operand_p (rr_arg))
4739 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4740 if (code == TRUTH_OR_EXPR
4741 && lcode == NE_EXPR && integer_zerop (lr_arg)
4742 && rcode == NE_EXPR && integer_zerop (rr_arg)
4743 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4744 return build2 (NE_EXPR, truth_type,
4745 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4746 ll_arg, rl_arg),
4747 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4749 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4750 if (code == TRUTH_AND_EXPR
4751 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4752 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4753 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4754 return build2 (EQ_EXPR, truth_type,
4755 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4756 ll_arg, rl_arg),
4757 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4759 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4760 return build2 (code, truth_type, lhs, rhs);
4763 /* See if the comparisons can be merged. Then get all the parameters for
4764 each side. */
4766 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4767 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4768 return 0;
4770 volatilep = 0;
4771 ll_inner = decode_field_reference (ll_arg,
4772 &ll_bitsize, &ll_bitpos, &ll_mode,
4773 &ll_unsignedp, &volatilep, &ll_mask,
4774 &ll_and_mask);
4775 lr_inner = decode_field_reference (lr_arg,
4776 &lr_bitsize, &lr_bitpos, &lr_mode,
4777 &lr_unsignedp, &volatilep, &lr_mask,
4778 &lr_and_mask);
4779 rl_inner = decode_field_reference (rl_arg,
4780 &rl_bitsize, &rl_bitpos, &rl_mode,
4781 &rl_unsignedp, &volatilep, &rl_mask,
4782 &rl_and_mask);
4783 rr_inner = decode_field_reference (rr_arg,
4784 &rr_bitsize, &rr_bitpos, &rr_mode,
4785 &rr_unsignedp, &volatilep, &rr_mask,
4786 &rr_and_mask);
4788 /* It must be true that the inner operation on the lhs of each
4789 comparison must be the same if we are to be able to do anything.
4790 Then see if we have constants. If not, the same must be true for
4791 the rhs's. */
4792 if (volatilep || ll_inner == 0 || rl_inner == 0
4793 || ! operand_equal_p (ll_inner, rl_inner, 0))
4794 return 0;
4796 if (TREE_CODE (lr_arg) == INTEGER_CST
4797 && TREE_CODE (rr_arg) == INTEGER_CST)
4798 l_const = lr_arg, r_const = rr_arg;
4799 else if (lr_inner == 0 || rr_inner == 0
4800 || ! operand_equal_p (lr_inner, rr_inner, 0))
4801 return 0;
4802 else
4803 l_const = r_const = 0;
4805 /* If either comparison code is not correct for our logical operation,
4806 fail. However, we can convert a one-bit comparison against zero into
4807 the opposite comparison against that bit being set in the field. */
4809 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4810 if (lcode != wanted_code)
4812 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4814 /* Make the left operand unsigned, since we are only interested
4815 in the value of one bit. Otherwise we are doing the wrong
4816 thing below. */
4817 ll_unsignedp = 1;
4818 l_const = ll_mask;
4820 else
4821 return 0;
4824 /* This is analogous to the code for l_const above. */
4825 if (rcode != wanted_code)
4827 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4829 rl_unsignedp = 1;
4830 r_const = rl_mask;
4832 else
4833 return 0;
4836 /* After this point all optimizations will generate bit-field
4837 references, which we might not want. */
4838 if (! lang_hooks.can_use_bit_fields_p ())
4839 return 0;
4841 /* See if we can find a mode that contains both fields being compared on
4842 the left. If we can't, fail. Otherwise, update all constants and masks
4843 to be relative to a field of that size. */
4844 first_bit = MIN (ll_bitpos, rl_bitpos);
4845 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4846 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4847 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4848 volatilep);
4849 if (lnmode == VOIDmode)
4850 return 0;
4852 lnbitsize = GET_MODE_BITSIZE (lnmode);
4853 lnbitpos = first_bit & ~ (lnbitsize - 1);
4854 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4855 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4857 if (BYTES_BIG_ENDIAN)
4859 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4860 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4863 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4864 size_int (xll_bitpos), 0);
4865 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4866 size_int (xrl_bitpos), 0);
4868 if (l_const)
4870 l_const = fold_convert (lntype, l_const);
4871 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4872 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4873 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4874 fold_build1 (BIT_NOT_EXPR,
4875 lntype, ll_mask),
4876 0)))
4878 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4880 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4883 if (r_const)
4885 r_const = fold_convert (lntype, r_const);
4886 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4887 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4888 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4889 fold_build1 (BIT_NOT_EXPR,
4890 lntype, rl_mask),
4891 0)))
4893 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4895 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4899 /* If the right sides are not constant, do the same for it. Also,
4900 disallow this optimization if a size or signedness mismatch occurs
4901 between the left and right sides. */
4902 if (l_const == 0)
4904 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4905 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4906 /* Make sure the two fields on the right
4907 correspond to the left without being swapped. */
4908 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4909 return 0;
4911 first_bit = MIN (lr_bitpos, rr_bitpos);
4912 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4913 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4914 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4915 volatilep);
4916 if (rnmode == VOIDmode)
4917 return 0;
4919 rnbitsize = GET_MODE_BITSIZE (rnmode);
4920 rnbitpos = first_bit & ~ (rnbitsize - 1);
4921 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4922 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4924 if (BYTES_BIG_ENDIAN)
4926 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4927 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4930 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4931 size_int (xlr_bitpos), 0);
4932 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4933 size_int (xrr_bitpos), 0);
4935 /* Make a mask that corresponds to both fields being compared.
4936 Do this for both items being compared. If the operands are the
4937 same size and the bits being compared are in the same position
4938 then we can do this by masking both and comparing the masked
4939 results. */
4940 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4941 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4942 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4944 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4945 ll_unsignedp || rl_unsignedp);
4946 if (! all_ones_mask_p (ll_mask, lnbitsize))
4947 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4949 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4950 lr_unsignedp || rr_unsignedp);
4951 if (! all_ones_mask_p (lr_mask, rnbitsize))
4952 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4954 return build2 (wanted_code, truth_type, lhs, rhs);
4957 /* There is still another way we can do something: If both pairs of
4958 fields being compared are adjacent, we may be able to make a wider
4959 field containing them both.
4961 Note that we still must mask the lhs/rhs expressions. Furthermore,
4962 the mask must be shifted to account for the shift done by
4963 make_bit_field_ref. */
4964 if ((ll_bitsize + ll_bitpos == rl_bitpos
4965 && lr_bitsize + lr_bitpos == rr_bitpos)
4966 || (ll_bitpos == rl_bitpos + rl_bitsize
4967 && lr_bitpos == rr_bitpos + rr_bitsize))
4969 tree type;
4971 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4972 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4973 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4974 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4976 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4977 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4978 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4979 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4981 /* Convert to the smaller type before masking out unwanted bits. */
4982 type = lntype;
4983 if (lntype != rntype)
4985 if (lnbitsize > rnbitsize)
4987 lhs = fold_convert (rntype, lhs);
4988 ll_mask = fold_convert (rntype, ll_mask);
4989 type = rntype;
4991 else if (lnbitsize < rnbitsize)
4993 rhs = fold_convert (lntype, rhs);
4994 lr_mask = fold_convert (lntype, lr_mask);
4995 type = lntype;
4999 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5000 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5002 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5003 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5005 return build2 (wanted_code, truth_type, lhs, rhs);
5008 return 0;
5011 /* Handle the case of comparisons with constants. If there is something in
5012 common between the masks, those bits of the constants must be the same.
5013 If not, the condition is always false. Test for this to avoid generating
5014 incorrect code below. */
5015 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5016 if (! integer_zerop (result)
5017 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5018 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5020 if (wanted_code == NE_EXPR)
5022 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5023 return constant_boolean_node (true, truth_type);
5025 else
5027 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5028 return constant_boolean_node (false, truth_type);
5032 /* Construct the expression we will return. First get the component
5033 reference we will make. Unless the mask is all ones the width of
5034 that field, perform the mask operation. Then compare with the
5035 merged constant. */
5036 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5037 ll_unsignedp || rl_unsignedp);
5039 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5040 if (! all_ones_mask_p (ll_mask, lnbitsize))
5041 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5043 return build2 (wanted_code, truth_type, result,
5044 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5047 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5048 constant. */
5050 static tree
5051 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5053 tree arg0 = op0;
5054 enum tree_code op_code;
5055 tree comp_const = op1;
5056 tree minmax_const;
5057 int consts_equal, consts_lt;
5058 tree inner;
5060 STRIP_SIGN_NOPS (arg0);
5062 op_code = TREE_CODE (arg0);
5063 minmax_const = TREE_OPERAND (arg0, 1);
5064 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5065 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5066 inner = TREE_OPERAND (arg0, 0);
5068 /* If something does not permit us to optimize, return the original tree. */
5069 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5070 || TREE_CODE (comp_const) != INTEGER_CST
5071 || TREE_CONSTANT_OVERFLOW (comp_const)
5072 || TREE_CODE (minmax_const) != INTEGER_CST
5073 || TREE_CONSTANT_OVERFLOW (minmax_const))
5074 return NULL_TREE;
5076 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5077 and GT_EXPR, doing the rest with recursive calls using logical
5078 simplifications. */
5079 switch (code)
5081 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5083 /* FIXME: We should be able to invert code without building a
5084 scratch tree node, but doing so would require us to
5085 duplicate a part of invert_truthvalue here. */
5086 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5087 tem = optimize_minmax_comparison (TREE_CODE (tem),
5088 TREE_TYPE (tem),
5089 TREE_OPERAND (tem, 0),
5090 TREE_OPERAND (tem, 1));
5091 return invert_truthvalue (tem);
5094 case GE_EXPR:
5095 return
5096 fold_build2 (TRUTH_ORIF_EXPR, type,
5097 optimize_minmax_comparison
5098 (EQ_EXPR, type, arg0, comp_const),
5099 optimize_minmax_comparison
5100 (GT_EXPR, type, arg0, comp_const));
5102 case EQ_EXPR:
5103 if (op_code == MAX_EXPR && consts_equal)
5104 /* MAX (X, 0) == 0 -> X <= 0 */
5105 return fold_build2 (LE_EXPR, type, inner, comp_const);
5107 else if (op_code == MAX_EXPR && consts_lt)
5108 /* MAX (X, 0) == 5 -> X == 5 */
5109 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5111 else if (op_code == MAX_EXPR)
5112 /* MAX (X, 0) == -1 -> false */
5113 return omit_one_operand (type, integer_zero_node, inner);
5115 else if (consts_equal)
5116 /* MIN (X, 0) == 0 -> X >= 0 */
5117 return fold_build2 (GE_EXPR, type, inner, comp_const);
5119 else if (consts_lt)
5120 /* MIN (X, 0) == 5 -> false */
5121 return omit_one_operand (type, integer_zero_node, inner);
5123 else
5124 /* MIN (X, 0) == -1 -> X == -1 */
5125 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5127 case GT_EXPR:
5128 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5129 /* MAX (X, 0) > 0 -> X > 0
5130 MAX (X, 0) > 5 -> X > 5 */
5131 return fold_build2 (GT_EXPR, type, inner, comp_const);
5133 else if (op_code == MAX_EXPR)
5134 /* MAX (X, 0) > -1 -> true */
5135 return omit_one_operand (type, integer_one_node, inner);
5137 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5138 /* MIN (X, 0) > 0 -> false
5139 MIN (X, 0) > 5 -> false */
5140 return omit_one_operand (type, integer_zero_node, inner);
5142 else
5143 /* MIN (X, 0) > -1 -> X > -1 */
5144 return fold_build2 (GT_EXPR, type, inner, comp_const);
5146 default:
5147 return NULL_TREE;
5151 /* T is an integer expression that is being multiplied, divided, or taken a
5152 modulus (CODE says which and what kind of divide or modulus) by a
5153 constant C. See if we can eliminate that operation by folding it with
5154 other operations already in T. WIDE_TYPE, if non-null, is a type that
5155 should be used for the computation if wider than our type.
5157 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5158 (X * 2) + (Y * 4). We must, however, be assured that either the original
5159 expression would not overflow or that overflow is undefined for the type
5160 in the language in question.
5162 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5163 the machine has a multiply-accumulate insn or that this is part of an
5164 addressing calculation.
5166 If we return a non-null expression, it is an equivalent form of the
5167 original computation, but need not be in the original type. */
5169 static tree
5170 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5172 /* To avoid exponential search depth, refuse to allow recursion past
5173 three levels. Beyond that (1) it's highly unlikely that we'll find
5174 something interesting and (2) we've probably processed it before
5175 when we built the inner expression. */
5177 static int depth;
5178 tree ret;
5180 if (depth > 3)
5181 return NULL;
5183 depth++;
5184 ret = extract_muldiv_1 (t, c, code, wide_type);
5185 depth--;
5187 return ret;
5190 static tree
5191 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5193 tree type = TREE_TYPE (t);
5194 enum tree_code tcode = TREE_CODE (t);
5195 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5196 > GET_MODE_SIZE (TYPE_MODE (type)))
5197 ? wide_type : type);
5198 tree t1, t2;
5199 int same_p = tcode == code;
5200 tree op0 = NULL_TREE, op1 = NULL_TREE;
5202 /* Don't deal with constants of zero here; they confuse the code below. */
5203 if (integer_zerop (c))
5204 return NULL_TREE;
5206 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5207 op0 = TREE_OPERAND (t, 0);
5209 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5210 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5212 /* Note that we need not handle conditional operations here since fold
5213 already handles those cases. So just do arithmetic here. */
5214 switch (tcode)
5216 case INTEGER_CST:
5217 /* For a constant, we can always simplify if we are a multiply
5218 or (for divide and modulus) if it is a multiple of our constant. */
5219 if (code == MULT_EXPR
5220 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5221 return const_binop (code, fold_convert (ctype, t),
5222 fold_convert (ctype, c), 0);
5223 break;
5225 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5226 /* If op0 is an expression ... */
5227 if ((COMPARISON_CLASS_P (op0)
5228 || UNARY_CLASS_P (op0)
5229 || BINARY_CLASS_P (op0)
5230 || EXPRESSION_CLASS_P (op0))
5231 /* ... and is unsigned, and its type is smaller than ctype,
5232 then we cannot pass through as widening. */
5233 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5234 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5235 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5236 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5237 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5238 /* ... or this is a truncation (t is narrower than op0),
5239 then we cannot pass through this narrowing. */
5240 || (GET_MODE_SIZE (TYPE_MODE (type))
5241 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5242 /* ... or signedness changes for division or modulus,
5243 then we cannot pass through this conversion. */
5244 || (code != MULT_EXPR
5245 && (TYPE_UNSIGNED (ctype)
5246 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5247 break;
5249 /* Pass the constant down and see if we can make a simplification. If
5250 we can, replace this expression with the inner simplification for
5251 possible later conversion to our or some other type. */
5252 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5253 && TREE_CODE (t2) == INTEGER_CST
5254 && ! TREE_CONSTANT_OVERFLOW (t2)
5255 && (0 != (t1 = extract_muldiv (op0, t2, code,
5256 code == MULT_EXPR
5257 ? ctype : NULL_TREE))))
5258 return t1;
5259 break;
5261 case ABS_EXPR:
5262 /* If widening the type changes it from signed to unsigned, then we
5263 must avoid building ABS_EXPR itself as unsigned. */
5264 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5266 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5267 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5269 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5270 return fold_convert (ctype, t1);
5272 break;
5274 /* FALLTHROUGH */
5275 case NEGATE_EXPR:
5276 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5277 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5278 break;
5280 case MIN_EXPR: case MAX_EXPR:
5281 /* If widening the type changes the signedness, then we can't perform
5282 this optimization as that changes the result. */
5283 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5284 break;
5286 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5287 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5288 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5290 if (tree_int_cst_sgn (c) < 0)
5291 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5293 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5294 fold_convert (ctype, t2));
5296 break;
5298 case LSHIFT_EXPR: case RSHIFT_EXPR:
5299 /* If the second operand is constant, this is a multiplication
5300 or floor division, by a power of two, so we can treat it that
5301 way unless the multiplier or divisor overflows. Signed
5302 left-shift overflow is implementation-defined rather than
5303 undefined in C90, so do not convert signed left shift into
5304 multiplication. */
5305 if (TREE_CODE (op1) == INTEGER_CST
5306 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5307 /* const_binop may not detect overflow correctly,
5308 so check for it explicitly here. */
5309 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5310 && TREE_INT_CST_HIGH (op1) == 0
5311 && 0 != (t1 = fold_convert (ctype,
5312 const_binop (LSHIFT_EXPR,
5313 size_one_node,
5314 op1, 0)))
5315 && ! TREE_OVERFLOW (t1))
5316 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5317 ? MULT_EXPR : FLOOR_DIV_EXPR,
5318 ctype, fold_convert (ctype, op0), t1),
5319 c, code, wide_type);
5320 break;
5322 case PLUS_EXPR: case MINUS_EXPR:
5323 /* See if we can eliminate the operation on both sides. If we can, we
5324 can return a new PLUS or MINUS. If we can't, the only remaining
5325 cases where we can do anything are if the second operand is a
5326 constant. */
5327 t1 = extract_muldiv (op0, c, code, wide_type);
5328 t2 = extract_muldiv (op1, c, code, wide_type);
5329 if (t1 != 0 && t2 != 0
5330 && (code == MULT_EXPR
5331 /* If not multiplication, we can only do this if both operands
5332 are divisible by c. */
5333 || (multiple_of_p (ctype, op0, c)
5334 && multiple_of_p (ctype, op1, c))))
5335 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5336 fold_convert (ctype, t2));
5338 /* If this was a subtraction, negate OP1 and set it to be an addition.
5339 This simplifies the logic below. */
5340 if (tcode == MINUS_EXPR)
5341 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5343 if (TREE_CODE (op1) != INTEGER_CST)
5344 break;
5346 /* If either OP1 or C are negative, this optimization is not safe for
5347 some of the division and remainder types while for others we need
5348 to change the code. */
5349 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5351 if (code == CEIL_DIV_EXPR)
5352 code = FLOOR_DIV_EXPR;
5353 else if (code == FLOOR_DIV_EXPR)
5354 code = CEIL_DIV_EXPR;
5355 else if (code != MULT_EXPR
5356 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5357 break;
5360 /* If it's a multiply or a division/modulus operation of a multiple
5361 of our constant, do the operation and verify it doesn't overflow. */
5362 if (code == MULT_EXPR
5363 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5365 op1 = const_binop (code, fold_convert (ctype, op1),
5366 fold_convert (ctype, c), 0);
5367 /* We allow the constant to overflow with wrapping semantics. */
5368 if (op1 == 0
5369 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5370 break;
5372 else
5373 break;
5375 /* If we have an unsigned type is not a sizetype, we cannot widen
5376 the operation since it will change the result if the original
5377 computation overflowed. */
5378 if (TYPE_UNSIGNED (ctype)
5379 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5380 && ctype != type)
5381 break;
5383 /* If we were able to eliminate our operation from the first side,
5384 apply our operation to the second side and reform the PLUS. */
5385 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5386 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5388 /* The last case is if we are a multiply. In that case, we can
5389 apply the distributive law to commute the multiply and addition
5390 if the multiplication of the constants doesn't overflow. */
5391 if (code == MULT_EXPR)
5392 return fold_build2 (tcode, ctype,
5393 fold_build2 (code, ctype,
5394 fold_convert (ctype, op0),
5395 fold_convert (ctype, c)),
5396 op1);
5398 break;
5400 case MULT_EXPR:
5401 /* We have a special case here if we are doing something like
5402 (C * 8) % 4 since we know that's zero. */
5403 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5404 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5405 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5406 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5407 return omit_one_operand (type, integer_zero_node, op0);
5409 /* ... fall through ... */
5411 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5412 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5413 /* If we can extract our operation from the LHS, do so and return a
5414 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5415 do something only if the second operand is a constant. */
5416 if (same_p
5417 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5418 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5419 fold_convert (ctype, op1));
5420 else if (tcode == MULT_EXPR && code == MULT_EXPR
5421 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5422 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5423 fold_convert (ctype, t1));
5424 else if (TREE_CODE (op1) != INTEGER_CST)
5425 return 0;
5427 /* If these are the same operation types, we can associate them
5428 assuming no overflow. */
5429 if (tcode == code
5430 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5431 fold_convert (ctype, c), 0))
5432 && ! TREE_OVERFLOW (t1))
5433 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5435 /* If these operations "cancel" each other, we have the main
5436 optimizations of this pass, which occur when either constant is a
5437 multiple of the other, in which case we replace this with either an
5438 operation or CODE or TCODE.
5440 If we have an unsigned type that is not a sizetype, we cannot do
5441 this since it will change the result if the original computation
5442 overflowed. */
5443 if ((! TYPE_UNSIGNED (ctype)
5444 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5445 && ! flag_wrapv
5446 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5447 || (tcode == MULT_EXPR
5448 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5449 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5451 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5452 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5453 fold_convert (ctype,
5454 const_binop (TRUNC_DIV_EXPR,
5455 op1, c, 0)));
5456 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5457 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5458 fold_convert (ctype,
5459 const_binop (TRUNC_DIV_EXPR,
5460 c, op1, 0)));
5462 break;
5464 default:
5465 break;
5468 return 0;
5471 /* Return a node which has the indicated constant VALUE (either 0 or
5472 1), and is of the indicated TYPE. */
5474 tree
5475 constant_boolean_node (int value, tree type)
5477 if (type == integer_type_node)
5478 return value ? integer_one_node : integer_zero_node;
5479 else if (type == boolean_type_node)
5480 return value ? boolean_true_node : boolean_false_node;
5481 else
5482 return build_int_cst (type, value);
5486 /* Return true if expr looks like an ARRAY_REF and set base and
5487 offset to the appropriate trees. If there is no offset,
5488 offset is set to NULL_TREE. */
5490 static bool
5491 extract_array_ref (tree expr, tree *base, tree *offset)
5493 /* We have to be careful with stripping nops as with the
5494 base type the meaning of the offset can change. */
5495 tree inner_expr = expr;
5496 STRIP_NOPS (inner_expr);
5497 /* One canonical form is a PLUS_EXPR with the first
5498 argument being an ADDR_EXPR with a possible NOP_EXPR
5499 attached. */
5500 if (TREE_CODE (expr) == PLUS_EXPR)
5502 tree op0 = TREE_OPERAND (expr, 0);
5503 STRIP_NOPS (op0);
5504 if (TREE_CODE (op0) == ADDR_EXPR)
5506 *base = TREE_OPERAND (expr, 0);
5507 *offset = TREE_OPERAND (expr, 1);
5508 return true;
5511 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5512 which we transform into an ADDR_EXPR with appropriate
5513 offset. For other arguments to the ADDR_EXPR we assume
5514 zero offset and as such do not care about the ADDR_EXPR
5515 type and strip possible nops from it. */
5516 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5518 tree op0 = TREE_OPERAND (inner_expr, 0);
5519 if (TREE_CODE (op0) == ARRAY_REF)
5521 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5522 *offset = TREE_OPERAND (op0, 1);
5524 else
5526 *base = inner_expr;
5527 *offset = NULL_TREE;
5529 return true;
5532 return false;
5536 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5537 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5538 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5539 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5540 COND is the first argument to CODE; otherwise (as in the example
5541 given here), it is the second argument. TYPE is the type of the
5542 original expression. Return NULL_TREE if no simplification is
5543 possible. */
5545 static tree
5546 fold_binary_op_with_conditional_arg (enum tree_code code,
5547 tree type, tree op0, tree op1,
5548 tree cond, tree arg, int cond_first_p)
5550 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5551 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5552 tree test, true_value, false_value;
5553 tree lhs = NULL_TREE;
5554 tree rhs = NULL_TREE;
5556 /* This transformation is only worthwhile if we don't have to wrap
5557 arg in a SAVE_EXPR, and the operation can be simplified on at least
5558 one of the branches once its pushed inside the COND_EXPR. */
5559 if (!TREE_CONSTANT (arg))
5560 return NULL_TREE;
5562 if (TREE_CODE (cond) == COND_EXPR)
5564 test = TREE_OPERAND (cond, 0);
5565 true_value = TREE_OPERAND (cond, 1);
5566 false_value = TREE_OPERAND (cond, 2);
5567 /* If this operand throws an expression, then it does not make
5568 sense to try to perform a logical or arithmetic operation
5569 involving it. */
5570 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5571 lhs = true_value;
5572 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5573 rhs = false_value;
5575 else
5577 tree testtype = TREE_TYPE (cond);
5578 test = cond;
5579 true_value = constant_boolean_node (true, testtype);
5580 false_value = constant_boolean_node (false, testtype);
5583 arg = fold_convert (arg_type, arg);
5584 if (lhs == 0)
5586 true_value = fold_convert (cond_type, true_value);
5587 if (cond_first_p)
5588 lhs = fold_build2 (code, type, true_value, arg);
5589 else
5590 lhs = fold_build2 (code, type, arg, true_value);
5592 if (rhs == 0)
5594 false_value = fold_convert (cond_type, false_value);
5595 if (cond_first_p)
5596 rhs = fold_build2 (code, type, false_value, arg);
5597 else
5598 rhs = fold_build2 (code, type, arg, false_value);
5601 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5602 return fold_convert (type, test);
5606 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5608 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5609 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5610 ADDEND is the same as X.
5612 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5613 and finite. The problematic cases are when X is zero, and its mode
5614 has signed zeros. In the case of rounding towards -infinity,
5615 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5616 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5618 static bool
5619 fold_real_zero_addition_p (tree type, tree addend, int negate)
5621 if (!real_zerop (addend))
5622 return false;
5624 /* Don't allow the fold with -fsignaling-nans. */
5625 if (HONOR_SNANS (TYPE_MODE (type)))
5626 return false;
5628 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5629 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5630 return true;
5632 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5633 if (TREE_CODE (addend) == REAL_CST
5634 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5635 negate = !negate;
5637 /* The mode has signed zeros, and we have to honor their sign.
5638 In this situation, there is only one case we can return true for.
5639 X - 0 is the same as X unless rounding towards -infinity is
5640 supported. */
5641 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5644 /* Subroutine of fold() that checks comparisons of built-in math
5645 functions against real constants.
5647 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5648 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5649 is the type of the result and ARG0 and ARG1 are the operands of the
5650 comparison. ARG1 must be a TREE_REAL_CST.
5652 The function returns the constant folded tree if a simplification
5653 can be made, and NULL_TREE otherwise. */
5655 static tree
5656 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5657 tree type, tree arg0, tree arg1)
5659 REAL_VALUE_TYPE c;
5661 if (BUILTIN_SQRT_P (fcode))
5663 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5664 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5666 c = TREE_REAL_CST (arg1);
5667 if (REAL_VALUE_NEGATIVE (c))
5669 /* sqrt(x) < y is always false, if y is negative. */
5670 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5671 return omit_one_operand (type, integer_zero_node, arg);
5673 /* sqrt(x) > y is always true, if y is negative and we
5674 don't care about NaNs, i.e. negative values of x. */
5675 if (code == NE_EXPR || !HONOR_NANS (mode))
5676 return omit_one_operand (type, integer_one_node, arg);
5678 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5679 return fold_build2 (GE_EXPR, type, arg,
5680 build_real (TREE_TYPE (arg), dconst0));
5682 else if (code == GT_EXPR || code == GE_EXPR)
5684 REAL_VALUE_TYPE c2;
5686 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5687 real_convert (&c2, mode, &c2);
5689 if (REAL_VALUE_ISINF (c2))
5691 /* sqrt(x) > y is x == +Inf, when y is very large. */
5692 if (HONOR_INFINITIES (mode))
5693 return fold_build2 (EQ_EXPR, type, arg,
5694 build_real (TREE_TYPE (arg), c2));
5696 /* sqrt(x) > y is always false, when y is very large
5697 and we don't care about infinities. */
5698 return omit_one_operand (type, integer_zero_node, arg);
5701 /* sqrt(x) > c is the same as x > c*c. */
5702 return fold_build2 (code, type, arg,
5703 build_real (TREE_TYPE (arg), c2));
5705 else if (code == LT_EXPR || code == LE_EXPR)
5707 REAL_VALUE_TYPE c2;
5709 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5710 real_convert (&c2, mode, &c2);
5712 if (REAL_VALUE_ISINF (c2))
5714 /* sqrt(x) < y is always true, when y is a very large
5715 value and we don't care about NaNs or Infinities. */
5716 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5717 return omit_one_operand (type, integer_one_node, arg);
5719 /* sqrt(x) < y is x != +Inf when y is very large and we
5720 don't care about NaNs. */
5721 if (! HONOR_NANS (mode))
5722 return fold_build2 (NE_EXPR, type, arg,
5723 build_real (TREE_TYPE (arg), c2));
5725 /* sqrt(x) < y is x >= 0 when y is very large and we
5726 don't care about Infinities. */
5727 if (! HONOR_INFINITIES (mode))
5728 return fold_build2 (GE_EXPR, type, arg,
5729 build_real (TREE_TYPE (arg), dconst0));
5731 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5732 if (lang_hooks.decls.global_bindings_p () != 0
5733 || CONTAINS_PLACEHOLDER_P (arg))
5734 return NULL_TREE;
5736 arg = save_expr (arg);
5737 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5738 fold_build2 (GE_EXPR, type, arg,
5739 build_real (TREE_TYPE (arg),
5740 dconst0)),
5741 fold_build2 (NE_EXPR, type, arg,
5742 build_real (TREE_TYPE (arg),
5743 c2)));
5746 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5747 if (! HONOR_NANS (mode))
5748 return fold_build2 (code, type, arg,
5749 build_real (TREE_TYPE (arg), c2));
5751 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5752 if (lang_hooks.decls.global_bindings_p () == 0
5753 && ! CONTAINS_PLACEHOLDER_P (arg))
5755 arg = save_expr (arg);
5756 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5757 fold_build2 (GE_EXPR, type, arg,
5758 build_real (TREE_TYPE (arg),
5759 dconst0)),
5760 fold_build2 (code, type, arg,
5761 build_real (TREE_TYPE (arg),
5762 c2)));
5767 return NULL_TREE;
5770 /* Subroutine of fold() that optimizes comparisons against Infinities,
5771 either +Inf or -Inf.
5773 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5774 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5775 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5777 The function returns the constant folded tree if a simplification
5778 can be made, and NULL_TREE otherwise. */
5780 static tree
5781 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5783 enum machine_mode mode;
5784 REAL_VALUE_TYPE max;
5785 tree temp;
5786 bool neg;
5788 mode = TYPE_MODE (TREE_TYPE (arg0));
5790 /* For negative infinity swap the sense of the comparison. */
5791 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5792 if (neg)
5793 code = swap_tree_comparison (code);
5795 switch (code)
5797 case GT_EXPR:
5798 /* x > +Inf is always false, if with ignore sNANs. */
5799 if (HONOR_SNANS (mode))
5800 return NULL_TREE;
5801 return omit_one_operand (type, integer_zero_node, arg0);
5803 case LE_EXPR:
5804 /* x <= +Inf is always true, if we don't case about NaNs. */
5805 if (! HONOR_NANS (mode))
5806 return omit_one_operand (type, integer_one_node, arg0);
5808 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5809 if (lang_hooks.decls.global_bindings_p () == 0
5810 && ! CONTAINS_PLACEHOLDER_P (arg0))
5812 arg0 = save_expr (arg0);
5813 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5815 break;
5817 case EQ_EXPR:
5818 case GE_EXPR:
5819 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5820 real_maxval (&max, neg, mode);
5821 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5822 arg0, build_real (TREE_TYPE (arg0), max));
5824 case LT_EXPR:
5825 /* x < +Inf is always equal to x <= DBL_MAX. */
5826 real_maxval (&max, neg, mode);
5827 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5828 arg0, build_real (TREE_TYPE (arg0), max));
5830 case NE_EXPR:
5831 /* x != +Inf is always equal to !(x > DBL_MAX). */
5832 real_maxval (&max, neg, mode);
5833 if (! HONOR_NANS (mode))
5834 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5835 arg0, build_real (TREE_TYPE (arg0), max));
5837 /* The transformation below creates non-gimple code and thus is
5838 not appropriate if we are in gimple form. */
5839 if (in_gimple_form)
5840 return NULL_TREE;
5842 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5843 arg0, build_real (TREE_TYPE (arg0), max));
5844 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5846 default:
5847 break;
5850 return NULL_TREE;
5853 /* Subroutine of fold() that optimizes comparisons of a division by
5854 a nonzero integer constant against an integer constant, i.e.
5855 X/C1 op C2.
5857 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5858 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5859 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5861 The function returns the constant folded tree if a simplification
5862 can be made, and NULL_TREE otherwise. */
5864 static tree
5865 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5867 tree prod, tmp, hi, lo;
5868 tree arg00 = TREE_OPERAND (arg0, 0);
5869 tree arg01 = TREE_OPERAND (arg0, 1);
5870 unsigned HOST_WIDE_INT lpart;
5871 HOST_WIDE_INT hpart;
5872 int overflow;
5874 /* We have to do this the hard way to detect unsigned overflow.
5875 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5876 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5877 TREE_INT_CST_HIGH (arg01),
5878 TREE_INT_CST_LOW (arg1),
5879 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5880 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5881 prod = force_fit_type (prod, -1, overflow, false);
5883 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5885 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5886 lo = prod;
5888 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5889 overflow = add_double (TREE_INT_CST_LOW (prod),
5890 TREE_INT_CST_HIGH (prod),
5891 TREE_INT_CST_LOW (tmp),
5892 TREE_INT_CST_HIGH (tmp),
5893 &lpart, &hpart);
5894 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5895 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5896 TREE_CONSTANT_OVERFLOW (prod));
5898 else if (tree_int_cst_sgn (arg01) >= 0)
5900 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5901 switch (tree_int_cst_sgn (arg1))
5903 case -1:
5904 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5905 hi = prod;
5906 break;
5908 case 0:
5909 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5910 hi = tmp;
5911 break;
5913 case 1:
5914 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5915 lo = prod;
5916 break;
5918 default:
5919 gcc_unreachable ();
5922 else
5924 /* A negative divisor reverses the relational operators. */
5925 code = swap_tree_comparison (code);
5927 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5928 switch (tree_int_cst_sgn (arg1))
5930 case -1:
5931 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5932 lo = prod;
5933 break;
5935 case 0:
5936 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5937 lo = tmp;
5938 break;
5940 case 1:
5941 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5942 hi = prod;
5943 break;
5945 default:
5946 gcc_unreachable ();
5950 switch (code)
5952 case EQ_EXPR:
5953 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5954 return omit_one_operand (type, integer_zero_node, arg00);
5955 if (TREE_OVERFLOW (hi))
5956 return fold_build2 (GE_EXPR, type, arg00, lo);
5957 if (TREE_OVERFLOW (lo))
5958 return fold_build2 (LE_EXPR, type, arg00, hi);
5959 return build_range_check (type, arg00, 1, lo, hi);
5961 case NE_EXPR:
5962 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5963 return omit_one_operand (type, integer_one_node, arg00);
5964 if (TREE_OVERFLOW (hi))
5965 return fold_build2 (LT_EXPR, type, arg00, lo);
5966 if (TREE_OVERFLOW (lo))
5967 return fold_build2 (GT_EXPR, type, arg00, hi);
5968 return build_range_check (type, arg00, 0, lo, hi);
5970 case LT_EXPR:
5971 if (TREE_OVERFLOW (lo))
5972 return omit_one_operand (type, integer_zero_node, arg00);
5973 return fold_build2 (LT_EXPR, type, arg00, lo);
5975 case LE_EXPR:
5976 if (TREE_OVERFLOW (hi))
5977 return omit_one_operand (type, integer_one_node, arg00);
5978 return fold_build2 (LE_EXPR, type, arg00, hi);
5980 case GT_EXPR:
5981 if (TREE_OVERFLOW (hi))
5982 return omit_one_operand (type, integer_zero_node, arg00);
5983 return fold_build2 (GT_EXPR, type, arg00, hi);
5985 case GE_EXPR:
5986 if (TREE_OVERFLOW (lo))
5987 return omit_one_operand (type, integer_one_node, arg00);
5988 return fold_build2 (GE_EXPR, type, arg00, lo);
5990 default:
5991 break;
5994 return NULL_TREE;
5998 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5999 equality/inequality test, then return a simplified form of the test
6000 using a sign testing. Otherwise return NULL. TYPE is the desired
6001 result type. */
6003 static tree
6004 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6005 tree result_type)
6007 /* If this is testing a single bit, we can optimize the test. */
6008 if ((code == NE_EXPR || code == EQ_EXPR)
6009 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6010 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6012 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6013 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6014 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6016 if (arg00 != NULL_TREE
6017 /* This is only a win if casting to a signed type is cheap,
6018 i.e. when arg00's type is not a partial mode. */
6019 && TYPE_PRECISION (TREE_TYPE (arg00))
6020 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6022 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6023 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6024 result_type, fold_convert (stype, arg00),
6025 fold_convert (stype, integer_zero_node));
6029 return NULL_TREE;
6032 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6033 equality/inequality test, then return a simplified form of
6034 the test using shifts and logical operations. Otherwise return
6035 NULL. TYPE is the desired result type. */
6037 tree
6038 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6039 tree result_type)
6041 /* If this is testing a single bit, we can optimize the test. */
6042 if ((code == NE_EXPR || code == EQ_EXPR)
6043 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6044 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6046 tree inner = TREE_OPERAND (arg0, 0);
6047 tree type = TREE_TYPE (arg0);
6048 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6049 enum machine_mode operand_mode = TYPE_MODE (type);
6050 int ops_unsigned;
6051 tree signed_type, unsigned_type, intermediate_type;
6052 tree tem;
6054 /* First, see if we can fold the single bit test into a sign-bit
6055 test. */
6056 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6057 result_type);
6058 if (tem)
6059 return tem;
6061 /* Otherwise we have (A & C) != 0 where C is a single bit,
6062 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6063 Similarly for (A & C) == 0. */
6065 /* If INNER is a right shift of a constant and it plus BITNUM does
6066 not overflow, adjust BITNUM and INNER. */
6067 if (TREE_CODE (inner) == RSHIFT_EXPR
6068 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6069 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6070 && bitnum < TYPE_PRECISION (type)
6071 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6072 bitnum - TYPE_PRECISION (type)))
6074 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6075 inner = TREE_OPERAND (inner, 0);
6078 /* If we are going to be able to omit the AND below, we must do our
6079 operations as unsigned. If we must use the AND, we have a choice.
6080 Normally unsigned is faster, but for some machines signed is. */
6081 #ifdef LOAD_EXTEND_OP
6082 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6083 && !flag_syntax_only) ? 0 : 1;
6084 #else
6085 ops_unsigned = 1;
6086 #endif
6088 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6089 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6090 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6091 inner = fold_convert (intermediate_type, inner);
6093 if (bitnum != 0)
6094 inner = build2 (RSHIFT_EXPR, intermediate_type,
6095 inner, size_int (bitnum));
6097 if (code == EQ_EXPR)
6098 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6099 inner, integer_one_node);
6101 /* Put the AND last so it can combine with more things. */
6102 inner = build2 (BIT_AND_EXPR, intermediate_type,
6103 inner, integer_one_node);
6105 /* Make sure to return the proper type. */
6106 inner = fold_convert (result_type, inner);
6108 return inner;
6110 return NULL_TREE;
6113 /* Check whether we are allowed to reorder operands arg0 and arg1,
6114 such that the evaluation of arg1 occurs before arg0. */
6116 static bool
6117 reorder_operands_p (tree arg0, tree arg1)
6119 if (! flag_evaluation_order)
6120 return true;
6121 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6122 return true;
6123 return ! TREE_SIDE_EFFECTS (arg0)
6124 && ! TREE_SIDE_EFFECTS (arg1);
6127 /* Test whether it is preferable two swap two operands, ARG0 and
6128 ARG1, for example because ARG0 is an integer constant and ARG1
6129 isn't. If REORDER is true, only recommend swapping if we can
6130 evaluate the operands in reverse order. */
6132 bool
6133 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6135 STRIP_SIGN_NOPS (arg0);
6136 STRIP_SIGN_NOPS (arg1);
6138 if (TREE_CODE (arg1) == INTEGER_CST)
6139 return 0;
6140 if (TREE_CODE (arg0) == INTEGER_CST)
6141 return 1;
6143 if (TREE_CODE (arg1) == REAL_CST)
6144 return 0;
6145 if (TREE_CODE (arg0) == REAL_CST)
6146 return 1;
6148 if (TREE_CODE (arg1) == COMPLEX_CST)
6149 return 0;
6150 if (TREE_CODE (arg0) == COMPLEX_CST)
6151 return 1;
6153 if (TREE_CONSTANT (arg1))
6154 return 0;
6155 if (TREE_CONSTANT (arg0))
6156 return 1;
6158 if (optimize_size)
6159 return 0;
6161 if (reorder && flag_evaluation_order
6162 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6163 return 0;
6165 if (DECL_P (arg1))
6166 return 0;
6167 if (DECL_P (arg0))
6168 return 1;
6170 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6171 for commutative and comparison operators. Ensuring a canonical
6172 form allows the optimizers to find additional redundancies without
6173 having to explicitly check for both orderings. */
6174 if (TREE_CODE (arg0) == SSA_NAME
6175 && TREE_CODE (arg1) == SSA_NAME
6176 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6177 return 1;
6179 return 0;
6182 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6183 ARG0 is extended to a wider type. */
6185 static tree
6186 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6188 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6189 tree arg1_unw;
6190 tree shorter_type, outer_type;
6191 tree min, max;
6192 bool above, below;
6194 if (arg0_unw == arg0)
6195 return NULL_TREE;
6196 shorter_type = TREE_TYPE (arg0_unw);
6198 #ifdef HAVE_canonicalize_funcptr_for_compare
6199 /* Disable this optimization if we're casting a function pointer
6200 type on targets that require function pointer canonicalization. */
6201 if (HAVE_canonicalize_funcptr_for_compare
6202 && TREE_CODE (shorter_type) == POINTER_TYPE
6203 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6204 return NULL_TREE;
6205 #endif
6207 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6208 return NULL_TREE;
6210 arg1_unw = get_unwidened (arg1, shorter_type);
6211 if (!arg1_unw)
6212 return NULL_TREE;
6214 /* If possible, express the comparison in the shorter mode. */
6215 if ((code == EQ_EXPR || code == NE_EXPR
6216 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6217 && (TREE_TYPE (arg1_unw) == shorter_type
6218 || (TREE_CODE (arg1_unw) == INTEGER_CST
6219 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6220 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6221 && int_fits_type_p (arg1_unw, shorter_type))))
6222 return fold_build2 (code, type, arg0_unw,
6223 fold_convert (shorter_type, arg1_unw));
6225 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6226 return NULL_TREE;
6228 /* If we are comparing with the integer that does not fit into the range
6229 of the shorter type, the result is known. */
6230 outer_type = TREE_TYPE (arg1_unw);
6231 min = lower_bound_in_type (outer_type, shorter_type);
6232 max = upper_bound_in_type (outer_type, shorter_type);
6234 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6235 max, arg1_unw));
6236 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6237 arg1_unw, min));
6239 switch (code)
6241 case EQ_EXPR:
6242 if (above || below)
6243 return omit_one_operand (type, integer_zero_node, arg0);
6244 break;
6246 case NE_EXPR:
6247 if (above || below)
6248 return omit_one_operand (type, integer_one_node, arg0);
6249 break;
6251 case LT_EXPR:
6252 case LE_EXPR:
6253 if (above)
6254 return omit_one_operand (type, integer_one_node, arg0);
6255 else if (below)
6256 return omit_one_operand (type, integer_zero_node, arg0);
6258 case GT_EXPR:
6259 case GE_EXPR:
6260 if (above)
6261 return omit_one_operand (type, integer_zero_node, arg0);
6262 else if (below)
6263 return omit_one_operand (type, integer_one_node, arg0);
6265 default:
6266 break;
6269 return NULL_TREE;
6272 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6273 ARG0 just the signedness is changed. */
6275 static tree
6276 fold_sign_changed_comparison (enum tree_code code, tree type,
6277 tree arg0, tree arg1)
6279 tree arg0_inner, tmp;
6280 tree inner_type, outer_type;
6282 if (TREE_CODE (arg0) != NOP_EXPR
6283 && TREE_CODE (arg0) != CONVERT_EXPR)
6284 return NULL_TREE;
6286 outer_type = TREE_TYPE (arg0);
6287 arg0_inner = TREE_OPERAND (arg0, 0);
6288 inner_type = TREE_TYPE (arg0_inner);
6290 #ifdef HAVE_canonicalize_funcptr_for_compare
6291 /* Disable this optimization if we're casting a function pointer
6292 type on targets that require function pointer canonicalization. */
6293 if (HAVE_canonicalize_funcptr_for_compare
6294 && TREE_CODE (inner_type) == POINTER_TYPE
6295 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6296 return NULL_TREE;
6297 #endif
6299 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6300 return NULL_TREE;
6302 if (TREE_CODE (arg1) != INTEGER_CST
6303 && !((TREE_CODE (arg1) == NOP_EXPR
6304 || TREE_CODE (arg1) == CONVERT_EXPR)
6305 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6306 return NULL_TREE;
6308 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6309 && code != NE_EXPR
6310 && code != EQ_EXPR)
6311 return NULL_TREE;
6313 if (TREE_CODE (arg1) == INTEGER_CST)
6315 tmp = build_int_cst_wide (inner_type,
6316 TREE_INT_CST_LOW (arg1),
6317 TREE_INT_CST_HIGH (arg1));
6318 arg1 = force_fit_type (tmp, 0,
6319 TREE_OVERFLOW (arg1),
6320 TREE_CONSTANT_OVERFLOW (arg1));
6322 else
6323 arg1 = fold_convert (inner_type, arg1);
6325 return fold_build2 (code, type, arg0_inner, arg1);
6328 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6329 step of the array. Reconstructs s and delta in the case of s * delta
6330 being an integer constant (and thus already folded).
6331 ADDR is the address. MULT is the multiplicative expression.
6332 If the function succeeds, the new address expression is returned. Otherwise
6333 NULL_TREE is returned. */
6335 static tree
6336 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6338 tree s, delta, step;
6339 tree ref = TREE_OPERAND (addr, 0), pref;
6340 tree ret, pos;
6341 tree itype;
6343 /* Canonicalize op1 into a possibly non-constant delta
6344 and an INTEGER_CST s. */
6345 if (TREE_CODE (op1) == MULT_EXPR)
6347 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6349 STRIP_NOPS (arg0);
6350 STRIP_NOPS (arg1);
6352 if (TREE_CODE (arg0) == INTEGER_CST)
6354 s = arg0;
6355 delta = arg1;
6357 else if (TREE_CODE (arg1) == INTEGER_CST)
6359 s = arg1;
6360 delta = arg0;
6362 else
6363 return NULL_TREE;
6365 else if (TREE_CODE (op1) == INTEGER_CST)
6367 delta = op1;
6368 s = NULL_TREE;
6370 else
6372 /* Simulate we are delta * 1. */
6373 delta = op1;
6374 s = integer_one_node;
6377 for (;; ref = TREE_OPERAND (ref, 0))
6379 if (TREE_CODE (ref) == ARRAY_REF)
6381 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6382 if (! itype)
6383 continue;
6385 step = array_ref_element_size (ref);
6386 if (TREE_CODE (step) != INTEGER_CST)
6387 continue;
6389 if (s)
6391 if (! tree_int_cst_equal (step, s))
6392 continue;
6394 else
6396 /* Try if delta is a multiple of step. */
6397 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6398 if (! tmp)
6399 continue;
6400 delta = tmp;
6403 break;
6406 if (!handled_component_p (ref))
6407 return NULL_TREE;
6410 /* We found the suitable array reference. So copy everything up to it,
6411 and replace the index. */
6413 pref = TREE_OPERAND (addr, 0);
6414 ret = copy_node (pref);
6415 pos = ret;
6417 while (pref != ref)
6419 pref = TREE_OPERAND (pref, 0);
6420 TREE_OPERAND (pos, 0) = copy_node (pref);
6421 pos = TREE_OPERAND (pos, 0);
6424 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6425 fold_convert (itype,
6426 TREE_OPERAND (pos, 1)),
6427 fold_convert (itype, delta));
6429 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6433 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6434 means A >= Y && A != MAX, but in this case we know that
6435 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6437 static tree
6438 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6440 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6442 if (TREE_CODE (bound) == LT_EXPR)
6443 a = TREE_OPERAND (bound, 0);
6444 else if (TREE_CODE (bound) == GT_EXPR)
6445 a = TREE_OPERAND (bound, 1);
6446 else
6447 return NULL_TREE;
6449 typea = TREE_TYPE (a);
6450 if (!INTEGRAL_TYPE_P (typea)
6451 && !POINTER_TYPE_P (typea))
6452 return NULL_TREE;
6454 if (TREE_CODE (ineq) == LT_EXPR)
6456 a1 = TREE_OPERAND (ineq, 1);
6457 y = TREE_OPERAND (ineq, 0);
6459 else if (TREE_CODE (ineq) == GT_EXPR)
6461 a1 = TREE_OPERAND (ineq, 0);
6462 y = TREE_OPERAND (ineq, 1);
6464 else
6465 return NULL_TREE;
6467 if (TREE_TYPE (a1) != typea)
6468 return NULL_TREE;
6470 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6471 if (!integer_onep (diff))
6472 return NULL_TREE;
6474 return fold_build2 (GE_EXPR, type, a, y);
6477 /* Fold complex addition when both components are accessible by parts.
6478 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6479 or MINUS_EXPR for subtraction. */
6481 static tree
6482 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6484 tree ar, ai, br, bi, rr, ri, inner_type;
6486 if (TREE_CODE (ac) == COMPLEX_EXPR)
6487 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6488 else if (TREE_CODE (ac) == COMPLEX_CST)
6489 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6490 else
6491 return NULL;
6493 if (TREE_CODE (bc) == COMPLEX_EXPR)
6494 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6495 else if (TREE_CODE (bc) == COMPLEX_CST)
6496 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6497 else
6498 return NULL;
6500 inner_type = TREE_TYPE (type);
6502 rr = fold_build2 (code, inner_type, ar, br);
6503 ri = fold_build2 (code, inner_type, ai, bi);
6505 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6508 /* Perform some simplifications of complex multiplication when one or more
6509 of the components are constants or zeros. Return non-null if successful. */
6511 tree
6512 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6514 tree rr, ri, inner_type, zero;
6515 bool ar0, ai0, br0, bi0, bi1;
6517 inner_type = TREE_TYPE (type);
6518 zero = NULL;
6520 if (SCALAR_FLOAT_TYPE_P (inner_type))
6522 ar0 = ai0 = br0 = bi0 = bi1 = false;
6524 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6526 if (TREE_CODE (ar) == REAL_CST
6527 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6528 ar0 = true, zero = ar;
6530 if (TREE_CODE (ai) == REAL_CST
6531 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6532 ai0 = true, zero = ai;
6534 if (TREE_CODE (br) == REAL_CST
6535 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6536 br0 = true, zero = br;
6538 if (TREE_CODE (bi) == REAL_CST)
6540 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6541 bi0 = true, zero = bi;
6542 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6543 bi1 = true;
6546 else
6548 ar0 = integer_zerop (ar);
6549 if (ar0)
6550 zero = ar;
6551 ai0 = integer_zerop (ai);
6552 if (ai0)
6553 zero = ai;
6554 br0 = integer_zerop (br);
6555 if (br0)
6556 zero = br;
6557 bi0 = integer_zerop (bi);
6558 if (bi0)
6560 zero = bi;
6561 bi1 = false;
6563 else
6564 bi1 = integer_onep (bi);
6567 /* We won't optimize anything below unless something is zero. */
6568 if (zero == NULL)
6569 return NULL;
6571 if (ai0 && br0 && bi1)
6573 rr = zero;
6574 ri = ar;
6576 else if (ai0 && bi0)
6578 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6579 ri = zero;
6581 else if (ai0 && br0)
6583 rr = zero;
6584 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6586 else if (ar0 && bi0)
6588 rr = zero;
6589 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6591 else if (ar0 && br0)
6593 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6594 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6595 ri = zero;
6597 else if (bi0)
6599 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6600 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6602 else if (ai0)
6604 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6605 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6607 else if (br0)
6609 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6610 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6611 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6613 else if (ar0)
6615 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6616 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6617 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6619 else
6620 return NULL;
6622 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6625 static tree
6626 fold_complex_mult (tree type, tree ac, tree bc)
6628 tree ar, ai, br, bi;
6630 if (TREE_CODE (ac) == COMPLEX_EXPR)
6631 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6632 else if (TREE_CODE (ac) == COMPLEX_CST)
6633 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6634 else
6635 return NULL;
6637 if (TREE_CODE (bc) == COMPLEX_EXPR)
6638 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6639 else if (TREE_CODE (bc) == COMPLEX_CST)
6640 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6641 else
6642 return NULL;
6644 return fold_complex_mult_parts (type, ar, ai, br, bi);
6647 /* Perform some simplifications of complex division when one or more of
6648 the components are constants or zeros. Return non-null if successful. */
6650 tree
6651 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6652 enum tree_code code)
6654 tree rr, ri, inner_type, zero;
6655 bool ar0, ai0, br0, bi0, bi1;
6657 inner_type = TREE_TYPE (type);
6658 zero = NULL;
6660 if (SCALAR_FLOAT_TYPE_P (inner_type))
6662 ar0 = ai0 = br0 = bi0 = bi1 = false;
6664 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6666 if (TREE_CODE (ar) == REAL_CST
6667 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6668 ar0 = true, zero = ar;
6670 if (TREE_CODE (ai) == REAL_CST
6671 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6672 ai0 = true, zero = ai;
6674 if (TREE_CODE (br) == REAL_CST
6675 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6676 br0 = true, zero = br;
6678 if (TREE_CODE (bi) == REAL_CST)
6680 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6681 bi0 = true, zero = bi;
6682 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6683 bi1 = true;
6686 else
6688 ar0 = integer_zerop (ar);
6689 if (ar0)
6690 zero = ar;
6691 ai0 = integer_zerop (ai);
6692 if (ai0)
6693 zero = ai;
6694 br0 = integer_zerop (br);
6695 if (br0)
6696 zero = br;
6697 bi0 = integer_zerop (bi);
6698 if (bi0)
6700 zero = bi;
6701 bi1 = false;
6703 else
6704 bi1 = integer_onep (bi);
6707 /* We won't optimize anything below unless something is zero. */
6708 if (zero == NULL)
6709 return NULL;
6711 if (ai0 && bi0)
6713 rr = fold_build2 (code, inner_type, ar, br);
6714 ri = zero;
6716 else if (ai0 && br0)
6718 rr = zero;
6719 ri = fold_build2 (code, inner_type, ar, bi);
6720 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6722 else if (ar0 && bi0)
6724 rr = zero;
6725 ri = fold_build2 (code, inner_type, ai, br);
6727 else if (ar0 && br0)
6729 rr = fold_build2 (code, inner_type, ai, bi);
6730 ri = zero;
6732 else if (bi0)
6734 rr = fold_build2 (code, inner_type, ar, br);
6735 ri = fold_build2 (code, inner_type, ai, br);
6737 else if (br0)
6739 rr = fold_build2 (code, inner_type, ai, bi);
6740 ri = fold_build2 (code, inner_type, ar, bi);
6741 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6743 else
6744 return NULL;
6746 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6749 static tree
6750 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6752 tree ar, ai, br, bi;
6754 if (TREE_CODE (ac) == COMPLEX_EXPR)
6755 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6756 else if (TREE_CODE (ac) == COMPLEX_CST)
6757 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6758 else
6759 return NULL;
6761 if (TREE_CODE (bc) == COMPLEX_EXPR)
6762 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6763 else if (TREE_CODE (bc) == COMPLEX_CST)
6764 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6765 else
6766 return NULL;
6768 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6771 /* Fold a unary expression of code CODE and type TYPE with operand
6772 OP0. Return the folded expression if folding is successful.
6773 Otherwise, return NULL_TREE. */
6775 tree
6776 fold_unary (enum tree_code code, tree type, tree op0)
6778 tree tem;
6779 tree arg0;
6780 enum tree_code_class kind = TREE_CODE_CLASS (code);
6782 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6783 && TREE_CODE_LENGTH (code) == 1);
6785 arg0 = op0;
6786 if (arg0)
6788 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6790 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6791 STRIP_SIGN_NOPS (arg0);
6793 else
6795 /* Strip any conversions that don't change the mode. This
6796 is safe for every expression, except for a comparison
6797 expression because its signedness is derived from its
6798 operands.
6800 Note that this is done as an internal manipulation within
6801 the constant folder, in order to find the simplest
6802 representation of the arguments so that their form can be
6803 studied. In any cases, the appropriate type conversions
6804 should be put back in the tree that will get out of the
6805 constant folder. */
6806 STRIP_NOPS (arg0);
6810 if (TREE_CODE_CLASS (code) == tcc_unary)
6812 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6813 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6814 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6815 else if (TREE_CODE (arg0) == COND_EXPR)
6817 tree arg01 = TREE_OPERAND (arg0, 1);
6818 tree arg02 = TREE_OPERAND (arg0, 2);
6819 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6820 arg01 = fold_build1 (code, type, arg01);
6821 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6822 arg02 = fold_build1 (code, type, arg02);
6823 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6824 arg01, arg02);
6826 /* If this was a conversion, and all we did was to move into
6827 inside the COND_EXPR, bring it back out. But leave it if
6828 it is a conversion from integer to integer and the
6829 result precision is no wider than a word since such a
6830 conversion is cheap and may be optimized away by combine,
6831 while it couldn't if it were outside the COND_EXPR. Then return
6832 so we don't get into an infinite recursion loop taking the
6833 conversion out and then back in. */
6835 if ((code == NOP_EXPR || code == CONVERT_EXPR
6836 || code == NON_LVALUE_EXPR)
6837 && TREE_CODE (tem) == COND_EXPR
6838 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6839 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6840 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6841 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6842 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6843 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6844 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6845 && (INTEGRAL_TYPE_P
6846 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6847 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6848 || flag_syntax_only))
6849 tem = build1 (code, type,
6850 build3 (COND_EXPR,
6851 TREE_TYPE (TREE_OPERAND
6852 (TREE_OPERAND (tem, 1), 0)),
6853 TREE_OPERAND (tem, 0),
6854 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6855 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6856 return tem;
6858 else if (COMPARISON_CLASS_P (arg0))
6860 if (TREE_CODE (type) == BOOLEAN_TYPE)
6862 arg0 = copy_node (arg0);
6863 TREE_TYPE (arg0) = type;
6864 return arg0;
6866 else if (TREE_CODE (type) != INTEGER_TYPE)
6867 return fold_build3 (COND_EXPR, type, arg0,
6868 fold_build1 (code, type,
6869 integer_one_node),
6870 fold_build1 (code, type,
6871 integer_zero_node));
6875 switch (code)
6877 case NOP_EXPR:
6878 case FLOAT_EXPR:
6879 case CONVERT_EXPR:
6880 case FIX_TRUNC_EXPR:
6881 case FIX_CEIL_EXPR:
6882 case FIX_FLOOR_EXPR:
6883 case FIX_ROUND_EXPR:
6884 if (TREE_TYPE (op0) == type)
6885 return op0;
6887 /* Handle cases of two conversions in a row. */
6888 if (TREE_CODE (op0) == NOP_EXPR
6889 || TREE_CODE (op0) == CONVERT_EXPR)
6891 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6892 tree inter_type = TREE_TYPE (op0);
6893 int inside_int = INTEGRAL_TYPE_P (inside_type);
6894 int inside_ptr = POINTER_TYPE_P (inside_type);
6895 int inside_float = FLOAT_TYPE_P (inside_type);
6896 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6897 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6898 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6899 int inter_int = INTEGRAL_TYPE_P (inter_type);
6900 int inter_ptr = POINTER_TYPE_P (inter_type);
6901 int inter_float = FLOAT_TYPE_P (inter_type);
6902 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6903 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6904 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6905 int final_int = INTEGRAL_TYPE_P (type);
6906 int final_ptr = POINTER_TYPE_P (type);
6907 int final_float = FLOAT_TYPE_P (type);
6908 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6909 unsigned int final_prec = TYPE_PRECISION (type);
6910 int final_unsignedp = TYPE_UNSIGNED (type);
6912 /* In addition to the cases of two conversions in a row
6913 handled below, if we are converting something to its own
6914 type via an object of identical or wider precision, neither
6915 conversion is needed. */
6916 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6917 && ((inter_int && final_int) || (inter_float && final_float))
6918 && inter_prec >= final_prec)
6919 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6921 /* Likewise, if the intermediate and final types are either both
6922 float or both integer, we don't need the middle conversion if
6923 it is wider than the final type and doesn't change the signedness
6924 (for integers). Avoid this if the final type is a pointer
6925 since then we sometimes need the inner conversion. Likewise if
6926 the outer has a precision not equal to the size of its mode. */
6927 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6928 || (inter_float && inside_float)
6929 || (inter_vec && inside_vec))
6930 && inter_prec >= inside_prec
6931 && (inter_float || inter_vec
6932 || inter_unsignedp == inside_unsignedp)
6933 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6934 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6935 && ! final_ptr
6936 && (! final_vec || inter_prec == inside_prec))
6937 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6939 /* If we have a sign-extension of a zero-extended value, we can
6940 replace that by a single zero-extension. */
6941 if (inside_int && inter_int && final_int
6942 && inside_prec < inter_prec && inter_prec < final_prec
6943 && inside_unsignedp && !inter_unsignedp)
6944 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6946 /* Two conversions in a row are not needed unless:
6947 - some conversion is floating-point (overstrict for now), or
6948 - some conversion is a vector (overstrict for now), or
6949 - the intermediate type is narrower than both initial and
6950 final, or
6951 - the intermediate type and innermost type differ in signedness,
6952 and the outermost type is wider than the intermediate, or
6953 - the initial type is a pointer type and the precisions of the
6954 intermediate and final types differ, or
6955 - the final type is a pointer type and the precisions of the
6956 initial and intermediate types differ. */
6957 if (! inside_float && ! inter_float && ! final_float
6958 && ! inside_vec && ! inter_vec && ! final_vec
6959 && (inter_prec > inside_prec || inter_prec > final_prec)
6960 && ! (inside_int && inter_int
6961 && inter_unsignedp != inside_unsignedp
6962 && inter_prec < final_prec)
6963 && ((inter_unsignedp && inter_prec > inside_prec)
6964 == (final_unsignedp && final_prec > inter_prec))
6965 && ! (inside_ptr && inter_prec != final_prec)
6966 && ! (final_ptr && inside_prec != inter_prec)
6967 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6968 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6969 && ! final_ptr)
6970 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6973 if (TREE_CODE (op0) == MODIFY_EXPR
6974 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6975 /* Detect assigning a bitfield. */
6976 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6977 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6979 /* Don't leave an assignment inside a conversion
6980 unless assigning a bitfield. */
6981 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6982 /* First do the assignment, then return converted constant. */
6983 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6984 TREE_NO_WARNING (tem) = 1;
6985 TREE_USED (tem) = 1;
6986 return tem;
6989 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6990 constants (if x has signed type, the sign bit cannot be set
6991 in c). This folds extension into the BIT_AND_EXPR. */
6992 if (INTEGRAL_TYPE_P (type)
6993 && TREE_CODE (type) != BOOLEAN_TYPE
6994 && TREE_CODE (op0) == BIT_AND_EXPR
6995 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6997 tree and = op0;
6998 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6999 int change = 0;
7001 if (TYPE_UNSIGNED (TREE_TYPE (and))
7002 || (TYPE_PRECISION (type)
7003 <= TYPE_PRECISION (TREE_TYPE (and))))
7004 change = 1;
7005 else if (TYPE_PRECISION (TREE_TYPE (and1))
7006 <= HOST_BITS_PER_WIDE_INT
7007 && host_integerp (and1, 1))
7009 unsigned HOST_WIDE_INT cst;
7011 cst = tree_low_cst (and1, 1);
7012 cst &= (HOST_WIDE_INT) -1
7013 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7014 change = (cst == 0);
7015 #ifdef LOAD_EXTEND_OP
7016 if (change
7017 && !flag_syntax_only
7018 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7019 == ZERO_EXTEND))
7021 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7022 and0 = fold_convert (uns, and0);
7023 and1 = fold_convert (uns, and1);
7025 #endif
7027 if (change)
7029 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7030 TREE_INT_CST_HIGH (and1));
7031 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7032 TREE_CONSTANT_OVERFLOW (and1));
7033 return fold_build2 (BIT_AND_EXPR, type,
7034 fold_convert (type, and0), tem);
7038 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7039 T2 being pointers to types of the same size. */
7040 if (POINTER_TYPE_P (type)
7041 && BINARY_CLASS_P (arg0)
7042 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7043 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7045 tree arg00 = TREE_OPERAND (arg0, 0);
7046 tree t0 = type;
7047 tree t1 = TREE_TYPE (arg00);
7048 tree tt0 = TREE_TYPE (t0);
7049 tree tt1 = TREE_TYPE (t1);
7050 tree s0 = TYPE_SIZE (tt0);
7051 tree s1 = TYPE_SIZE (tt1);
7053 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7054 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7055 TREE_OPERAND (arg0, 1));
7058 tem = fold_convert_const (code, type, arg0);
7059 return tem ? tem : NULL_TREE;
7061 case VIEW_CONVERT_EXPR:
7062 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7063 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7064 return NULL_TREE;
7066 case NEGATE_EXPR:
7067 if (negate_expr_p (arg0))
7068 return fold_convert (type, negate_expr (arg0));
7069 /* Convert - (~A) to A + 1. */
7070 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
7071 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
7072 build_int_cst (type, 1));
7073 return NULL_TREE;
7075 case ABS_EXPR:
7076 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7077 return fold_abs_const (arg0, type);
7078 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7079 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7080 /* Convert fabs((double)float) into (double)fabsf(float). */
7081 else if (TREE_CODE (arg0) == NOP_EXPR
7082 && TREE_CODE (type) == REAL_TYPE)
7084 tree targ0 = strip_float_extensions (arg0);
7085 if (targ0 != arg0)
7086 return fold_convert (type, fold_build1 (ABS_EXPR,
7087 TREE_TYPE (targ0),
7088 targ0));
7090 else if (tree_expr_nonnegative_p (arg0))
7091 return arg0;
7093 /* Strip sign ops from argument. */
7094 if (TREE_CODE (type) == REAL_TYPE)
7096 tem = fold_strip_sign_ops (arg0);
7097 if (tem)
7098 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7100 return NULL_TREE;
7102 case CONJ_EXPR:
7103 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7104 return fold_convert (type, arg0);
7105 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7106 return build2 (COMPLEX_EXPR, type,
7107 TREE_OPERAND (arg0, 0),
7108 negate_expr (TREE_OPERAND (arg0, 1)));
7109 else if (TREE_CODE (arg0) == COMPLEX_CST)
7110 return build_complex (type, TREE_REALPART (arg0),
7111 negate_expr (TREE_IMAGPART (arg0)));
7112 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7113 return fold_build2 (TREE_CODE (arg0), type,
7114 fold_build1 (CONJ_EXPR, type,
7115 TREE_OPERAND (arg0, 0)),
7116 fold_build1 (CONJ_EXPR, type,
7117 TREE_OPERAND (arg0, 1)));
7118 else if (TREE_CODE (arg0) == CONJ_EXPR)
7119 return TREE_OPERAND (arg0, 0);
7120 return NULL_TREE;
7122 case BIT_NOT_EXPR:
7123 if (TREE_CODE (arg0) == INTEGER_CST)
7124 return fold_not_const (arg0, type);
7125 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7126 return TREE_OPERAND (arg0, 0);
7127 /* Convert ~ (-A) to A - 1. */
7128 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7129 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7130 build_int_cst (type, 1));
7131 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7132 else if (INTEGRAL_TYPE_P (type)
7133 && ((TREE_CODE (arg0) == MINUS_EXPR
7134 && integer_onep (TREE_OPERAND (arg0, 1)))
7135 || (TREE_CODE (arg0) == PLUS_EXPR
7136 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7137 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7138 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7139 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7140 && (tem = fold_unary (BIT_NOT_EXPR, type,
7141 fold_convert (type,
7142 TREE_OPERAND (arg0, 0)))))
7143 return fold_build2 (BIT_XOR_EXPR, type, tem,
7144 fold_convert (type, TREE_OPERAND (arg0, 1)));
7145 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7146 && (tem = fold_unary (BIT_NOT_EXPR, type,
7147 fold_convert (type,
7148 TREE_OPERAND (arg0, 1)))))
7149 return fold_build2 (BIT_XOR_EXPR, type,
7150 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7152 return NULL_TREE;
7154 case TRUTH_NOT_EXPR:
7155 /* The argument to invert_truthvalue must have Boolean type. */
7156 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7157 arg0 = fold_convert (boolean_type_node, arg0);
7159 /* Note that the operand of this must be an int
7160 and its values must be 0 or 1.
7161 ("true" is a fixed value perhaps depending on the language,
7162 but we don't handle values other than 1 correctly yet.) */
7163 tem = invert_truthvalue (arg0);
7164 /* Avoid infinite recursion. */
7165 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7166 return NULL_TREE;
7167 return fold_convert (type, tem);
7169 case REALPART_EXPR:
7170 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7171 return NULL_TREE;
7172 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7173 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7174 TREE_OPERAND (arg0, 1));
7175 else if (TREE_CODE (arg0) == COMPLEX_CST)
7176 return TREE_REALPART (arg0);
7177 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7178 return fold_build2 (TREE_CODE (arg0), type,
7179 fold_build1 (REALPART_EXPR, type,
7180 TREE_OPERAND (arg0, 0)),
7181 fold_build1 (REALPART_EXPR, type,
7182 TREE_OPERAND (arg0, 1)));
7183 return NULL_TREE;
7185 case IMAGPART_EXPR:
7186 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7187 return fold_convert (type, integer_zero_node);
7188 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7189 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7190 TREE_OPERAND (arg0, 0));
7191 else if (TREE_CODE (arg0) == COMPLEX_CST)
7192 return TREE_IMAGPART (arg0);
7193 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7194 return fold_build2 (TREE_CODE (arg0), type,
7195 fold_build1 (IMAGPART_EXPR, type,
7196 TREE_OPERAND (arg0, 0)),
7197 fold_build1 (IMAGPART_EXPR, type,
7198 TREE_OPERAND (arg0, 1)));
7199 return NULL_TREE;
7201 default:
7202 return NULL_TREE;
7203 } /* switch (code) */
7206 /* Fold a binary expression of code CODE and type TYPE with operands
7207 OP0 and OP1. Return the folded expression if folding is
7208 successful. Otherwise, return NULL_TREE. */
7210 tree
7211 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7213 tree t1 = NULL_TREE;
7214 tree tem;
7215 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7216 enum tree_code_class kind = TREE_CODE_CLASS (code);
7218 /* WINS will be nonzero when the switch is done
7219 if all operands are constant. */
7220 int wins = 1;
7222 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7223 && TREE_CODE_LENGTH (code) == 2);
7225 arg0 = op0;
7226 arg1 = op1;
7228 if (arg0)
7230 tree subop;
7232 /* Strip any conversions that don't change the mode. This is
7233 safe for every expression, except for a comparison expression
7234 because its signedness is derived from its operands. So, in
7235 the latter case, only strip conversions that don't change the
7236 signedness.
7238 Note that this is done as an internal manipulation within the
7239 constant folder, in order to find the simplest representation
7240 of the arguments so that their form can be studied. In any
7241 cases, the appropriate type conversions should be put back in
7242 the tree that will get out of the constant folder. */
7243 if (kind == tcc_comparison)
7244 STRIP_SIGN_NOPS (arg0);
7245 else
7246 STRIP_NOPS (arg0);
7248 if (TREE_CODE (arg0) == COMPLEX_CST)
7249 subop = TREE_REALPART (arg0);
7250 else
7251 subop = arg0;
7253 if (TREE_CODE (subop) != INTEGER_CST
7254 && TREE_CODE (subop) != REAL_CST)
7255 /* Note that TREE_CONSTANT isn't enough:
7256 static var addresses are constant but we can't
7257 do arithmetic on them. */
7258 wins = 0;
7261 if (arg1)
7263 tree subop;
7265 /* Strip any conversions that don't change the mode. This is
7266 safe for every expression, except for a comparison expression
7267 because its signedness is derived from its operands. So, in
7268 the latter case, only strip conversions that don't change the
7269 signedness.
7271 Note that this is done as an internal manipulation within the
7272 constant folder, in order to find the simplest representation
7273 of the arguments so that their form can be studied. In any
7274 cases, the appropriate type conversions should be put back in
7275 the tree that will get out of the constant folder. */
7276 if (kind == tcc_comparison)
7277 STRIP_SIGN_NOPS (arg1);
7278 else
7279 STRIP_NOPS (arg1);
7281 if (TREE_CODE (arg1) == COMPLEX_CST)
7282 subop = TREE_REALPART (arg1);
7283 else
7284 subop = arg1;
7286 if (TREE_CODE (subop) != INTEGER_CST
7287 && TREE_CODE (subop) != REAL_CST)
7288 /* Note that TREE_CONSTANT isn't enough:
7289 static var addresses are constant but we can't
7290 do arithmetic on them. */
7291 wins = 0;
7294 /* If this is a commutative operation, and ARG0 is a constant, move it
7295 to ARG1 to reduce the number of tests below. */
7296 if (commutative_tree_code (code)
7297 && tree_swap_operands_p (arg0, arg1, true))
7298 return fold_build2 (code, type, op1, op0);
7300 /* Now WINS is set as described above,
7301 ARG0 is the first operand of EXPR,
7302 and ARG1 is the second operand (if it has more than one operand).
7304 First check for cases where an arithmetic operation is applied to a
7305 compound, conditional, or comparison operation. Push the arithmetic
7306 operation inside the compound or conditional to see if any folding
7307 can then be done. Convert comparison to conditional for this purpose.
7308 The also optimizes non-constant cases that used to be done in
7309 expand_expr.
7311 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7312 one of the operands is a comparison and the other is a comparison, a
7313 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7314 code below would make the expression more complex. Change it to a
7315 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7316 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7318 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7319 || code == EQ_EXPR || code == NE_EXPR)
7320 && ((truth_value_p (TREE_CODE (arg0))
7321 && (truth_value_p (TREE_CODE (arg1))
7322 || (TREE_CODE (arg1) == BIT_AND_EXPR
7323 && integer_onep (TREE_OPERAND (arg1, 1)))))
7324 || (truth_value_p (TREE_CODE (arg1))
7325 && (truth_value_p (TREE_CODE (arg0))
7326 || (TREE_CODE (arg0) == BIT_AND_EXPR
7327 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7329 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7330 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7331 : TRUTH_XOR_EXPR,
7332 boolean_type_node,
7333 fold_convert (boolean_type_node, arg0),
7334 fold_convert (boolean_type_node, arg1));
7336 if (code == EQ_EXPR)
7337 tem = invert_truthvalue (tem);
7339 return fold_convert (type, tem);
7342 if (TREE_CODE_CLASS (code) == tcc_comparison
7343 && TREE_CODE (arg0) == COMPOUND_EXPR)
7344 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7345 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7346 else if (TREE_CODE_CLASS (code) == tcc_comparison
7347 && TREE_CODE (arg1) == COMPOUND_EXPR)
7348 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7349 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7350 else if (TREE_CODE_CLASS (code) == tcc_binary
7351 || TREE_CODE_CLASS (code) == tcc_comparison)
7353 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7354 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7355 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7356 arg1));
7357 if (TREE_CODE (arg1) == COMPOUND_EXPR
7358 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7359 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7360 fold_build2 (code, type,
7361 arg0, TREE_OPERAND (arg1, 1)));
7363 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7365 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7366 arg0, arg1,
7367 /*cond_first_p=*/1);
7368 if (tem != NULL_TREE)
7369 return tem;
7372 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7374 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7375 arg1, arg0,
7376 /*cond_first_p=*/0);
7377 if (tem != NULL_TREE)
7378 return tem;
7382 switch (code)
7384 case PLUS_EXPR:
7385 /* A + (-B) -> A - B */
7386 if (TREE_CODE (arg1) == NEGATE_EXPR)
7387 return fold_build2 (MINUS_EXPR, type,
7388 fold_convert (type, arg0),
7389 fold_convert (type, TREE_OPERAND (arg1, 0)));
7390 /* (-A) + B -> B - A */
7391 if (TREE_CODE (arg0) == NEGATE_EXPR
7392 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7393 return fold_build2 (MINUS_EXPR, type,
7394 fold_convert (type, arg1),
7395 fold_convert (type, TREE_OPERAND (arg0, 0)));
7396 /* Convert ~A + 1 to -A. */
7397 if (INTEGRAL_TYPE_P (type)
7398 && TREE_CODE (arg0) == BIT_NOT_EXPR
7399 && integer_onep (arg1))
7400 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7402 if (TREE_CODE (type) == COMPLEX_TYPE)
7404 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7405 if (tem)
7406 return tem;
7409 if (! FLOAT_TYPE_P (type))
7411 if (integer_zerop (arg1))
7412 return non_lvalue (fold_convert (type, arg0));
7414 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7415 with a constant, and the two constants have no bits in common,
7416 we should treat this as a BIT_IOR_EXPR since this may produce more
7417 simplifications. */
7418 if (TREE_CODE (arg0) == BIT_AND_EXPR
7419 && TREE_CODE (arg1) == BIT_AND_EXPR
7420 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7421 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7422 && integer_zerop (const_binop (BIT_AND_EXPR,
7423 TREE_OPERAND (arg0, 1),
7424 TREE_OPERAND (arg1, 1), 0)))
7426 code = BIT_IOR_EXPR;
7427 goto bit_ior;
7430 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7431 (plus (plus (mult) (mult)) (foo)) so that we can
7432 take advantage of the factoring cases below. */
7433 if (((TREE_CODE (arg0) == PLUS_EXPR
7434 || TREE_CODE (arg0) == MINUS_EXPR)
7435 && TREE_CODE (arg1) == MULT_EXPR)
7436 || ((TREE_CODE (arg1) == PLUS_EXPR
7437 || TREE_CODE (arg1) == MINUS_EXPR)
7438 && TREE_CODE (arg0) == MULT_EXPR))
7440 tree parg0, parg1, parg, marg;
7441 enum tree_code pcode;
7443 if (TREE_CODE (arg1) == MULT_EXPR)
7444 parg = arg0, marg = arg1;
7445 else
7446 parg = arg1, marg = arg0;
7447 pcode = TREE_CODE (parg);
7448 parg0 = TREE_OPERAND (parg, 0);
7449 parg1 = TREE_OPERAND (parg, 1);
7450 STRIP_NOPS (parg0);
7451 STRIP_NOPS (parg1);
7453 if (TREE_CODE (parg0) == MULT_EXPR
7454 && TREE_CODE (parg1) != MULT_EXPR)
7455 return fold_build2 (pcode, type,
7456 fold_build2 (PLUS_EXPR, type,
7457 fold_convert (type, parg0),
7458 fold_convert (type, marg)),
7459 fold_convert (type, parg1));
7460 if (TREE_CODE (parg0) != MULT_EXPR
7461 && TREE_CODE (parg1) == MULT_EXPR)
7462 return fold_build2 (PLUS_EXPR, type,
7463 fold_convert (type, parg0),
7464 fold_build2 (pcode, type,
7465 fold_convert (type, marg),
7466 fold_convert (type,
7467 parg1)));
7470 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7472 tree arg00, arg01, arg10, arg11;
7473 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7475 /* (A * C) + (B * C) -> (A+B) * C.
7476 We are most concerned about the case where C is a constant,
7477 but other combinations show up during loop reduction. Since
7478 it is not difficult, try all four possibilities. */
7480 arg00 = TREE_OPERAND (arg0, 0);
7481 arg01 = TREE_OPERAND (arg0, 1);
7482 arg10 = TREE_OPERAND (arg1, 0);
7483 arg11 = TREE_OPERAND (arg1, 1);
7484 same = NULL_TREE;
7486 if (operand_equal_p (arg01, arg11, 0))
7487 same = arg01, alt0 = arg00, alt1 = arg10;
7488 else if (operand_equal_p (arg00, arg10, 0))
7489 same = arg00, alt0 = arg01, alt1 = arg11;
7490 else if (operand_equal_p (arg00, arg11, 0))
7491 same = arg00, alt0 = arg01, alt1 = arg10;
7492 else if (operand_equal_p (arg01, arg10, 0))
7493 same = arg01, alt0 = arg00, alt1 = arg11;
7495 /* No identical multiplicands; see if we can find a common
7496 power-of-two factor in non-power-of-two multiplies. This
7497 can help in multi-dimensional array access. */
7498 else if (TREE_CODE (arg01) == INTEGER_CST
7499 && TREE_CODE (arg11) == INTEGER_CST
7500 && TREE_INT_CST_HIGH (arg01) == 0
7501 && TREE_INT_CST_HIGH (arg11) == 0)
7503 HOST_WIDE_INT int01, int11, tmp;
7504 int01 = TREE_INT_CST_LOW (arg01);
7505 int11 = TREE_INT_CST_LOW (arg11);
7507 /* Move min of absolute values to int11. */
7508 if ((int01 >= 0 ? int01 : -int01)
7509 < (int11 >= 0 ? int11 : -int11))
7511 tmp = int01, int01 = int11, int11 = tmp;
7512 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7513 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7516 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7518 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7519 build_int_cst (NULL_TREE,
7520 int01 / int11));
7521 alt1 = arg10;
7522 same = arg11;
7526 if (same)
7527 return fold_build2 (MULT_EXPR, type,
7528 fold_build2 (PLUS_EXPR, type,
7529 fold_convert (type, alt0),
7530 fold_convert (type, alt1)),
7531 fold_convert (type, same));
7534 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7535 of the array. Loop optimizer sometimes produce this type of
7536 expressions. */
7537 if (TREE_CODE (arg0) == ADDR_EXPR)
7539 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7540 if (tem)
7541 return fold_convert (type, fold (tem));
7543 else if (TREE_CODE (arg1) == ADDR_EXPR)
7545 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7546 if (tem)
7547 return fold_convert (type, fold (tem));
7550 else
7552 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7553 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7554 return non_lvalue (fold_convert (type, arg0));
7556 /* Likewise if the operands are reversed. */
7557 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7558 return non_lvalue (fold_convert (type, arg1));
7560 /* Convert X + -C into X - C. */
7561 if (TREE_CODE (arg1) == REAL_CST
7562 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7564 tem = fold_negate_const (arg1, type);
7565 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7566 return fold_build2 (MINUS_EXPR, type,
7567 fold_convert (type, arg0),
7568 fold_convert (type, tem));
7571 if (flag_unsafe_math_optimizations
7572 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7573 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7574 && (tem = distribute_real_division (code, type, arg0, arg1)))
7575 return tem;
7577 /* Convert x+x into x*2.0. */
7578 if (operand_equal_p (arg0, arg1, 0)
7579 && SCALAR_FLOAT_TYPE_P (type))
7580 return fold_build2 (MULT_EXPR, type, arg0,
7581 build_real (type, dconst2));
7583 /* Convert x*c+x into x*(c+1). */
7584 if (flag_unsafe_math_optimizations
7585 && TREE_CODE (arg0) == MULT_EXPR
7586 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7587 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7588 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7590 REAL_VALUE_TYPE c;
7592 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7593 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7594 return fold_build2 (MULT_EXPR, type, arg1,
7595 build_real (type, c));
7598 /* Convert x+x*c into x*(c+1). */
7599 if (flag_unsafe_math_optimizations
7600 && TREE_CODE (arg1) == MULT_EXPR
7601 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7602 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7603 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7605 REAL_VALUE_TYPE c;
7607 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7608 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7609 return fold_build2 (MULT_EXPR, type, arg0,
7610 build_real (type, c));
7613 /* Convert x*c1+x*c2 into x*(c1+c2). */
7614 if (flag_unsafe_math_optimizations
7615 && TREE_CODE (arg0) == MULT_EXPR
7616 && TREE_CODE (arg1) == MULT_EXPR
7617 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7618 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7619 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7620 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7621 && operand_equal_p (TREE_OPERAND (arg0, 0),
7622 TREE_OPERAND (arg1, 0), 0))
7624 REAL_VALUE_TYPE c1, c2;
7626 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7627 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7628 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7629 return fold_build2 (MULT_EXPR, type,
7630 TREE_OPERAND (arg0, 0),
7631 build_real (type, c1));
7633 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7634 if (flag_unsafe_math_optimizations
7635 && TREE_CODE (arg1) == PLUS_EXPR
7636 && TREE_CODE (arg0) != MULT_EXPR)
7638 tree tree10 = TREE_OPERAND (arg1, 0);
7639 tree tree11 = TREE_OPERAND (arg1, 1);
7640 if (TREE_CODE (tree11) == MULT_EXPR
7641 && TREE_CODE (tree10) == MULT_EXPR)
7643 tree tree0;
7644 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7645 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7648 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7649 if (flag_unsafe_math_optimizations
7650 && TREE_CODE (arg0) == PLUS_EXPR
7651 && TREE_CODE (arg1) != MULT_EXPR)
7653 tree tree00 = TREE_OPERAND (arg0, 0);
7654 tree tree01 = TREE_OPERAND (arg0, 1);
7655 if (TREE_CODE (tree01) == MULT_EXPR
7656 && TREE_CODE (tree00) == MULT_EXPR)
7658 tree tree0;
7659 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7660 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7665 bit_rotate:
7666 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7667 is a rotate of A by C1 bits. */
7668 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7669 is a rotate of A by B bits. */
7671 enum tree_code code0, code1;
7672 code0 = TREE_CODE (arg0);
7673 code1 = TREE_CODE (arg1);
7674 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7675 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7676 && operand_equal_p (TREE_OPERAND (arg0, 0),
7677 TREE_OPERAND (arg1, 0), 0)
7678 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7680 tree tree01, tree11;
7681 enum tree_code code01, code11;
7683 tree01 = TREE_OPERAND (arg0, 1);
7684 tree11 = TREE_OPERAND (arg1, 1);
7685 STRIP_NOPS (tree01);
7686 STRIP_NOPS (tree11);
7687 code01 = TREE_CODE (tree01);
7688 code11 = TREE_CODE (tree11);
7689 if (code01 == INTEGER_CST
7690 && code11 == INTEGER_CST
7691 && TREE_INT_CST_HIGH (tree01) == 0
7692 && TREE_INT_CST_HIGH (tree11) == 0
7693 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7694 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7695 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7696 code0 == LSHIFT_EXPR ? tree01 : tree11);
7697 else if (code11 == MINUS_EXPR)
7699 tree tree110, tree111;
7700 tree110 = TREE_OPERAND (tree11, 0);
7701 tree111 = TREE_OPERAND (tree11, 1);
7702 STRIP_NOPS (tree110);
7703 STRIP_NOPS (tree111);
7704 if (TREE_CODE (tree110) == INTEGER_CST
7705 && 0 == compare_tree_int (tree110,
7706 TYPE_PRECISION
7707 (TREE_TYPE (TREE_OPERAND
7708 (arg0, 0))))
7709 && operand_equal_p (tree01, tree111, 0))
7710 return build2 ((code0 == LSHIFT_EXPR
7711 ? LROTATE_EXPR
7712 : RROTATE_EXPR),
7713 type, TREE_OPERAND (arg0, 0), tree01);
7715 else if (code01 == MINUS_EXPR)
7717 tree tree010, tree011;
7718 tree010 = TREE_OPERAND (tree01, 0);
7719 tree011 = TREE_OPERAND (tree01, 1);
7720 STRIP_NOPS (tree010);
7721 STRIP_NOPS (tree011);
7722 if (TREE_CODE (tree010) == INTEGER_CST
7723 && 0 == compare_tree_int (tree010,
7724 TYPE_PRECISION
7725 (TREE_TYPE (TREE_OPERAND
7726 (arg0, 0))))
7727 && operand_equal_p (tree11, tree011, 0))
7728 return build2 ((code0 != LSHIFT_EXPR
7729 ? LROTATE_EXPR
7730 : RROTATE_EXPR),
7731 type, TREE_OPERAND (arg0, 0), tree11);
7736 associate:
7737 /* In most languages, can't associate operations on floats through
7738 parentheses. Rather than remember where the parentheses were, we
7739 don't associate floats at all, unless the user has specified
7740 -funsafe-math-optimizations. */
7742 if (! wins
7743 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7745 tree var0, con0, lit0, minus_lit0;
7746 tree var1, con1, lit1, minus_lit1;
7748 /* Split both trees into variables, constants, and literals. Then
7749 associate each group together, the constants with literals,
7750 then the result with variables. This increases the chances of
7751 literals being recombined later and of generating relocatable
7752 expressions for the sum of a constant and literal. */
7753 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7754 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7755 code == MINUS_EXPR);
7757 /* Only do something if we found more than two objects. Otherwise,
7758 nothing has changed and we risk infinite recursion. */
7759 if (2 < ((var0 != 0) + (var1 != 0)
7760 + (con0 != 0) + (con1 != 0)
7761 + (lit0 != 0) + (lit1 != 0)
7762 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7764 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7765 if (code == MINUS_EXPR)
7766 code = PLUS_EXPR;
7768 var0 = associate_trees (var0, var1, code, type);
7769 con0 = associate_trees (con0, con1, code, type);
7770 lit0 = associate_trees (lit0, lit1, code, type);
7771 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7773 /* Preserve the MINUS_EXPR if the negative part of the literal is
7774 greater than the positive part. Otherwise, the multiplicative
7775 folding code (i.e extract_muldiv) may be fooled in case
7776 unsigned constants are subtracted, like in the following
7777 example: ((X*2 + 4) - 8U)/2. */
7778 if (minus_lit0 && lit0)
7780 if (TREE_CODE (lit0) == INTEGER_CST
7781 && TREE_CODE (minus_lit0) == INTEGER_CST
7782 && tree_int_cst_lt (lit0, minus_lit0))
7784 minus_lit0 = associate_trees (minus_lit0, lit0,
7785 MINUS_EXPR, type);
7786 lit0 = 0;
7788 else
7790 lit0 = associate_trees (lit0, minus_lit0,
7791 MINUS_EXPR, type);
7792 minus_lit0 = 0;
7795 if (minus_lit0)
7797 if (con0 == 0)
7798 return fold_convert (type,
7799 associate_trees (var0, minus_lit0,
7800 MINUS_EXPR, type));
7801 else
7803 con0 = associate_trees (con0, minus_lit0,
7804 MINUS_EXPR, type);
7805 return fold_convert (type,
7806 associate_trees (var0, con0,
7807 PLUS_EXPR, type));
7811 con0 = associate_trees (con0, lit0, code, type);
7812 return fold_convert (type, associate_trees (var0, con0,
7813 code, type));
7817 binary:
7818 if (wins)
7819 t1 = const_binop (code, arg0, arg1, 0);
7820 if (t1 != NULL_TREE)
7822 /* The return value should always have
7823 the same type as the original expression. */
7824 if (TREE_TYPE (t1) != type)
7825 t1 = fold_convert (type, t1);
7827 return t1;
7829 return NULL_TREE;
7831 case MINUS_EXPR:
7832 /* A - (-B) -> A + B */
7833 if (TREE_CODE (arg1) == NEGATE_EXPR)
7834 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7835 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7836 if (TREE_CODE (arg0) == NEGATE_EXPR
7837 && (FLOAT_TYPE_P (type)
7838 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7839 && negate_expr_p (arg1)
7840 && reorder_operands_p (arg0, arg1))
7841 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7842 TREE_OPERAND (arg0, 0));
7843 /* Convert -A - 1 to ~A. */
7844 if (INTEGRAL_TYPE_P (type)
7845 && TREE_CODE (arg0) == NEGATE_EXPR
7846 && integer_onep (arg1))
7847 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7849 /* Convert -1 - A to ~A. */
7850 if (INTEGRAL_TYPE_P (type)
7851 && integer_all_onesp (arg0))
7852 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7854 if (TREE_CODE (type) == COMPLEX_TYPE)
7856 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7857 if (tem)
7858 return tem;
7861 if (! FLOAT_TYPE_P (type))
7863 if (! wins && integer_zerop (arg0))
7864 return negate_expr (fold_convert (type, arg1));
7865 if (integer_zerop (arg1))
7866 return non_lvalue (fold_convert (type, arg0));
7868 /* Fold A - (A & B) into ~B & A. */
7869 if (!TREE_SIDE_EFFECTS (arg0)
7870 && TREE_CODE (arg1) == BIT_AND_EXPR)
7872 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7873 return fold_build2 (BIT_AND_EXPR, type,
7874 fold_build1 (BIT_NOT_EXPR, type,
7875 TREE_OPERAND (arg1, 0)),
7876 arg0);
7877 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7878 return fold_build2 (BIT_AND_EXPR, type,
7879 fold_build1 (BIT_NOT_EXPR, type,
7880 TREE_OPERAND (arg1, 1)),
7881 arg0);
7884 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7885 any power of 2 minus 1. */
7886 if (TREE_CODE (arg0) == BIT_AND_EXPR
7887 && TREE_CODE (arg1) == BIT_AND_EXPR
7888 && operand_equal_p (TREE_OPERAND (arg0, 0),
7889 TREE_OPERAND (arg1, 0), 0))
7891 tree mask0 = TREE_OPERAND (arg0, 1);
7892 tree mask1 = TREE_OPERAND (arg1, 1);
7893 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7895 if (operand_equal_p (tem, mask1, 0))
7897 tem = fold_build2 (BIT_XOR_EXPR, type,
7898 TREE_OPERAND (arg0, 0), mask1);
7899 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7904 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7905 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7906 return non_lvalue (fold_convert (type, arg0));
7908 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7909 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7910 (-ARG1 + ARG0) reduces to -ARG1. */
7911 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7912 return negate_expr (fold_convert (type, arg1));
7914 /* Fold &x - &x. This can happen from &x.foo - &x.
7915 This is unsafe for certain floats even in non-IEEE formats.
7916 In IEEE, it is unsafe because it does wrong for NaNs.
7917 Also note that operand_equal_p is always false if an operand
7918 is volatile. */
7920 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7921 && operand_equal_p (arg0, arg1, 0))
7922 return fold_convert (type, integer_zero_node);
7924 /* A - B -> A + (-B) if B is easily negatable. */
7925 if (!wins && negate_expr_p (arg1)
7926 && ((FLOAT_TYPE_P (type)
7927 /* Avoid this transformation if B is a positive REAL_CST. */
7928 && (TREE_CODE (arg1) != REAL_CST
7929 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7930 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7931 return fold_build2 (PLUS_EXPR, type, arg0, negate_expr (arg1));
7933 /* Try folding difference of addresses. */
7935 HOST_WIDE_INT diff;
7937 if ((TREE_CODE (arg0) == ADDR_EXPR
7938 || TREE_CODE (arg1) == ADDR_EXPR)
7939 && ptr_difference_const (arg0, arg1, &diff))
7940 return build_int_cst_type (type, diff);
7943 /* Fold &a[i] - &a[j] to i-j. */
7944 if (TREE_CODE (arg0) == ADDR_EXPR
7945 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7946 && TREE_CODE (arg1) == ADDR_EXPR
7947 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7949 tree aref0 = TREE_OPERAND (arg0, 0);
7950 tree aref1 = TREE_OPERAND (arg1, 0);
7951 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7952 TREE_OPERAND (aref1, 0), 0))
7954 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7955 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7956 tree esz = array_ref_element_size (aref0);
7957 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7958 return fold_build2 (MULT_EXPR, type, diff,
7959 fold_convert (type, esz));
7964 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7965 of the array. Loop optimizer sometimes produce this type of
7966 expressions. */
7967 if (TREE_CODE (arg0) == ADDR_EXPR)
7969 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7970 if (tem)
7971 return fold_convert (type, fold (tem));
7974 if (flag_unsafe_math_optimizations
7975 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7976 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7977 && (tem = distribute_real_division (code, type, arg0, arg1)))
7978 return tem;
7980 if (TREE_CODE (arg0) == MULT_EXPR
7981 && TREE_CODE (arg1) == MULT_EXPR
7982 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7984 /* (A * C) - (B * C) -> (A-B) * C. */
7985 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7986 TREE_OPERAND (arg1, 1), 0))
7987 return fold_build2 (MULT_EXPR, type,
7988 fold_build2 (MINUS_EXPR, type,
7989 TREE_OPERAND (arg0, 0),
7990 TREE_OPERAND (arg1, 0)),
7991 TREE_OPERAND (arg0, 1));
7992 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7993 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7994 TREE_OPERAND (arg1, 0), 0))
7995 return fold_build2 (MULT_EXPR, type,
7996 TREE_OPERAND (arg0, 0),
7997 fold_build2 (MINUS_EXPR, type,
7998 TREE_OPERAND (arg0, 1),
7999 TREE_OPERAND (arg1, 1)));
8002 goto associate;
8004 case MULT_EXPR:
8005 /* (-A) * (-B) -> A * B */
8006 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8007 return fold_build2 (MULT_EXPR, type,
8008 TREE_OPERAND (arg0, 0),
8009 negate_expr (arg1));
8010 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8011 return fold_build2 (MULT_EXPR, type,
8012 negate_expr (arg0),
8013 TREE_OPERAND (arg1, 0));
8015 if (TREE_CODE (type) == COMPLEX_TYPE)
8017 tem = fold_complex_mult (type, arg0, arg1);
8018 if (tem)
8019 return tem;
8022 if (! FLOAT_TYPE_P (type))
8024 if (integer_zerop (arg1))
8025 return omit_one_operand (type, arg1, arg0);
8026 if (integer_onep (arg1))
8027 return non_lvalue (fold_convert (type, arg0));
8028 /* Transform x * -1 into -x. */
8029 if (integer_all_onesp (arg1))
8030 return fold_convert (type, negate_expr (arg0));
8032 /* (a * (1 << b)) is (a << b) */
8033 if (TREE_CODE (arg1) == LSHIFT_EXPR
8034 && integer_onep (TREE_OPERAND (arg1, 0)))
8035 return fold_build2 (LSHIFT_EXPR, type, arg0,
8036 TREE_OPERAND (arg1, 1));
8037 if (TREE_CODE (arg0) == LSHIFT_EXPR
8038 && integer_onep (TREE_OPERAND (arg0, 0)))
8039 return fold_build2 (LSHIFT_EXPR, type, arg1,
8040 TREE_OPERAND (arg0, 1));
8042 if (TREE_CODE (arg1) == INTEGER_CST
8043 && 0 != (tem = extract_muldiv (op0,
8044 fold_convert (type, arg1),
8045 code, NULL_TREE)))
8046 return fold_convert (type, tem);
8049 else
8051 /* Maybe fold x * 0 to 0. The expressions aren't the same
8052 when x is NaN, since x * 0 is also NaN. Nor are they the
8053 same in modes with signed zeros, since multiplying a
8054 negative value by 0 gives -0, not +0. */
8055 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8056 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8057 && real_zerop (arg1))
8058 return omit_one_operand (type, arg1, arg0);
8059 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8060 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8061 && real_onep (arg1))
8062 return non_lvalue (fold_convert (type, arg0));
8064 /* Transform x * -1.0 into -x. */
8065 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8066 && real_minus_onep (arg1))
8067 return fold_convert (type, negate_expr (arg0));
8069 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8070 if (flag_unsafe_math_optimizations
8071 && TREE_CODE (arg0) == RDIV_EXPR
8072 && TREE_CODE (arg1) == REAL_CST
8073 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8075 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8076 arg1, 0);
8077 if (tem)
8078 return fold_build2 (RDIV_EXPR, type, tem,
8079 TREE_OPERAND (arg0, 1));
8082 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8083 if (operand_equal_p (arg0, arg1, 0))
8085 tree tem = fold_strip_sign_ops (arg0);
8086 if (tem != NULL_TREE)
8088 tem = fold_convert (type, tem);
8089 return fold_build2 (MULT_EXPR, type, tem, tem);
8093 if (flag_unsafe_math_optimizations)
8095 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8096 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8098 /* Optimizations of root(...)*root(...). */
8099 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8101 tree rootfn, arg, arglist;
8102 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8103 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8105 /* Optimize sqrt(x)*sqrt(x) as x. */
8106 if (BUILTIN_SQRT_P (fcode0)
8107 && operand_equal_p (arg00, arg10, 0)
8108 && ! HONOR_SNANS (TYPE_MODE (type)))
8109 return arg00;
8111 /* Optimize root(x)*root(y) as root(x*y). */
8112 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8113 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8114 arglist = build_tree_list (NULL_TREE, arg);
8115 return build_function_call_expr (rootfn, arglist);
8118 /* Optimize expN(x)*expN(y) as expN(x+y). */
8119 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8121 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8122 tree arg = fold_build2 (PLUS_EXPR, type,
8123 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8124 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8125 tree arglist = build_tree_list (NULL_TREE, arg);
8126 return build_function_call_expr (expfn, arglist);
8129 /* Optimizations of pow(...)*pow(...). */
8130 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8131 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8132 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8134 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8135 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8136 1)));
8137 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8138 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8139 1)));
8141 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8142 if (operand_equal_p (arg01, arg11, 0))
8144 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8145 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8146 tree arglist = tree_cons (NULL_TREE, arg,
8147 build_tree_list (NULL_TREE,
8148 arg01));
8149 return build_function_call_expr (powfn, arglist);
8152 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8153 if (operand_equal_p (arg00, arg10, 0))
8155 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8156 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
8157 tree arglist = tree_cons (NULL_TREE, arg00,
8158 build_tree_list (NULL_TREE,
8159 arg));
8160 return build_function_call_expr (powfn, arglist);
8164 /* Optimize tan(x)*cos(x) as sin(x). */
8165 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8166 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8167 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8168 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8169 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8170 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8171 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8172 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8174 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8176 if (sinfn != NULL_TREE)
8177 return build_function_call_expr (sinfn,
8178 TREE_OPERAND (arg0, 1));
8181 /* Optimize x*pow(x,c) as pow(x,c+1). */
8182 if (fcode1 == BUILT_IN_POW
8183 || fcode1 == BUILT_IN_POWF
8184 || fcode1 == BUILT_IN_POWL)
8186 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8187 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8188 1)));
8189 if (TREE_CODE (arg11) == REAL_CST
8190 && ! TREE_CONSTANT_OVERFLOW (arg11)
8191 && operand_equal_p (arg0, arg10, 0))
8193 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8194 REAL_VALUE_TYPE c;
8195 tree arg, arglist;
8197 c = TREE_REAL_CST (arg11);
8198 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8199 arg = build_real (type, c);
8200 arglist = build_tree_list (NULL_TREE, arg);
8201 arglist = tree_cons (NULL_TREE, arg0, arglist);
8202 return build_function_call_expr (powfn, arglist);
8206 /* Optimize pow(x,c)*x as pow(x,c+1). */
8207 if (fcode0 == BUILT_IN_POW
8208 || fcode0 == BUILT_IN_POWF
8209 || fcode0 == BUILT_IN_POWL)
8211 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8212 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8213 1)));
8214 if (TREE_CODE (arg01) == REAL_CST
8215 && ! TREE_CONSTANT_OVERFLOW (arg01)
8216 && operand_equal_p (arg1, arg00, 0))
8218 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8219 REAL_VALUE_TYPE c;
8220 tree arg, arglist;
8222 c = TREE_REAL_CST (arg01);
8223 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8224 arg = build_real (type, c);
8225 arglist = build_tree_list (NULL_TREE, arg);
8226 arglist = tree_cons (NULL_TREE, arg1, arglist);
8227 return build_function_call_expr (powfn, arglist);
8231 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8232 if (! optimize_size
8233 && operand_equal_p (arg0, arg1, 0))
8235 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8237 if (powfn)
8239 tree arg = build_real (type, dconst2);
8240 tree arglist = build_tree_list (NULL_TREE, arg);
8241 arglist = tree_cons (NULL_TREE, arg0, arglist);
8242 return build_function_call_expr (powfn, arglist);
8247 goto associate;
8249 case BIT_IOR_EXPR:
8250 bit_ior:
8251 if (integer_all_onesp (arg1))
8252 return omit_one_operand (type, arg1, arg0);
8253 if (integer_zerop (arg1))
8254 return non_lvalue (fold_convert (type, arg0));
8255 if (operand_equal_p (arg0, arg1, 0))
8256 return non_lvalue (fold_convert (type, arg0));
8258 /* ~X | X is -1. */
8259 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8260 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8262 t1 = build_int_cst (type, -1);
8263 t1 = force_fit_type (t1, 0, false, false);
8264 return omit_one_operand (type, t1, arg1);
8267 /* X | ~X is -1. */
8268 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8269 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8271 t1 = build_int_cst (type, -1);
8272 t1 = force_fit_type (t1, 0, false, false);
8273 return omit_one_operand (type, t1, arg0);
8276 t1 = distribute_bit_expr (code, type, arg0, arg1);
8277 if (t1 != NULL_TREE)
8278 return t1;
8280 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8282 This results in more efficient code for machines without a NAND
8283 instruction. Combine will canonicalize to the first form
8284 which will allow use of NAND instructions provided by the
8285 backend if they exist. */
8286 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8287 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8289 return fold_build1 (BIT_NOT_EXPR, type,
8290 build2 (BIT_AND_EXPR, type,
8291 TREE_OPERAND (arg0, 0),
8292 TREE_OPERAND (arg1, 0)));
8295 /* See if this can be simplified into a rotate first. If that
8296 is unsuccessful continue in the association code. */
8297 goto bit_rotate;
8299 case BIT_XOR_EXPR:
8300 if (integer_zerop (arg1))
8301 return non_lvalue (fold_convert (type, arg0));
8302 if (integer_all_onesp (arg1))
8303 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8304 if (operand_equal_p (arg0, arg1, 0))
8305 return omit_one_operand (type, integer_zero_node, arg0);
8307 /* ~X ^ X is -1. */
8308 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8309 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8311 t1 = build_int_cst (type, -1);
8312 t1 = force_fit_type (t1, 0, false, false);
8313 return omit_one_operand (type, t1, arg1);
8316 /* X ^ ~X is -1. */
8317 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8318 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8320 t1 = build_int_cst (type, -1);
8321 t1 = force_fit_type (t1, 0, false, false);
8322 return omit_one_operand (type, t1, arg0);
8325 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8326 with a constant, and the two constants have no bits in common,
8327 we should treat this as a BIT_IOR_EXPR since this may produce more
8328 simplifications. */
8329 if (TREE_CODE (arg0) == BIT_AND_EXPR
8330 && TREE_CODE (arg1) == BIT_AND_EXPR
8331 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8332 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8333 && integer_zerop (const_binop (BIT_AND_EXPR,
8334 TREE_OPERAND (arg0, 1),
8335 TREE_OPERAND (arg1, 1), 0)))
8337 code = BIT_IOR_EXPR;
8338 goto bit_ior;
8341 /* Convert ~X ^ ~Y to X ^ Y. */
8342 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8343 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8344 return fold_build2 (code, type,
8345 fold_convert (type, TREE_OPERAND (arg0, 0)),
8346 fold_convert (type, TREE_OPERAND (arg1, 0)));
8348 /* See if this can be simplified into a rotate first. If that
8349 is unsuccessful continue in the association code. */
8350 goto bit_rotate;
8352 case BIT_AND_EXPR:
8353 if (integer_all_onesp (arg1))
8354 return non_lvalue (fold_convert (type, arg0));
8355 if (integer_zerop (arg1))
8356 return omit_one_operand (type, arg1, arg0);
8357 if (operand_equal_p (arg0, arg1, 0))
8358 return non_lvalue (fold_convert (type, arg0));
8360 /* ~X & X is always zero. */
8361 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8362 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8363 return omit_one_operand (type, integer_zero_node, arg1);
8365 /* X & ~X is always zero. */
8366 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8367 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8368 return omit_one_operand (type, integer_zero_node, arg0);
8370 t1 = distribute_bit_expr (code, type, arg0, arg1);
8371 if (t1 != NULL_TREE)
8372 return t1;
8373 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8374 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8375 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8377 unsigned int prec
8378 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8380 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8381 && (~TREE_INT_CST_LOW (arg1)
8382 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8383 return fold_convert (type, TREE_OPERAND (arg0, 0));
8386 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8388 This results in more efficient code for machines without a NOR
8389 instruction. Combine will canonicalize to the first form
8390 which will allow use of NOR instructions provided by the
8391 backend if they exist. */
8392 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8393 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8395 return fold_build1 (BIT_NOT_EXPR, type,
8396 build2 (BIT_IOR_EXPR, type,
8397 TREE_OPERAND (arg0, 0),
8398 TREE_OPERAND (arg1, 0)));
8401 goto associate;
8403 case RDIV_EXPR:
8404 /* Don't touch a floating-point divide by zero unless the mode
8405 of the constant can represent infinity. */
8406 if (TREE_CODE (arg1) == REAL_CST
8407 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8408 && real_zerop (arg1))
8409 return NULL_TREE;
8411 /* (-A) / (-B) -> A / B */
8412 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8413 return fold_build2 (RDIV_EXPR, type,
8414 TREE_OPERAND (arg0, 0),
8415 negate_expr (arg1));
8416 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8417 return fold_build2 (RDIV_EXPR, type,
8418 negate_expr (arg0),
8419 TREE_OPERAND (arg1, 0));
8421 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8422 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8423 && real_onep (arg1))
8424 return non_lvalue (fold_convert (type, arg0));
8426 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8427 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8428 && real_minus_onep (arg1))
8429 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8431 /* If ARG1 is a constant, we can convert this to a multiply by the
8432 reciprocal. This does not have the same rounding properties,
8433 so only do this if -funsafe-math-optimizations. We can actually
8434 always safely do it if ARG1 is a power of two, but it's hard to
8435 tell if it is or not in a portable manner. */
8436 if (TREE_CODE (arg1) == REAL_CST)
8438 if (flag_unsafe_math_optimizations
8439 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8440 arg1, 0)))
8441 return fold_build2 (MULT_EXPR, type, arg0, tem);
8442 /* Find the reciprocal if optimizing and the result is exact. */
8443 if (optimize)
8445 REAL_VALUE_TYPE r;
8446 r = TREE_REAL_CST (arg1);
8447 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8449 tem = build_real (type, r);
8450 return fold_build2 (MULT_EXPR, type, arg0, tem);
8454 /* Convert A/B/C to A/(B*C). */
8455 if (flag_unsafe_math_optimizations
8456 && TREE_CODE (arg0) == RDIV_EXPR)
8457 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8458 fold_build2 (MULT_EXPR, type,
8459 TREE_OPERAND (arg0, 1), arg1));
8461 /* Convert A/(B/C) to (A/B)*C. */
8462 if (flag_unsafe_math_optimizations
8463 && TREE_CODE (arg1) == RDIV_EXPR)
8464 return fold_build2 (MULT_EXPR, type,
8465 fold_build2 (RDIV_EXPR, type, arg0,
8466 TREE_OPERAND (arg1, 0)),
8467 TREE_OPERAND (arg1, 1));
8469 /* Convert C1/(X*C2) into (C1/C2)/X. */
8470 if (flag_unsafe_math_optimizations
8471 && TREE_CODE (arg1) == MULT_EXPR
8472 && TREE_CODE (arg0) == REAL_CST
8473 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8475 tree tem = const_binop (RDIV_EXPR, arg0,
8476 TREE_OPERAND (arg1, 1), 0);
8477 if (tem)
8478 return fold_build2 (RDIV_EXPR, type, tem,
8479 TREE_OPERAND (arg1, 0));
8482 if (TREE_CODE (type) == COMPLEX_TYPE)
8484 tem = fold_complex_div (type, arg0, arg1, code);
8485 if (tem)
8486 return tem;
8489 if (flag_unsafe_math_optimizations)
8491 enum built_in_function fcode = builtin_mathfn_code (arg1);
8492 /* Optimize x/expN(y) into x*expN(-y). */
8493 if (BUILTIN_EXPONENT_P (fcode))
8495 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8496 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8497 tree arglist = build_tree_list (NULL_TREE,
8498 fold_convert (type, arg));
8499 arg1 = build_function_call_expr (expfn, arglist);
8500 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8503 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8504 if (fcode == BUILT_IN_POW
8505 || fcode == BUILT_IN_POWF
8506 || fcode == BUILT_IN_POWL)
8508 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8509 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8510 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8511 tree neg11 = fold_convert (type, negate_expr (arg11));
8512 tree arglist = tree_cons(NULL_TREE, arg10,
8513 build_tree_list (NULL_TREE, neg11));
8514 arg1 = build_function_call_expr (powfn, arglist);
8515 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8519 if (flag_unsafe_math_optimizations)
8521 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8522 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8524 /* Optimize sin(x)/cos(x) as tan(x). */
8525 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8526 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8527 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8528 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8529 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8531 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8533 if (tanfn != NULL_TREE)
8534 return build_function_call_expr (tanfn,
8535 TREE_OPERAND (arg0, 1));
8538 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8539 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8540 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8541 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8542 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8543 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8545 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8547 if (tanfn != NULL_TREE)
8549 tree tmp = TREE_OPERAND (arg0, 1);
8550 tmp = build_function_call_expr (tanfn, tmp);
8551 return fold_build2 (RDIV_EXPR, type,
8552 build_real (type, dconst1), tmp);
8556 /* Optimize pow(x,c)/x as pow(x,c-1). */
8557 if (fcode0 == BUILT_IN_POW
8558 || fcode0 == BUILT_IN_POWF
8559 || fcode0 == BUILT_IN_POWL)
8561 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8562 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8563 if (TREE_CODE (arg01) == REAL_CST
8564 && ! TREE_CONSTANT_OVERFLOW (arg01)
8565 && operand_equal_p (arg1, arg00, 0))
8567 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8568 REAL_VALUE_TYPE c;
8569 tree arg, arglist;
8571 c = TREE_REAL_CST (arg01);
8572 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8573 arg = build_real (type, c);
8574 arglist = build_tree_list (NULL_TREE, arg);
8575 arglist = tree_cons (NULL_TREE, arg1, arglist);
8576 return build_function_call_expr (powfn, arglist);
8580 goto binary;
8582 case TRUNC_DIV_EXPR:
8583 case ROUND_DIV_EXPR:
8584 case FLOOR_DIV_EXPR:
8585 case CEIL_DIV_EXPR:
8586 case EXACT_DIV_EXPR:
8587 if (integer_onep (arg1))
8588 return non_lvalue (fold_convert (type, arg0));
8589 if (integer_zerop (arg1))
8590 return NULL_TREE;
8591 /* X / -1 is -X. */
8592 if (!TYPE_UNSIGNED (type)
8593 && TREE_CODE (arg1) == INTEGER_CST
8594 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8595 && TREE_INT_CST_HIGH (arg1) == -1)
8596 return fold_convert (type, negate_expr (arg0));
8598 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8599 operation, EXACT_DIV_EXPR.
8601 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8602 At one time others generated faster code, it's not clear if they do
8603 after the last round to changes to the DIV code in expmed.c. */
8604 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8605 && multiple_of_p (type, arg0, arg1))
8606 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8608 if (TREE_CODE (arg1) == INTEGER_CST
8609 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8610 return fold_convert (type, tem);
8612 if (TREE_CODE (type) == COMPLEX_TYPE)
8614 tem = fold_complex_div (type, arg0, arg1, code);
8615 if (tem)
8616 return tem;
8618 goto binary;
8620 case CEIL_MOD_EXPR:
8621 case FLOOR_MOD_EXPR:
8622 case ROUND_MOD_EXPR:
8623 case TRUNC_MOD_EXPR:
8624 /* X % 1 is always zero, but be sure to preserve any side
8625 effects in X. */
8626 if (integer_onep (arg1))
8627 return omit_one_operand (type, integer_zero_node, arg0);
8629 /* X % 0, return X % 0 unchanged so that we can get the
8630 proper warnings and errors. */
8631 if (integer_zerop (arg1))
8632 return NULL_TREE;
8634 /* 0 % X is always zero, but be sure to preserve any side
8635 effects in X. Place this after checking for X == 0. */
8636 if (integer_zerop (arg0))
8637 return omit_one_operand (type, integer_zero_node, arg1);
8639 /* X % -1 is zero. */
8640 if (!TYPE_UNSIGNED (type)
8641 && TREE_CODE (arg1) == INTEGER_CST
8642 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8643 && TREE_INT_CST_HIGH (arg1) == -1)
8644 return omit_one_operand (type, integer_zero_node, arg0);
8646 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8647 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8648 if (code == TRUNC_MOD_EXPR
8649 && TYPE_UNSIGNED (type)
8650 && integer_pow2p (arg1))
8652 unsigned HOST_WIDE_INT high, low;
8653 tree mask;
8654 int l;
8656 l = tree_log2 (arg1);
8657 if (l >= HOST_BITS_PER_WIDE_INT)
8659 high = ((unsigned HOST_WIDE_INT) 1
8660 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8661 low = -1;
8663 else
8665 high = 0;
8666 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8669 mask = build_int_cst_wide (type, low, high);
8670 return fold_build2 (BIT_AND_EXPR, type,
8671 fold_convert (type, arg0), mask);
8674 /* X % -C is the same as X % C. */
8675 if (code == TRUNC_MOD_EXPR
8676 && !TYPE_UNSIGNED (type)
8677 && TREE_CODE (arg1) == INTEGER_CST
8678 && !TREE_CONSTANT_OVERFLOW (arg1)
8679 && TREE_INT_CST_HIGH (arg1) < 0
8680 && !flag_trapv
8681 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8682 && !sign_bit_p (arg1, arg1))
8683 return fold_build2 (code, type, fold_convert (type, arg0),
8684 fold_convert (type, negate_expr (arg1)));
8686 /* X % -Y is the same as X % Y. */
8687 if (code == TRUNC_MOD_EXPR
8688 && !TYPE_UNSIGNED (type)
8689 && TREE_CODE (arg1) == NEGATE_EXPR
8690 && !flag_trapv)
8691 return fold_build2 (code, type, fold_convert (type, arg0),
8692 fold_convert (type, TREE_OPERAND (arg1, 0)));
8694 if (TREE_CODE (arg1) == INTEGER_CST
8695 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8696 return fold_convert (type, tem);
8698 goto binary;
8700 case LROTATE_EXPR:
8701 case RROTATE_EXPR:
8702 if (integer_all_onesp (arg0))
8703 return omit_one_operand (type, arg0, arg1);
8704 goto shift;
8706 case RSHIFT_EXPR:
8707 /* Optimize -1 >> x for arithmetic right shifts. */
8708 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8709 return omit_one_operand (type, arg0, arg1);
8710 /* ... fall through ... */
8712 case LSHIFT_EXPR:
8713 shift:
8714 if (integer_zerop (arg1))
8715 return non_lvalue (fold_convert (type, arg0));
8716 if (integer_zerop (arg0))
8717 return omit_one_operand (type, arg0, arg1);
8719 /* Since negative shift count is not well-defined,
8720 don't try to compute it in the compiler. */
8721 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8722 return NULL_TREE;
8723 /* Rewrite an LROTATE_EXPR by a constant into an
8724 RROTATE_EXPR by a new constant. */
8725 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8727 tree tem = build_int_cst (NULL_TREE,
8728 GET_MODE_BITSIZE (TYPE_MODE (type)));
8729 tem = fold_convert (TREE_TYPE (arg1), tem);
8730 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8731 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8734 /* If we have a rotate of a bit operation with the rotate count and
8735 the second operand of the bit operation both constant,
8736 permute the two operations. */
8737 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8738 && (TREE_CODE (arg0) == BIT_AND_EXPR
8739 || TREE_CODE (arg0) == BIT_IOR_EXPR
8740 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8742 return fold_build2 (TREE_CODE (arg0), type,
8743 fold_build2 (code, type,
8744 TREE_OPERAND (arg0, 0), arg1),
8745 fold_build2 (code, type,
8746 TREE_OPERAND (arg0, 1), arg1));
8748 /* Two consecutive rotates adding up to the width of the mode can
8749 be ignored. */
8750 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8751 && TREE_CODE (arg0) == RROTATE_EXPR
8752 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8753 && TREE_INT_CST_HIGH (arg1) == 0
8754 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8755 && ((TREE_INT_CST_LOW (arg1)
8756 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8757 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8758 return TREE_OPERAND (arg0, 0);
8760 goto binary;
8762 case MIN_EXPR:
8763 if (operand_equal_p (arg0, arg1, 0))
8764 return omit_one_operand (type, arg0, arg1);
8765 if (INTEGRAL_TYPE_P (type)
8766 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8767 return omit_one_operand (type, arg1, arg0);
8768 goto associate;
8770 case MAX_EXPR:
8771 if (operand_equal_p (arg0, arg1, 0))
8772 return omit_one_operand (type, arg0, arg1);
8773 if (INTEGRAL_TYPE_P (type)
8774 && TYPE_MAX_VALUE (type)
8775 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8776 return omit_one_operand (type, arg1, arg0);
8777 goto associate;
8779 case TRUTH_ANDIF_EXPR:
8780 /* Note that the operands of this must be ints
8781 and their values must be 0 or 1.
8782 ("true" is a fixed value perhaps depending on the language.) */
8783 /* If first arg is constant zero, return it. */
8784 if (integer_zerop (arg0))
8785 return fold_convert (type, arg0);
8786 case TRUTH_AND_EXPR:
8787 /* If either arg is constant true, drop it. */
8788 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8789 return non_lvalue (fold_convert (type, arg1));
8790 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8791 /* Preserve sequence points. */
8792 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8793 return non_lvalue (fold_convert (type, arg0));
8794 /* If second arg is constant zero, result is zero, but first arg
8795 must be evaluated. */
8796 if (integer_zerop (arg1))
8797 return omit_one_operand (type, arg1, arg0);
8798 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8799 case will be handled here. */
8800 if (integer_zerop (arg0))
8801 return omit_one_operand (type, arg0, arg1);
8803 /* !X && X is always false. */
8804 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8805 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8806 return omit_one_operand (type, integer_zero_node, arg1);
8807 /* X && !X is always false. */
8808 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8809 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8810 return omit_one_operand (type, integer_zero_node, arg0);
8812 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8813 means A >= Y && A != MAX, but in this case we know that
8814 A < X <= MAX. */
8816 if (!TREE_SIDE_EFFECTS (arg0)
8817 && !TREE_SIDE_EFFECTS (arg1))
8819 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8820 if (tem)
8821 return fold_build2 (code, type, tem, arg1);
8823 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8824 if (tem)
8825 return fold_build2 (code, type, arg0, tem);
8828 truth_andor:
8829 /* We only do these simplifications if we are optimizing. */
8830 if (!optimize)
8831 return NULL_TREE;
8833 /* Check for things like (A || B) && (A || C). We can convert this
8834 to A || (B && C). Note that either operator can be any of the four
8835 truth and/or operations and the transformation will still be
8836 valid. Also note that we only care about order for the
8837 ANDIF and ORIF operators. If B contains side effects, this
8838 might change the truth-value of A. */
8839 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8840 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8841 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8842 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8843 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8844 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8846 tree a00 = TREE_OPERAND (arg0, 0);
8847 tree a01 = TREE_OPERAND (arg0, 1);
8848 tree a10 = TREE_OPERAND (arg1, 0);
8849 tree a11 = TREE_OPERAND (arg1, 1);
8850 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8851 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8852 && (code == TRUTH_AND_EXPR
8853 || code == TRUTH_OR_EXPR));
8855 if (operand_equal_p (a00, a10, 0))
8856 return fold_build2 (TREE_CODE (arg0), type, a00,
8857 fold_build2 (code, type, a01, a11));
8858 else if (commutative && operand_equal_p (a00, a11, 0))
8859 return fold_build2 (TREE_CODE (arg0), type, a00,
8860 fold_build2 (code, type, a01, a10));
8861 else if (commutative && operand_equal_p (a01, a10, 0))
8862 return fold_build2 (TREE_CODE (arg0), type, a01,
8863 fold_build2 (code, type, a00, a11));
8865 /* This case if tricky because we must either have commutative
8866 operators or else A10 must not have side-effects. */
8868 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8869 && operand_equal_p (a01, a11, 0))
8870 return fold_build2 (TREE_CODE (arg0), type,
8871 fold_build2 (code, type, a00, a10),
8872 a01);
8875 /* See if we can build a range comparison. */
8876 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8877 return tem;
8879 /* Check for the possibility of merging component references. If our
8880 lhs is another similar operation, try to merge its rhs with our
8881 rhs. Then try to merge our lhs and rhs. */
8882 if (TREE_CODE (arg0) == code
8883 && 0 != (tem = fold_truthop (code, type,
8884 TREE_OPERAND (arg0, 1), arg1)))
8885 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8887 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8888 return tem;
8890 return NULL_TREE;
8892 case TRUTH_ORIF_EXPR:
8893 /* Note that the operands of this must be ints
8894 and their values must be 0 or true.
8895 ("true" is a fixed value perhaps depending on the language.) */
8896 /* If first arg is constant true, return it. */
8897 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8898 return fold_convert (type, arg0);
8899 case TRUTH_OR_EXPR:
8900 /* If either arg is constant zero, drop it. */
8901 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8902 return non_lvalue (fold_convert (type, arg1));
8903 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8904 /* Preserve sequence points. */
8905 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8906 return non_lvalue (fold_convert (type, arg0));
8907 /* If second arg is constant true, result is true, but we must
8908 evaluate first arg. */
8909 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8910 return omit_one_operand (type, arg1, arg0);
8911 /* Likewise for first arg, but note this only occurs here for
8912 TRUTH_OR_EXPR. */
8913 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8914 return omit_one_operand (type, arg0, arg1);
8916 /* !X || X is always true. */
8917 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8918 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8919 return omit_one_operand (type, integer_one_node, arg1);
8920 /* X || !X is always true. */
8921 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8922 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8923 return omit_one_operand (type, integer_one_node, arg0);
8925 goto truth_andor;
8927 case TRUTH_XOR_EXPR:
8928 /* If the second arg is constant zero, drop it. */
8929 if (integer_zerop (arg1))
8930 return non_lvalue (fold_convert (type, arg0));
8931 /* If the second arg is constant true, this is a logical inversion. */
8932 if (integer_onep (arg1))
8934 /* Only call invert_truthvalue if operand is a truth value. */
8935 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8936 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8937 else
8938 tem = invert_truthvalue (arg0);
8939 return non_lvalue (fold_convert (type, tem));
8941 /* Identical arguments cancel to zero. */
8942 if (operand_equal_p (arg0, arg1, 0))
8943 return omit_one_operand (type, integer_zero_node, arg0);
8945 /* !X ^ X is always true. */
8946 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8947 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8948 return omit_one_operand (type, integer_one_node, arg1);
8950 /* X ^ !X is always true. */
8951 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8952 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8953 return omit_one_operand (type, integer_one_node, arg0);
8955 return NULL_TREE;
8957 case EQ_EXPR:
8958 case NE_EXPR:
8959 case LT_EXPR:
8960 case GT_EXPR:
8961 case LE_EXPR:
8962 case GE_EXPR:
8963 /* If one arg is a real or integer constant, put it last. */
8964 if (tree_swap_operands_p (arg0, arg1, true))
8965 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8967 /* bool_var != 0 becomes bool_var. */
8968 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8969 && code == NE_EXPR)
8970 return non_lvalue (fold_convert (type, arg0));
8972 /* bool_var == 1 becomes bool_var. */
8973 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8974 && code == EQ_EXPR)
8975 return non_lvalue (fold_convert (type, arg0));
8977 /* If this is an equality comparison of the address of a non-weak
8978 object against zero, then we know the result. */
8979 if ((code == EQ_EXPR || code == NE_EXPR)
8980 && TREE_CODE (arg0) == ADDR_EXPR
8981 && DECL_P (TREE_OPERAND (arg0, 0))
8982 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8983 && integer_zerop (arg1))
8984 return constant_boolean_node (code != EQ_EXPR, type);
8986 /* If this is an equality comparison of the address of two non-weak,
8987 unaliased symbols neither of which are extern (since we do not
8988 have access to attributes for externs), then we know the result. */
8989 if ((code == EQ_EXPR || code == NE_EXPR)
8990 && TREE_CODE (arg0) == ADDR_EXPR
8991 && DECL_P (TREE_OPERAND (arg0, 0))
8992 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8993 && ! lookup_attribute ("alias",
8994 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8995 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8996 && TREE_CODE (arg1) == ADDR_EXPR
8997 && DECL_P (TREE_OPERAND (arg1, 0))
8998 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8999 && ! lookup_attribute ("alias",
9000 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
9001 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
9002 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
9003 ? code == EQ_EXPR : code != EQ_EXPR,
9004 type);
9006 /* If this is a comparison of two exprs that look like an
9007 ARRAY_REF of the same object, then we can fold this to a
9008 comparison of the two offsets. */
9009 if (TREE_CODE_CLASS (code) == tcc_comparison)
9011 tree base0, offset0, base1, offset1;
9013 if (extract_array_ref (arg0, &base0, &offset0)
9014 && extract_array_ref (arg1, &base1, &offset1)
9015 && operand_equal_p (base0, base1, 0))
9017 if (offset0 == NULL_TREE
9018 && offset1 == NULL_TREE)
9020 offset0 = integer_zero_node;
9021 offset1 = integer_zero_node;
9023 else if (offset0 == NULL_TREE)
9024 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9025 else if (offset1 == NULL_TREE)
9026 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9028 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
9029 return fold_build2 (code, type, offset0, offset1);
9033 /* Transform comparisons of the form X +- C CMP X. */
9034 if ((code != EQ_EXPR && code != NE_EXPR)
9035 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9036 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9037 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9038 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9039 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9040 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9041 && !(flag_wrapv || flag_trapv))))
9043 tree arg01 = TREE_OPERAND (arg0, 1);
9044 enum tree_code code0 = TREE_CODE (arg0);
9045 int is_positive;
9047 if (TREE_CODE (arg01) == REAL_CST)
9048 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9049 else
9050 is_positive = tree_int_cst_sgn (arg01);
9052 /* (X - c) > X becomes false. */
9053 if (code == GT_EXPR
9054 && ((code0 == MINUS_EXPR && is_positive >= 0)
9055 || (code0 == PLUS_EXPR && is_positive <= 0)))
9056 return constant_boolean_node (0, type);
9058 /* Likewise (X + c) < X becomes false. */
9059 if (code == LT_EXPR
9060 && ((code0 == PLUS_EXPR && is_positive >= 0)
9061 || (code0 == MINUS_EXPR && is_positive <= 0)))
9062 return constant_boolean_node (0, type);
9064 /* Convert (X - c) <= X to true. */
9065 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9066 && code == LE_EXPR
9067 && ((code0 == MINUS_EXPR && is_positive >= 0)
9068 || (code0 == PLUS_EXPR && is_positive <= 0)))
9069 return constant_boolean_node (1, type);
9071 /* Convert (X + c) >= X to true. */
9072 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9073 && code == GE_EXPR
9074 && ((code0 == PLUS_EXPR && is_positive >= 0)
9075 || (code0 == MINUS_EXPR && is_positive <= 0)))
9076 return constant_boolean_node (1, type);
9078 if (TREE_CODE (arg01) == INTEGER_CST)
9080 /* Convert X + c > X and X - c < X to true for integers. */
9081 if (code == GT_EXPR
9082 && ((code0 == PLUS_EXPR && is_positive > 0)
9083 || (code0 == MINUS_EXPR && is_positive < 0)))
9084 return constant_boolean_node (1, type);
9086 if (code == LT_EXPR
9087 && ((code0 == MINUS_EXPR && is_positive > 0)
9088 || (code0 == PLUS_EXPR && is_positive < 0)))
9089 return constant_boolean_node (1, type);
9091 /* Convert X + c <= X and X - c >= X to false for integers. */
9092 if (code == LE_EXPR
9093 && ((code0 == PLUS_EXPR && is_positive > 0)
9094 || (code0 == MINUS_EXPR && is_positive < 0)))
9095 return constant_boolean_node (0, type);
9097 if (code == GE_EXPR
9098 && ((code0 == MINUS_EXPR && is_positive > 0)
9099 || (code0 == PLUS_EXPR && is_positive < 0)))
9100 return constant_boolean_node (0, type);
9104 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9106 tree targ0 = strip_float_extensions (arg0);
9107 tree targ1 = strip_float_extensions (arg1);
9108 tree newtype = TREE_TYPE (targ0);
9110 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9111 newtype = TREE_TYPE (targ1);
9113 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9114 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9115 return fold_build2 (code, type, fold_convert (newtype, targ0),
9116 fold_convert (newtype, targ1));
9118 /* (-a) CMP (-b) -> b CMP a */
9119 if (TREE_CODE (arg0) == NEGATE_EXPR
9120 && TREE_CODE (arg1) == NEGATE_EXPR)
9121 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9122 TREE_OPERAND (arg0, 0));
9124 if (TREE_CODE (arg1) == REAL_CST)
9126 REAL_VALUE_TYPE cst;
9127 cst = TREE_REAL_CST (arg1);
9129 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9130 if (TREE_CODE (arg0) == NEGATE_EXPR)
9131 return
9132 fold_build2 (swap_tree_comparison (code), type,
9133 TREE_OPERAND (arg0, 0),
9134 build_real (TREE_TYPE (arg1),
9135 REAL_VALUE_NEGATE (cst)));
9137 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9138 /* a CMP (-0) -> a CMP 0 */
9139 if (REAL_VALUE_MINUS_ZERO (cst))
9140 return fold_build2 (code, type, arg0,
9141 build_real (TREE_TYPE (arg1), dconst0));
9143 /* x != NaN is always true, other ops are always false. */
9144 if (REAL_VALUE_ISNAN (cst)
9145 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9147 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9148 return omit_one_operand (type, tem, arg0);
9151 /* Fold comparisons against infinity. */
9152 if (REAL_VALUE_ISINF (cst))
9154 tem = fold_inf_compare (code, type, arg0, arg1);
9155 if (tem != NULL_TREE)
9156 return tem;
9160 /* If this is a comparison of a real constant with a PLUS_EXPR
9161 or a MINUS_EXPR of a real constant, we can convert it into a
9162 comparison with a revised real constant as long as no overflow
9163 occurs when unsafe_math_optimizations are enabled. */
9164 if (flag_unsafe_math_optimizations
9165 && TREE_CODE (arg1) == REAL_CST
9166 && (TREE_CODE (arg0) == PLUS_EXPR
9167 || TREE_CODE (arg0) == MINUS_EXPR)
9168 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9169 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9170 ? MINUS_EXPR : PLUS_EXPR,
9171 arg1, TREE_OPERAND (arg0, 1), 0))
9172 && ! TREE_CONSTANT_OVERFLOW (tem))
9173 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9175 /* Likewise, we can simplify a comparison of a real constant with
9176 a MINUS_EXPR whose first operand is also a real constant, i.e.
9177 (c1 - x) < c2 becomes x > c1-c2. */
9178 if (flag_unsafe_math_optimizations
9179 && TREE_CODE (arg1) == REAL_CST
9180 && TREE_CODE (arg0) == MINUS_EXPR
9181 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9182 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9183 arg1, 0))
9184 && ! TREE_CONSTANT_OVERFLOW (tem))
9185 return fold_build2 (swap_tree_comparison (code), type,
9186 TREE_OPERAND (arg0, 1), tem);
9188 /* Fold comparisons against built-in math functions. */
9189 if (TREE_CODE (arg1) == REAL_CST
9190 && flag_unsafe_math_optimizations
9191 && ! flag_errno_math)
9193 enum built_in_function fcode = builtin_mathfn_code (arg0);
9195 if (fcode != END_BUILTINS)
9197 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9198 if (tem != NULL_TREE)
9199 return tem;
9204 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9205 if (TREE_CONSTANT (arg1)
9206 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9207 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9208 /* This optimization is invalid for ordered comparisons
9209 if CONST+INCR overflows or if foo+incr might overflow.
9210 This optimization is invalid for floating point due to rounding.
9211 For pointer types we assume overflow doesn't happen. */
9212 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9213 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9214 && (code == EQ_EXPR || code == NE_EXPR))))
9216 tree varop, newconst;
9218 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9220 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9221 arg1, TREE_OPERAND (arg0, 1));
9222 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9223 TREE_OPERAND (arg0, 0),
9224 TREE_OPERAND (arg0, 1));
9226 else
9228 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9229 arg1, TREE_OPERAND (arg0, 1));
9230 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9231 TREE_OPERAND (arg0, 0),
9232 TREE_OPERAND (arg0, 1));
9236 /* If VAROP is a reference to a bitfield, we must mask
9237 the constant by the width of the field. */
9238 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9239 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9240 && host_integerp (DECL_SIZE (TREE_OPERAND
9241 (TREE_OPERAND (varop, 0), 1)), 1))
9243 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9244 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9245 tree folded_compare, shift;
9247 /* First check whether the comparison would come out
9248 always the same. If we don't do that we would
9249 change the meaning with the masking. */
9250 folded_compare = fold_build2 (code, type,
9251 TREE_OPERAND (varop, 0), arg1);
9252 if (integer_zerop (folded_compare)
9253 || integer_onep (folded_compare))
9254 return omit_one_operand (type, folded_compare, varop);
9256 shift = build_int_cst (NULL_TREE,
9257 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9258 shift = fold_convert (TREE_TYPE (varop), shift);
9259 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9260 newconst, shift);
9261 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9262 newconst, shift);
9265 return fold_build2 (code, type, varop, newconst);
9268 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9269 This transformation affects the cases which are handled in later
9270 optimizations involving comparisons with non-negative constants. */
9271 if (TREE_CODE (arg1) == INTEGER_CST
9272 && TREE_CODE (arg0) != INTEGER_CST
9273 && tree_int_cst_sgn (arg1) > 0)
9275 switch (code)
9277 case GE_EXPR:
9278 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9279 return fold_build2 (GT_EXPR, type, arg0, arg1);
9281 case LT_EXPR:
9282 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9283 return fold_build2 (LE_EXPR, type, arg0, arg1);
9285 default:
9286 break;
9290 /* Comparisons with the highest or lowest possible integer of
9291 the specified size will have known values. */
9293 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9295 if (TREE_CODE (arg1) == INTEGER_CST
9296 && ! TREE_CONSTANT_OVERFLOW (arg1)
9297 && width <= 2 * HOST_BITS_PER_WIDE_INT
9298 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9299 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9301 HOST_WIDE_INT signed_max_hi;
9302 unsigned HOST_WIDE_INT signed_max_lo;
9303 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9305 if (width <= HOST_BITS_PER_WIDE_INT)
9307 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9308 - 1;
9309 signed_max_hi = 0;
9310 max_hi = 0;
9312 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9314 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9315 min_lo = 0;
9316 min_hi = 0;
9318 else
9320 max_lo = signed_max_lo;
9321 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9322 min_hi = -1;
9325 else
9327 width -= HOST_BITS_PER_WIDE_INT;
9328 signed_max_lo = -1;
9329 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9330 - 1;
9331 max_lo = -1;
9332 min_lo = 0;
9334 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9336 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9337 min_hi = 0;
9339 else
9341 max_hi = signed_max_hi;
9342 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9346 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9347 && TREE_INT_CST_LOW (arg1) == max_lo)
9348 switch (code)
9350 case GT_EXPR:
9351 return omit_one_operand (type, integer_zero_node, arg0);
9353 case GE_EXPR:
9354 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9356 case LE_EXPR:
9357 return omit_one_operand (type, integer_one_node, arg0);
9359 case LT_EXPR:
9360 return fold_build2 (NE_EXPR, type, arg0, arg1);
9362 /* The GE_EXPR and LT_EXPR cases above are not normally
9363 reached because of previous transformations. */
9365 default:
9366 break;
9368 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9369 == max_hi
9370 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9371 switch (code)
9373 case GT_EXPR:
9374 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9375 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9376 case LE_EXPR:
9377 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9378 return fold_build2 (NE_EXPR, type, arg0, arg1);
9379 default:
9380 break;
9382 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9383 == min_hi
9384 && TREE_INT_CST_LOW (arg1) == min_lo)
9385 switch (code)
9387 case LT_EXPR:
9388 return omit_one_operand (type, integer_zero_node, arg0);
9390 case LE_EXPR:
9391 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9393 case GE_EXPR:
9394 return omit_one_operand (type, integer_one_node, arg0);
9396 case GT_EXPR:
9397 return fold_build2 (NE_EXPR, type, arg0, arg1);
9399 default:
9400 break;
9402 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9403 == min_hi
9404 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9405 switch (code)
9407 case GE_EXPR:
9408 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9409 return fold_build2 (NE_EXPR, type, arg0, arg1);
9410 case LT_EXPR:
9411 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9412 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9413 default:
9414 break;
9417 else if (!in_gimple_form
9418 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9419 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9420 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9421 /* signed_type does not work on pointer types. */
9422 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9424 /* The following case also applies to X < signed_max+1
9425 and X >= signed_max+1 because previous transformations. */
9426 if (code == LE_EXPR || code == GT_EXPR)
9428 tree st0, st1;
9429 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9430 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9431 return fold
9432 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9433 type, fold_convert (st0, arg0),
9434 fold_convert (st1, integer_zero_node)));
9440 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9441 a MINUS_EXPR of a constant, we can convert it into a comparison with
9442 a revised constant as long as no overflow occurs. */
9443 if ((code == EQ_EXPR || code == NE_EXPR)
9444 && TREE_CODE (arg1) == INTEGER_CST
9445 && (TREE_CODE (arg0) == PLUS_EXPR
9446 || TREE_CODE (arg0) == MINUS_EXPR)
9447 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9448 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9449 ? MINUS_EXPR : PLUS_EXPR,
9450 arg1, TREE_OPERAND (arg0, 1), 0))
9451 && ! TREE_CONSTANT_OVERFLOW (tem))
9452 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9454 /* Similarly for a NEGATE_EXPR. */
9455 else if ((code == EQ_EXPR || code == NE_EXPR)
9456 && TREE_CODE (arg0) == NEGATE_EXPR
9457 && TREE_CODE (arg1) == INTEGER_CST
9458 && 0 != (tem = negate_expr (arg1))
9459 && TREE_CODE (tem) == INTEGER_CST
9460 && ! TREE_CONSTANT_OVERFLOW (tem))
9461 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9463 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9464 for !=. Don't do this for ordered comparisons due to overflow. */
9465 else if ((code == NE_EXPR || code == EQ_EXPR)
9466 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9467 return fold_build2 (code, type,
9468 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9470 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9471 && (TREE_CODE (arg0) == NOP_EXPR
9472 || TREE_CODE (arg0) == CONVERT_EXPR))
9474 /* If we are widening one operand of an integer comparison,
9475 see if the other operand is similarly being widened. Perhaps we
9476 can do the comparison in the narrower type. */
9477 tem = fold_widened_comparison (code, type, arg0, arg1);
9478 if (tem)
9479 return tem;
9481 /* Or if we are changing signedness. */
9482 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9483 if (tem)
9484 return tem;
9487 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9488 constant, we can simplify it. */
9489 else if (TREE_CODE (arg1) == INTEGER_CST
9490 && (TREE_CODE (arg0) == MIN_EXPR
9491 || TREE_CODE (arg0) == MAX_EXPR)
9492 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9494 tem = optimize_minmax_comparison (code, type, op0, op1);
9495 if (tem)
9496 return tem;
9498 return NULL_TREE;
9501 /* If we are comparing an ABS_EXPR with a constant, we can
9502 convert all the cases into explicit comparisons, but they may
9503 well not be faster than doing the ABS and one comparison.
9504 But ABS (X) <= C is a range comparison, which becomes a subtraction
9505 and a comparison, and is probably faster. */
9506 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9507 && TREE_CODE (arg0) == ABS_EXPR
9508 && ! TREE_SIDE_EFFECTS (arg0)
9509 && (0 != (tem = negate_expr (arg1)))
9510 && TREE_CODE (tem) == INTEGER_CST
9511 && ! TREE_CONSTANT_OVERFLOW (tem))
9512 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9513 build2 (GE_EXPR, type,
9514 TREE_OPERAND (arg0, 0), tem),
9515 build2 (LE_EXPR, type,
9516 TREE_OPERAND (arg0, 0), arg1));
9518 /* Convert ABS_EXPR<x> >= 0 to true. */
9519 else if (code == GE_EXPR
9520 && tree_expr_nonnegative_p (arg0)
9521 && (integer_zerop (arg1)
9522 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9523 && real_zerop (arg1))))
9524 return omit_one_operand (type, integer_one_node, arg0);
9526 /* Convert ABS_EXPR<x> < 0 to false. */
9527 else if (code == LT_EXPR
9528 && tree_expr_nonnegative_p (arg0)
9529 && (integer_zerop (arg1) || real_zerop (arg1)))
9530 return omit_one_operand (type, integer_zero_node, arg0);
9532 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9533 else if ((code == EQ_EXPR || code == NE_EXPR)
9534 && TREE_CODE (arg0) == ABS_EXPR
9535 && (integer_zerop (arg1) || real_zerop (arg1)))
9536 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9538 /* If this is an EQ or NE comparison with zero and ARG0 is
9539 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9540 two operations, but the latter can be done in one less insn
9541 on machines that have only two-operand insns or on which a
9542 constant cannot be the first operand. */
9543 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9544 && TREE_CODE (arg0) == BIT_AND_EXPR)
9546 tree arg00 = TREE_OPERAND (arg0, 0);
9547 tree arg01 = TREE_OPERAND (arg0, 1);
9548 if (TREE_CODE (arg00) == LSHIFT_EXPR
9549 && integer_onep (TREE_OPERAND (arg00, 0)))
9550 return
9551 fold_build2 (code, type,
9552 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9553 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9554 arg01, TREE_OPERAND (arg00, 1)),
9555 fold_convert (TREE_TYPE (arg0),
9556 integer_one_node)),
9557 arg1);
9558 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9559 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9560 return
9561 fold_build2 (code, type,
9562 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9563 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9564 arg00, TREE_OPERAND (arg01, 1)),
9565 fold_convert (TREE_TYPE (arg0),
9566 integer_one_node)),
9567 arg1);
9570 /* If this is an NE or EQ comparison of zero against the result of a
9571 signed MOD operation whose second operand is a power of 2, make
9572 the MOD operation unsigned since it is simpler and equivalent. */
9573 if ((code == NE_EXPR || code == EQ_EXPR)
9574 && integer_zerop (arg1)
9575 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9576 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9577 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9578 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9579 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9580 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9582 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9583 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9584 fold_convert (newtype,
9585 TREE_OPERAND (arg0, 0)),
9586 fold_convert (newtype,
9587 TREE_OPERAND (arg0, 1)));
9589 return fold_build2 (code, type, newmod,
9590 fold_convert (newtype, arg1));
9593 /* If this is an NE comparison of zero with an AND of one, remove the
9594 comparison since the AND will give the correct value. */
9595 if (code == NE_EXPR && integer_zerop (arg1)
9596 && TREE_CODE (arg0) == BIT_AND_EXPR
9597 && integer_onep (TREE_OPERAND (arg0, 1)))
9598 return fold_convert (type, arg0);
9600 /* If we have (A & C) == C where C is a power of 2, convert this into
9601 (A & C) != 0. Similarly for NE_EXPR. */
9602 if ((code == EQ_EXPR || code == NE_EXPR)
9603 && TREE_CODE (arg0) == BIT_AND_EXPR
9604 && integer_pow2p (TREE_OPERAND (arg0, 1))
9605 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9606 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9607 arg0, fold_convert (TREE_TYPE (arg0),
9608 integer_zero_node));
9610 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9611 bit, then fold the expression into A < 0 or A >= 0. */
9612 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9613 if (tem)
9614 return tem;
9616 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9617 Similarly for NE_EXPR. */
9618 if ((code == EQ_EXPR || code == NE_EXPR)
9619 && TREE_CODE (arg0) == BIT_AND_EXPR
9620 && TREE_CODE (arg1) == INTEGER_CST
9621 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9623 tree notc = fold_build1 (BIT_NOT_EXPR,
9624 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9625 TREE_OPERAND (arg0, 1));
9626 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9627 arg1, notc);
9628 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9629 if (integer_nonzerop (dandnotc))
9630 return omit_one_operand (type, rslt, arg0);
9633 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9634 Similarly for NE_EXPR. */
9635 if ((code == EQ_EXPR || code == NE_EXPR)
9636 && TREE_CODE (arg0) == BIT_IOR_EXPR
9637 && TREE_CODE (arg1) == INTEGER_CST
9638 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9640 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9641 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9642 TREE_OPERAND (arg0, 1), notd);
9643 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9644 if (integer_nonzerop (candnotd))
9645 return omit_one_operand (type, rslt, arg0);
9648 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9649 and similarly for >= into !=. */
9650 if ((code == LT_EXPR || code == GE_EXPR)
9651 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9652 && TREE_CODE (arg1) == LSHIFT_EXPR
9653 && integer_onep (TREE_OPERAND (arg1, 0)))
9654 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9655 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9656 TREE_OPERAND (arg1, 1)),
9657 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9659 else if ((code == LT_EXPR || code == GE_EXPR)
9660 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9661 && (TREE_CODE (arg1) == NOP_EXPR
9662 || TREE_CODE (arg1) == CONVERT_EXPR)
9663 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9664 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9665 return
9666 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9667 fold_convert (TREE_TYPE (arg0),
9668 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9669 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9670 1))),
9671 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9673 /* Simplify comparison of something with itself. (For IEEE
9674 floating-point, we can only do some of these simplifications.) */
9675 if (operand_equal_p (arg0, arg1, 0))
9677 switch (code)
9679 case EQ_EXPR:
9680 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9681 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9682 return constant_boolean_node (1, type);
9683 break;
9685 case GE_EXPR:
9686 case LE_EXPR:
9687 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9688 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9689 return constant_boolean_node (1, type);
9690 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9692 case NE_EXPR:
9693 /* For NE, we can only do this simplification if integer
9694 or we don't honor IEEE floating point NaNs. */
9695 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9696 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9697 break;
9698 /* ... fall through ... */
9699 case GT_EXPR:
9700 case LT_EXPR:
9701 return constant_boolean_node (0, type);
9702 default:
9703 gcc_unreachable ();
9707 /* If we are comparing an expression that just has comparisons
9708 of two integer values, arithmetic expressions of those comparisons,
9709 and constants, we can simplify it. There are only three cases
9710 to check: the two values can either be equal, the first can be
9711 greater, or the second can be greater. Fold the expression for
9712 those three values. Since each value must be 0 or 1, we have
9713 eight possibilities, each of which corresponds to the constant 0
9714 or 1 or one of the six possible comparisons.
9716 This handles common cases like (a > b) == 0 but also handles
9717 expressions like ((x > y) - (y > x)) > 0, which supposedly
9718 occur in macroized code. */
9720 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9722 tree cval1 = 0, cval2 = 0;
9723 int save_p = 0;
9725 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9726 /* Don't handle degenerate cases here; they should already
9727 have been handled anyway. */
9728 && cval1 != 0 && cval2 != 0
9729 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9730 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9731 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9732 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9733 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9734 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9735 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9737 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9738 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9740 /* We can't just pass T to eval_subst in case cval1 or cval2
9741 was the same as ARG1. */
9743 tree high_result
9744 = fold_build2 (code, type,
9745 eval_subst (arg0, cval1, maxval,
9746 cval2, minval),
9747 arg1);
9748 tree equal_result
9749 = fold_build2 (code, type,
9750 eval_subst (arg0, cval1, maxval,
9751 cval2, maxval),
9752 arg1);
9753 tree low_result
9754 = fold_build2 (code, type,
9755 eval_subst (arg0, cval1, minval,
9756 cval2, maxval),
9757 arg1);
9759 /* All three of these results should be 0 or 1. Confirm they
9760 are. Then use those values to select the proper code
9761 to use. */
9763 if ((integer_zerop (high_result)
9764 || integer_onep (high_result))
9765 && (integer_zerop (equal_result)
9766 || integer_onep (equal_result))
9767 && (integer_zerop (low_result)
9768 || integer_onep (low_result)))
9770 /* Make a 3-bit mask with the high-order bit being the
9771 value for `>', the next for '=', and the low for '<'. */
9772 switch ((integer_onep (high_result) * 4)
9773 + (integer_onep (equal_result) * 2)
9774 + integer_onep (low_result))
9776 case 0:
9777 /* Always false. */
9778 return omit_one_operand (type, integer_zero_node, arg0);
9779 case 1:
9780 code = LT_EXPR;
9781 break;
9782 case 2:
9783 code = EQ_EXPR;
9784 break;
9785 case 3:
9786 code = LE_EXPR;
9787 break;
9788 case 4:
9789 code = GT_EXPR;
9790 break;
9791 case 5:
9792 code = NE_EXPR;
9793 break;
9794 case 6:
9795 code = GE_EXPR;
9796 break;
9797 case 7:
9798 /* Always true. */
9799 return omit_one_operand (type, integer_one_node, arg0);
9802 if (save_p)
9803 return save_expr (build2 (code, type, cval1, cval2));
9804 else
9805 return fold_build2 (code, type, cval1, cval2);
9810 /* If this is a comparison of a field, we may be able to simplify it. */
9811 if (((TREE_CODE (arg0) == COMPONENT_REF
9812 && lang_hooks.can_use_bit_fields_p ())
9813 || TREE_CODE (arg0) == BIT_FIELD_REF)
9814 && (code == EQ_EXPR || code == NE_EXPR)
9815 /* Handle the constant case even without -O
9816 to make sure the warnings are given. */
9817 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9819 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9820 if (t1)
9821 return t1;
9824 /* Fold a comparison of the address of COMPONENT_REFs with the same
9825 type and component to a comparison of the address of the base
9826 object. In short, &x->a OP &y->a to x OP y and
9827 &x->a OP &y.a to x OP &y */
9828 if (TREE_CODE (arg0) == ADDR_EXPR
9829 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9830 && TREE_CODE (arg1) == ADDR_EXPR
9831 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9833 tree cref0 = TREE_OPERAND (arg0, 0);
9834 tree cref1 = TREE_OPERAND (arg1, 0);
9835 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9837 tree op0 = TREE_OPERAND (cref0, 0);
9838 tree op1 = TREE_OPERAND (cref1, 0);
9839 return fold_build2 (code, type,
9840 build_fold_addr_expr (op0),
9841 build_fold_addr_expr (op1));
9845 /* If this is a comparison of complex values and either or both sides
9846 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9847 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9848 This may prevent needless evaluations. */
9849 if ((code == EQ_EXPR || code == NE_EXPR)
9850 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9851 && (TREE_CODE (arg0) == COMPLEX_EXPR
9852 || TREE_CODE (arg1) == COMPLEX_EXPR
9853 || TREE_CODE (arg0) == COMPLEX_CST
9854 || TREE_CODE (arg1) == COMPLEX_CST))
9856 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9857 tree real0, imag0, real1, imag1;
9859 arg0 = save_expr (arg0);
9860 arg1 = save_expr (arg1);
9861 real0 = fold_build1 (REALPART_EXPR, subtype, arg0);
9862 imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0);
9863 real1 = fold_build1 (REALPART_EXPR, subtype, arg1);
9864 imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1);
9866 return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9867 : TRUTH_ORIF_EXPR),
9868 type,
9869 fold_build2 (code, type, real0, real1),
9870 fold_build2 (code, type, imag0, imag1));
9873 /* Optimize comparisons of strlen vs zero to a compare of the
9874 first character of the string vs zero. To wit,
9875 strlen(ptr) == 0 => *ptr == 0
9876 strlen(ptr) != 0 => *ptr != 0
9877 Other cases should reduce to one of these two (or a constant)
9878 due to the return value of strlen being unsigned. */
9879 if ((code == EQ_EXPR || code == NE_EXPR)
9880 && integer_zerop (arg1)
9881 && TREE_CODE (arg0) == CALL_EXPR)
9883 tree fndecl = get_callee_fndecl (arg0);
9884 tree arglist;
9886 if (fndecl
9887 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9888 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9889 && (arglist = TREE_OPERAND (arg0, 1))
9890 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9891 && ! TREE_CHAIN (arglist))
9892 return fold_build2 (code, type,
9893 build1 (INDIRECT_REF, char_type_node,
9894 TREE_VALUE (arglist)),
9895 fold_convert (char_type_node,
9896 integer_zero_node));
9899 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9900 into a single range test. */
9901 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9902 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9903 && TREE_CODE (arg1) == INTEGER_CST
9904 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9905 && !integer_zerop (TREE_OPERAND (arg0, 1))
9906 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9907 && !TREE_OVERFLOW (arg1))
9909 t1 = fold_div_compare (code, type, arg0, arg1);
9910 if (t1 != NULL_TREE)
9911 return t1;
9914 if ((code == EQ_EXPR || code == NE_EXPR)
9915 && !TREE_SIDE_EFFECTS (arg0)
9916 && integer_zerop (arg1)
9917 && tree_expr_nonzero_p (arg0))
9918 return constant_boolean_node (code==NE_EXPR, type);
9920 t1 = fold_relational_const (code, type, arg0, arg1);
9921 return t1 == NULL_TREE ? NULL_TREE : t1;
9923 case UNORDERED_EXPR:
9924 case ORDERED_EXPR:
9925 case UNLT_EXPR:
9926 case UNLE_EXPR:
9927 case UNGT_EXPR:
9928 case UNGE_EXPR:
9929 case UNEQ_EXPR:
9930 case LTGT_EXPR:
9931 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9933 t1 = fold_relational_const (code, type, arg0, arg1);
9934 if (t1 != NULL_TREE)
9935 return t1;
9938 /* If the first operand is NaN, the result is constant. */
9939 if (TREE_CODE (arg0) == REAL_CST
9940 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9941 && (code != LTGT_EXPR || ! flag_trapping_math))
9943 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9944 ? integer_zero_node
9945 : integer_one_node;
9946 return omit_one_operand (type, t1, arg1);
9949 /* If the second operand is NaN, the result is constant. */
9950 if (TREE_CODE (arg1) == REAL_CST
9951 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9952 && (code != LTGT_EXPR || ! flag_trapping_math))
9954 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9955 ? integer_zero_node
9956 : integer_one_node;
9957 return omit_one_operand (type, t1, arg0);
9960 /* Simplify unordered comparison of something with itself. */
9961 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9962 && operand_equal_p (arg0, arg1, 0))
9963 return constant_boolean_node (1, type);
9965 if (code == LTGT_EXPR
9966 && !flag_trapping_math
9967 && operand_equal_p (arg0, arg1, 0))
9968 return constant_boolean_node (0, type);
9970 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9972 tree targ0 = strip_float_extensions (arg0);
9973 tree targ1 = strip_float_extensions (arg1);
9974 tree newtype = TREE_TYPE (targ0);
9976 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9977 newtype = TREE_TYPE (targ1);
9979 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9980 return fold_build2 (code, type, fold_convert (newtype, targ0),
9981 fold_convert (newtype, targ1));
9984 return NULL_TREE;
9986 case COMPOUND_EXPR:
9987 /* When pedantic, a compound expression can be neither an lvalue
9988 nor an integer constant expression. */
9989 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9990 return NULL_TREE;
9991 /* Don't let (0, 0) be null pointer constant. */
9992 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9993 : fold_convert (type, arg1);
9994 return pedantic_non_lvalue (tem);
9996 case COMPLEX_EXPR:
9997 if (wins)
9998 return build_complex (type, arg0, arg1);
9999 return NULL_TREE;
10001 case ASSERT_EXPR:
10002 /* An ASSERT_EXPR should never be passed to fold_binary. */
10003 gcc_unreachable ();
10005 default:
10006 return NULL_TREE;
10007 } /* switch (code) */
10010 /* Callback for walk_tree, looking for LABEL_EXPR.
10011 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10012 Do not check the sub-tree of GOTO_EXPR. */
10014 static tree
10015 contains_label_1 (tree *tp,
10016 int *walk_subtrees,
10017 void *data ATTRIBUTE_UNUSED)
10019 switch (TREE_CODE (*tp))
10021 case LABEL_EXPR:
10022 return *tp;
10023 case GOTO_EXPR:
10024 *walk_subtrees = 0;
10025 /* no break */
10026 default:
10027 return NULL_TREE;
10031 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10032 accessible from outside the sub-tree. Returns NULL_TREE if no
10033 addressable label is found. */
10035 static bool
10036 contains_label_p (tree st)
10038 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10041 /* Fold a ternary expression of code CODE and type TYPE with operands
10042 OP0, OP1, and OP2. Return the folded expression if folding is
10043 successful. Otherwise, return NULL_TREE. */
10045 tree
10046 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10048 tree tem;
10049 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10050 enum tree_code_class kind = TREE_CODE_CLASS (code);
10052 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10053 && TREE_CODE_LENGTH (code) == 3);
10055 /* Strip any conversions that don't change the mode. This is safe
10056 for every expression, except for a comparison expression because
10057 its signedness is derived from its operands. So, in the latter
10058 case, only strip conversions that don't change the signedness.
10060 Note that this is done as an internal manipulation within the
10061 constant folder, in order to find the simplest representation of
10062 the arguments so that their form can be studied. In any cases,
10063 the appropriate type conversions should be put back in the tree
10064 that will get out of the constant folder. */
10065 if (op0)
10067 arg0 = op0;
10068 STRIP_NOPS (arg0);
10071 if (op1)
10073 arg1 = op1;
10074 STRIP_NOPS (arg1);
10077 switch (code)
10079 case COMPONENT_REF:
10080 if (TREE_CODE (arg0) == CONSTRUCTOR
10081 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10083 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
10084 if (m)
10085 return TREE_VALUE (m);
10087 return NULL_TREE;
10089 case COND_EXPR:
10090 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10091 so all simple results must be passed through pedantic_non_lvalue. */
10092 if (TREE_CODE (arg0) == INTEGER_CST)
10094 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10095 tem = integer_zerop (arg0) ? op2 : op1;
10096 /* Only optimize constant conditions when the selected branch
10097 has the same type as the COND_EXPR. This avoids optimizing
10098 away "c ? x : throw", where the throw has a void type.
10099 Avoid throwing away that operand which contains label. */
10100 if ((!TREE_SIDE_EFFECTS (unused_op)
10101 || !contains_label_p (unused_op))
10102 && (! VOID_TYPE_P (TREE_TYPE (tem))
10103 || VOID_TYPE_P (type)))
10104 return pedantic_non_lvalue (tem);
10105 return NULL_TREE;
10107 if (operand_equal_p (arg1, op2, 0))
10108 return pedantic_omit_one_operand (type, arg1, arg0);
10110 /* If we have A op B ? A : C, we may be able to convert this to a
10111 simpler expression, depending on the operation and the values
10112 of B and C. Signed zeros prevent all of these transformations,
10113 for reasons given above each one.
10115 Also try swapping the arguments and inverting the conditional. */
10116 if (COMPARISON_CLASS_P (arg0)
10117 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10118 arg1, TREE_OPERAND (arg0, 1))
10119 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10121 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10122 if (tem)
10123 return tem;
10126 if (COMPARISON_CLASS_P (arg0)
10127 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10128 op2,
10129 TREE_OPERAND (arg0, 1))
10130 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10132 tem = invert_truthvalue (arg0);
10133 if (COMPARISON_CLASS_P (tem))
10135 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10136 if (tem)
10137 return tem;
10141 /* If the second operand is simpler than the third, swap them
10142 since that produces better jump optimization results. */
10143 if (tree_swap_operands_p (op1, op2, false))
10145 /* See if this can be inverted. If it can't, possibly because
10146 it was a floating-point inequality comparison, don't do
10147 anything. */
10148 tem = invert_truthvalue (arg0);
10150 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10151 return fold_build3 (code, type, tem, op2, op1);
10154 /* Convert A ? 1 : 0 to simply A. */
10155 if (integer_onep (op1)
10156 && integer_zerop (op2)
10157 /* If we try to convert OP0 to our type, the
10158 call to fold will try to move the conversion inside
10159 a COND, which will recurse. In that case, the COND_EXPR
10160 is probably the best choice, so leave it alone. */
10161 && type == TREE_TYPE (arg0))
10162 return pedantic_non_lvalue (arg0);
10164 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10165 over COND_EXPR in cases such as floating point comparisons. */
10166 if (integer_zerop (op1)
10167 && integer_onep (op2)
10168 && truth_value_p (TREE_CODE (arg0)))
10169 return pedantic_non_lvalue (fold_convert (type,
10170 invert_truthvalue (arg0)));
10172 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10173 if (TREE_CODE (arg0) == LT_EXPR
10174 && integer_zerop (TREE_OPERAND (arg0, 1))
10175 && integer_zerop (op2)
10176 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10177 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10178 TREE_TYPE (tem), tem, arg1));
10180 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10181 already handled above. */
10182 if (TREE_CODE (arg0) == BIT_AND_EXPR
10183 && integer_onep (TREE_OPERAND (arg0, 1))
10184 && integer_zerop (op2)
10185 && integer_pow2p (arg1))
10187 tree tem = TREE_OPERAND (arg0, 0);
10188 STRIP_NOPS (tem);
10189 if (TREE_CODE (tem) == RSHIFT_EXPR
10190 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10191 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10192 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10193 return fold_build2 (BIT_AND_EXPR, type,
10194 TREE_OPERAND (tem, 0), arg1);
10197 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10198 is probably obsolete because the first operand should be a
10199 truth value (that's why we have the two cases above), but let's
10200 leave it in until we can confirm this for all front-ends. */
10201 if (integer_zerop (op2)
10202 && TREE_CODE (arg0) == NE_EXPR
10203 && integer_zerop (TREE_OPERAND (arg0, 1))
10204 && integer_pow2p (arg1)
10205 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10206 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10207 arg1, OEP_ONLY_CONST))
10208 return pedantic_non_lvalue (fold_convert (type,
10209 TREE_OPERAND (arg0, 0)));
10211 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10212 if (integer_zerop (op2)
10213 && truth_value_p (TREE_CODE (arg0))
10214 && truth_value_p (TREE_CODE (arg1)))
10215 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10217 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10218 if (integer_onep (op2)
10219 && truth_value_p (TREE_CODE (arg0))
10220 && truth_value_p (TREE_CODE (arg1)))
10222 /* Only perform transformation if ARG0 is easily inverted. */
10223 tem = invert_truthvalue (arg0);
10224 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10225 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10228 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10229 if (integer_zerop (arg1)
10230 && truth_value_p (TREE_CODE (arg0))
10231 && truth_value_p (TREE_CODE (op2)))
10233 /* Only perform transformation if ARG0 is easily inverted. */
10234 tem = invert_truthvalue (arg0);
10235 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10236 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10239 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10240 if (integer_onep (arg1)
10241 && truth_value_p (TREE_CODE (arg0))
10242 && truth_value_p (TREE_CODE (op2)))
10243 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10245 return NULL_TREE;
10247 case CALL_EXPR:
10248 /* Check for a built-in function. */
10249 if (TREE_CODE (op0) == ADDR_EXPR
10250 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10251 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10253 tree fndecl = TREE_OPERAND (op0, 0);
10254 tree arglist = op1;
10255 tree tmp = fold_builtin (fndecl, arglist, false);
10256 if (tmp)
10257 return tmp;
10259 return NULL_TREE;
10261 default:
10262 return NULL_TREE;
10263 } /* switch (code) */
10266 /* Perform constant folding and related simplification of EXPR.
10267 The related simplifications include x*1 => x, x*0 => 0, etc.,
10268 and application of the associative law.
10269 NOP_EXPR conversions may be removed freely (as long as we
10270 are careful not to change the type of the overall expression).
10271 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10272 but we can constant-fold them if they have constant operands. */
10274 #ifdef ENABLE_FOLD_CHECKING
10275 # define fold(x) fold_1 (x)
10276 static tree fold_1 (tree);
10277 static
10278 #endif
10279 tree
10280 fold (tree expr)
10282 const tree t = expr;
10283 enum tree_code code = TREE_CODE (t);
10284 enum tree_code_class kind = TREE_CODE_CLASS (code);
10285 tree tem;
10287 /* Return right away if a constant. */
10288 if (kind == tcc_constant)
10289 return t;
10291 if (IS_EXPR_CODE_CLASS (kind))
10293 tree type = TREE_TYPE (t);
10294 tree op0, op1, op2;
10296 switch (TREE_CODE_LENGTH (code))
10298 case 1:
10299 op0 = TREE_OPERAND (t, 0);
10300 tem = fold_unary (code, type, op0);
10301 return tem ? tem : expr;
10302 case 2:
10303 op0 = TREE_OPERAND (t, 0);
10304 op1 = TREE_OPERAND (t, 1);
10305 tem = fold_binary (code, type, op0, op1);
10306 return tem ? tem : expr;
10307 case 3:
10308 op0 = TREE_OPERAND (t, 0);
10309 op1 = TREE_OPERAND (t, 1);
10310 op2 = TREE_OPERAND (t, 2);
10311 tem = fold_ternary (code, type, op0, op1, op2);
10312 return tem ? tem : expr;
10313 default:
10314 break;
10318 switch (code)
10320 case CONST_DECL:
10321 return fold (DECL_INITIAL (t));
10323 default:
10324 return t;
10325 } /* switch (code) */
10328 #ifdef ENABLE_FOLD_CHECKING
10329 #undef fold
10331 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10332 static void fold_check_failed (tree, tree);
10333 void print_fold_checksum (tree);
10335 /* When --enable-checking=fold, compute a digest of expr before
10336 and after actual fold call to see if fold did not accidentally
10337 change original expr. */
10339 tree
10340 fold (tree expr)
10342 tree ret;
10343 struct md5_ctx ctx;
10344 unsigned char checksum_before[16], checksum_after[16];
10345 htab_t ht;
10347 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10348 md5_init_ctx (&ctx);
10349 fold_checksum_tree (expr, &ctx, ht);
10350 md5_finish_ctx (&ctx, checksum_before);
10351 htab_empty (ht);
10353 ret = fold_1 (expr);
10355 md5_init_ctx (&ctx);
10356 fold_checksum_tree (expr, &ctx, ht);
10357 md5_finish_ctx (&ctx, checksum_after);
10358 htab_delete (ht);
10360 if (memcmp (checksum_before, checksum_after, 16))
10361 fold_check_failed (expr, ret);
10363 return ret;
10366 void
10367 print_fold_checksum (tree expr)
10369 struct md5_ctx ctx;
10370 unsigned char checksum[16], cnt;
10371 htab_t ht;
10373 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10374 md5_init_ctx (&ctx);
10375 fold_checksum_tree (expr, &ctx, ht);
10376 md5_finish_ctx (&ctx, checksum);
10377 htab_delete (ht);
10378 for (cnt = 0; cnt < 16; ++cnt)
10379 fprintf (stderr, "%02x", checksum[cnt]);
10380 putc ('\n', stderr);
10383 static void
10384 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10386 internal_error ("fold check: original tree changed by fold");
10389 static void
10390 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10392 void **slot;
10393 enum tree_code code;
10394 char buf[sizeof (struct tree_decl)];
10395 int i, len;
10397 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10398 <= sizeof (struct tree_decl))
10399 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10400 if (expr == NULL)
10401 return;
10402 slot = htab_find_slot (ht, expr, INSERT);
10403 if (*slot != NULL)
10404 return;
10405 *slot = expr;
10406 code = TREE_CODE (expr);
10407 if (TREE_CODE_CLASS (code) == tcc_declaration
10408 && DECL_ASSEMBLER_NAME_SET_P (expr))
10410 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10411 memcpy (buf, expr, tree_size (expr));
10412 expr = (tree) buf;
10413 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10415 else if (TREE_CODE_CLASS (code) == tcc_type
10416 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10417 || TYPE_CACHED_VALUES_P (expr)))
10419 /* Allow these fields to be modified. */
10420 memcpy (buf, expr, tree_size (expr));
10421 expr = (tree) buf;
10422 TYPE_POINTER_TO (expr) = NULL;
10423 TYPE_REFERENCE_TO (expr) = NULL;
10424 if (TYPE_CACHED_VALUES_P (expr))
10426 TYPE_CACHED_VALUES_P (expr) = 0;
10427 TYPE_CACHED_VALUES (expr) = NULL;
10430 md5_process_bytes (expr, tree_size (expr), ctx);
10431 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10432 if (TREE_CODE_CLASS (code) != tcc_type
10433 && TREE_CODE_CLASS (code) != tcc_declaration)
10434 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10435 switch (TREE_CODE_CLASS (code))
10437 case tcc_constant:
10438 switch (code)
10440 case STRING_CST:
10441 md5_process_bytes (TREE_STRING_POINTER (expr),
10442 TREE_STRING_LENGTH (expr), ctx);
10443 break;
10444 case COMPLEX_CST:
10445 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10446 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10447 break;
10448 case VECTOR_CST:
10449 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10450 break;
10451 default:
10452 break;
10454 break;
10455 case tcc_exceptional:
10456 switch (code)
10458 case TREE_LIST:
10459 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10460 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10461 break;
10462 case TREE_VEC:
10463 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10464 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10465 break;
10466 default:
10467 break;
10469 break;
10470 case tcc_expression:
10471 case tcc_reference:
10472 case tcc_comparison:
10473 case tcc_unary:
10474 case tcc_binary:
10475 case tcc_statement:
10476 len = TREE_CODE_LENGTH (code);
10477 for (i = 0; i < len; ++i)
10478 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10479 break;
10480 case tcc_declaration:
10481 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10482 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10483 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10484 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10485 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10486 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10487 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10488 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10489 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10490 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10491 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10492 break;
10493 case tcc_type:
10494 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10495 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10496 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10497 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10498 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10499 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10500 if (INTEGRAL_TYPE_P (expr)
10501 || SCALAR_FLOAT_TYPE_P (expr))
10503 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10504 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10506 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10507 if (TREE_CODE (expr) == RECORD_TYPE
10508 || TREE_CODE (expr) == UNION_TYPE
10509 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10510 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10511 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10512 break;
10513 default:
10514 break;
10518 #endif
10520 /* Fold a unary tree expression with code CODE of type TYPE with an
10521 operand OP0. Return a folded expression if successful. Otherwise,
10522 return a tree expression with code CODE of type TYPE with an
10523 operand OP0. */
10525 tree
10526 fold_build1 (enum tree_code code, tree type, tree op0)
10528 tree tem = fold_unary (code, type, op0);
10529 if (tem)
10530 return tem;
10532 return build1 (code, type, op0);
10535 /* Fold a binary tree expression with code CODE of type TYPE with
10536 operands OP0 and OP1. Return a folded expression if successful.
10537 Otherwise, return a tree expression with code CODE of type TYPE
10538 with operands OP0 and OP1. */
10540 tree
10541 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10543 tree tem = fold_binary (code, type, op0, op1);
10544 if (tem)
10545 return tem;
10547 return build2 (code, type, op0, op1);
10550 /* Fold a ternary tree expression with code CODE of type TYPE with
10551 operands OP0, OP1, and OP2. Return a folded expression if
10552 successful. Otherwise, return a tree expression with code CODE of
10553 type TYPE with operands OP0, OP1, and OP2. */
10555 tree
10556 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10558 tree tem = fold_ternary (code, type, op0, op1, op2);
10559 if (tem)
10560 return tem;
10562 return build3 (code, type, op0, op1, op2);
10565 /* Perform constant folding and related simplification of initializer
10566 expression EXPR. This behaves identically to "fold" but ignores
10567 potential run-time traps and exceptions that fold must preserve. */
10569 tree
10570 fold_initializer (tree expr)
10572 int saved_signaling_nans = flag_signaling_nans;
10573 int saved_trapping_math = flag_trapping_math;
10574 int saved_rounding_math = flag_rounding_math;
10575 int saved_trapv = flag_trapv;
10576 tree result;
10578 flag_signaling_nans = 0;
10579 flag_trapping_math = 0;
10580 flag_rounding_math = 0;
10581 flag_trapv = 0;
10583 result = fold (expr);
10585 flag_signaling_nans = saved_signaling_nans;
10586 flag_trapping_math = saved_trapping_math;
10587 flag_rounding_math = saved_rounding_math;
10588 flag_trapv = saved_trapv;
10590 return result;
10593 /* Determine if first argument is a multiple of second argument. Return 0 if
10594 it is not, or we cannot easily determined it to be.
10596 An example of the sort of thing we care about (at this point; this routine
10597 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10598 fold cases do now) is discovering that
10600 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10602 is a multiple of
10604 SAVE_EXPR (J * 8)
10606 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10608 This code also handles discovering that
10610 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10612 is a multiple of 8 so we don't have to worry about dealing with a
10613 possible remainder.
10615 Note that we *look* inside a SAVE_EXPR only to determine how it was
10616 calculated; it is not safe for fold to do much of anything else with the
10617 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10618 at run time. For example, the latter example above *cannot* be implemented
10619 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10620 evaluation time of the original SAVE_EXPR is not necessarily the same at
10621 the time the new expression is evaluated. The only optimization of this
10622 sort that would be valid is changing
10624 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10626 divided by 8 to
10628 SAVE_EXPR (I) * SAVE_EXPR (J)
10630 (where the same SAVE_EXPR (J) is used in the original and the
10631 transformed version). */
10633 static int
10634 multiple_of_p (tree type, tree top, tree bottom)
10636 if (operand_equal_p (top, bottom, 0))
10637 return 1;
10639 if (TREE_CODE (type) != INTEGER_TYPE)
10640 return 0;
10642 switch (TREE_CODE (top))
10644 case BIT_AND_EXPR:
10645 /* Bitwise and provides a power of two multiple. If the mask is
10646 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10647 if (!integer_pow2p (bottom))
10648 return 0;
10649 /* FALLTHRU */
10651 case MULT_EXPR:
10652 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10653 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10655 case PLUS_EXPR:
10656 case MINUS_EXPR:
10657 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10658 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10660 case LSHIFT_EXPR:
10661 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10663 tree op1, t1;
10665 op1 = TREE_OPERAND (top, 1);
10666 /* const_binop may not detect overflow correctly,
10667 so check for it explicitly here. */
10668 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10669 > TREE_INT_CST_LOW (op1)
10670 && TREE_INT_CST_HIGH (op1) == 0
10671 && 0 != (t1 = fold_convert (type,
10672 const_binop (LSHIFT_EXPR,
10673 size_one_node,
10674 op1, 0)))
10675 && ! TREE_OVERFLOW (t1))
10676 return multiple_of_p (type, t1, bottom);
10678 return 0;
10680 case NOP_EXPR:
10681 /* Can't handle conversions from non-integral or wider integral type. */
10682 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10683 || (TYPE_PRECISION (type)
10684 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10685 return 0;
10687 /* .. fall through ... */
10689 case SAVE_EXPR:
10690 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10692 case INTEGER_CST:
10693 if (TREE_CODE (bottom) != INTEGER_CST
10694 || (TYPE_UNSIGNED (type)
10695 && (tree_int_cst_sgn (top) < 0
10696 || tree_int_cst_sgn (bottom) < 0)))
10697 return 0;
10698 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10699 top, bottom, 0));
10701 default:
10702 return 0;
10706 /* Return true if `t' is known to be non-negative. */
10709 tree_expr_nonnegative_p (tree t)
10711 switch (TREE_CODE (t))
10713 case ABS_EXPR:
10714 return 1;
10716 case INTEGER_CST:
10717 return tree_int_cst_sgn (t) >= 0;
10719 case REAL_CST:
10720 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10722 case PLUS_EXPR:
10723 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10724 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10725 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10727 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10728 both unsigned and at least 2 bits shorter than the result. */
10729 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10730 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10731 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10733 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10734 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10735 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10736 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10738 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10739 TYPE_PRECISION (inner2)) + 1;
10740 return prec < TYPE_PRECISION (TREE_TYPE (t));
10743 break;
10745 case MULT_EXPR:
10746 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10748 /* x * x for floating point x is always non-negative. */
10749 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10750 return 1;
10751 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10752 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10755 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10756 both unsigned and their total bits is shorter than the result. */
10757 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10758 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10759 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10761 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10762 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10763 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10764 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10765 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10766 < TYPE_PRECISION (TREE_TYPE (t));
10768 return 0;
10770 case TRUNC_DIV_EXPR:
10771 case CEIL_DIV_EXPR:
10772 case FLOOR_DIV_EXPR:
10773 case ROUND_DIV_EXPR:
10774 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10775 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10777 case TRUNC_MOD_EXPR:
10778 case CEIL_MOD_EXPR:
10779 case FLOOR_MOD_EXPR:
10780 case ROUND_MOD_EXPR:
10781 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10783 case RDIV_EXPR:
10784 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10785 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10787 case BIT_AND_EXPR:
10788 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10789 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10790 case BIT_IOR_EXPR:
10791 case BIT_XOR_EXPR:
10792 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10793 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10795 case NOP_EXPR:
10797 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10798 tree outer_type = TREE_TYPE (t);
10800 if (TREE_CODE (outer_type) == REAL_TYPE)
10802 if (TREE_CODE (inner_type) == REAL_TYPE)
10803 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10804 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10806 if (TYPE_UNSIGNED (inner_type))
10807 return 1;
10808 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10811 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10813 if (TREE_CODE (inner_type) == REAL_TYPE)
10814 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10815 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10816 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10817 && TYPE_UNSIGNED (inner_type);
10820 break;
10822 case COND_EXPR:
10823 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10824 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10825 case COMPOUND_EXPR:
10826 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10827 case MIN_EXPR:
10828 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10829 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10830 case MAX_EXPR:
10831 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10832 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10833 case MODIFY_EXPR:
10834 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10835 case BIND_EXPR:
10836 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10837 case SAVE_EXPR:
10838 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10839 case NON_LVALUE_EXPR:
10840 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10841 case FLOAT_EXPR:
10842 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10844 case TARGET_EXPR:
10846 tree temp = TARGET_EXPR_SLOT (t);
10847 t = TARGET_EXPR_INITIAL (t);
10849 /* If the initializer is non-void, then it's a normal expression
10850 that will be assigned to the slot. */
10851 if (!VOID_TYPE_P (t))
10852 return tree_expr_nonnegative_p (t);
10854 /* Otherwise, the initializer sets the slot in some way. One common
10855 way is an assignment statement at the end of the initializer. */
10856 while (1)
10858 if (TREE_CODE (t) == BIND_EXPR)
10859 t = expr_last (BIND_EXPR_BODY (t));
10860 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10861 || TREE_CODE (t) == TRY_CATCH_EXPR)
10862 t = expr_last (TREE_OPERAND (t, 0));
10863 else if (TREE_CODE (t) == STATEMENT_LIST)
10864 t = expr_last (t);
10865 else
10866 break;
10868 if (TREE_CODE (t) == MODIFY_EXPR
10869 && TREE_OPERAND (t, 0) == temp)
10870 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10872 return 0;
10875 case CALL_EXPR:
10877 tree fndecl = get_callee_fndecl (t);
10878 tree arglist = TREE_OPERAND (t, 1);
10879 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10880 switch (DECL_FUNCTION_CODE (fndecl))
10882 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10883 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10884 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10885 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10887 CASE_BUILTIN_F (BUILT_IN_ACOS)
10888 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10889 CASE_BUILTIN_F (BUILT_IN_CABS)
10890 CASE_BUILTIN_F (BUILT_IN_COSH)
10891 CASE_BUILTIN_F (BUILT_IN_ERFC)
10892 CASE_BUILTIN_F (BUILT_IN_EXP)
10893 CASE_BUILTIN_F (BUILT_IN_EXP10)
10894 CASE_BUILTIN_F (BUILT_IN_EXP2)
10895 CASE_BUILTIN_F (BUILT_IN_FABS)
10896 CASE_BUILTIN_F (BUILT_IN_FDIM)
10897 CASE_BUILTIN_F (BUILT_IN_FREXP)
10898 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10899 CASE_BUILTIN_F (BUILT_IN_POW10)
10900 CASE_BUILTIN_I (BUILT_IN_FFS)
10901 CASE_BUILTIN_I (BUILT_IN_PARITY)
10902 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10903 /* Always true. */
10904 return 1;
10906 CASE_BUILTIN_F (BUILT_IN_SQRT)
10907 /* sqrt(-0.0) is -0.0. */
10908 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10909 return 1;
10910 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10912 CASE_BUILTIN_F (BUILT_IN_ASINH)
10913 CASE_BUILTIN_F (BUILT_IN_ATAN)
10914 CASE_BUILTIN_F (BUILT_IN_ATANH)
10915 CASE_BUILTIN_F (BUILT_IN_CBRT)
10916 CASE_BUILTIN_F (BUILT_IN_CEIL)
10917 CASE_BUILTIN_F (BUILT_IN_ERF)
10918 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10919 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10920 CASE_BUILTIN_F (BUILT_IN_FMOD)
10921 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10922 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10923 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10924 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10925 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10926 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10927 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10928 CASE_BUILTIN_F (BUILT_IN_LRINT)
10929 CASE_BUILTIN_F (BUILT_IN_LROUND)
10930 CASE_BUILTIN_F (BUILT_IN_MODF)
10931 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10932 CASE_BUILTIN_F (BUILT_IN_POW)
10933 CASE_BUILTIN_F (BUILT_IN_RINT)
10934 CASE_BUILTIN_F (BUILT_IN_ROUND)
10935 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10936 CASE_BUILTIN_F (BUILT_IN_SINH)
10937 CASE_BUILTIN_F (BUILT_IN_TANH)
10938 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10939 /* True if the 1st argument is nonnegative. */
10940 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10942 CASE_BUILTIN_F (BUILT_IN_FMAX)
10943 /* True if the 1st OR 2nd arguments are nonnegative. */
10944 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10945 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10947 CASE_BUILTIN_F (BUILT_IN_FMIN)
10948 /* True if the 1st AND 2nd arguments are nonnegative. */
10949 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10950 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10952 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10953 /* True if the 2nd argument is nonnegative. */
10954 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10956 default:
10957 break;
10958 #undef CASE_BUILTIN_F
10959 #undef CASE_BUILTIN_I
10963 /* ... fall through ... */
10965 default:
10966 if (truth_value_p (TREE_CODE (t)))
10967 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10968 return 1;
10971 /* We don't know sign of `t', so be conservative and return false. */
10972 return 0;
10975 /* Return true when T is an address and is known to be nonzero.
10976 For floating point we further ensure that T is not denormal.
10977 Similar logic is present in nonzero_address in rtlanal.h. */
10979 static bool
10980 tree_expr_nonzero_p (tree t)
10982 tree type = TREE_TYPE (t);
10984 /* Doing something useful for floating point would need more work. */
10985 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10986 return false;
10988 switch (TREE_CODE (t))
10990 case ABS_EXPR:
10991 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10992 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10994 case INTEGER_CST:
10995 /* We used to test for !integer_zerop here. This does not work correctly
10996 if TREE_CONSTANT_OVERFLOW (t). */
10997 return (TREE_INT_CST_LOW (t) != 0
10998 || TREE_INT_CST_HIGH (t) != 0);
11000 case PLUS_EXPR:
11001 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11003 /* With the presence of negative values it is hard
11004 to say something. */
11005 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11006 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11007 return false;
11008 /* One of operands must be positive and the other non-negative. */
11009 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11010 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11012 break;
11014 case MULT_EXPR:
11015 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11017 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11018 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11020 break;
11022 case NOP_EXPR:
11024 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11025 tree outer_type = TREE_TYPE (t);
11027 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11028 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11030 break;
11032 case ADDR_EXPR:
11034 tree base = get_base_address (TREE_OPERAND (t, 0));
11036 if (!base)
11037 return false;
11039 /* Weak declarations may link to NULL. */
11040 if (DECL_P (base))
11041 return !DECL_WEAK (base);
11043 /* Constants are never weak. */
11044 if (CONSTANT_CLASS_P (base))
11045 return true;
11047 return false;
11050 case COND_EXPR:
11051 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11052 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11054 case MIN_EXPR:
11055 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11056 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11058 case MAX_EXPR:
11059 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11061 /* When both operands are nonzero, then MAX must be too. */
11062 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11063 return true;
11065 /* MAX where operand 0 is positive is positive. */
11066 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11068 /* MAX where operand 1 is positive is positive. */
11069 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11070 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11071 return true;
11072 break;
11074 case COMPOUND_EXPR:
11075 case MODIFY_EXPR:
11076 case BIND_EXPR:
11077 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11079 case SAVE_EXPR:
11080 case NON_LVALUE_EXPR:
11081 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11083 case BIT_IOR_EXPR:
11084 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11085 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11087 default:
11088 break;
11090 return false;
11093 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11094 attempt to fold the expression to a constant without modifying TYPE,
11095 OP0 or OP1.
11097 If the expression could be simplified to a constant, then return
11098 the constant. If the expression would not be simplified to a
11099 constant, then return NULL_TREE. */
11101 tree
11102 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11104 tree tem = fold_binary (code, type, op0, op1);
11105 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11108 /* Given the components of a unary expression CODE, TYPE and OP0,
11109 attempt to fold the expression to a constant without modifying
11110 TYPE or OP0.
11112 If the expression could be simplified to a constant, then return
11113 the constant. If the expression would not be simplified to a
11114 constant, then return NULL_TREE. */
11116 tree
11117 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11119 tree tem = fold_unary (code, type, op0);
11120 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11123 /* If EXP represents referencing an element in a constant string
11124 (either via pointer arithmetic or array indexing), return the
11125 tree representing the value accessed, otherwise return NULL. */
11127 tree
11128 fold_read_from_constant_string (tree exp)
11130 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11132 tree exp1 = TREE_OPERAND (exp, 0);
11133 tree index;
11134 tree string;
11136 if (TREE_CODE (exp) == INDIRECT_REF)
11137 string = string_constant (exp1, &index);
11138 else
11140 tree low_bound = array_ref_low_bound (exp);
11141 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11143 /* Optimize the special-case of a zero lower bound.
11145 We convert the low_bound to sizetype to avoid some problems
11146 with constant folding. (E.g. suppose the lower bound is 1,
11147 and its mode is QI. Without the conversion,l (ARRAY
11148 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11149 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11150 if (! integer_zerop (low_bound))
11151 index = size_diffop (index, fold_convert (sizetype, low_bound));
11153 string = exp1;
11156 if (string
11157 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11158 && TREE_CODE (string) == STRING_CST
11159 && TREE_CODE (index) == INTEGER_CST
11160 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11161 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11162 == MODE_INT)
11163 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11164 return fold_convert (TREE_TYPE (exp),
11165 build_int_cst (NULL_TREE,
11166 (TREE_STRING_POINTER (string)
11167 [TREE_INT_CST_LOW (index)])));
11169 return NULL;
11172 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11173 an integer constant or real constant.
11175 TYPE is the type of the result. */
11177 static tree
11178 fold_negate_const (tree arg0, tree type)
11180 tree t = NULL_TREE;
11182 switch (TREE_CODE (arg0))
11184 case INTEGER_CST:
11186 unsigned HOST_WIDE_INT low;
11187 HOST_WIDE_INT high;
11188 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11189 TREE_INT_CST_HIGH (arg0),
11190 &low, &high);
11191 t = build_int_cst_wide (type, low, high);
11192 t = force_fit_type (t, 1,
11193 (overflow | TREE_OVERFLOW (arg0))
11194 && !TYPE_UNSIGNED (type),
11195 TREE_CONSTANT_OVERFLOW (arg0));
11196 break;
11199 case REAL_CST:
11200 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11201 break;
11203 default:
11204 gcc_unreachable ();
11207 return t;
11210 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11211 an integer constant or real constant.
11213 TYPE is the type of the result. */
11215 tree
11216 fold_abs_const (tree arg0, tree type)
11218 tree t = NULL_TREE;
11220 switch (TREE_CODE (arg0))
11222 case INTEGER_CST:
11223 /* If the value is unsigned, then the absolute value is
11224 the same as the ordinary value. */
11225 if (TYPE_UNSIGNED (type))
11226 t = arg0;
11227 /* Similarly, if the value is non-negative. */
11228 else if (INT_CST_LT (integer_minus_one_node, arg0))
11229 t = arg0;
11230 /* If the value is negative, then the absolute value is
11231 its negation. */
11232 else
11234 unsigned HOST_WIDE_INT low;
11235 HOST_WIDE_INT high;
11236 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11237 TREE_INT_CST_HIGH (arg0),
11238 &low, &high);
11239 t = build_int_cst_wide (type, low, high);
11240 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11241 TREE_CONSTANT_OVERFLOW (arg0));
11243 break;
11245 case REAL_CST:
11246 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11247 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11248 else
11249 t = arg0;
11250 break;
11252 default:
11253 gcc_unreachable ();
11256 return t;
11259 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11260 constant. TYPE is the type of the result. */
11262 static tree
11263 fold_not_const (tree arg0, tree type)
11265 tree t = NULL_TREE;
11267 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11269 t = build_int_cst_wide (type,
11270 ~ TREE_INT_CST_LOW (arg0),
11271 ~ TREE_INT_CST_HIGH (arg0));
11272 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11273 TREE_CONSTANT_OVERFLOW (arg0));
11275 return t;
11278 /* Given CODE, a relational operator, the target type, TYPE and two
11279 constant operands OP0 and OP1, return the result of the
11280 relational operation. If the result is not a compile time
11281 constant, then return NULL_TREE. */
11283 static tree
11284 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11286 int result, invert;
11288 /* From here on, the only cases we handle are when the result is
11289 known to be a constant. */
11291 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11293 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11294 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11296 /* Handle the cases where either operand is a NaN. */
11297 if (real_isnan (c0) || real_isnan (c1))
11299 switch (code)
11301 case EQ_EXPR:
11302 case ORDERED_EXPR:
11303 result = 0;
11304 break;
11306 case NE_EXPR:
11307 case UNORDERED_EXPR:
11308 case UNLT_EXPR:
11309 case UNLE_EXPR:
11310 case UNGT_EXPR:
11311 case UNGE_EXPR:
11312 case UNEQ_EXPR:
11313 result = 1;
11314 break;
11316 case LT_EXPR:
11317 case LE_EXPR:
11318 case GT_EXPR:
11319 case GE_EXPR:
11320 case LTGT_EXPR:
11321 if (flag_trapping_math)
11322 return NULL_TREE;
11323 result = 0;
11324 break;
11326 default:
11327 gcc_unreachable ();
11330 return constant_boolean_node (result, type);
11333 return constant_boolean_node (real_compare (code, c0, c1), type);
11336 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11338 To compute GT, swap the arguments and do LT.
11339 To compute GE, do LT and invert the result.
11340 To compute LE, swap the arguments, do LT and invert the result.
11341 To compute NE, do EQ and invert the result.
11343 Therefore, the code below must handle only EQ and LT. */
11345 if (code == LE_EXPR || code == GT_EXPR)
11347 tree tem = op0;
11348 op0 = op1;
11349 op1 = tem;
11350 code = swap_tree_comparison (code);
11353 /* Note that it is safe to invert for real values here because we
11354 have already handled the one case that it matters. */
11356 invert = 0;
11357 if (code == NE_EXPR || code == GE_EXPR)
11359 invert = 1;
11360 code = invert_tree_comparison (code, false);
11363 /* Compute a result for LT or EQ if args permit;
11364 Otherwise return T. */
11365 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11367 if (code == EQ_EXPR)
11368 result = tree_int_cst_equal (op0, op1);
11369 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11370 result = INT_CST_LT_UNSIGNED (op0, op1);
11371 else
11372 result = INT_CST_LT (op0, op1);
11374 else
11375 return NULL_TREE;
11377 if (invert)
11378 result ^= 1;
11379 return constant_boolean_node (result, type);
11382 /* Build an expression for the a clean point containing EXPR with type TYPE.
11383 Don't build a cleanup point expression for EXPR which don't have side
11384 effects. */
11386 tree
11387 fold_build_cleanup_point_expr (tree type, tree expr)
11389 /* If the expression does not have side effects then we don't have to wrap
11390 it with a cleanup point expression. */
11391 if (!TREE_SIDE_EFFECTS (expr))
11392 return expr;
11394 /* If the expression is a return, check to see if the expression inside the
11395 return has no side effects or the right hand side of the modify expression
11396 inside the return. If either don't have side effects set we don't need to
11397 wrap the expression in a cleanup point expression. Note we don't check the
11398 left hand side of the modify because it should always be a return decl. */
11399 if (TREE_CODE (expr) == RETURN_EXPR)
11401 tree op = TREE_OPERAND (expr, 0);
11402 if (!op || !TREE_SIDE_EFFECTS (op))
11403 return expr;
11404 op = TREE_OPERAND (op, 1);
11405 if (!TREE_SIDE_EFFECTS (op))
11406 return expr;
11409 return build1 (CLEANUP_POINT_EXPR, type, expr);
11412 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11413 avoid confusing the gimplify process. */
11415 tree
11416 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11418 /* The size of the object is not relevant when talking about its address. */
11419 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11420 t = TREE_OPERAND (t, 0);
11422 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11423 if (TREE_CODE (t) == INDIRECT_REF
11424 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11426 t = TREE_OPERAND (t, 0);
11427 if (TREE_TYPE (t) != ptrtype)
11428 t = build1 (NOP_EXPR, ptrtype, t);
11430 else
11432 tree base = t;
11434 while (handled_component_p (base))
11435 base = TREE_OPERAND (base, 0);
11436 if (DECL_P (base))
11437 TREE_ADDRESSABLE (base) = 1;
11439 t = build1 (ADDR_EXPR, ptrtype, t);
11442 return t;
11445 tree
11446 build_fold_addr_expr (tree t)
11448 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11451 /* Given a pointer value T, return a simplified version of an indirection
11452 through T, or NULL_TREE if no simplification is possible. */
11454 static tree
11455 fold_indirect_ref_1 (tree t)
11457 tree type = TREE_TYPE (TREE_TYPE (t));
11458 tree sub = t;
11459 tree subtype;
11461 STRIP_NOPS (sub);
11462 subtype = TREE_TYPE (sub);
11463 if (!POINTER_TYPE_P (subtype))
11464 return NULL_TREE;
11466 if (TREE_CODE (sub) == ADDR_EXPR)
11468 tree op = TREE_OPERAND (sub, 0);
11469 tree optype = TREE_TYPE (op);
11470 /* *&p => p */
11471 if (lang_hooks.types_compatible_p (type, optype))
11472 return op;
11473 /* *(foo *)&fooarray => fooarray[0] */
11474 else if (TREE_CODE (optype) == ARRAY_TYPE
11475 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11477 tree type_domain = TYPE_DOMAIN (optype);
11478 tree min_val = size_zero_node;
11479 if (type_domain && TYPE_MIN_VALUE (type_domain))
11480 min_val = TYPE_MIN_VALUE (type_domain);
11481 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11485 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11486 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11487 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11489 tree type_domain;
11490 tree min_val = size_zero_node;
11491 sub = build_fold_indirect_ref (sub);
11492 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11493 if (type_domain && TYPE_MIN_VALUE (type_domain))
11494 min_val = TYPE_MIN_VALUE (type_domain);
11495 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11498 return NULL_TREE;
11501 /* Builds an expression for an indirection through T, simplifying some
11502 cases. */
11504 tree
11505 build_fold_indirect_ref (tree t)
11507 tree sub = fold_indirect_ref_1 (t);
11509 if (sub)
11510 return sub;
11511 else
11512 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
11515 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11517 tree
11518 fold_indirect_ref (tree t)
11520 tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
11522 if (sub)
11523 return sub;
11524 else
11525 return t;
11528 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11529 whose result is ignored. The type of the returned tree need not be
11530 the same as the original expression. */
11532 tree
11533 fold_ignored_result (tree t)
11535 if (!TREE_SIDE_EFFECTS (t))
11536 return integer_zero_node;
11538 for (;;)
11539 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11541 case tcc_unary:
11542 t = TREE_OPERAND (t, 0);
11543 break;
11545 case tcc_binary:
11546 case tcc_comparison:
11547 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11548 t = TREE_OPERAND (t, 0);
11549 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11550 t = TREE_OPERAND (t, 1);
11551 else
11552 return t;
11553 break;
11555 case tcc_expression:
11556 switch (TREE_CODE (t))
11558 case COMPOUND_EXPR:
11559 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11560 return t;
11561 t = TREE_OPERAND (t, 0);
11562 break;
11564 case COND_EXPR:
11565 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11566 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11567 return t;
11568 t = TREE_OPERAND (t, 0);
11569 break;
11571 default:
11572 return t;
11574 break;
11576 default:
11577 return t;
11581 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11582 This can only be applied to objects of a sizetype. */
11584 tree
11585 round_up (tree value, int divisor)
11587 tree div = NULL_TREE;
11589 gcc_assert (divisor > 0);
11590 if (divisor == 1)
11591 return value;
11593 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11594 have to do anything. Only do this when we are not given a const,
11595 because in that case, this check is more expensive than just
11596 doing it. */
11597 if (TREE_CODE (value) != INTEGER_CST)
11599 div = build_int_cst (TREE_TYPE (value), divisor);
11601 if (multiple_of_p (TREE_TYPE (value), value, div))
11602 return value;
11605 /* If divisor is a power of two, simplify this to bit manipulation. */
11606 if (divisor == (divisor & -divisor))
11608 tree t;
11610 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11611 value = size_binop (PLUS_EXPR, value, t);
11612 t = build_int_cst (TREE_TYPE (value), -divisor);
11613 value = size_binop (BIT_AND_EXPR, value, t);
11615 else
11617 if (!div)
11618 div = build_int_cst (TREE_TYPE (value), divisor);
11619 value = size_binop (CEIL_DIV_EXPR, value, div);
11620 value = size_binop (MULT_EXPR, value, div);
11623 return value;
11626 /* Likewise, but round down. */
11628 tree
11629 round_down (tree value, int divisor)
11631 tree div = NULL_TREE;
11633 gcc_assert (divisor > 0);
11634 if (divisor == 1)
11635 return value;
11637 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11638 have to do anything. Only do this when we are not given a const,
11639 because in that case, this check is more expensive than just
11640 doing it. */
11641 if (TREE_CODE (value) != INTEGER_CST)
11643 div = build_int_cst (TREE_TYPE (value), divisor);
11645 if (multiple_of_p (TREE_TYPE (value), value, div))
11646 return value;
11649 /* If divisor is a power of two, simplify this to bit manipulation. */
11650 if (divisor == (divisor & -divisor))
11652 tree t;
11654 t = build_int_cst (TREE_TYPE (value), -divisor);
11655 value = size_binop (BIT_AND_EXPR, value, t);
11657 else
11659 if (!div)
11660 div = build_int_cst (TREE_TYPE (value), divisor);
11661 value = size_binop (FLOOR_DIV_EXPR, value, div);
11662 value = size_binop (MULT_EXPR, value, div);
11665 return value;
11668 /* Returns the pointer to the base of the object addressed by EXP and
11669 extracts the information about the offset of the access, storing it
11670 to PBITPOS and POFFSET. */
11672 static tree
11673 split_address_to_core_and_offset (tree exp,
11674 HOST_WIDE_INT *pbitpos, tree *poffset)
11676 tree core;
11677 enum machine_mode mode;
11678 int unsignedp, volatilep;
11679 HOST_WIDE_INT bitsize;
11681 if (TREE_CODE (exp) == ADDR_EXPR)
11683 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11684 poffset, &mode, &unsignedp, &volatilep,
11685 false);
11687 if (TREE_CODE (core) == INDIRECT_REF)
11688 core = TREE_OPERAND (core, 0);
11690 else
11692 core = exp;
11693 *pbitpos = 0;
11694 *poffset = NULL_TREE;
11697 return core;
11700 /* Returns true if addresses of E1 and E2 differ by a constant, false
11701 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11703 bool
11704 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11706 tree core1, core2;
11707 HOST_WIDE_INT bitpos1, bitpos2;
11708 tree toffset1, toffset2, tdiff, type;
11710 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11711 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11713 if (bitpos1 % BITS_PER_UNIT != 0
11714 || bitpos2 % BITS_PER_UNIT != 0
11715 || !operand_equal_p (core1, core2, 0))
11716 return false;
11718 if (toffset1 && toffset2)
11720 type = TREE_TYPE (toffset1);
11721 if (type != TREE_TYPE (toffset2))
11722 toffset2 = fold_convert (type, toffset2);
11724 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11725 if (!host_integerp (tdiff, 0))
11726 return false;
11728 *diff = tree_low_cst (tdiff, 0);
11730 else if (toffset1 || toffset2)
11732 /* If only one of the offsets is non-constant, the difference cannot
11733 be a constant. */
11734 return false;
11736 else
11737 *diff = 0;
11739 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11740 return true;
11743 /* Simplify the floating point expression EXP when the sign of the
11744 result is not significant. Return NULL_TREE if no simplification
11745 is possible. */
11747 tree
11748 fold_strip_sign_ops (tree exp)
11750 tree arg0, arg1;
11752 switch (TREE_CODE (exp))
11754 case ABS_EXPR:
11755 case NEGATE_EXPR:
11756 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11757 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11759 case MULT_EXPR:
11760 case RDIV_EXPR:
11761 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11762 return NULL_TREE;
11763 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11764 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11765 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11766 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11767 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11768 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11769 break;
11771 default:
11772 break;
11774 return NULL_TREE;