* config/m68k/m68k.md (bungt_rev): New pattern.
[official-gcc.git] / gcc / fold-const.c
blob69db6c2613a3d0c504c722fc520eaaf62c1ca198
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
63 otherwise. */
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
70 COMPCODE_FALSE = 0,
71 COMPCODE_LT = 1,
72 COMPCODE_EQ = 2,
73 COMPCODE_LE = 3,
74 COMPCODE_GT = 4,
75 COMPCODE_LTGT = 5,
76 COMPCODE_GE = 6,
77 COMPCODE_ORD = 7,
78 COMPCODE_UNORD = 8,
79 COMPCODE_UNLT = 9,
80 COMPCODE_UNEQ = 10,
81 COMPCODE_UNLE = 11,
82 COMPCODE_UNGT = 12,
83 COMPCODE_NE = 13,
84 COMPCODE_UNGE = 14,
85 COMPCODE_TRUE = 15
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
110 tree *, tree *);
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
120 tree);
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
130 tree, tree,
131 tree, tree, int);
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
134 tree, tree, tree);
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
148 addition.
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 sign. */
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
160 #define LOWPART(x) \
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
170 static void
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
183 static void
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
185 HOST_WIDE_INT *hi)
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
206 tree
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
211 HOST_WIDE_INT high;
212 unsigned int prec;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
222 prec = POINTER_SIZE;
223 else
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
236 else
238 high = 0;
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
257 high = -1;
259 else
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
264 high = -1;
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
275 if (overflowed
276 || overflowable < 0
277 || (overflowable > 0 && sign_extended_type))
279 t = copy_node (t);
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
285 t = copy_node (t);
286 TREE_CONSTANT_OVERFLOW (t) = 1;
290 return t;
293 /* Add two doubleword integers with doubleword result.
294 Each argument is given as two `HOST_WIDE_INT' pieces.
295 One argument is L1 and H1; the other, L2 and H2.
296 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
299 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
300 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
301 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
303 unsigned HOST_WIDE_INT l;
304 HOST_WIDE_INT h;
306 l = l1 + l2;
307 h = h1 + h2 + (l < l1);
309 *lv = l;
310 *hv = h;
311 return OVERFLOW_SUM_SIGN (h1, h2, h);
314 /* Negate a doubleword integer with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
323 if (l1 == 0)
325 *lv = 0;
326 *hv = - h1;
327 return (*hv & h1) < 0;
329 else
331 *lv = -l1;
332 *hv = ~h1;
333 return 0;
337 /* Multiply two doubleword integers with doubleword result.
338 Return nonzero if the operation overflows, assuming it's signed.
339 Each argument is given as two `HOST_WIDE_INT' pieces.
340 One argument is L1 and H1; the other, L2 and H2.
341 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
344 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
345 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 HOST_WIDE_INT arg1[4];
349 HOST_WIDE_INT arg2[4];
350 HOST_WIDE_INT prod[4 * 2];
351 unsigned HOST_WIDE_INT carry;
352 int i, j, k;
353 unsigned HOST_WIDE_INT toplow, neglow;
354 HOST_WIDE_INT tophigh, neghigh;
356 encode (arg1, l1, h1);
357 encode (arg2, l2, h2);
359 memset (prod, 0, sizeof prod);
361 for (i = 0; i < 4; i++)
363 carry = 0;
364 for (j = 0; j < 4; j++)
366 k = i + j;
367 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
368 carry += arg1[i] * arg2[j];
369 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
370 carry += prod[k];
371 prod[k] = LOWPART (carry);
372 carry = HIGHPART (carry);
374 prod[i + 4] = carry;
377 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
379 /* Check for overflow by calculating the top half of the answer in full;
380 it should agree with the low half's sign bit. */
381 decode (prod + 4, &toplow, &tophigh);
382 if (h1 < 0)
384 neg_double (l2, h2, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
387 if (h2 < 0)
389 neg_double (l1, h1, &neglow, &neghigh);
390 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
392 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
395 /* Shift the doubleword integer in L1, H1 left by COUNT places
396 keeping only PREC bits of result.
397 Shift right if COUNT is negative.
398 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
399 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
401 void
402 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
403 HOST_WIDE_INT count, unsigned int prec,
404 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
406 unsigned HOST_WIDE_INT signmask;
408 if (count < 0)
410 rshift_double (l1, h1, -count, prec, lv, hv, arith);
411 return;
414 if (SHIFT_COUNT_TRUNCATED)
415 count %= prec;
417 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
419 /* Shifting by the host word size is undefined according to the
420 ANSI standard, so we must handle this as a special case. */
421 *hv = 0;
422 *lv = 0;
424 else if (count >= HOST_BITS_PER_WIDE_INT)
426 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
427 *lv = 0;
429 else
431 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
432 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
433 *lv = l1 << count;
436 /* Sign extend all bits that are beyond the precision. */
438 signmask = -((prec > HOST_BITS_PER_WIDE_INT
439 ? ((unsigned HOST_WIDE_INT) *hv
440 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
441 : (*lv >> (prec - 1))) & 1);
443 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
445 else if (prec >= HOST_BITS_PER_WIDE_INT)
447 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
448 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
450 else
452 *hv = signmask;
453 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
454 *lv |= signmask << prec;
458 /* Shift the doubleword integer in L1, H1 right by COUNT places
459 keeping only PREC bits of result. COUNT must be positive.
460 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
461 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
463 void
464 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
465 HOST_WIDE_INT count, unsigned int prec,
466 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
467 int arith)
469 unsigned HOST_WIDE_INT signmask;
471 signmask = (arith
472 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
473 : 0);
475 if (SHIFT_COUNT_TRUNCATED)
476 count %= prec;
478 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
480 /* Shifting by the host word size is undefined according to the
481 ANSI standard, so we must handle this as a special case. */
482 *hv = 0;
483 *lv = 0;
485 else if (count >= HOST_BITS_PER_WIDE_INT)
487 *hv = 0;
488 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
490 else
492 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
493 *lv = ((l1 >> count)
494 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
497 /* Zero / sign extend all bits that are beyond the precision. */
499 if (count >= (HOST_WIDE_INT)prec)
501 *hv = signmask;
502 *lv = signmask;
504 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
506 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
508 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
509 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
511 else
513 *hv = signmask;
514 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
515 *lv |= signmask << (prec - count);
519 /* Rotate the doubleword integer in L1, H1 left by COUNT places
520 keeping only PREC bits of result.
521 Rotate right if COUNT is negative.
522 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
524 void
525 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
526 HOST_WIDE_INT count, unsigned int prec,
527 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
529 unsigned HOST_WIDE_INT s1l, s2l;
530 HOST_WIDE_INT s1h, s2h;
532 count %= prec;
533 if (count < 0)
534 count += prec;
536 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
537 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
538 *lv = s1l | s2l;
539 *hv = s1h | s2h;
542 /* Rotate the doubleword integer in L1, H1 left by COUNT places
543 keeping only PREC bits of result. COUNT must be positive.
544 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
546 void
547 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
548 HOST_WIDE_INT count, unsigned int prec,
549 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
551 unsigned HOST_WIDE_INT s1l, s2l;
552 HOST_WIDE_INT s1h, s2h;
554 count %= prec;
555 if (count < 0)
556 count += prec;
558 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
559 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
560 *lv = s1l | s2l;
561 *hv = s1h | s2h;
564 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
565 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
566 CODE is a tree code for a kind of division, one of
567 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
568 or EXACT_DIV_EXPR
569 It controls how the quotient is rounded to an integer.
570 Return nonzero if the operation overflows.
571 UNS nonzero says do unsigned division. */
574 div_and_round_double (enum tree_code code, int uns,
575 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
576 HOST_WIDE_INT hnum_orig,
577 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
578 HOST_WIDE_INT hden_orig,
579 unsigned HOST_WIDE_INT *lquo,
580 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
581 HOST_WIDE_INT *hrem)
583 int quo_neg = 0;
584 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
585 HOST_WIDE_INT den[4], quo[4];
586 int i, j;
587 unsigned HOST_WIDE_INT work;
588 unsigned HOST_WIDE_INT carry = 0;
589 unsigned HOST_WIDE_INT lnum = lnum_orig;
590 HOST_WIDE_INT hnum = hnum_orig;
591 unsigned HOST_WIDE_INT lden = lden_orig;
592 HOST_WIDE_INT hden = hden_orig;
593 int overflow = 0;
595 if (hden == 0 && lden == 0)
596 overflow = 1, lden = 1;
598 /* Calculate quotient sign and convert operands to unsigned. */
599 if (!uns)
601 if (hnum < 0)
603 quo_neg = ~ quo_neg;
604 /* (minimum integer) / (-1) is the only overflow case. */
605 if (neg_double (lnum, hnum, &lnum, &hnum)
606 && ((HOST_WIDE_INT) lden & hden) == -1)
607 overflow = 1;
609 if (hden < 0)
611 quo_neg = ~ quo_neg;
612 neg_double (lden, hden, &lden, &hden);
616 if (hnum == 0 && hden == 0)
617 { /* single precision */
618 *hquo = *hrem = 0;
619 /* This unsigned division rounds toward zero. */
620 *lquo = lnum / lden;
621 goto finish_up;
624 if (hnum == 0)
625 { /* trivial case: dividend < divisor */
626 /* hden != 0 already checked. */
627 *hquo = *lquo = 0;
628 *hrem = hnum;
629 *lrem = lnum;
630 goto finish_up;
633 memset (quo, 0, sizeof quo);
635 memset (num, 0, sizeof num); /* to zero 9th element */
636 memset (den, 0, sizeof den);
638 encode (num, lnum, hnum);
639 encode (den, lden, hden);
641 /* Special code for when the divisor < BASE. */
642 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
644 /* hnum != 0 already checked. */
645 for (i = 4 - 1; i >= 0; i--)
647 work = num[i] + carry * BASE;
648 quo[i] = work / lden;
649 carry = work % lden;
652 else
654 /* Full double precision division,
655 with thanks to Don Knuth's "Seminumerical Algorithms". */
656 int num_hi_sig, den_hi_sig;
657 unsigned HOST_WIDE_INT quo_est, scale;
659 /* Find the highest nonzero divisor digit. */
660 for (i = 4 - 1;; i--)
661 if (den[i] != 0)
663 den_hi_sig = i;
664 break;
667 /* Insure that the first digit of the divisor is at least BASE/2.
668 This is required by the quotient digit estimation algorithm. */
670 scale = BASE / (den[den_hi_sig] + 1);
671 if (scale > 1)
672 { /* scale divisor and dividend */
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
676 work = (num[i] * scale) + carry;
677 num[i] = LOWPART (work);
678 carry = HIGHPART (work);
681 num[4] = carry;
682 carry = 0;
683 for (i = 0; i <= 4 - 1; i++)
685 work = (den[i] * scale) + carry;
686 den[i] = LOWPART (work);
687 carry = HIGHPART (work);
688 if (den[i] != 0) den_hi_sig = i;
692 num_hi_sig = 4;
694 /* Main loop */
695 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
697 /* Guess the next quotient digit, quo_est, by dividing the first
698 two remaining dividend digits by the high order quotient digit.
699 quo_est is never low and is at most 2 high. */
700 unsigned HOST_WIDE_INT tmp;
702 num_hi_sig = i + den_hi_sig + 1;
703 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
704 if (num[num_hi_sig] != den[den_hi_sig])
705 quo_est = work / den[den_hi_sig];
706 else
707 quo_est = BASE - 1;
709 /* Refine quo_est so it's usually correct, and at most one high. */
710 tmp = work - quo_est * den[den_hi_sig];
711 if (tmp < BASE
712 && (den[den_hi_sig - 1] * quo_est
713 > (tmp * BASE + num[num_hi_sig - 2])))
714 quo_est--;
716 /* Try QUO_EST as the quotient digit, by multiplying the
717 divisor by QUO_EST and subtracting from the remaining dividend.
718 Keep in mind that QUO_EST is the I - 1st digit. */
720 carry = 0;
721 for (j = 0; j <= den_hi_sig; j++)
723 work = quo_est * den[j] + carry;
724 carry = HIGHPART (work);
725 work = num[i + j] - LOWPART (work);
726 num[i + j] = LOWPART (work);
727 carry += HIGHPART (work) != 0;
730 /* If quo_est was high by one, then num[i] went negative and
731 we need to correct things. */
732 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
734 quo_est--;
735 carry = 0; /* add divisor back in */
736 for (j = 0; j <= den_hi_sig; j++)
738 work = num[i + j] + den[j] + carry;
739 carry = HIGHPART (work);
740 num[i + j] = LOWPART (work);
743 num [num_hi_sig] += carry;
746 /* Store the quotient digit. */
747 quo[i] = quo_est;
751 decode (quo, lquo, hquo);
753 finish_up:
754 /* If result is negative, make it so. */
755 if (quo_neg)
756 neg_double (*lquo, *hquo, lquo, hquo);
758 /* Compute trial remainder: rem = num - (quo * den) */
759 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
760 neg_double (*lrem, *hrem, lrem, hrem);
761 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
763 switch (code)
765 case TRUNC_DIV_EXPR:
766 case TRUNC_MOD_EXPR: /* round toward zero */
767 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
768 return overflow;
770 case FLOOR_DIV_EXPR:
771 case FLOOR_MOD_EXPR: /* round toward negative infinity */
772 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
774 /* quo = quo - 1; */
775 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
776 lquo, hquo);
778 else
779 return overflow;
780 break;
782 case CEIL_DIV_EXPR:
783 case CEIL_MOD_EXPR: /* round toward positive infinity */
784 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
787 lquo, hquo);
789 else
790 return overflow;
791 break;
793 case ROUND_DIV_EXPR:
794 case ROUND_MOD_EXPR: /* round to closest integer */
796 unsigned HOST_WIDE_INT labs_rem = *lrem;
797 HOST_WIDE_INT habs_rem = *hrem;
798 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
799 HOST_WIDE_INT habs_den = hden, htwice;
801 /* Get absolute values. */
802 if (*hrem < 0)
803 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
804 if (hden < 0)
805 neg_double (lden, hden, &labs_den, &habs_den);
807 /* If (2 * abs (lrem) >= abs (lden)) */
808 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
809 labs_rem, habs_rem, &ltwice, &htwice);
811 if (((unsigned HOST_WIDE_INT) habs_den
812 < (unsigned HOST_WIDE_INT) htwice)
813 || (((unsigned HOST_WIDE_INT) habs_den
814 == (unsigned HOST_WIDE_INT) htwice)
815 && (labs_den < ltwice)))
817 if (*hquo < 0)
818 /* quo = quo - 1; */
819 add_double (*lquo, *hquo,
820 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
821 else
822 /* quo = quo + 1; */
823 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
824 lquo, hquo);
826 else
827 return overflow;
829 break;
831 default:
832 gcc_unreachable ();
835 /* Compute true remainder: rem = num - (quo * den) */
836 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
837 neg_double (*lrem, *hrem, lrem, hrem);
838 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
839 return overflow;
842 /* If ARG2 divides ARG1 with zero remainder, carries out the division
843 of type CODE and returns the quotient.
844 Otherwise returns NULL_TREE. */
846 static tree
847 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
849 unsigned HOST_WIDE_INT int1l, int2l;
850 HOST_WIDE_INT int1h, int2h;
851 unsigned HOST_WIDE_INT quol, reml;
852 HOST_WIDE_INT quoh, remh;
853 tree type = TREE_TYPE (arg1);
854 int uns = TYPE_UNSIGNED (type);
856 int1l = TREE_INT_CST_LOW (arg1);
857 int1h = TREE_INT_CST_HIGH (arg1);
858 int2l = TREE_INT_CST_LOW (arg2);
859 int2h = TREE_INT_CST_HIGH (arg2);
861 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
862 &quol, &quoh, &reml, &remh);
863 if (remh != 0 || reml != 0)
864 return NULL_TREE;
866 return build_int_cst_wide (type, quol, quoh);
869 /* Return true if the built-in mathematical function specified by CODE
870 is odd, i.e. -f(x) == f(-x). */
872 static bool
873 negate_mathfn_p (enum built_in_function code)
875 switch (code)
877 CASE_FLT_FN (BUILT_IN_ASIN):
878 CASE_FLT_FN (BUILT_IN_ASINH):
879 CASE_FLT_FN (BUILT_IN_ATAN):
880 CASE_FLT_FN (BUILT_IN_ATANH):
881 CASE_FLT_FN (BUILT_IN_CBRT):
882 CASE_FLT_FN (BUILT_IN_SIN):
883 CASE_FLT_FN (BUILT_IN_SINH):
884 CASE_FLT_FN (BUILT_IN_TAN):
885 CASE_FLT_FN (BUILT_IN_TANH):
886 return true;
888 default:
889 break;
891 return false;
894 /* Check whether we may negate an integer constant T without causing
895 overflow. */
897 bool
898 may_negate_without_overflow_p (tree t)
900 unsigned HOST_WIDE_INT val;
901 unsigned int prec;
902 tree type;
904 gcc_assert (TREE_CODE (t) == INTEGER_CST);
906 type = TREE_TYPE (t);
907 if (TYPE_UNSIGNED (type))
908 return false;
910 prec = TYPE_PRECISION (type);
911 if (prec > HOST_BITS_PER_WIDE_INT)
913 if (TREE_INT_CST_LOW (t) != 0)
914 return true;
915 prec -= HOST_BITS_PER_WIDE_INT;
916 val = TREE_INT_CST_HIGH (t);
918 else
919 val = TREE_INT_CST_LOW (t);
920 if (prec < HOST_BITS_PER_WIDE_INT)
921 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
922 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
925 /* Determine whether an expression T can be cheaply negated using
926 the function negate_expr without introducing undefined overflow. */
928 static bool
929 negate_expr_p (tree t)
931 tree type;
933 if (t == 0)
934 return false;
936 type = TREE_TYPE (t);
938 STRIP_SIGN_NOPS (t);
939 switch (TREE_CODE (t))
941 case INTEGER_CST:
942 if (TYPE_UNSIGNED (type)
943 || (flag_wrapv && ! flag_trapv))
944 return true;
946 /* Check that -CST will not overflow type. */
947 return may_negate_without_overflow_p (t);
948 case BIT_NOT_EXPR:
949 return INTEGRAL_TYPE_P (type)
950 && (TYPE_UNSIGNED (type)
951 || (flag_wrapv && !flag_trapv));
953 case REAL_CST:
954 case NEGATE_EXPR:
955 return true;
957 case COMPLEX_CST:
958 return negate_expr_p (TREE_REALPART (t))
959 && negate_expr_p (TREE_IMAGPART (t));
961 case PLUS_EXPR:
962 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
963 return false;
964 /* -(A + B) -> (-B) - A. */
965 if (negate_expr_p (TREE_OPERAND (t, 1))
966 && reorder_operands_p (TREE_OPERAND (t, 0),
967 TREE_OPERAND (t, 1)))
968 return true;
969 /* -(A + B) -> (-A) - B. */
970 return negate_expr_p (TREE_OPERAND (t, 0));
972 case MINUS_EXPR:
973 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
974 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
975 && reorder_operands_p (TREE_OPERAND (t, 0),
976 TREE_OPERAND (t, 1));
978 case MULT_EXPR:
979 if (TYPE_UNSIGNED (TREE_TYPE (t)))
980 break;
982 /* Fall through. */
984 case RDIV_EXPR:
985 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
986 return negate_expr_p (TREE_OPERAND (t, 1))
987 || negate_expr_p (TREE_OPERAND (t, 0));
988 break;
990 case TRUNC_DIV_EXPR:
991 case ROUND_DIV_EXPR:
992 case FLOOR_DIV_EXPR:
993 case CEIL_DIV_EXPR:
994 case EXACT_DIV_EXPR:
995 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
996 break;
997 return negate_expr_p (TREE_OPERAND (t, 1))
998 || negate_expr_p (TREE_OPERAND (t, 0));
1000 case NOP_EXPR:
1001 /* Negate -((double)float) as (double)(-float). */
1002 if (TREE_CODE (type) == REAL_TYPE)
1004 tree tem = strip_float_extensions (t);
1005 if (tem != t)
1006 return negate_expr_p (tem);
1008 break;
1010 case CALL_EXPR:
1011 /* Negate -f(x) as f(-x). */
1012 if (negate_mathfn_p (builtin_mathfn_code (t)))
1013 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1014 break;
1016 case RSHIFT_EXPR:
1017 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1018 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1020 tree op1 = TREE_OPERAND (t, 1);
1021 if (TREE_INT_CST_HIGH (op1) == 0
1022 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1023 == TREE_INT_CST_LOW (op1))
1024 return true;
1026 break;
1028 default:
1029 break;
1031 return false;
1034 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1035 simplification is possible.
1036 If negate_expr_p would return true for T, NULL_TREE will never be
1037 returned. */
1039 static tree
1040 fold_negate_expr (tree t)
1042 tree type = TREE_TYPE (t);
1043 tree tem;
1045 switch (TREE_CODE (t))
1047 /* Convert - (~A) to A + 1. */
1048 case BIT_NOT_EXPR:
1049 if (INTEGRAL_TYPE_P (type))
1050 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1051 build_int_cst (type, 1));
1052 break;
1054 case INTEGER_CST:
1055 tem = fold_negate_const (t, type);
1056 if (! TREE_OVERFLOW (tem)
1057 || TYPE_UNSIGNED (type)
1058 || ! flag_trapv)
1059 return tem;
1060 break;
1062 case REAL_CST:
1063 tem = fold_negate_const (t, type);
1064 /* Two's complement FP formats, such as c4x, may overflow. */
1065 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1066 return tem;
1067 break;
1069 case COMPLEX_CST:
1071 tree rpart = negate_expr (TREE_REALPART (t));
1072 tree ipart = negate_expr (TREE_IMAGPART (t));
1074 if ((TREE_CODE (rpart) == REAL_CST
1075 && TREE_CODE (ipart) == REAL_CST)
1076 || (TREE_CODE (rpart) == INTEGER_CST
1077 && TREE_CODE (ipart) == INTEGER_CST))
1078 return build_complex (type, rpart, ipart);
1080 break;
1082 case NEGATE_EXPR:
1083 return TREE_OPERAND (t, 0);
1085 case PLUS_EXPR:
1086 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1088 /* -(A + B) -> (-B) - A. */
1089 if (negate_expr_p (TREE_OPERAND (t, 1))
1090 && reorder_operands_p (TREE_OPERAND (t, 0),
1091 TREE_OPERAND (t, 1)))
1093 tem = negate_expr (TREE_OPERAND (t, 1));
1094 return fold_build2 (MINUS_EXPR, type,
1095 tem, TREE_OPERAND (t, 0));
1098 /* -(A + B) -> (-A) - B. */
1099 if (negate_expr_p (TREE_OPERAND (t, 0)))
1101 tem = negate_expr (TREE_OPERAND (t, 0));
1102 return fold_build2 (MINUS_EXPR, type,
1103 tem, TREE_OPERAND (t, 1));
1106 break;
1108 case MINUS_EXPR:
1109 /* - (A - B) -> B - A */
1110 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1111 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1112 return fold_build2 (MINUS_EXPR, type,
1113 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1114 break;
1116 case MULT_EXPR:
1117 if (TYPE_UNSIGNED (type))
1118 break;
1120 /* Fall through. */
1122 case RDIV_EXPR:
1123 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1125 tem = TREE_OPERAND (t, 1);
1126 if (negate_expr_p (tem))
1127 return fold_build2 (TREE_CODE (t), type,
1128 TREE_OPERAND (t, 0), negate_expr (tem));
1129 tem = TREE_OPERAND (t, 0);
1130 if (negate_expr_p (tem))
1131 return fold_build2 (TREE_CODE (t), type,
1132 negate_expr (tem), TREE_OPERAND (t, 1));
1134 break;
1136 case TRUNC_DIV_EXPR:
1137 case ROUND_DIV_EXPR:
1138 case FLOOR_DIV_EXPR:
1139 case CEIL_DIV_EXPR:
1140 case EXACT_DIV_EXPR:
1141 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1143 tem = TREE_OPERAND (t, 1);
1144 if (negate_expr_p (tem))
1145 return fold_build2 (TREE_CODE (t), type,
1146 TREE_OPERAND (t, 0), negate_expr (tem));
1147 tem = TREE_OPERAND (t, 0);
1148 if (negate_expr_p (tem))
1149 return fold_build2 (TREE_CODE (t), type,
1150 negate_expr (tem), TREE_OPERAND (t, 1));
1152 break;
1154 case NOP_EXPR:
1155 /* Convert -((double)float) into (double)(-float). */
1156 if (TREE_CODE (type) == REAL_TYPE)
1158 tem = strip_float_extensions (t);
1159 if (tem != t && negate_expr_p (tem))
1160 return negate_expr (tem);
1162 break;
1164 case CALL_EXPR:
1165 /* Negate -f(x) as f(-x). */
1166 if (negate_mathfn_p (builtin_mathfn_code (t))
1167 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1169 tree fndecl, arg, arglist;
1171 fndecl = get_callee_fndecl (t);
1172 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1173 arglist = build_tree_list (NULL_TREE, arg);
1174 return build_function_call_expr (fndecl, arglist);
1176 break;
1178 case RSHIFT_EXPR:
1179 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1180 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1182 tree op1 = TREE_OPERAND (t, 1);
1183 if (TREE_INT_CST_HIGH (op1) == 0
1184 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1185 == TREE_INT_CST_LOW (op1))
1187 tree ntype = TYPE_UNSIGNED (type)
1188 ? lang_hooks.types.signed_type (type)
1189 : lang_hooks.types.unsigned_type (type);
1190 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1191 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1192 return fold_convert (type, temp);
1195 break;
1197 default:
1198 break;
1201 return NULL_TREE;
1204 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1205 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1206 return NULL_TREE. */
1208 static tree
1209 negate_expr (tree t)
1211 tree type, tem;
1213 if (t == NULL_TREE)
1214 return NULL_TREE;
1216 type = TREE_TYPE (t);
1217 STRIP_SIGN_NOPS (t);
1219 tem = fold_negate_expr (t);
1220 if (!tem)
1221 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1222 return fold_convert (type, tem);
1225 /* Split a tree IN into a constant, literal and variable parts that could be
1226 combined with CODE to make IN. "constant" means an expression with
1227 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1228 commutative arithmetic operation. Store the constant part into *CONP,
1229 the literal in *LITP and return the variable part. If a part isn't
1230 present, set it to null. If the tree does not decompose in this way,
1231 return the entire tree as the variable part and the other parts as null.
1233 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1234 case, we negate an operand that was subtracted. Except if it is a
1235 literal for which we use *MINUS_LITP instead.
1237 If NEGATE_P is true, we are negating all of IN, again except a literal
1238 for which we use *MINUS_LITP instead.
1240 If IN is itself a literal or constant, return it as appropriate.
1242 Note that we do not guarantee that any of the three values will be the
1243 same type as IN, but they will have the same signedness and mode. */
1245 static tree
1246 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1247 tree *minus_litp, int negate_p)
1249 tree var = 0;
1251 *conp = 0;
1252 *litp = 0;
1253 *minus_litp = 0;
1255 /* Strip any conversions that don't change the machine mode or signedness. */
1256 STRIP_SIGN_NOPS (in);
1258 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1259 *litp = in;
1260 else if (TREE_CODE (in) == code
1261 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1262 /* We can associate addition and subtraction together (even
1263 though the C standard doesn't say so) for integers because
1264 the value is not affected. For reals, the value might be
1265 affected, so we can't. */
1266 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1267 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1269 tree op0 = TREE_OPERAND (in, 0);
1270 tree op1 = TREE_OPERAND (in, 1);
1271 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1272 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1274 /* First see if either of the operands is a literal, then a constant. */
1275 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1276 *litp = op0, op0 = 0;
1277 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1278 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1280 if (op0 != 0 && TREE_CONSTANT (op0))
1281 *conp = op0, op0 = 0;
1282 else if (op1 != 0 && TREE_CONSTANT (op1))
1283 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1285 /* If we haven't dealt with either operand, this is not a case we can
1286 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1287 if (op0 != 0 && op1 != 0)
1288 var = in;
1289 else if (op0 != 0)
1290 var = op0;
1291 else
1292 var = op1, neg_var_p = neg1_p;
1294 /* Now do any needed negations. */
1295 if (neg_litp_p)
1296 *minus_litp = *litp, *litp = 0;
1297 if (neg_conp_p)
1298 *conp = negate_expr (*conp);
1299 if (neg_var_p)
1300 var = negate_expr (var);
1302 else if (TREE_CONSTANT (in))
1303 *conp = in;
1304 else
1305 var = in;
1307 if (negate_p)
1309 if (*litp)
1310 *minus_litp = *litp, *litp = 0;
1311 else if (*minus_litp)
1312 *litp = *minus_litp, *minus_litp = 0;
1313 *conp = negate_expr (*conp);
1314 var = negate_expr (var);
1317 return var;
1320 /* Re-associate trees split by the above function. T1 and T2 are either
1321 expressions to associate or null. Return the new expression, if any. If
1322 we build an operation, do it in TYPE and with CODE. */
1324 static tree
1325 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1327 if (t1 == 0)
1328 return t2;
1329 else if (t2 == 0)
1330 return t1;
1332 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1333 try to fold this since we will have infinite recursion. But do
1334 deal with any NEGATE_EXPRs. */
1335 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1336 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1338 if (code == PLUS_EXPR)
1340 if (TREE_CODE (t1) == NEGATE_EXPR)
1341 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1342 fold_convert (type, TREE_OPERAND (t1, 0)));
1343 else if (TREE_CODE (t2) == NEGATE_EXPR)
1344 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1345 fold_convert (type, TREE_OPERAND (t2, 0)));
1346 else if (integer_zerop (t2))
1347 return fold_convert (type, t1);
1349 else if (code == MINUS_EXPR)
1351 if (integer_zerop (t2))
1352 return fold_convert (type, t1);
1355 return build2 (code, type, fold_convert (type, t1),
1356 fold_convert (type, t2));
1359 return fold_build2 (code, type, fold_convert (type, t1),
1360 fold_convert (type, t2));
1363 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1364 to produce a new constant. Return NULL_TREE if we don't know how
1365 to evaluate CODE at compile-time.
1367 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1369 tree
1370 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1372 unsigned HOST_WIDE_INT int1l, int2l;
1373 HOST_WIDE_INT int1h, int2h;
1374 unsigned HOST_WIDE_INT low;
1375 HOST_WIDE_INT hi;
1376 unsigned HOST_WIDE_INT garbagel;
1377 HOST_WIDE_INT garbageh;
1378 tree t;
1379 tree type = TREE_TYPE (arg1);
1380 int uns = TYPE_UNSIGNED (type);
1381 int is_sizetype
1382 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1383 int overflow = 0;
1385 int1l = TREE_INT_CST_LOW (arg1);
1386 int1h = TREE_INT_CST_HIGH (arg1);
1387 int2l = TREE_INT_CST_LOW (arg2);
1388 int2h = TREE_INT_CST_HIGH (arg2);
1390 switch (code)
1392 case BIT_IOR_EXPR:
1393 low = int1l | int2l, hi = int1h | int2h;
1394 break;
1396 case BIT_XOR_EXPR:
1397 low = int1l ^ int2l, hi = int1h ^ int2h;
1398 break;
1400 case BIT_AND_EXPR:
1401 low = int1l & int2l, hi = int1h & int2h;
1402 break;
1404 case RSHIFT_EXPR:
1405 int2l = -int2l;
1406 case LSHIFT_EXPR:
1407 /* It's unclear from the C standard whether shifts can overflow.
1408 The following code ignores overflow; perhaps a C standard
1409 interpretation ruling is needed. */
1410 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1411 &low, &hi, !uns);
1412 break;
1414 case RROTATE_EXPR:
1415 int2l = - int2l;
1416 case LROTATE_EXPR:
1417 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1418 &low, &hi);
1419 break;
1421 case PLUS_EXPR:
1422 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1423 break;
1425 case MINUS_EXPR:
1426 neg_double (int2l, int2h, &low, &hi);
1427 add_double (int1l, int1h, low, hi, &low, &hi);
1428 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1429 break;
1431 case MULT_EXPR:
1432 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1433 break;
1435 case TRUNC_DIV_EXPR:
1436 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1437 case EXACT_DIV_EXPR:
1438 /* This is a shortcut for a common special case. */
1439 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1440 && ! TREE_CONSTANT_OVERFLOW (arg1)
1441 && ! TREE_CONSTANT_OVERFLOW (arg2)
1442 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1444 if (code == CEIL_DIV_EXPR)
1445 int1l += int2l - 1;
1447 low = int1l / int2l, hi = 0;
1448 break;
1451 /* ... fall through ... */
1453 case ROUND_DIV_EXPR:
1454 if (int2h == 0 && int2l == 0)
1455 return NULL_TREE;
1456 if (int2h == 0 && int2l == 1)
1458 low = int1l, hi = int1h;
1459 break;
1461 if (int1l == int2l && int1h == int2h
1462 && ! (int1l == 0 && int1h == 0))
1464 low = 1, hi = 0;
1465 break;
1467 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1468 &low, &hi, &garbagel, &garbageh);
1469 break;
1471 case TRUNC_MOD_EXPR:
1472 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1473 /* This is a shortcut for a common special case. */
1474 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1475 && ! TREE_CONSTANT_OVERFLOW (arg1)
1476 && ! TREE_CONSTANT_OVERFLOW (arg2)
1477 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1479 if (code == CEIL_MOD_EXPR)
1480 int1l += int2l - 1;
1481 low = int1l % int2l, hi = 0;
1482 break;
1485 /* ... fall through ... */
1487 case ROUND_MOD_EXPR:
1488 if (int2h == 0 && int2l == 0)
1489 return NULL_TREE;
1490 overflow = div_and_round_double (code, uns,
1491 int1l, int1h, int2l, int2h,
1492 &garbagel, &garbageh, &low, &hi);
1493 break;
1495 case MIN_EXPR:
1496 case MAX_EXPR:
1497 if (uns)
1498 low = (((unsigned HOST_WIDE_INT) int1h
1499 < (unsigned HOST_WIDE_INT) int2h)
1500 || (((unsigned HOST_WIDE_INT) int1h
1501 == (unsigned HOST_WIDE_INT) int2h)
1502 && int1l < int2l));
1503 else
1504 low = (int1h < int2h
1505 || (int1h == int2h && int1l < int2l));
1507 if (low == (code == MIN_EXPR))
1508 low = int1l, hi = int1h;
1509 else
1510 low = int2l, hi = int2h;
1511 break;
1513 default:
1514 return NULL_TREE;
1517 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1519 if (notrunc)
1521 /* Propagate overflow flags ourselves. */
1522 if (((!uns || is_sizetype) && overflow)
1523 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1525 t = copy_node (t);
1526 TREE_OVERFLOW (t) = 1;
1527 TREE_CONSTANT_OVERFLOW (t) = 1;
1529 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1531 t = copy_node (t);
1532 TREE_CONSTANT_OVERFLOW (t) = 1;
1535 else
1536 t = force_fit_type (t, 1,
1537 ((!uns || is_sizetype) && overflow)
1538 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1539 TREE_CONSTANT_OVERFLOW (arg1)
1540 | TREE_CONSTANT_OVERFLOW (arg2));
1542 return t;
1545 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1546 constant. We assume ARG1 and ARG2 have the same data type, or at least
1547 are the same kind of constant and the same machine mode. Return zero if
1548 combining the constants is not allowed in the current operating mode.
1550 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1552 static tree
1553 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1555 /* Sanity check for the recursive cases. */
1556 if (!arg1 || !arg2)
1557 return NULL_TREE;
1559 STRIP_NOPS (arg1);
1560 STRIP_NOPS (arg2);
1562 if (TREE_CODE (arg1) == INTEGER_CST)
1563 return int_const_binop (code, arg1, arg2, notrunc);
1565 if (TREE_CODE (arg1) == REAL_CST)
1567 enum machine_mode mode;
1568 REAL_VALUE_TYPE d1;
1569 REAL_VALUE_TYPE d2;
1570 REAL_VALUE_TYPE value;
1571 REAL_VALUE_TYPE result;
1572 bool inexact;
1573 tree t, type;
1575 /* The following codes are handled by real_arithmetic. */
1576 switch (code)
1578 case PLUS_EXPR:
1579 case MINUS_EXPR:
1580 case MULT_EXPR:
1581 case RDIV_EXPR:
1582 case MIN_EXPR:
1583 case MAX_EXPR:
1584 break;
1586 default:
1587 return NULL_TREE;
1590 d1 = TREE_REAL_CST (arg1);
1591 d2 = TREE_REAL_CST (arg2);
1593 type = TREE_TYPE (arg1);
1594 mode = TYPE_MODE (type);
1596 /* Don't perform operation if we honor signaling NaNs and
1597 either operand is a NaN. */
1598 if (HONOR_SNANS (mode)
1599 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1600 return NULL_TREE;
1602 /* Don't perform operation if it would raise a division
1603 by zero exception. */
1604 if (code == RDIV_EXPR
1605 && REAL_VALUES_EQUAL (d2, dconst0)
1606 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1607 return NULL_TREE;
1609 /* If either operand is a NaN, just return it. Otherwise, set up
1610 for floating-point trap; we return an overflow. */
1611 if (REAL_VALUE_ISNAN (d1))
1612 return arg1;
1613 else if (REAL_VALUE_ISNAN (d2))
1614 return arg2;
1616 inexact = real_arithmetic (&value, code, &d1, &d2);
1617 real_convert (&result, mode, &value);
1619 /* Don't constant fold this floating point operation if
1620 the result has overflowed and flag_trapping_math. */
1621 if (flag_trapping_math
1622 && MODE_HAS_INFINITIES (mode)
1623 && REAL_VALUE_ISINF (result)
1624 && !REAL_VALUE_ISINF (d1)
1625 && !REAL_VALUE_ISINF (d2))
1626 return NULL_TREE;
1628 /* Don't constant fold this floating point operation if the
1629 result may dependent upon the run-time rounding mode and
1630 flag_rounding_math is set, or if GCC's software emulation
1631 is unable to accurately represent the result. */
1632 if ((flag_rounding_math
1633 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1634 && !flag_unsafe_math_optimizations))
1635 && (inexact || !real_identical (&result, &value)))
1636 return NULL_TREE;
1638 t = build_real (type, result);
1640 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1641 TREE_CONSTANT_OVERFLOW (t)
1642 = TREE_OVERFLOW (t)
1643 | TREE_CONSTANT_OVERFLOW (arg1)
1644 | TREE_CONSTANT_OVERFLOW (arg2);
1645 return t;
1648 if (TREE_CODE (arg1) == COMPLEX_CST)
1650 tree type = TREE_TYPE (arg1);
1651 tree r1 = TREE_REALPART (arg1);
1652 tree i1 = TREE_IMAGPART (arg1);
1653 tree r2 = TREE_REALPART (arg2);
1654 tree i2 = TREE_IMAGPART (arg2);
1655 tree real, imag;
1657 switch (code)
1659 case PLUS_EXPR:
1660 case MINUS_EXPR:
1661 real = const_binop (code, r1, r2, notrunc);
1662 imag = const_binop (code, i1, i2, notrunc);
1663 break;
1665 case MULT_EXPR:
1666 real = const_binop (MINUS_EXPR,
1667 const_binop (MULT_EXPR, r1, r2, notrunc),
1668 const_binop (MULT_EXPR, i1, i2, notrunc),
1669 notrunc);
1670 imag = const_binop (PLUS_EXPR,
1671 const_binop (MULT_EXPR, r1, i2, notrunc),
1672 const_binop (MULT_EXPR, i1, r2, notrunc),
1673 notrunc);
1674 break;
1676 case RDIV_EXPR:
1678 tree magsquared
1679 = const_binop (PLUS_EXPR,
1680 const_binop (MULT_EXPR, r2, r2, notrunc),
1681 const_binop (MULT_EXPR, i2, i2, notrunc),
1682 notrunc);
1683 tree t1
1684 = const_binop (PLUS_EXPR,
1685 const_binop (MULT_EXPR, r1, r2, notrunc),
1686 const_binop (MULT_EXPR, i1, i2, notrunc),
1687 notrunc);
1688 tree t2
1689 = const_binop (MINUS_EXPR,
1690 const_binop (MULT_EXPR, i1, r2, notrunc),
1691 const_binop (MULT_EXPR, r1, i2, notrunc),
1692 notrunc);
1694 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1695 code = TRUNC_DIV_EXPR;
1697 real = const_binop (code, t1, magsquared, notrunc);
1698 imag = const_binop (code, t2, magsquared, notrunc);
1700 break;
1702 default:
1703 return NULL_TREE;
1706 if (real && imag)
1707 return build_complex (type, real, imag);
1710 return NULL_TREE;
1713 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1714 indicates which particular sizetype to create. */
1716 tree
1717 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1719 return build_int_cst (sizetype_tab[(int) kind], number);
1722 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1723 is a tree code. The type of the result is taken from the operands.
1724 Both must be the same type integer type and it must be a size type.
1725 If the operands are constant, so is the result. */
1727 tree
1728 size_binop (enum tree_code code, tree arg0, tree arg1)
1730 tree type = TREE_TYPE (arg0);
1732 if (arg0 == error_mark_node || arg1 == error_mark_node)
1733 return error_mark_node;
1735 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1736 && type == TREE_TYPE (arg1));
1738 /* Handle the special case of two integer constants faster. */
1739 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1741 /* And some specific cases even faster than that. */
1742 if (code == PLUS_EXPR && integer_zerop (arg0))
1743 return arg1;
1744 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1745 && integer_zerop (arg1))
1746 return arg0;
1747 else if (code == MULT_EXPR && integer_onep (arg0))
1748 return arg1;
1750 /* Handle general case of two integer constants. */
1751 return int_const_binop (code, arg0, arg1, 0);
1754 return fold_build2 (code, type, arg0, arg1);
1757 /* Given two values, either both of sizetype or both of bitsizetype,
1758 compute the difference between the two values. Return the value
1759 in signed type corresponding to the type of the operands. */
1761 tree
1762 size_diffop (tree arg0, tree arg1)
1764 tree type = TREE_TYPE (arg0);
1765 tree ctype;
1767 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1768 && type == TREE_TYPE (arg1));
1770 /* If the type is already signed, just do the simple thing. */
1771 if (!TYPE_UNSIGNED (type))
1772 return size_binop (MINUS_EXPR, arg0, arg1);
1774 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1776 /* If either operand is not a constant, do the conversions to the signed
1777 type and subtract. The hardware will do the right thing with any
1778 overflow in the subtraction. */
1779 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1780 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1781 fold_convert (ctype, arg1));
1783 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1784 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1785 overflow) and negate (which can't either). Special-case a result
1786 of zero while we're here. */
1787 if (tree_int_cst_equal (arg0, arg1))
1788 return build_int_cst (ctype, 0);
1789 else if (tree_int_cst_lt (arg1, arg0))
1790 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1791 else
1792 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1793 fold_convert (ctype, size_binop (MINUS_EXPR,
1794 arg1, arg0)));
1797 /* A subroutine of fold_convert_const handling conversions of an
1798 INTEGER_CST to another integer type. */
1800 static tree
1801 fold_convert_const_int_from_int (tree type, tree arg1)
1803 tree t;
1805 /* Given an integer constant, make new constant with new type,
1806 appropriately sign-extended or truncated. */
1807 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1808 TREE_INT_CST_HIGH (arg1));
1810 t = force_fit_type (t,
1811 /* Don't set the overflow when
1812 converting a pointer */
1813 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1814 (TREE_INT_CST_HIGH (arg1) < 0
1815 && (TYPE_UNSIGNED (type)
1816 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1817 | TREE_OVERFLOW (arg1),
1818 TREE_CONSTANT_OVERFLOW (arg1));
1820 return t;
1823 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1824 to an integer type. */
1826 static tree
1827 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1829 int overflow = 0;
1830 tree t;
1832 /* The following code implements the floating point to integer
1833 conversion rules required by the Java Language Specification,
1834 that IEEE NaNs are mapped to zero and values that overflow
1835 the target precision saturate, i.e. values greater than
1836 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1837 are mapped to INT_MIN. These semantics are allowed by the
1838 C and C++ standards that simply state that the behavior of
1839 FP-to-integer conversion is unspecified upon overflow. */
1841 HOST_WIDE_INT high, low;
1842 REAL_VALUE_TYPE r;
1843 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1845 switch (code)
1847 case FIX_TRUNC_EXPR:
1848 real_trunc (&r, VOIDmode, &x);
1849 break;
1851 case FIX_CEIL_EXPR:
1852 real_ceil (&r, VOIDmode, &x);
1853 break;
1855 case FIX_FLOOR_EXPR:
1856 real_floor (&r, VOIDmode, &x);
1857 break;
1859 case FIX_ROUND_EXPR:
1860 real_round (&r, VOIDmode, &x);
1861 break;
1863 default:
1864 gcc_unreachable ();
1867 /* If R is NaN, return zero and show we have an overflow. */
1868 if (REAL_VALUE_ISNAN (r))
1870 overflow = 1;
1871 high = 0;
1872 low = 0;
1875 /* See if R is less than the lower bound or greater than the
1876 upper bound. */
1878 if (! overflow)
1880 tree lt = TYPE_MIN_VALUE (type);
1881 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1882 if (REAL_VALUES_LESS (r, l))
1884 overflow = 1;
1885 high = TREE_INT_CST_HIGH (lt);
1886 low = TREE_INT_CST_LOW (lt);
1890 if (! overflow)
1892 tree ut = TYPE_MAX_VALUE (type);
1893 if (ut)
1895 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1896 if (REAL_VALUES_LESS (u, r))
1898 overflow = 1;
1899 high = TREE_INT_CST_HIGH (ut);
1900 low = TREE_INT_CST_LOW (ut);
1905 if (! overflow)
1906 REAL_VALUE_TO_INT (&low, &high, r);
1908 t = build_int_cst_wide (type, low, high);
1910 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1911 TREE_CONSTANT_OVERFLOW (arg1));
1912 return t;
1915 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1916 to another floating point type. */
1918 static tree
1919 fold_convert_const_real_from_real (tree type, tree arg1)
1921 REAL_VALUE_TYPE value;
1922 tree t;
1924 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1925 t = build_real (type, value);
1927 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1928 TREE_CONSTANT_OVERFLOW (t)
1929 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1930 return t;
1933 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1934 type TYPE. If no simplification can be done return NULL_TREE. */
1936 static tree
1937 fold_convert_const (enum tree_code code, tree type, tree arg1)
1939 if (TREE_TYPE (arg1) == type)
1940 return arg1;
1942 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1944 if (TREE_CODE (arg1) == INTEGER_CST)
1945 return fold_convert_const_int_from_int (type, arg1);
1946 else if (TREE_CODE (arg1) == REAL_CST)
1947 return fold_convert_const_int_from_real (code, type, arg1);
1949 else if (TREE_CODE (type) == REAL_TYPE)
1951 if (TREE_CODE (arg1) == INTEGER_CST)
1952 return build_real_from_int_cst (type, arg1);
1953 if (TREE_CODE (arg1) == REAL_CST)
1954 return fold_convert_const_real_from_real (type, arg1);
1956 return NULL_TREE;
1959 /* Construct a vector of zero elements of vector type TYPE. */
1961 static tree
1962 build_zero_vector (tree type)
1964 tree elem, list;
1965 int i, units;
1967 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1968 units = TYPE_VECTOR_SUBPARTS (type);
1970 list = NULL_TREE;
1971 for (i = 0; i < units; i++)
1972 list = tree_cons (NULL_TREE, elem, list);
1973 return build_vector (type, list);
1976 /* Convert expression ARG to type TYPE. Used by the middle-end for
1977 simple conversions in preference to calling the front-end's convert. */
1979 tree
1980 fold_convert (tree type, tree arg)
1982 tree orig = TREE_TYPE (arg);
1983 tree tem;
1985 if (type == orig)
1986 return arg;
1988 if (TREE_CODE (arg) == ERROR_MARK
1989 || TREE_CODE (type) == ERROR_MARK
1990 || TREE_CODE (orig) == ERROR_MARK)
1991 return error_mark_node;
1993 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1994 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1995 TYPE_MAIN_VARIANT (orig)))
1996 return fold_build1 (NOP_EXPR, type, arg);
1998 switch (TREE_CODE (type))
2000 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2001 case POINTER_TYPE: case REFERENCE_TYPE:
2002 case OFFSET_TYPE:
2003 if (TREE_CODE (arg) == INTEGER_CST)
2005 tem = fold_convert_const (NOP_EXPR, type, arg);
2006 if (tem != NULL_TREE)
2007 return tem;
2009 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2010 || TREE_CODE (orig) == OFFSET_TYPE)
2011 return fold_build1 (NOP_EXPR, type, arg);
2012 if (TREE_CODE (orig) == COMPLEX_TYPE)
2014 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2015 return fold_convert (type, tem);
2017 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2018 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2019 return fold_build1 (NOP_EXPR, type, arg);
2021 case REAL_TYPE:
2022 if (TREE_CODE (arg) == INTEGER_CST)
2024 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2025 if (tem != NULL_TREE)
2026 return tem;
2028 else if (TREE_CODE (arg) == REAL_CST)
2030 tem = fold_convert_const (NOP_EXPR, type, arg);
2031 if (tem != NULL_TREE)
2032 return tem;
2035 switch (TREE_CODE (orig))
2037 case INTEGER_TYPE:
2038 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2039 case POINTER_TYPE: case REFERENCE_TYPE:
2040 return fold_build1 (FLOAT_EXPR, type, arg);
2042 case REAL_TYPE:
2043 return fold_build1 (NOP_EXPR, type, arg);
2045 case COMPLEX_TYPE:
2046 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2047 return fold_convert (type, tem);
2049 default:
2050 gcc_unreachable ();
2053 case COMPLEX_TYPE:
2054 switch (TREE_CODE (orig))
2056 case INTEGER_TYPE:
2057 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2058 case POINTER_TYPE: case REFERENCE_TYPE:
2059 case REAL_TYPE:
2060 return build2 (COMPLEX_EXPR, type,
2061 fold_convert (TREE_TYPE (type), arg),
2062 fold_convert (TREE_TYPE (type), integer_zero_node));
2063 case COMPLEX_TYPE:
2065 tree rpart, ipart;
2067 if (TREE_CODE (arg) == COMPLEX_EXPR)
2069 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2070 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2071 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2074 arg = save_expr (arg);
2075 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2076 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2077 rpart = fold_convert (TREE_TYPE (type), rpart);
2078 ipart = fold_convert (TREE_TYPE (type), ipart);
2079 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2082 default:
2083 gcc_unreachable ();
2086 case VECTOR_TYPE:
2087 if (integer_zerop (arg))
2088 return build_zero_vector (type);
2089 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2090 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2091 || TREE_CODE (orig) == VECTOR_TYPE);
2092 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2094 case VOID_TYPE:
2095 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2097 default:
2098 gcc_unreachable ();
2102 /* Return false if expr can be assumed not to be an lvalue, true
2103 otherwise. */
2105 static bool
2106 maybe_lvalue_p (tree x)
2108 /* We only need to wrap lvalue tree codes. */
2109 switch (TREE_CODE (x))
2111 case VAR_DECL:
2112 case PARM_DECL:
2113 case RESULT_DECL:
2114 case LABEL_DECL:
2115 case FUNCTION_DECL:
2116 case SSA_NAME:
2118 case COMPONENT_REF:
2119 case INDIRECT_REF:
2120 case ALIGN_INDIRECT_REF:
2121 case MISALIGNED_INDIRECT_REF:
2122 case ARRAY_REF:
2123 case ARRAY_RANGE_REF:
2124 case BIT_FIELD_REF:
2125 case OBJ_TYPE_REF:
2127 case REALPART_EXPR:
2128 case IMAGPART_EXPR:
2129 case PREINCREMENT_EXPR:
2130 case PREDECREMENT_EXPR:
2131 case SAVE_EXPR:
2132 case TRY_CATCH_EXPR:
2133 case WITH_CLEANUP_EXPR:
2134 case COMPOUND_EXPR:
2135 case MODIFY_EXPR:
2136 case TARGET_EXPR:
2137 case COND_EXPR:
2138 case BIND_EXPR:
2139 case MIN_EXPR:
2140 case MAX_EXPR:
2141 break;
2143 default:
2144 /* Assume the worst for front-end tree codes. */
2145 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2146 break;
2147 return false;
2150 return true;
2153 /* Return an expr equal to X but certainly not valid as an lvalue. */
2155 tree
2156 non_lvalue (tree x)
2158 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2159 us. */
2160 if (in_gimple_form)
2161 return x;
2163 if (! maybe_lvalue_p (x))
2164 return x;
2165 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2168 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2169 Zero means allow extended lvalues. */
2171 int pedantic_lvalues;
2173 /* When pedantic, return an expr equal to X but certainly not valid as a
2174 pedantic lvalue. Otherwise, return X. */
2176 static tree
2177 pedantic_non_lvalue (tree x)
2179 if (pedantic_lvalues)
2180 return non_lvalue (x);
2181 else
2182 return x;
2185 /* Given a tree comparison code, return the code that is the logical inverse
2186 of the given code. It is not safe to do this for floating-point
2187 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2188 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2190 enum tree_code
2191 invert_tree_comparison (enum tree_code code, bool honor_nans)
2193 if (honor_nans && flag_trapping_math)
2194 return ERROR_MARK;
2196 switch (code)
2198 case EQ_EXPR:
2199 return NE_EXPR;
2200 case NE_EXPR:
2201 return EQ_EXPR;
2202 case GT_EXPR:
2203 return honor_nans ? UNLE_EXPR : LE_EXPR;
2204 case GE_EXPR:
2205 return honor_nans ? UNLT_EXPR : LT_EXPR;
2206 case LT_EXPR:
2207 return honor_nans ? UNGE_EXPR : GE_EXPR;
2208 case LE_EXPR:
2209 return honor_nans ? UNGT_EXPR : GT_EXPR;
2210 case LTGT_EXPR:
2211 return UNEQ_EXPR;
2212 case UNEQ_EXPR:
2213 return LTGT_EXPR;
2214 case UNGT_EXPR:
2215 return LE_EXPR;
2216 case UNGE_EXPR:
2217 return LT_EXPR;
2218 case UNLT_EXPR:
2219 return GE_EXPR;
2220 case UNLE_EXPR:
2221 return GT_EXPR;
2222 case ORDERED_EXPR:
2223 return UNORDERED_EXPR;
2224 case UNORDERED_EXPR:
2225 return ORDERED_EXPR;
2226 default:
2227 gcc_unreachable ();
2231 /* Similar, but return the comparison that results if the operands are
2232 swapped. This is safe for floating-point. */
2234 enum tree_code
2235 swap_tree_comparison (enum tree_code code)
2237 switch (code)
2239 case EQ_EXPR:
2240 case NE_EXPR:
2241 case ORDERED_EXPR:
2242 case UNORDERED_EXPR:
2243 case LTGT_EXPR:
2244 case UNEQ_EXPR:
2245 return code;
2246 case GT_EXPR:
2247 return LT_EXPR;
2248 case GE_EXPR:
2249 return LE_EXPR;
2250 case LT_EXPR:
2251 return GT_EXPR;
2252 case LE_EXPR:
2253 return GE_EXPR;
2254 case UNGT_EXPR:
2255 return UNLT_EXPR;
2256 case UNGE_EXPR:
2257 return UNLE_EXPR;
2258 case UNLT_EXPR:
2259 return UNGT_EXPR;
2260 case UNLE_EXPR:
2261 return UNGE_EXPR;
2262 default:
2263 gcc_unreachable ();
2268 /* Convert a comparison tree code from an enum tree_code representation
2269 into a compcode bit-based encoding. This function is the inverse of
2270 compcode_to_comparison. */
2272 static enum comparison_code
2273 comparison_to_compcode (enum tree_code code)
2275 switch (code)
2277 case LT_EXPR:
2278 return COMPCODE_LT;
2279 case EQ_EXPR:
2280 return COMPCODE_EQ;
2281 case LE_EXPR:
2282 return COMPCODE_LE;
2283 case GT_EXPR:
2284 return COMPCODE_GT;
2285 case NE_EXPR:
2286 return COMPCODE_NE;
2287 case GE_EXPR:
2288 return COMPCODE_GE;
2289 case ORDERED_EXPR:
2290 return COMPCODE_ORD;
2291 case UNORDERED_EXPR:
2292 return COMPCODE_UNORD;
2293 case UNLT_EXPR:
2294 return COMPCODE_UNLT;
2295 case UNEQ_EXPR:
2296 return COMPCODE_UNEQ;
2297 case UNLE_EXPR:
2298 return COMPCODE_UNLE;
2299 case UNGT_EXPR:
2300 return COMPCODE_UNGT;
2301 case LTGT_EXPR:
2302 return COMPCODE_LTGT;
2303 case UNGE_EXPR:
2304 return COMPCODE_UNGE;
2305 default:
2306 gcc_unreachable ();
2310 /* Convert a compcode bit-based encoding of a comparison operator back
2311 to GCC's enum tree_code representation. This function is the
2312 inverse of comparison_to_compcode. */
2314 static enum tree_code
2315 compcode_to_comparison (enum comparison_code code)
2317 switch (code)
2319 case COMPCODE_LT:
2320 return LT_EXPR;
2321 case COMPCODE_EQ:
2322 return EQ_EXPR;
2323 case COMPCODE_LE:
2324 return LE_EXPR;
2325 case COMPCODE_GT:
2326 return GT_EXPR;
2327 case COMPCODE_NE:
2328 return NE_EXPR;
2329 case COMPCODE_GE:
2330 return GE_EXPR;
2331 case COMPCODE_ORD:
2332 return ORDERED_EXPR;
2333 case COMPCODE_UNORD:
2334 return UNORDERED_EXPR;
2335 case COMPCODE_UNLT:
2336 return UNLT_EXPR;
2337 case COMPCODE_UNEQ:
2338 return UNEQ_EXPR;
2339 case COMPCODE_UNLE:
2340 return UNLE_EXPR;
2341 case COMPCODE_UNGT:
2342 return UNGT_EXPR;
2343 case COMPCODE_LTGT:
2344 return LTGT_EXPR;
2345 case COMPCODE_UNGE:
2346 return UNGE_EXPR;
2347 default:
2348 gcc_unreachable ();
2352 /* Return a tree for the comparison which is the combination of
2353 doing the AND or OR (depending on CODE) of the two operations LCODE
2354 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2355 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2356 if this makes the transformation invalid. */
2358 tree
2359 combine_comparisons (enum tree_code code, enum tree_code lcode,
2360 enum tree_code rcode, tree truth_type,
2361 tree ll_arg, tree lr_arg)
2363 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2364 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2365 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2366 enum comparison_code compcode;
2368 switch (code)
2370 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2371 compcode = lcompcode & rcompcode;
2372 break;
2374 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2375 compcode = lcompcode | rcompcode;
2376 break;
2378 default:
2379 return NULL_TREE;
2382 if (!honor_nans)
2384 /* Eliminate unordered comparisons, as well as LTGT and ORD
2385 which are not used unless the mode has NaNs. */
2386 compcode &= ~COMPCODE_UNORD;
2387 if (compcode == COMPCODE_LTGT)
2388 compcode = COMPCODE_NE;
2389 else if (compcode == COMPCODE_ORD)
2390 compcode = COMPCODE_TRUE;
2392 else if (flag_trapping_math)
2394 /* Check that the original operation and the optimized ones will trap
2395 under the same condition. */
2396 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2397 && (lcompcode != COMPCODE_EQ)
2398 && (lcompcode != COMPCODE_ORD);
2399 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2400 && (rcompcode != COMPCODE_EQ)
2401 && (rcompcode != COMPCODE_ORD);
2402 bool trap = (compcode & COMPCODE_UNORD) == 0
2403 && (compcode != COMPCODE_EQ)
2404 && (compcode != COMPCODE_ORD);
2406 /* In a short-circuited boolean expression the LHS might be
2407 such that the RHS, if evaluated, will never trap. For
2408 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2409 if neither x nor y is NaN. (This is a mixed blessing: for
2410 example, the expression above will never trap, hence
2411 optimizing it to x < y would be invalid). */
2412 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2413 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2414 rtrap = false;
2416 /* If the comparison was short-circuited, and only the RHS
2417 trapped, we may now generate a spurious trap. */
2418 if (rtrap && !ltrap
2419 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2420 return NULL_TREE;
2422 /* If we changed the conditions that cause a trap, we lose. */
2423 if ((ltrap || rtrap) != trap)
2424 return NULL_TREE;
2427 if (compcode == COMPCODE_TRUE)
2428 return constant_boolean_node (true, truth_type);
2429 else if (compcode == COMPCODE_FALSE)
2430 return constant_boolean_node (false, truth_type);
2431 else
2432 return fold_build2 (compcode_to_comparison (compcode),
2433 truth_type, ll_arg, lr_arg);
2436 /* Return nonzero if CODE is a tree code that represents a truth value. */
2438 static int
2439 truth_value_p (enum tree_code code)
2441 return (TREE_CODE_CLASS (code) == tcc_comparison
2442 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2443 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2444 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2447 /* Return nonzero if two operands (typically of the same tree node)
2448 are necessarily equal. If either argument has side-effects this
2449 function returns zero. FLAGS modifies behavior as follows:
2451 If OEP_ONLY_CONST is set, only return nonzero for constants.
2452 This function tests whether the operands are indistinguishable;
2453 it does not test whether they are equal using C's == operation.
2454 The distinction is important for IEEE floating point, because
2455 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2456 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2458 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2459 even though it may hold multiple values during a function.
2460 This is because a GCC tree node guarantees that nothing else is
2461 executed between the evaluation of its "operands" (which may often
2462 be evaluated in arbitrary order). Hence if the operands themselves
2463 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2464 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2465 unset means assuming isochronic (or instantaneous) tree equivalence.
2466 Unless comparing arbitrary expression trees, such as from different
2467 statements, this flag can usually be left unset.
2469 If OEP_PURE_SAME is set, then pure functions with identical arguments
2470 are considered the same. It is used when the caller has other ways
2471 to ensure that global memory is unchanged in between. */
2474 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2476 /* If either is ERROR_MARK, they aren't equal. */
2477 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2478 return 0;
2480 /* If both types don't have the same signedness, then we can't consider
2481 them equal. We must check this before the STRIP_NOPS calls
2482 because they may change the signedness of the arguments. */
2483 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2484 return 0;
2486 /* If both types don't have the same precision, then it is not safe
2487 to strip NOPs. */
2488 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2489 return 0;
2491 STRIP_NOPS (arg0);
2492 STRIP_NOPS (arg1);
2494 /* In case both args are comparisons but with different comparison
2495 code, try to swap the comparison operands of one arg to produce
2496 a match and compare that variant. */
2497 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2498 && COMPARISON_CLASS_P (arg0)
2499 && COMPARISON_CLASS_P (arg1))
2501 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2503 if (TREE_CODE (arg0) == swap_code)
2504 return operand_equal_p (TREE_OPERAND (arg0, 0),
2505 TREE_OPERAND (arg1, 1), flags)
2506 && operand_equal_p (TREE_OPERAND (arg0, 1),
2507 TREE_OPERAND (arg1, 0), flags);
2510 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2511 /* This is needed for conversions and for COMPONENT_REF.
2512 Might as well play it safe and always test this. */
2513 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2514 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2515 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2516 return 0;
2518 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2519 We don't care about side effects in that case because the SAVE_EXPR
2520 takes care of that for us. In all other cases, two expressions are
2521 equal if they have no side effects. If we have two identical
2522 expressions with side effects that should be treated the same due
2523 to the only side effects being identical SAVE_EXPR's, that will
2524 be detected in the recursive calls below. */
2525 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2526 && (TREE_CODE (arg0) == SAVE_EXPR
2527 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2528 return 1;
2530 /* Next handle constant cases, those for which we can return 1 even
2531 if ONLY_CONST is set. */
2532 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2533 switch (TREE_CODE (arg0))
2535 case INTEGER_CST:
2536 return (! TREE_CONSTANT_OVERFLOW (arg0)
2537 && ! TREE_CONSTANT_OVERFLOW (arg1)
2538 && tree_int_cst_equal (arg0, arg1));
2540 case REAL_CST:
2541 return (! TREE_CONSTANT_OVERFLOW (arg0)
2542 && ! TREE_CONSTANT_OVERFLOW (arg1)
2543 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2544 TREE_REAL_CST (arg1)));
2546 case VECTOR_CST:
2548 tree v1, v2;
2550 if (TREE_CONSTANT_OVERFLOW (arg0)
2551 || TREE_CONSTANT_OVERFLOW (arg1))
2552 return 0;
2554 v1 = TREE_VECTOR_CST_ELTS (arg0);
2555 v2 = TREE_VECTOR_CST_ELTS (arg1);
2556 while (v1 && v2)
2558 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2559 flags))
2560 return 0;
2561 v1 = TREE_CHAIN (v1);
2562 v2 = TREE_CHAIN (v2);
2565 return v1 == v2;
2568 case COMPLEX_CST:
2569 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2570 flags)
2571 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2572 flags));
2574 case STRING_CST:
2575 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2576 && ! memcmp (TREE_STRING_POINTER (arg0),
2577 TREE_STRING_POINTER (arg1),
2578 TREE_STRING_LENGTH (arg0)));
2580 case ADDR_EXPR:
2581 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2583 default:
2584 break;
2587 if (flags & OEP_ONLY_CONST)
2588 return 0;
2590 /* Define macros to test an operand from arg0 and arg1 for equality and a
2591 variant that allows null and views null as being different from any
2592 non-null value. In the latter case, if either is null, the both
2593 must be; otherwise, do the normal comparison. */
2594 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2595 TREE_OPERAND (arg1, N), flags)
2597 #define OP_SAME_WITH_NULL(N) \
2598 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2599 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2601 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2603 case tcc_unary:
2604 /* Two conversions are equal only if signedness and modes match. */
2605 switch (TREE_CODE (arg0))
2607 case NOP_EXPR:
2608 case CONVERT_EXPR:
2609 case FIX_CEIL_EXPR:
2610 case FIX_TRUNC_EXPR:
2611 case FIX_FLOOR_EXPR:
2612 case FIX_ROUND_EXPR:
2613 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2614 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2615 return 0;
2616 break;
2617 default:
2618 break;
2621 return OP_SAME (0);
2624 case tcc_comparison:
2625 case tcc_binary:
2626 if (OP_SAME (0) && OP_SAME (1))
2627 return 1;
2629 /* For commutative ops, allow the other order. */
2630 return (commutative_tree_code (TREE_CODE (arg0))
2631 && operand_equal_p (TREE_OPERAND (arg0, 0),
2632 TREE_OPERAND (arg1, 1), flags)
2633 && operand_equal_p (TREE_OPERAND (arg0, 1),
2634 TREE_OPERAND (arg1, 0), flags));
2636 case tcc_reference:
2637 /* If either of the pointer (or reference) expressions we are
2638 dereferencing contain a side effect, these cannot be equal. */
2639 if (TREE_SIDE_EFFECTS (arg0)
2640 || TREE_SIDE_EFFECTS (arg1))
2641 return 0;
2643 switch (TREE_CODE (arg0))
2645 case INDIRECT_REF:
2646 case ALIGN_INDIRECT_REF:
2647 case MISALIGNED_INDIRECT_REF:
2648 case REALPART_EXPR:
2649 case IMAGPART_EXPR:
2650 return OP_SAME (0);
2652 case ARRAY_REF:
2653 case ARRAY_RANGE_REF:
2654 /* Operands 2 and 3 may be null. */
2655 return (OP_SAME (0)
2656 && OP_SAME (1)
2657 && OP_SAME_WITH_NULL (2)
2658 && OP_SAME_WITH_NULL (3));
2660 case COMPONENT_REF:
2661 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2662 may be NULL when we're called to compare MEM_EXPRs. */
2663 return OP_SAME_WITH_NULL (0)
2664 && OP_SAME (1)
2665 && OP_SAME_WITH_NULL (2);
2667 case BIT_FIELD_REF:
2668 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2670 default:
2671 return 0;
2674 case tcc_expression:
2675 switch (TREE_CODE (arg0))
2677 case ADDR_EXPR:
2678 case TRUTH_NOT_EXPR:
2679 return OP_SAME (0);
2681 case TRUTH_ANDIF_EXPR:
2682 case TRUTH_ORIF_EXPR:
2683 return OP_SAME (0) && OP_SAME (1);
2685 case TRUTH_AND_EXPR:
2686 case TRUTH_OR_EXPR:
2687 case TRUTH_XOR_EXPR:
2688 if (OP_SAME (0) && OP_SAME (1))
2689 return 1;
2691 /* Otherwise take into account this is a commutative operation. */
2692 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2693 TREE_OPERAND (arg1, 1), flags)
2694 && operand_equal_p (TREE_OPERAND (arg0, 1),
2695 TREE_OPERAND (arg1, 0), flags));
2697 case CALL_EXPR:
2698 /* If the CALL_EXPRs call different functions, then they
2699 clearly can not be equal. */
2700 if (!OP_SAME (0))
2701 return 0;
2704 unsigned int cef = call_expr_flags (arg0);
2705 if (flags & OEP_PURE_SAME)
2706 cef &= ECF_CONST | ECF_PURE;
2707 else
2708 cef &= ECF_CONST;
2709 if (!cef)
2710 return 0;
2713 /* Now see if all the arguments are the same. operand_equal_p
2714 does not handle TREE_LIST, so we walk the operands here
2715 feeding them to operand_equal_p. */
2716 arg0 = TREE_OPERAND (arg0, 1);
2717 arg1 = TREE_OPERAND (arg1, 1);
2718 while (arg0 && arg1)
2720 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2721 flags))
2722 return 0;
2724 arg0 = TREE_CHAIN (arg0);
2725 arg1 = TREE_CHAIN (arg1);
2728 /* If we get here and both argument lists are exhausted
2729 then the CALL_EXPRs are equal. */
2730 return ! (arg0 || arg1);
2732 default:
2733 return 0;
2736 case tcc_declaration:
2737 /* Consider __builtin_sqrt equal to sqrt. */
2738 return (TREE_CODE (arg0) == FUNCTION_DECL
2739 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2740 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2741 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2743 default:
2744 return 0;
2747 #undef OP_SAME
2748 #undef OP_SAME_WITH_NULL
2751 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2752 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2754 When in doubt, return 0. */
2756 static int
2757 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2759 int unsignedp1, unsignedpo;
2760 tree primarg0, primarg1, primother;
2761 unsigned int correct_width;
2763 if (operand_equal_p (arg0, arg1, 0))
2764 return 1;
2766 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2767 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2768 return 0;
2770 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2771 and see if the inner values are the same. This removes any
2772 signedness comparison, which doesn't matter here. */
2773 primarg0 = arg0, primarg1 = arg1;
2774 STRIP_NOPS (primarg0);
2775 STRIP_NOPS (primarg1);
2776 if (operand_equal_p (primarg0, primarg1, 0))
2777 return 1;
2779 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2780 actual comparison operand, ARG0.
2782 First throw away any conversions to wider types
2783 already present in the operands. */
2785 primarg1 = get_narrower (arg1, &unsignedp1);
2786 primother = get_narrower (other, &unsignedpo);
2788 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2789 if (unsignedp1 == unsignedpo
2790 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2791 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2793 tree type = TREE_TYPE (arg0);
2795 /* Make sure shorter operand is extended the right way
2796 to match the longer operand. */
2797 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2798 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2800 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2801 return 1;
2804 return 0;
2807 /* See if ARG is an expression that is either a comparison or is performing
2808 arithmetic on comparisons. The comparisons must only be comparing
2809 two different values, which will be stored in *CVAL1 and *CVAL2; if
2810 they are nonzero it means that some operands have already been found.
2811 No variables may be used anywhere else in the expression except in the
2812 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2813 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2815 If this is true, return 1. Otherwise, return zero. */
2817 static int
2818 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2820 enum tree_code code = TREE_CODE (arg);
2821 enum tree_code_class class = TREE_CODE_CLASS (code);
2823 /* We can handle some of the tcc_expression cases here. */
2824 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2825 class = tcc_unary;
2826 else if (class == tcc_expression
2827 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2828 || code == COMPOUND_EXPR))
2829 class = tcc_binary;
2831 else if (class == tcc_expression && code == SAVE_EXPR
2832 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2834 /* If we've already found a CVAL1 or CVAL2, this expression is
2835 two complex to handle. */
2836 if (*cval1 || *cval2)
2837 return 0;
2839 class = tcc_unary;
2840 *save_p = 1;
2843 switch (class)
2845 case tcc_unary:
2846 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2848 case tcc_binary:
2849 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2850 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2851 cval1, cval2, save_p));
2853 case tcc_constant:
2854 return 1;
2856 case tcc_expression:
2857 if (code == COND_EXPR)
2858 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2859 cval1, cval2, save_p)
2860 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2861 cval1, cval2, save_p)
2862 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2863 cval1, cval2, save_p));
2864 return 0;
2866 case tcc_comparison:
2867 /* First see if we can handle the first operand, then the second. For
2868 the second operand, we know *CVAL1 can't be zero. It must be that
2869 one side of the comparison is each of the values; test for the
2870 case where this isn't true by failing if the two operands
2871 are the same. */
2873 if (operand_equal_p (TREE_OPERAND (arg, 0),
2874 TREE_OPERAND (arg, 1), 0))
2875 return 0;
2877 if (*cval1 == 0)
2878 *cval1 = TREE_OPERAND (arg, 0);
2879 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2881 else if (*cval2 == 0)
2882 *cval2 = TREE_OPERAND (arg, 0);
2883 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2885 else
2886 return 0;
2888 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2890 else if (*cval2 == 0)
2891 *cval2 = TREE_OPERAND (arg, 1);
2892 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2894 else
2895 return 0;
2897 return 1;
2899 default:
2900 return 0;
2904 /* ARG is a tree that is known to contain just arithmetic operations and
2905 comparisons. Evaluate the operations in the tree substituting NEW0 for
2906 any occurrence of OLD0 as an operand of a comparison and likewise for
2907 NEW1 and OLD1. */
2909 static tree
2910 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2912 tree type = TREE_TYPE (arg);
2913 enum tree_code code = TREE_CODE (arg);
2914 enum tree_code_class class = TREE_CODE_CLASS (code);
2916 /* We can handle some of the tcc_expression cases here. */
2917 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2918 class = tcc_unary;
2919 else if (class == tcc_expression
2920 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2921 class = tcc_binary;
2923 switch (class)
2925 case tcc_unary:
2926 return fold_build1 (code, type,
2927 eval_subst (TREE_OPERAND (arg, 0),
2928 old0, new0, old1, new1));
2930 case tcc_binary:
2931 return fold_build2 (code, type,
2932 eval_subst (TREE_OPERAND (arg, 0),
2933 old0, new0, old1, new1),
2934 eval_subst (TREE_OPERAND (arg, 1),
2935 old0, new0, old1, new1));
2937 case tcc_expression:
2938 switch (code)
2940 case SAVE_EXPR:
2941 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2943 case COMPOUND_EXPR:
2944 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2946 case COND_EXPR:
2947 return fold_build3 (code, type,
2948 eval_subst (TREE_OPERAND (arg, 0),
2949 old0, new0, old1, new1),
2950 eval_subst (TREE_OPERAND (arg, 1),
2951 old0, new0, old1, new1),
2952 eval_subst (TREE_OPERAND (arg, 2),
2953 old0, new0, old1, new1));
2954 default:
2955 break;
2957 /* Fall through - ??? */
2959 case tcc_comparison:
2961 tree arg0 = TREE_OPERAND (arg, 0);
2962 tree arg1 = TREE_OPERAND (arg, 1);
2964 /* We need to check both for exact equality and tree equality. The
2965 former will be true if the operand has a side-effect. In that
2966 case, we know the operand occurred exactly once. */
2968 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2969 arg0 = new0;
2970 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2971 arg0 = new1;
2973 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2974 arg1 = new0;
2975 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2976 arg1 = new1;
2978 return fold_build2 (code, type, arg0, arg1);
2981 default:
2982 return arg;
2986 /* Return a tree for the case when the result of an expression is RESULT
2987 converted to TYPE and OMITTED was previously an operand of the expression
2988 but is now not needed (e.g., we folded OMITTED * 0).
2990 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2991 the conversion of RESULT to TYPE. */
2993 tree
2994 omit_one_operand (tree type, tree result, tree omitted)
2996 tree t = fold_convert (type, result);
2998 if (TREE_SIDE_EFFECTS (omitted))
2999 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3001 return non_lvalue (t);
3004 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3006 static tree
3007 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3009 tree t = fold_convert (type, result);
3011 if (TREE_SIDE_EFFECTS (omitted))
3012 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3014 return pedantic_non_lvalue (t);
3017 /* Return a tree for the case when the result of an expression is RESULT
3018 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3019 of the expression but are now not needed.
3021 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3022 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3023 evaluated before OMITTED2. Otherwise, if neither has side effects,
3024 just do the conversion of RESULT to TYPE. */
3026 tree
3027 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3029 tree t = fold_convert (type, result);
3031 if (TREE_SIDE_EFFECTS (omitted2))
3032 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3033 if (TREE_SIDE_EFFECTS (omitted1))
3034 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3036 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3040 /* Return a simplified tree node for the truth-negation of ARG. This
3041 never alters ARG itself. We assume that ARG is an operation that
3042 returns a truth value (0 or 1).
3044 FIXME: one would think we would fold the result, but it causes
3045 problems with the dominator optimizer. */
3047 tree
3048 fold_truth_not_expr (tree arg)
3050 tree type = TREE_TYPE (arg);
3051 enum tree_code code = TREE_CODE (arg);
3053 /* If this is a comparison, we can simply invert it, except for
3054 floating-point non-equality comparisons, in which case we just
3055 enclose a TRUTH_NOT_EXPR around what we have. */
3057 if (TREE_CODE_CLASS (code) == tcc_comparison)
3059 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3060 if (FLOAT_TYPE_P (op_type)
3061 && flag_trapping_math
3062 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3063 && code != NE_EXPR && code != EQ_EXPR)
3064 return NULL_TREE;
3065 else
3067 code = invert_tree_comparison (code,
3068 HONOR_NANS (TYPE_MODE (op_type)));
3069 if (code == ERROR_MARK)
3070 return NULL_TREE;
3071 else
3072 return build2 (code, type,
3073 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3077 switch (code)
3079 case INTEGER_CST:
3080 return constant_boolean_node (integer_zerop (arg), type);
3082 case TRUTH_AND_EXPR:
3083 return build2 (TRUTH_OR_EXPR, type,
3084 invert_truthvalue (TREE_OPERAND (arg, 0)),
3085 invert_truthvalue (TREE_OPERAND (arg, 1)));
3087 case TRUTH_OR_EXPR:
3088 return build2 (TRUTH_AND_EXPR, type,
3089 invert_truthvalue (TREE_OPERAND (arg, 0)),
3090 invert_truthvalue (TREE_OPERAND (arg, 1)));
3092 case TRUTH_XOR_EXPR:
3093 /* Here we can invert either operand. We invert the first operand
3094 unless the second operand is a TRUTH_NOT_EXPR in which case our
3095 result is the XOR of the first operand with the inside of the
3096 negation of the second operand. */
3098 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3099 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3100 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3101 else
3102 return build2 (TRUTH_XOR_EXPR, type,
3103 invert_truthvalue (TREE_OPERAND (arg, 0)),
3104 TREE_OPERAND (arg, 1));
3106 case TRUTH_ANDIF_EXPR:
3107 return build2 (TRUTH_ORIF_EXPR, type,
3108 invert_truthvalue (TREE_OPERAND (arg, 0)),
3109 invert_truthvalue (TREE_OPERAND (arg, 1)));
3111 case TRUTH_ORIF_EXPR:
3112 return build2 (TRUTH_ANDIF_EXPR, type,
3113 invert_truthvalue (TREE_OPERAND (arg, 0)),
3114 invert_truthvalue (TREE_OPERAND (arg, 1)));
3116 case TRUTH_NOT_EXPR:
3117 return TREE_OPERAND (arg, 0);
3119 case COND_EXPR:
3121 tree arg1 = TREE_OPERAND (arg, 1);
3122 tree arg2 = TREE_OPERAND (arg, 2);
3123 /* A COND_EXPR may have a throw as one operand, which
3124 then has void type. Just leave void operands
3125 as they are. */
3126 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3127 VOID_TYPE_P (TREE_TYPE (arg1))
3128 ? arg1 : invert_truthvalue (arg1),
3129 VOID_TYPE_P (TREE_TYPE (arg2))
3130 ? arg2 : invert_truthvalue (arg2));
3133 case COMPOUND_EXPR:
3134 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3135 invert_truthvalue (TREE_OPERAND (arg, 1)));
3137 case NON_LVALUE_EXPR:
3138 return invert_truthvalue (TREE_OPERAND (arg, 0));
3140 case NOP_EXPR:
3141 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3142 return build1 (TRUTH_NOT_EXPR, type, arg);
3144 case CONVERT_EXPR:
3145 case FLOAT_EXPR:
3146 return build1 (TREE_CODE (arg), type,
3147 invert_truthvalue (TREE_OPERAND (arg, 0)));
3149 case BIT_AND_EXPR:
3150 if (!integer_onep (TREE_OPERAND (arg, 1)))
3151 break;
3152 return build2 (EQ_EXPR, type, arg,
3153 build_int_cst (type, 0));
3155 case SAVE_EXPR:
3156 return build1 (TRUTH_NOT_EXPR, type, arg);
3158 case CLEANUP_POINT_EXPR:
3159 return build1 (CLEANUP_POINT_EXPR, type,
3160 invert_truthvalue (TREE_OPERAND (arg, 0)));
3162 default:
3163 break;
3166 return NULL_TREE;
3169 /* Return a simplified tree node for the truth-negation of ARG. This
3170 never alters ARG itself. We assume that ARG is an operation that
3171 returns a truth value (0 or 1).
3173 FIXME: one would think we would fold the result, but it causes
3174 problems with the dominator optimizer. */
3176 tree
3177 invert_truthvalue (tree arg)
3179 tree tem;
3181 if (TREE_CODE (arg) == ERROR_MARK)
3182 return arg;
3184 tem = fold_truth_not_expr (arg);
3185 if (!tem)
3186 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3188 return tem;
3191 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3192 operands are another bit-wise operation with a common input. If so,
3193 distribute the bit operations to save an operation and possibly two if
3194 constants are involved. For example, convert
3195 (A | B) & (A | C) into A | (B & C)
3196 Further simplification will occur if B and C are constants.
3198 If this optimization cannot be done, 0 will be returned. */
3200 static tree
3201 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3203 tree common;
3204 tree left, right;
3206 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3207 || TREE_CODE (arg0) == code
3208 || (TREE_CODE (arg0) != BIT_AND_EXPR
3209 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3210 return 0;
3212 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3214 common = TREE_OPERAND (arg0, 0);
3215 left = TREE_OPERAND (arg0, 1);
3216 right = TREE_OPERAND (arg1, 1);
3218 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3220 common = TREE_OPERAND (arg0, 0);
3221 left = TREE_OPERAND (arg0, 1);
3222 right = TREE_OPERAND (arg1, 0);
3224 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3226 common = TREE_OPERAND (arg0, 1);
3227 left = TREE_OPERAND (arg0, 0);
3228 right = TREE_OPERAND (arg1, 1);
3230 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3232 common = TREE_OPERAND (arg0, 1);
3233 left = TREE_OPERAND (arg0, 0);
3234 right = TREE_OPERAND (arg1, 0);
3236 else
3237 return 0;
3239 return fold_build2 (TREE_CODE (arg0), type, common,
3240 fold_build2 (code, type, left, right));
3243 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3244 with code CODE. This optimization is unsafe. */
3245 static tree
3246 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3248 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3249 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3251 /* (A / C) +- (B / C) -> (A +- B) / C. */
3252 if (mul0 == mul1
3253 && operand_equal_p (TREE_OPERAND (arg0, 1),
3254 TREE_OPERAND (arg1, 1), 0))
3255 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3256 fold_build2 (code, type,
3257 TREE_OPERAND (arg0, 0),
3258 TREE_OPERAND (arg1, 0)),
3259 TREE_OPERAND (arg0, 1));
3261 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3262 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3263 TREE_OPERAND (arg1, 0), 0)
3264 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3265 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3267 REAL_VALUE_TYPE r0, r1;
3268 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3269 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3270 if (!mul0)
3271 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3272 if (!mul1)
3273 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3274 real_arithmetic (&r0, code, &r0, &r1);
3275 return fold_build2 (MULT_EXPR, type,
3276 TREE_OPERAND (arg0, 0),
3277 build_real (type, r0));
3280 return NULL_TREE;
3283 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3284 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3286 static tree
3287 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3288 int unsignedp)
3290 tree result;
3292 if (bitpos == 0)
3294 tree size = TYPE_SIZE (TREE_TYPE (inner));
3295 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3296 || POINTER_TYPE_P (TREE_TYPE (inner)))
3297 && host_integerp (size, 0)
3298 && tree_low_cst (size, 0) == bitsize)
3299 return fold_convert (type, inner);
3302 result = build3 (BIT_FIELD_REF, type, inner,
3303 size_int (bitsize), bitsize_int (bitpos));
3305 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3307 return result;
3310 /* Optimize a bit-field compare.
3312 There are two cases: First is a compare against a constant and the
3313 second is a comparison of two items where the fields are at the same
3314 bit position relative to the start of a chunk (byte, halfword, word)
3315 large enough to contain it. In these cases we can avoid the shift
3316 implicit in bitfield extractions.
3318 For constants, we emit a compare of the shifted constant with the
3319 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3320 compared. For two fields at the same position, we do the ANDs with the
3321 similar mask and compare the result of the ANDs.
3323 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3324 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3325 are the left and right operands of the comparison, respectively.
3327 If the optimization described above can be done, we return the resulting
3328 tree. Otherwise we return zero. */
3330 static tree
3331 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3332 tree lhs, tree rhs)
3334 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3335 tree type = TREE_TYPE (lhs);
3336 tree signed_type, unsigned_type;
3337 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3338 enum machine_mode lmode, rmode, nmode;
3339 int lunsignedp, runsignedp;
3340 int lvolatilep = 0, rvolatilep = 0;
3341 tree linner, rinner = NULL_TREE;
3342 tree mask;
3343 tree offset;
3345 /* Get all the information about the extractions being done. If the bit size
3346 if the same as the size of the underlying object, we aren't doing an
3347 extraction at all and so can do nothing. We also don't want to
3348 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3349 then will no longer be able to replace it. */
3350 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3351 &lunsignedp, &lvolatilep, false);
3352 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3353 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3354 return 0;
3356 if (!const_p)
3358 /* If this is not a constant, we can only do something if bit positions,
3359 sizes, and signedness are the same. */
3360 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3361 &runsignedp, &rvolatilep, false);
3363 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3364 || lunsignedp != runsignedp || offset != 0
3365 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3366 return 0;
3369 /* See if we can find a mode to refer to this field. We should be able to,
3370 but fail if we can't. */
3371 nmode = get_best_mode (lbitsize, lbitpos,
3372 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3373 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3374 TYPE_ALIGN (TREE_TYPE (rinner))),
3375 word_mode, lvolatilep || rvolatilep);
3376 if (nmode == VOIDmode)
3377 return 0;
3379 /* Set signed and unsigned types of the precision of this mode for the
3380 shifts below. */
3381 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3382 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3384 /* Compute the bit position and size for the new reference and our offset
3385 within it. If the new reference is the same size as the original, we
3386 won't optimize anything, so return zero. */
3387 nbitsize = GET_MODE_BITSIZE (nmode);
3388 nbitpos = lbitpos & ~ (nbitsize - 1);
3389 lbitpos -= nbitpos;
3390 if (nbitsize == lbitsize)
3391 return 0;
3393 if (BYTES_BIG_ENDIAN)
3394 lbitpos = nbitsize - lbitsize - lbitpos;
3396 /* Make the mask to be used against the extracted field. */
3397 mask = build_int_cst (unsigned_type, -1);
3398 mask = force_fit_type (mask, 0, false, false);
3399 mask = fold_convert (unsigned_type, mask);
3400 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3401 mask = const_binop (RSHIFT_EXPR, mask,
3402 size_int (nbitsize - lbitsize - lbitpos), 0);
3404 if (! const_p)
3405 /* If not comparing with constant, just rework the comparison
3406 and return. */
3407 return build2 (code, compare_type,
3408 build2 (BIT_AND_EXPR, unsigned_type,
3409 make_bit_field_ref (linner, unsigned_type,
3410 nbitsize, nbitpos, 1),
3411 mask),
3412 build2 (BIT_AND_EXPR, unsigned_type,
3413 make_bit_field_ref (rinner, unsigned_type,
3414 nbitsize, nbitpos, 1),
3415 mask));
3417 /* Otherwise, we are handling the constant case. See if the constant is too
3418 big for the field. Warn and return a tree of for 0 (false) if so. We do
3419 this not only for its own sake, but to avoid having to test for this
3420 error case below. If we didn't, we might generate wrong code.
3422 For unsigned fields, the constant shifted right by the field length should
3423 be all zero. For signed fields, the high-order bits should agree with
3424 the sign bit. */
3426 if (lunsignedp)
3428 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3429 fold_convert (unsigned_type, rhs),
3430 size_int (lbitsize), 0)))
3432 warning (0, "comparison is always %d due to width of bit-field",
3433 code == NE_EXPR);
3434 return constant_boolean_node (code == NE_EXPR, compare_type);
3437 else
3439 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3440 size_int (lbitsize - 1), 0);
3441 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3443 warning (0, "comparison is always %d due to width of bit-field",
3444 code == NE_EXPR);
3445 return constant_boolean_node (code == NE_EXPR, compare_type);
3449 /* Single-bit compares should always be against zero. */
3450 if (lbitsize == 1 && ! integer_zerop (rhs))
3452 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3453 rhs = build_int_cst (type, 0);
3456 /* Make a new bitfield reference, shift the constant over the
3457 appropriate number of bits and mask it with the computed mask
3458 (in case this was a signed field). If we changed it, make a new one. */
3459 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3460 if (lvolatilep)
3462 TREE_SIDE_EFFECTS (lhs) = 1;
3463 TREE_THIS_VOLATILE (lhs) = 1;
3466 rhs = const_binop (BIT_AND_EXPR,
3467 const_binop (LSHIFT_EXPR,
3468 fold_convert (unsigned_type, rhs),
3469 size_int (lbitpos), 0),
3470 mask, 0);
3472 return build2 (code, compare_type,
3473 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3474 rhs);
3477 /* Subroutine for fold_truthop: decode a field reference.
3479 If EXP is a comparison reference, we return the innermost reference.
3481 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3482 set to the starting bit number.
3484 If the innermost field can be completely contained in a mode-sized
3485 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3487 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3488 otherwise it is not changed.
3490 *PUNSIGNEDP is set to the signedness of the field.
3492 *PMASK is set to the mask used. This is either contained in a
3493 BIT_AND_EXPR or derived from the width of the field.
3495 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3497 Return 0 if this is not a component reference or is one that we can't
3498 do anything with. */
3500 static tree
3501 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3502 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3503 int *punsignedp, int *pvolatilep,
3504 tree *pmask, tree *pand_mask)
3506 tree outer_type = 0;
3507 tree and_mask = 0;
3508 tree mask, inner, offset;
3509 tree unsigned_type;
3510 unsigned int precision;
3512 /* All the optimizations using this function assume integer fields.
3513 There are problems with FP fields since the type_for_size call
3514 below can fail for, e.g., XFmode. */
3515 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3516 return 0;
3518 /* We are interested in the bare arrangement of bits, so strip everything
3519 that doesn't affect the machine mode. However, record the type of the
3520 outermost expression if it may matter below. */
3521 if (TREE_CODE (exp) == NOP_EXPR
3522 || TREE_CODE (exp) == CONVERT_EXPR
3523 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3524 outer_type = TREE_TYPE (exp);
3525 STRIP_NOPS (exp);
3527 if (TREE_CODE (exp) == BIT_AND_EXPR)
3529 and_mask = TREE_OPERAND (exp, 1);
3530 exp = TREE_OPERAND (exp, 0);
3531 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3532 if (TREE_CODE (and_mask) != INTEGER_CST)
3533 return 0;
3536 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3537 punsignedp, pvolatilep, false);
3538 if ((inner == exp && and_mask == 0)
3539 || *pbitsize < 0 || offset != 0
3540 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3541 return 0;
3543 /* If the number of bits in the reference is the same as the bitsize of
3544 the outer type, then the outer type gives the signedness. Otherwise
3545 (in case of a small bitfield) the signedness is unchanged. */
3546 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3547 *punsignedp = TYPE_UNSIGNED (outer_type);
3549 /* Compute the mask to access the bitfield. */
3550 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3551 precision = TYPE_PRECISION (unsigned_type);
3553 mask = build_int_cst (unsigned_type, -1);
3554 mask = force_fit_type (mask, 0, false, false);
3556 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3557 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3559 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3560 if (and_mask != 0)
3561 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3562 fold_convert (unsigned_type, and_mask), mask);
3564 *pmask = mask;
3565 *pand_mask = and_mask;
3566 return inner;
3569 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3570 bit positions. */
3572 static int
3573 all_ones_mask_p (tree mask, int size)
3575 tree type = TREE_TYPE (mask);
3576 unsigned int precision = TYPE_PRECISION (type);
3577 tree tmask;
3579 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3580 tmask = force_fit_type (tmask, 0, false, false);
3582 return
3583 tree_int_cst_equal (mask,
3584 const_binop (RSHIFT_EXPR,
3585 const_binop (LSHIFT_EXPR, tmask,
3586 size_int (precision - size),
3588 size_int (precision - size), 0));
3591 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3592 represents the sign bit of EXP's type. If EXP represents a sign
3593 or zero extension, also test VAL against the unextended type.
3594 The return value is the (sub)expression whose sign bit is VAL,
3595 or NULL_TREE otherwise. */
3597 static tree
3598 sign_bit_p (tree exp, tree val)
3600 unsigned HOST_WIDE_INT mask_lo, lo;
3601 HOST_WIDE_INT mask_hi, hi;
3602 int width;
3603 tree t;
3605 /* Tree EXP must have an integral type. */
3606 t = TREE_TYPE (exp);
3607 if (! INTEGRAL_TYPE_P (t))
3608 return NULL_TREE;
3610 /* Tree VAL must be an integer constant. */
3611 if (TREE_CODE (val) != INTEGER_CST
3612 || TREE_CONSTANT_OVERFLOW (val))
3613 return NULL_TREE;
3615 width = TYPE_PRECISION (t);
3616 if (width > HOST_BITS_PER_WIDE_INT)
3618 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3619 lo = 0;
3621 mask_hi = ((unsigned HOST_WIDE_INT) -1
3622 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3623 mask_lo = -1;
3625 else
3627 hi = 0;
3628 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3630 mask_hi = 0;
3631 mask_lo = ((unsigned HOST_WIDE_INT) -1
3632 >> (HOST_BITS_PER_WIDE_INT - width));
3635 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3636 treat VAL as if it were unsigned. */
3637 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3638 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3639 return exp;
3641 /* Handle extension from a narrower type. */
3642 if (TREE_CODE (exp) == NOP_EXPR
3643 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3644 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3646 return NULL_TREE;
3649 /* Subroutine for fold_truthop: determine if an operand is simple enough
3650 to be evaluated unconditionally. */
3652 static int
3653 simple_operand_p (tree exp)
3655 /* Strip any conversions that don't change the machine mode. */
3656 STRIP_NOPS (exp);
3658 return (CONSTANT_CLASS_P (exp)
3659 || TREE_CODE (exp) == SSA_NAME
3660 || (DECL_P (exp)
3661 && ! TREE_ADDRESSABLE (exp)
3662 && ! TREE_THIS_VOLATILE (exp)
3663 && ! DECL_NONLOCAL (exp)
3664 /* Don't regard global variables as simple. They may be
3665 allocated in ways unknown to the compiler (shared memory,
3666 #pragma weak, etc). */
3667 && ! TREE_PUBLIC (exp)
3668 && ! DECL_EXTERNAL (exp)
3669 /* Loading a static variable is unduly expensive, but global
3670 registers aren't expensive. */
3671 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3674 /* The following functions are subroutines to fold_range_test and allow it to
3675 try to change a logical combination of comparisons into a range test.
3677 For example, both
3678 X == 2 || X == 3 || X == 4 || X == 5
3680 X >= 2 && X <= 5
3681 are converted to
3682 (unsigned) (X - 2) <= 3
3684 We describe each set of comparisons as being either inside or outside
3685 a range, using a variable named like IN_P, and then describe the
3686 range with a lower and upper bound. If one of the bounds is omitted,
3687 it represents either the highest or lowest value of the type.
3689 In the comments below, we represent a range by two numbers in brackets
3690 preceded by a "+" to designate being inside that range, or a "-" to
3691 designate being outside that range, so the condition can be inverted by
3692 flipping the prefix. An omitted bound is represented by a "-". For
3693 example, "- [-, 10]" means being outside the range starting at the lowest
3694 possible value and ending at 10, in other words, being greater than 10.
3695 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3696 always false.
3698 We set up things so that the missing bounds are handled in a consistent
3699 manner so neither a missing bound nor "true" and "false" need to be
3700 handled using a special case. */
3702 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3703 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3704 and UPPER1_P are nonzero if the respective argument is an upper bound
3705 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3706 must be specified for a comparison. ARG1 will be converted to ARG0's
3707 type if both are specified. */
3709 static tree
3710 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3711 tree arg1, int upper1_p)
3713 tree tem;
3714 int result;
3715 int sgn0, sgn1;
3717 /* If neither arg represents infinity, do the normal operation.
3718 Else, if not a comparison, return infinity. Else handle the special
3719 comparison rules. Note that most of the cases below won't occur, but
3720 are handled for consistency. */
3722 if (arg0 != 0 && arg1 != 0)
3724 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3725 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3726 STRIP_NOPS (tem);
3727 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3730 if (TREE_CODE_CLASS (code) != tcc_comparison)
3731 return 0;
3733 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3734 for neither. In real maths, we cannot assume open ended ranges are
3735 the same. But, this is computer arithmetic, where numbers are finite.
3736 We can therefore make the transformation of any unbounded range with
3737 the value Z, Z being greater than any representable number. This permits
3738 us to treat unbounded ranges as equal. */
3739 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3740 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3741 switch (code)
3743 case EQ_EXPR:
3744 result = sgn0 == sgn1;
3745 break;
3746 case NE_EXPR:
3747 result = sgn0 != sgn1;
3748 break;
3749 case LT_EXPR:
3750 result = sgn0 < sgn1;
3751 break;
3752 case LE_EXPR:
3753 result = sgn0 <= sgn1;
3754 break;
3755 case GT_EXPR:
3756 result = sgn0 > sgn1;
3757 break;
3758 case GE_EXPR:
3759 result = sgn0 >= sgn1;
3760 break;
3761 default:
3762 gcc_unreachable ();
3765 return constant_boolean_node (result, type);
3768 /* Given EXP, a logical expression, set the range it is testing into
3769 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3770 actually being tested. *PLOW and *PHIGH will be made of the same type
3771 as the returned expression. If EXP is not a comparison, we will most
3772 likely not be returning a useful value and range. */
3774 static tree
3775 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3777 enum tree_code code;
3778 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3779 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3780 int in_p, n_in_p;
3781 tree low, high, n_low, n_high;
3783 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3784 and see if we can refine the range. Some of the cases below may not
3785 happen, but it doesn't seem worth worrying about this. We "continue"
3786 the outer loop when we've changed something; otherwise we "break"
3787 the switch, which will "break" the while. */
3789 in_p = 0;
3790 low = high = build_int_cst (TREE_TYPE (exp), 0);
3792 while (1)
3794 code = TREE_CODE (exp);
3795 exp_type = TREE_TYPE (exp);
3797 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3799 if (TREE_CODE_LENGTH (code) > 0)
3800 arg0 = TREE_OPERAND (exp, 0);
3801 if (TREE_CODE_CLASS (code) == tcc_comparison
3802 || TREE_CODE_CLASS (code) == tcc_unary
3803 || TREE_CODE_CLASS (code) == tcc_binary)
3804 arg0_type = TREE_TYPE (arg0);
3805 if (TREE_CODE_CLASS (code) == tcc_binary
3806 || TREE_CODE_CLASS (code) == tcc_comparison
3807 || (TREE_CODE_CLASS (code) == tcc_expression
3808 && TREE_CODE_LENGTH (code) > 1))
3809 arg1 = TREE_OPERAND (exp, 1);
3812 switch (code)
3814 case TRUTH_NOT_EXPR:
3815 in_p = ! in_p, exp = arg0;
3816 continue;
3818 case EQ_EXPR: case NE_EXPR:
3819 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3820 /* We can only do something if the range is testing for zero
3821 and if the second operand is an integer constant. Note that
3822 saying something is "in" the range we make is done by
3823 complementing IN_P since it will set in the initial case of
3824 being not equal to zero; "out" is leaving it alone. */
3825 if (low == 0 || high == 0
3826 || ! integer_zerop (low) || ! integer_zerop (high)
3827 || TREE_CODE (arg1) != INTEGER_CST)
3828 break;
3830 switch (code)
3832 case NE_EXPR: /* - [c, c] */
3833 low = high = arg1;
3834 break;
3835 case EQ_EXPR: /* + [c, c] */
3836 in_p = ! in_p, low = high = arg1;
3837 break;
3838 case GT_EXPR: /* - [-, c] */
3839 low = 0, high = arg1;
3840 break;
3841 case GE_EXPR: /* + [c, -] */
3842 in_p = ! in_p, low = arg1, high = 0;
3843 break;
3844 case LT_EXPR: /* - [c, -] */
3845 low = arg1, high = 0;
3846 break;
3847 case LE_EXPR: /* + [-, c] */
3848 in_p = ! in_p, low = 0, high = arg1;
3849 break;
3850 default:
3851 gcc_unreachable ();
3854 /* If this is an unsigned comparison, we also know that EXP is
3855 greater than or equal to zero. We base the range tests we make
3856 on that fact, so we record it here so we can parse existing
3857 range tests. We test arg0_type since often the return type
3858 of, e.g. EQ_EXPR, is boolean. */
3859 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3861 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3862 in_p, low, high, 1,
3863 build_int_cst (arg0_type, 0),
3864 NULL_TREE))
3865 break;
3867 in_p = n_in_p, low = n_low, high = n_high;
3869 /* If the high bound is missing, but we have a nonzero low
3870 bound, reverse the range so it goes from zero to the low bound
3871 minus 1. */
3872 if (high == 0 && low && ! integer_zerop (low))
3874 in_p = ! in_p;
3875 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3876 integer_one_node, 0);
3877 low = build_int_cst (arg0_type, 0);
3881 exp = arg0;
3882 continue;
3884 case NEGATE_EXPR:
3885 /* (-x) IN [a,b] -> x in [-b, -a] */
3886 n_low = range_binop (MINUS_EXPR, exp_type,
3887 build_int_cst (exp_type, 0),
3888 0, high, 1);
3889 n_high = range_binop (MINUS_EXPR, exp_type,
3890 build_int_cst (exp_type, 0),
3891 0, low, 0);
3892 low = n_low, high = n_high;
3893 exp = arg0;
3894 continue;
3896 case BIT_NOT_EXPR:
3897 /* ~ X -> -X - 1 */
3898 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3899 build_int_cst (exp_type, 1));
3900 continue;
3902 case PLUS_EXPR: case MINUS_EXPR:
3903 if (TREE_CODE (arg1) != INTEGER_CST)
3904 break;
3906 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3907 move a constant to the other side. */
3908 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3909 break;
3911 /* If EXP is signed, any overflow in the computation is undefined,
3912 so we don't worry about it so long as our computations on
3913 the bounds don't overflow. For unsigned, overflow is defined
3914 and this is exactly the right thing. */
3915 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3916 arg0_type, low, 0, arg1, 0);
3917 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3918 arg0_type, high, 1, arg1, 0);
3919 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3920 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3921 break;
3923 /* Check for an unsigned range which has wrapped around the maximum
3924 value thus making n_high < n_low, and normalize it. */
3925 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3927 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3928 integer_one_node, 0);
3929 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3930 integer_one_node, 0);
3932 /* If the range is of the form +/- [ x+1, x ], we won't
3933 be able to normalize it. But then, it represents the
3934 whole range or the empty set, so make it
3935 +/- [ -, - ]. */
3936 if (tree_int_cst_equal (n_low, low)
3937 && tree_int_cst_equal (n_high, high))
3938 low = high = 0;
3939 else
3940 in_p = ! in_p;
3942 else
3943 low = n_low, high = n_high;
3945 exp = arg0;
3946 continue;
3948 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3949 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3950 break;
3952 if (! INTEGRAL_TYPE_P (arg0_type)
3953 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3954 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3955 break;
3957 n_low = low, n_high = high;
3959 if (n_low != 0)
3960 n_low = fold_convert (arg0_type, n_low);
3962 if (n_high != 0)
3963 n_high = fold_convert (arg0_type, n_high);
3966 /* If we're converting arg0 from an unsigned type, to exp,
3967 a signed type, we will be doing the comparison as unsigned.
3968 The tests above have already verified that LOW and HIGH
3969 are both positive.
3971 So we have to ensure that we will handle large unsigned
3972 values the same way that the current signed bounds treat
3973 negative values. */
3975 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3977 tree high_positive;
3978 tree equiv_type = lang_hooks.types.type_for_mode
3979 (TYPE_MODE (arg0_type), 1);
3981 /* A range without an upper bound is, naturally, unbounded.
3982 Since convert would have cropped a very large value, use
3983 the max value for the destination type. */
3984 high_positive
3985 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3986 : TYPE_MAX_VALUE (arg0_type);
3988 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3989 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3990 fold_convert (arg0_type,
3991 high_positive),
3992 fold_convert (arg0_type,
3993 integer_one_node));
3995 /* If the low bound is specified, "and" the range with the
3996 range for which the original unsigned value will be
3997 positive. */
3998 if (low != 0)
4000 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4001 1, n_low, n_high, 1,
4002 fold_convert (arg0_type,
4003 integer_zero_node),
4004 high_positive))
4005 break;
4007 in_p = (n_in_p == in_p);
4009 else
4011 /* Otherwise, "or" the range with the range of the input
4012 that will be interpreted as negative. */
4013 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4014 0, n_low, n_high, 1,
4015 fold_convert (arg0_type,
4016 integer_zero_node),
4017 high_positive))
4018 break;
4020 in_p = (in_p != n_in_p);
4024 exp = arg0;
4025 low = n_low, high = n_high;
4026 continue;
4028 default:
4029 break;
4032 break;
4035 /* If EXP is a constant, we can evaluate whether this is true or false. */
4036 if (TREE_CODE (exp) == INTEGER_CST)
4038 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4039 exp, 0, low, 0))
4040 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4041 exp, 1, high, 1)));
4042 low = high = 0;
4043 exp = 0;
4046 *pin_p = in_p, *plow = low, *phigh = high;
4047 return exp;
4050 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4051 type, TYPE, return an expression to test if EXP is in (or out of, depending
4052 on IN_P) the range. Return 0 if the test couldn't be created. */
4054 static tree
4055 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4057 tree etype = TREE_TYPE (exp);
4058 tree value;
4060 #ifdef HAVE_canonicalize_funcptr_for_compare
4061 /* Disable this optimization for function pointer expressions
4062 on targets that require function pointer canonicalization. */
4063 if (HAVE_canonicalize_funcptr_for_compare
4064 && TREE_CODE (etype) == POINTER_TYPE
4065 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4066 return NULL_TREE;
4067 #endif
4069 if (! in_p)
4071 value = build_range_check (type, exp, 1, low, high);
4072 if (value != 0)
4073 return invert_truthvalue (value);
4075 return 0;
4078 if (low == 0 && high == 0)
4079 return build_int_cst (type, 1);
4081 if (low == 0)
4082 return fold_build2 (LE_EXPR, type, exp,
4083 fold_convert (etype, high));
4085 if (high == 0)
4086 return fold_build2 (GE_EXPR, type, exp,
4087 fold_convert (etype, low));
4089 if (operand_equal_p (low, high, 0))
4090 return fold_build2 (EQ_EXPR, type, exp,
4091 fold_convert (etype, low));
4093 if (integer_zerop (low))
4095 if (! TYPE_UNSIGNED (etype))
4097 etype = lang_hooks.types.unsigned_type (etype);
4098 high = fold_convert (etype, high);
4099 exp = fold_convert (etype, exp);
4101 return build_range_check (type, exp, 1, 0, high);
4104 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4105 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4107 unsigned HOST_WIDE_INT lo;
4108 HOST_WIDE_INT hi;
4109 int prec;
4111 prec = TYPE_PRECISION (etype);
4112 if (prec <= HOST_BITS_PER_WIDE_INT)
4114 hi = 0;
4115 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4117 else
4119 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4120 lo = (unsigned HOST_WIDE_INT) -1;
4123 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4125 if (TYPE_UNSIGNED (etype))
4127 etype = lang_hooks.types.signed_type (etype);
4128 exp = fold_convert (etype, exp);
4130 return fold_build2 (GT_EXPR, type, exp,
4131 build_int_cst (etype, 0));
4135 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4136 This requires wrap-around arithmetics for the type of the expression. */
4137 switch (TREE_CODE (etype))
4139 case INTEGER_TYPE:
4140 /* There is no requirement that LOW be within the range of ETYPE
4141 if the latter is a subtype. It must, however, be within the base
4142 type of ETYPE. So be sure we do the subtraction in that type. */
4143 if (TREE_TYPE (etype))
4144 etype = TREE_TYPE (etype);
4145 break;
4147 case ENUMERAL_TYPE:
4148 case BOOLEAN_TYPE:
4149 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4150 TYPE_UNSIGNED (etype));
4151 break;
4153 default:
4154 break;
4157 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4158 if (TREE_CODE (etype) == INTEGER_TYPE
4159 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4161 tree utype, minv, maxv;
4163 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4164 for the type in question, as we rely on this here. */
4165 utype = lang_hooks.types.unsigned_type (etype);
4166 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4167 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4168 integer_one_node, 1);
4169 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4171 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4172 minv, 1, maxv, 1)))
4173 etype = utype;
4174 else
4175 return 0;
4178 high = fold_convert (etype, high);
4179 low = fold_convert (etype, low);
4180 exp = fold_convert (etype, exp);
4182 value = const_binop (MINUS_EXPR, high, low, 0);
4184 if (value != 0 && !TREE_OVERFLOW (value))
4185 return build_range_check (type,
4186 fold_build2 (MINUS_EXPR, etype, exp, low),
4187 1, build_int_cst (etype, 0), value);
4189 return 0;
4192 /* Return the predecessor of VAL in its type, handling the infinite case. */
4194 static tree
4195 range_predecessor (tree val)
4197 tree type = TREE_TYPE (val);
4199 if (INTEGRAL_TYPE_P (type)
4200 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4201 return 0;
4202 else
4203 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4206 /* Return the successor of VAL in its type, handling the infinite case. */
4208 static tree
4209 range_successor (tree val)
4211 tree type = TREE_TYPE (val);
4213 if (INTEGRAL_TYPE_P (type)
4214 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4215 return 0;
4216 else
4217 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4220 /* Given two ranges, see if we can merge them into one. Return 1 if we
4221 can, 0 if we can't. Set the output range into the specified parameters. */
4223 static int
4224 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4225 tree high0, int in1_p, tree low1, tree high1)
4227 int no_overlap;
4228 int subset;
4229 int temp;
4230 tree tem;
4231 int in_p;
4232 tree low, high;
4233 int lowequal = ((low0 == 0 && low1 == 0)
4234 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4235 low0, 0, low1, 0)));
4236 int highequal = ((high0 == 0 && high1 == 0)
4237 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4238 high0, 1, high1, 1)));
4240 /* Make range 0 be the range that starts first, or ends last if they
4241 start at the same value. Swap them if it isn't. */
4242 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4243 low0, 0, low1, 0))
4244 || (lowequal
4245 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4246 high1, 1, high0, 1))))
4248 temp = in0_p, in0_p = in1_p, in1_p = temp;
4249 tem = low0, low0 = low1, low1 = tem;
4250 tem = high0, high0 = high1, high1 = tem;
4253 /* Now flag two cases, whether the ranges are disjoint or whether the
4254 second range is totally subsumed in the first. Note that the tests
4255 below are simplified by the ones above. */
4256 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4257 high0, 1, low1, 0));
4258 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4259 high1, 1, high0, 1));
4261 /* We now have four cases, depending on whether we are including or
4262 excluding the two ranges. */
4263 if (in0_p && in1_p)
4265 /* If they don't overlap, the result is false. If the second range
4266 is a subset it is the result. Otherwise, the range is from the start
4267 of the second to the end of the first. */
4268 if (no_overlap)
4269 in_p = 0, low = high = 0;
4270 else if (subset)
4271 in_p = 1, low = low1, high = high1;
4272 else
4273 in_p = 1, low = low1, high = high0;
4276 else if (in0_p && ! in1_p)
4278 /* If they don't overlap, the result is the first range. If they are
4279 equal, the result is false. If the second range is a subset of the
4280 first, and the ranges begin at the same place, we go from just after
4281 the end of the second range to the end of the first. If the second
4282 range is not a subset of the first, or if it is a subset and both
4283 ranges end at the same place, the range starts at the start of the
4284 first range and ends just before the second range.
4285 Otherwise, we can't describe this as a single range. */
4286 if (no_overlap)
4287 in_p = 1, low = low0, high = high0;
4288 else if (lowequal && highequal)
4289 in_p = 0, low = high = 0;
4290 else if (subset && lowequal)
4292 low = range_successor (high1);
4293 high = high0;
4294 in_p = (low != 0);
4296 else if (! subset || highequal)
4298 low = low0;
4299 high = range_predecessor (low1);
4300 in_p = (high != 0);
4302 else
4303 return 0;
4306 else if (! in0_p && in1_p)
4308 /* If they don't overlap, the result is the second range. If the second
4309 is a subset of the first, the result is false. Otherwise,
4310 the range starts just after the first range and ends at the
4311 end of the second. */
4312 if (no_overlap)
4313 in_p = 1, low = low1, high = high1;
4314 else if (subset || highequal)
4315 in_p = 0, low = high = 0;
4316 else
4318 low = range_successor (high0);
4319 high = high1;
4320 in_p = (low != 0);
4324 else
4326 /* The case where we are excluding both ranges. Here the complex case
4327 is if they don't overlap. In that case, the only time we have a
4328 range is if they are adjacent. If the second is a subset of the
4329 first, the result is the first. Otherwise, the range to exclude
4330 starts at the beginning of the first range and ends at the end of the
4331 second. */
4332 if (no_overlap)
4334 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4335 range_successor (high0),
4336 1, low1, 0)))
4337 in_p = 0, low = low0, high = high1;
4338 else
4340 /* Canonicalize - [min, x] into - [-, x]. */
4341 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4342 switch (TREE_CODE (TREE_TYPE (low0)))
4344 case ENUMERAL_TYPE:
4345 if (TYPE_PRECISION (TREE_TYPE (low0))
4346 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4347 break;
4348 /* FALLTHROUGH */
4349 case INTEGER_TYPE:
4350 if (tree_int_cst_equal (low0,
4351 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4352 low0 = 0;
4353 break;
4354 case POINTER_TYPE:
4355 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4356 && integer_zerop (low0))
4357 low0 = 0;
4358 break;
4359 default:
4360 break;
4363 /* Canonicalize - [x, max] into - [x, -]. */
4364 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4365 switch (TREE_CODE (TREE_TYPE (high1)))
4367 case ENUMERAL_TYPE:
4368 if (TYPE_PRECISION (TREE_TYPE (high1))
4369 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4370 break;
4371 /* FALLTHROUGH */
4372 case INTEGER_TYPE:
4373 if (tree_int_cst_equal (high1,
4374 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4375 high1 = 0;
4376 break;
4377 case POINTER_TYPE:
4378 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4379 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4380 high1, 1,
4381 integer_one_node, 1)))
4382 high1 = 0;
4383 break;
4384 default:
4385 break;
4388 /* The ranges might be also adjacent between the maximum and
4389 minimum values of the given type. For
4390 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4391 return + [x + 1, y - 1]. */
4392 if (low0 == 0 && high1 == 0)
4394 low = range_successor (high0);
4395 high = range_predecessor (low1);
4396 if (low == 0 || high == 0)
4397 return 0;
4399 in_p = 1;
4401 else
4402 return 0;
4405 else if (subset)
4406 in_p = 0, low = low0, high = high0;
4407 else
4408 in_p = 0, low = low0, high = high1;
4411 *pin_p = in_p, *plow = low, *phigh = high;
4412 return 1;
4416 /* Subroutine of fold, looking inside expressions of the form
4417 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4418 of the COND_EXPR. This function is being used also to optimize
4419 A op B ? C : A, by reversing the comparison first.
4421 Return a folded expression whose code is not a COND_EXPR
4422 anymore, or NULL_TREE if no folding opportunity is found. */
4424 static tree
4425 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4427 enum tree_code comp_code = TREE_CODE (arg0);
4428 tree arg00 = TREE_OPERAND (arg0, 0);
4429 tree arg01 = TREE_OPERAND (arg0, 1);
4430 tree arg1_type = TREE_TYPE (arg1);
4431 tree tem;
4433 STRIP_NOPS (arg1);
4434 STRIP_NOPS (arg2);
4436 /* If we have A op 0 ? A : -A, consider applying the following
4437 transformations:
4439 A == 0? A : -A same as -A
4440 A != 0? A : -A same as A
4441 A >= 0? A : -A same as abs (A)
4442 A > 0? A : -A same as abs (A)
4443 A <= 0? A : -A same as -abs (A)
4444 A < 0? A : -A same as -abs (A)
4446 None of these transformations work for modes with signed
4447 zeros. If A is +/-0, the first two transformations will
4448 change the sign of the result (from +0 to -0, or vice
4449 versa). The last four will fix the sign of the result,
4450 even though the original expressions could be positive or
4451 negative, depending on the sign of A.
4453 Note that all these transformations are correct if A is
4454 NaN, since the two alternatives (A and -A) are also NaNs. */
4455 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4456 ? real_zerop (arg01)
4457 : integer_zerop (arg01))
4458 && ((TREE_CODE (arg2) == NEGATE_EXPR
4459 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4460 /* In the case that A is of the form X-Y, '-A' (arg2) may
4461 have already been folded to Y-X, check for that. */
4462 || (TREE_CODE (arg1) == MINUS_EXPR
4463 && TREE_CODE (arg2) == MINUS_EXPR
4464 && operand_equal_p (TREE_OPERAND (arg1, 0),
4465 TREE_OPERAND (arg2, 1), 0)
4466 && operand_equal_p (TREE_OPERAND (arg1, 1),
4467 TREE_OPERAND (arg2, 0), 0))))
4468 switch (comp_code)
4470 case EQ_EXPR:
4471 case UNEQ_EXPR:
4472 tem = fold_convert (arg1_type, arg1);
4473 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4474 case NE_EXPR:
4475 case LTGT_EXPR:
4476 return pedantic_non_lvalue (fold_convert (type, arg1));
4477 case UNGE_EXPR:
4478 case UNGT_EXPR:
4479 if (flag_trapping_math)
4480 break;
4481 /* Fall through. */
4482 case GE_EXPR:
4483 case GT_EXPR:
4484 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4485 arg1 = fold_convert (lang_hooks.types.signed_type
4486 (TREE_TYPE (arg1)), arg1);
4487 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4488 return pedantic_non_lvalue (fold_convert (type, tem));
4489 case UNLE_EXPR:
4490 case UNLT_EXPR:
4491 if (flag_trapping_math)
4492 break;
4493 case LE_EXPR:
4494 case LT_EXPR:
4495 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4496 arg1 = fold_convert (lang_hooks.types.signed_type
4497 (TREE_TYPE (arg1)), arg1);
4498 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4499 return negate_expr (fold_convert (type, tem));
4500 default:
4501 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4502 break;
4505 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4506 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4507 both transformations are correct when A is NaN: A != 0
4508 is then true, and A == 0 is false. */
4510 if (integer_zerop (arg01) && integer_zerop (arg2))
4512 if (comp_code == NE_EXPR)
4513 return pedantic_non_lvalue (fold_convert (type, arg1));
4514 else if (comp_code == EQ_EXPR)
4515 return build_int_cst (type, 0);
4518 /* Try some transformations of A op B ? A : B.
4520 A == B? A : B same as B
4521 A != B? A : B same as A
4522 A >= B? A : B same as max (A, B)
4523 A > B? A : B same as max (B, A)
4524 A <= B? A : B same as min (A, B)
4525 A < B? A : B same as min (B, A)
4527 As above, these transformations don't work in the presence
4528 of signed zeros. For example, if A and B are zeros of
4529 opposite sign, the first two transformations will change
4530 the sign of the result. In the last four, the original
4531 expressions give different results for (A=+0, B=-0) and
4532 (A=-0, B=+0), but the transformed expressions do not.
4534 The first two transformations are correct if either A or B
4535 is a NaN. In the first transformation, the condition will
4536 be false, and B will indeed be chosen. In the case of the
4537 second transformation, the condition A != B will be true,
4538 and A will be chosen.
4540 The conversions to max() and min() are not correct if B is
4541 a number and A is not. The conditions in the original
4542 expressions will be false, so all four give B. The min()
4543 and max() versions would give a NaN instead. */
4544 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4545 /* Avoid these transformations if the COND_EXPR may be used
4546 as an lvalue in the C++ front-end. PR c++/19199. */
4547 && (in_gimple_form
4548 || (strcmp (lang_hooks.name, "GNU C++") != 0
4549 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4550 || ! maybe_lvalue_p (arg1)
4551 || ! maybe_lvalue_p (arg2)))
4553 tree comp_op0 = arg00;
4554 tree comp_op1 = arg01;
4555 tree comp_type = TREE_TYPE (comp_op0);
4557 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4558 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4560 comp_type = type;
4561 comp_op0 = arg1;
4562 comp_op1 = arg2;
4565 switch (comp_code)
4567 case EQ_EXPR:
4568 return pedantic_non_lvalue (fold_convert (type, arg2));
4569 case NE_EXPR:
4570 return pedantic_non_lvalue (fold_convert (type, arg1));
4571 case LE_EXPR:
4572 case LT_EXPR:
4573 case UNLE_EXPR:
4574 case UNLT_EXPR:
4575 /* In C++ a ?: expression can be an lvalue, so put the
4576 operand which will be used if they are equal first
4577 so that we can convert this back to the
4578 corresponding COND_EXPR. */
4579 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4581 comp_op0 = fold_convert (comp_type, comp_op0);
4582 comp_op1 = fold_convert (comp_type, comp_op1);
4583 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4584 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4585 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4586 return pedantic_non_lvalue (fold_convert (type, tem));
4588 break;
4589 case GE_EXPR:
4590 case GT_EXPR:
4591 case UNGE_EXPR:
4592 case UNGT_EXPR:
4593 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4595 comp_op0 = fold_convert (comp_type, comp_op0);
4596 comp_op1 = fold_convert (comp_type, comp_op1);
4597 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4598 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4599 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4600 return pedantic_non_lvalue (fold_convert (type, tem));
4602 break;
4603 case UNEQ_EXPR:
4604 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4605 return pedantic_non_lvalue (fold_convert (type, arg2));
4606 break;
4607 case LTGT_EXPR:
4608 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4609 return pedantic_non_lvalue (fold_convert (type, arg1));
4610 break;
4611 default:
4612 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4613 break;
4617 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4618 we might still be able to simplify this. For example,
4619 if C1 is one less or one more than C2, this might have started
4620 out as a MIN or MAX and been transformed by this function.
4621 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4623 if (INTEGRAL_TYPE_P (type)
4624 && TREE_CODE (arg01) == INTEGER_CST
4625 && TREE_CODE (arg2) == INTEGER_CST)
4626 switch (comp_code)
4628 case EQ_EXPR:
4629 /* We can replace A with C1 in this case. */
4630 arg1 = fold_convert (type, arg01);
4631 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4633 case LT_EXPR:
4634 /* If C1 is C2 + 1, this is min(A, C2). */
4635 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4636 OEP_ONLY_CONST)
4637 && operand_equal_p (arg01,
4638 const_binop (PLUS_EXPR, arg2,
4639 integer_one_node, 0),
4640 OEP_ONLY_CONST))
4641 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4642 type, arg1, arg2));
4643 break;
4645 case LE_EXPR:
4646 /* If C1 is C2 - 1, this is min(A, C2). */
4647 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4648 OEP_ONLY_CONST)
4649 && operand_equal_p (arg01,
4650 const_binop (MINUS_EXPR, arg2,
4651 integer_one_node, 0),
4652 OEP_ONLY_CONST))
4653 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4654 type, arg1, arg2));
4655 break;
4657 case GT_EXPR:
4658 /* If C1 is C2 - 1, this is max(A, C2). */
4659 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4660 OEP_ONLY_CONST)
4661 && operand_equal_p (arg01,
4662 const_binop (MINUS_EXPR, arg2,
4663 integer_one_node, 0),
4664 OEP_ONLY_CONST))
4665 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4666 type, arg1, arg2));
4667 break;
4669 case GE_EXPR:
4670 /* If C1 is C2 + 1, this is max(A, C2). */
4671 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4672 OEP_ONLY_CONST)
4673 && operand_equal_p (arg01,
4674 const_binop (PLUS_EXPR, arg2,
4675 integer_one_node, 0),
4676 OEP_ONLY_CONST))
4677 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4678 type, arg1, arg2));
4679 break;
4680 case NE_EXPR:
4681 break;
4682 default:
4683 gcc_unreachable ();
4686 return NULL_TREE;
4691 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4692 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4693 #endif
4695 /* EXP is some logical combination of boolean tests. See if we can
4696 merge it into some range test. Return the new tree if so. */
4698 static tree
4699 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4701 int or_op = (code == TRUTH_ORIF_EXPR
4702 || code == TRUTH_OR_EXPR);
4703 int in0_p, in1_p, in_p;
4704 tree low0, low1, low, high0, high1, high;
4705 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4706 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4707 tree tem;
4709 /* If this is an OR operation, invert both sides; we will invert
4710 again at the end. */
4711 if (or_op)
4712 in0_p = ! in0_p, in1_p = ! in1_p;
4714 /* If both expressions are the same, if we can merge the ranges, and we
4715 can build the range test, return it or it inverted. If one of the
4716 ranges is always true or always false, consider it to be the same
4717 expression as the other. */
4718 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4719 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4720 in1_p, low1, high1)
4721 && 0 != (tem = (build_range_check (type,
4722 lhs != 0 ? lhs
4723 : rhs != 0 ? rhs : integer_zero_node,
4724 in_p, low, high))))
4725 return or_op ? invert_truthvalue (tem) : tem;
4727 /* On machines where the branch cost is expensive, if this is a
4728 short-circuited branch and the underlying object on both sides
4729 is the same, make a non-short-circuit operation. */
4730 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4731 && lhs != 0 && rhs != 0
4732 && (code == TRUTH_ANDIF_EXPR
4733 || code == TRUTH_ORIF_EXPR)
4734 && operand_equal_p (lhs, rhs, 0))
4736 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4737 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4738 which cases we can't do this. */
4739 if (simple_operand_p (lhs))
4740 return build2 (code == TRUTH_ANDIF_EXPR
4741 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4742 type, op0, op1);
4744 else if (lang_hooks.decls.global_bindings_p () == 0
4745 && ! CONTAINS_PLACEHOLDER_P (lhs))
4747 tree common = save_expr (lhs);
4749 if (0 != (lhs = build_range_check (type, common,
4750 or_op ? ! in0_p : in0_p,
4751 low0, high0))
4752 && (0 != (rhs = build_range_check (type, common,
4753 or_op ? ! in1_p : in1_p,
4754 low1, high1))))
4755 return build2 (code == TRUTH_ANDIF_EXPR
4756 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4757 type, lhs, rhs);
4761 return 0;
4764 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4765 bit value. Arrange things so the extra bits will be set to zero if and
4766 only if C is signed-extended to its full width. If MASK is nonzero,
4767 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4769 static tree
4770 unextend (tree c, int p, int unsignedp, tree mask)
4772 tree type = TREE_TYPE (c);
4773 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4774 tree temp;
4776 if (p == modesize || unsignedp)
4777 return c;
4779 /* We work by getting just the sign bit into the low-order bit, then
4780 into the high-order bit, then sign-extend. We then XOR that value
4781 with C. */
4782 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4783 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4785 /* We must use a signed type in order to get an arithmetic right shift.
4786 However, we must also avoid introducing accidental overflows, so that
4787 a subsequent call to integer_zerop will work. Hence we must
4788 do the type conversion here. At this point, the constant is either
4789 zero or one, and the conversion to a signed type can never overflow.
4790 We could get an overflow if this conversion is done anywhere else. */
4791 if (TYPE_UNSIGNED (type))
4792 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4794 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4795 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4796 if (mask != 0)
4797 temp = const_binop (BIT_AND_EXPR, temp,
4798 fold_convert (TREE_TYPE (c), mask), 0);
4799 /* If necessary, convert the type back to match the type of C. */
4800 if (TYPE_UNSIGNED (type))
4801 temp = fold_convert (type, temp);
4803 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4806 /* Find ways of folding logical expressions of LHS and RHS:
4807 Try to merge two comparisons to the same innermost item.
4808 Look for range tests like "ch >= '0' && ch <= '9'".
4809 Look for combinations of simple terms on machines with expensive branches
4810 and evaluate the RHS unconditionally.
4812 For example, if we have p->a == 2 && p->b == 4 and we can make an
4813 object large enough to span both A and B, we can do this with a comparison
4814 against the object ANDed with the a mask.
4816 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4817 operations to do this with one comparison.
4819 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4820 function and the one above.
4822 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4823 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4825 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4826 two operands.
4828 We return the simplified tree or 0 if no optimization is possible. */
4830 static tree
4831 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4833 /* If this is the "or" of two comparisons, we can do something if
4834 the comparisons are NE_EXPR. If this is the "and", we can do something
4835 if the comparisons are EQ_EXPR. I.e.,
4836 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4838 WANTED_CODE is this operation code. For single bit fields, we can
4839 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4840 comparison for one-bit fields. */
4842 enum tree_code wanted_code;
4843 enum tree_code lcode, rcode;
4844 tree ll_arg, lr_arg, rl_arg, rr_arg;
4845 tree ll_inner, lr_inner, rl_inner, rr_inner;
4846 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4847 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4848 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4849 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4850 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4851 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4852 enum machine_mode lnmode, rnmode;
4853 tree ll_mask, lr_mask, rl_mask, rr_mask;
4854 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4855 tree l_const, r_const;
4856 tree lntype, rntype, result;
4857 int first_bit, end_bit;
4858 int volatilep;
4859 tree orig_lhs = lhs, orig_rhs = rhs;
4860 enum tree_code orig_code = code;
4862 /* Start by getting the comparison codes. Fail if anything is volatile.
4863 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4864 it were surrounded with a NE_EXPR. */
4866 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4867 return 0;
4869 lcode = TREE_CODE (lhs);
4870 rcode = TREE_CODE (rhs);
4872 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4874 lhs = build2 (NE_EXPR, truth_type, lhs,
4875 build_int_cst (TREE_TYPE (lhs), 0));
4876 lcode = NE_EXPR;
4879 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4881 rhs = build2 (NE_EXPR, truth_type, rhs,
4882 build_int_cst (TREE_TYPE (rhs), 0));
4883 rcode = NE_EXPR;
4886 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4887 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4888 return 0;
4890 ll_arg = TREE_OPERAND (lhs, 0);
4891 lr_arg = TREE_OPERAND (lhs, 1);
4892 rl_arg = TREE_OPERAND (rhs, 0);
4893 rr_arg = TREE_OPERAND (rhs, 1);
4895 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4896 if (simple_operand_p (ll_arg)
4897 && simple_operand_p (lr_arg))
4899 tree result;
4900 if (operand_equal_p (ll_arg, rl_arg, 0)
4901 && operand_equal_p (lr_arg, rr_arg, 0))
4903 result = combine_comparisons (code, lcode, rcode,
4904 truth_type, ll_arg, lr_arg);
4905 if (result)
4906 return result;
4908 else if (operand_equal_p (ll_arg, rr_arg, 0)
4909 && operand_equal_p (lr_arg, rl_arg, 0))
4911 result = combine_comparisons (code, lcode,
4912 swap_tree_comparison (rcode),
4913 truth_type, ll_arg, lr_arg);
4914 if (result)
4915 return result;
4919 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4920 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4922 /* If the RHS can be evaluated unconditionally and its operands are
4923 simple, it wins to evaluate the RHS unconditionally on machines
4924 with expensive branches. In this case, this isn't a comparison
4925 that can be merged. Avoid doing this if the RHS is a floating-point
4926 comparison since those can trap. */
4928 if (BRANCH_COST >= 2
4929 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4930 && simple_operand_p (rl_arg)
4931 && simple_operand_p (rr_arg))
4933 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4934 if (code == TRUTH_OR_EXPR
4935 && lcode == NE_EXPR && integer_zerop (lr_arg)
4936 && rcode == NE_EXPR && integer_zerop (rr_arg)
4937 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4938 return build2 (NE_EXPR, truth_type,
4939 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4940 ll_arg, rl_arg),
4941 build_int_cst (TREE_TYPE (ll_arg), 0));
4943 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4944 if (code == TRUTH_AND_EXPR
4945 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4946 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4947 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4948 return build2 (EQ_EXPR, truth_type,
4949 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4950 ll_arg, rl_arg),
4951 build_int_cst (TREE_TYPE (ll_arg), 0));
4953 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4955 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
4956 return build2 (code, truth_type, lhs, rhs);
4957 return NULL_TREE;
4961 /* See if the comparisons can be merged. Then get all the parameters for
4962 each side. */
4964 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4965 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4966 return 0;
4968 volatilep = 0;
4969 ll_inner = decode_field_reference (ll_arg,
4970 &ll_bitsize, &ll_bitpos, &ll_mode,
4971 &ll_unsignedp, &volatilep, &ll_mask,
4972 &ll_and_mask);
4973 lr_inner = decode_field_reference (lr_arg,
4974 &lr_bitsize, &lr_bitpos, &lr_mode,
4975 &lr_unsignedp, &volatilep, &lr_mask,
4976 &lr_and_mask);
4977 rl_inner = decode_field_reference (rl_arg,
4978 &rl_bitsize, &rl_bitpos, &rl_mode,
4979 &rl_unsignedp, &volatilep, &rl_mask,
4980 &rl_and_mask);
4981 rr_inner = decode_field_reference (rr_arg,
4982 &rr_bitsize, &rr_bitpos, &rr_mode,
4983 &rr_unsignedp, &volatilep, &rr_mask,
4984 &rr_and_mask);
4986 /* It must be true that the inner operation on the lhs of each
4987 comparison must be the same if we are to be able to do anything.
4988 Then see if we have constants. If not, the same must be true for
4989 the rhs's. */
4990 if (volatilep || ll_inner == 0 || rl_inner == 0
4991 || ! operand_equal_p (ll_inner, rl_inner, 0))
4992 return 0;
4994 if (TREE_CODE (lr_arg) == INTEGER_CST
4995 && TREE_CODE (rr_arg) == INTEGER_CST)
4996 l_const = lr_arg, r_const = rr_arg;
4997 else if (lr_inner == 0 || rr_inner == 0
4998 || ! operand_equal_p (lr_inner, rr_inner, 0))
4999 return 0;
5000 else
5001 l_const = r_const = 0;
5003 /* If either comparison code is not correct for our logical operation,
5004 fail. However, we can convert a one-bit comparison against zero into
5005 the opposite comparison against that bit being set in the field. */
5007 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5008 if (lcode != wanted_code)
5010 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5012 /* Make the left operand unsigned, since we are only interested
5013 in the value of one bit. Otherwise we are doing the wrong
5014 thing below. */
5015 ll_unsignedp = 1;
5016 l_const = ll_mask;
5018 else
5019 return 0;
5022 /* This is analogous to the code for l_const above. */
5023 if (rcode != wanted_code)
5025 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5027 rl_unsignedp = 1;
5028 r_const = rl_mask;
5030 else
5031 return 0;
5034 /* After this point all optimizations will generate bit-field
5035 references, which we might not want. */
5036 if (! lang_hooks.can_use_bit_fields_p ())
5037 return 0;
5039 /* See if we can find a mode that contains both fields being compared on
5040 the left. If we can't, fail. Otherwise, update all constants and masks
5041 to be relative to a field of that size. */
5042 first_bit = MIN (ll_bitpos, rl_bitpos);
5043 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5044 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5045 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5046 volatilep);
5047 if (lnmode == VOIDmode)
5048 return 0;
5050 lnbitsize = GET_MODE_BITSIZE (lnmode);
5051 lnbitpos = first_bit & ~ (lnbitsize - 1);
5052 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5053 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5055 if (BYTES_BIG_ENDIAN)
5057 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5058 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5061 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5062 size_int (xll_bitpos), 0);
5063 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5064 size_int (xrl_bitpos), 0);
5066 if (l_const)
5068 l_const = fold_convert (lntype, l_const);
5069 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5070 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5071 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5072 fold_build1 (BIT_NOT_EXPR,
5073 lntype, ll_mask),
5074 0)))
5076 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5078 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5081 if (r_const)
5083 r_const = fold_convert (lntype, r_const);
5084 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5085 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5086 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5087 fold_build1 (BIT_NOT_EXPR,
5088 lntype, rl_mask),
5089 0)))
5091 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5093 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5097 /* If the right sides are not constant, do the same for it. Also,
5098 disallow this optimization if a size or signedness mismatch occurs
5099 between the left and right sides. */
5100 if (l_const == 0)
5102 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5103 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5104 /* Make sure the two fields on the right
5105 correspond to the left without being swapped. */
5106 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5107 return 0;
5109 first_bit = MIN (lr_bitpos, rr_bitpos);
5110 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5111 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5112 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5113 volatilep);
5114 if (rnmode == VOIDmode)
5115 return 0;
5117 rnbitsize = GET_MODE_BITSIZE (rnmode);
5118 rnbitpos = first_bit & ~ (rnbitsize - 1);
5119 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5120 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5122 if (BYTES_BIG_ENDIAN)
5124 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5125 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5128 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5129 size_int (xlr_bitpos), 0);
5130 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5131 size_int (xrr_bitpos), 0);
5133 /* Make a mask that corresponds to both fields being compared.
5134 Do this for both items being compared. If the operands are the
5135 same size and the bits being compared are in the same position
5136 then we can do this by masking both and comparing the masked
5137 results. */
5138 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5139 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5140 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5142 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5143 ll_unsignedp || rl_unsignedp);
5144 if (! all_ones_mask_p (ll_mask, lnbitsize))
5145 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5147 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5148 lr_unsignedp || rr_unsignedp);
5149 if (! all_ones_mask_p (lr_mask, rnbitsize))
5150 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5152 return build2 (wanted_code, truth_type, lhs, rhs);
5155 /* There is still another way we can do something: If both pairs of
5156 fields being compared are adjacent, we may be able to make a wider
5157 field containing them both.
5159 Note that we still must mask the lhs/rhs expressions. Furthermore,
5160 the mask must be shifted to account for the shift done by
5161 make_bit_field_ref. */
5162 if ((ll_bitsize + ll_bitpos == rl_bitpos
5163 && lr_bitsize + lr_bitpos == rr_bitpos)
5164 || (ll_bitpos == rl_bitpos + rl_bitsize
5165 && lr_bitpos == rr_bitpos + rr_bitsize))
5167 tree type;
5169 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5170 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5171 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5172 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5174 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5175 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5176 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5177 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5179 /* Convert to the smaller type before masking out unwanted bits. */
5180 type = lntype;
5181 if (lntype != rntype)
5183 if (lnbitsize > rnbitsize)
5185 lhs = fold_convert (rntype, lhs);
5186 ll_mask = fold_convert (rntype, ll_mask);
5187 type = rntype;
5189 else if (lnbitsize < rnbitsize)
5191 rhs = fold_convert (lntype, rhs);
5192 lr_mask = fold_convert (lntype, lr_mask);
5193 type = lntype;
5197 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5198 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5200 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5201 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5203 return build2 (wanted_code, truth_type, lhs, rhs);
5206 return 0;
5209 /* Handle the case of comparisons with constants. If there is something in
5210 common between the masks, those bits of the constants must be the same.
5211 If not, the condition is always false. Test for this to avoid generating
5212 incorrect code below. */
5213 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5214 if (! integer_zerop (result)
5215 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5216 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5218 if (wanted_code == NE_EXPR)
5220 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5221 return constant_boolean_node (true, truth_type);
5223 else
5225 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5226 return constant_boolean_node (false, truth_type);
5230 /* Construct the expression we will return. First get the component
5231 reference we will make. Unless the mask is all ones the width of
5232 that field, perform the mask operation. Then compare with the
5233 merged constant. */
5234 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5235 ll_unsignedp || rl_unsignedp);
5237 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5238 if (! all_ones_mask_p (ll_mask, lnbitsize))
5239 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5241 return build2 (wanted_code, truth_type, result,
5242 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5245 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5246 constant. */
5248 static tree
5249 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5251 tree arg0 = op0;
5252 enum tree_code op_code;
5253 tree comp_const = op1;
5254 tree minmax_const;
5255 int consts_equal, consts_lt;
5256 tree inner;
5258 STRIP_SIGN_NOPS (arg0);
5260 op_code = TREE_CODE (arg0);
5261 minmax_const = TREE_OPERAND (arg0, 1);
5262 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5263 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5264 inner = TREE_OPERAND (arg0, 0);
5266 /* If something does not permit us to optimize, return the original tree. */
5267 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5268 || TREE_CODE (comp_const) != INTEGER_CST
5269 || TREE_CONSTANT_OVERFLOW (comp_const)
5270 || TREE_CODE (minmax_const) != INTEGER_CST
5271 || TREE_CONSTANT_OVERFLOW (minmax_const))
5272 return NULL_TREE;
5274 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5275 and GT_EXPR, doing the rest with recursive calls using logical
5276 simplifications. */
5277 switch (code)
5279 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5281 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5282 type, op0, op1);
5283 if (tem)
5284 return invert_truthvalue (tem);
5285 return NULL_TREE;
5288 case GE_EXPR:
5289 return
5290 fold_build2 (TRUTH_ORIF_EXPR, type,
5291 optimize_minmax_comparison
5292 (EQ_EXPR, type, arg0, comp_const),
5293 optimize_minmax_comparison
5294 (GT_EXPR, type, arg0, comp_const));
5296 case EQ_EXPR:
5297 if (op_code == MAX_EXPR && consts_equal)
5298 /* MAX (X, 0) == 0 -> X <= 0 */
5299 return fold_build2 (LE_EXPR, type, inner, comp_const);
5301 else if (op_code == MAX_EXPR && consts_lt)
5302 /* MAX (X, 0) == 5 -> X == 5 */
5303 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5305 else if (op_code == MAX_EXPR)
5306 /* MAX (X, 0) == -1 -> false */
5307 return omit_one_operand (type, integer_zero_node, inner);
5309 else if (consts_equal)
5310 /* MIN (X, 0) == 0 -> X >= 0 */
5311 return fold_build2 (GE_EXPR, type, inner, comp_const);
5313 else if (consts_lt)
5314 /* MIN (X, 0) == 5 -> false */
5315 return omit_one_operand (type, integer_zero_node, inner);
5317 else
5318 /* MIN (X, 0) == -1 -> X == -1 */
5319 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5321 case GT_EXPR:
5322 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5323 /* MAX (X, 0) > 0 -> X > 0
5324 MAX (X, 0) > 5 -> X > 5 */
5325 return fold_build2 (GT_EXPR, type, inner, comp_const);
5327 else if (op_code == MAX_EXPR)
5328 /* MAX (X, 0) > -1 -> true */
5329 return omit_one_operand (type, integer_one_node, inner);
5331 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5332 /* MIN (X, 0) > 0 -> false
5333 MIN (X, 0) > 5 -> false */
5334 return omit_one_operand (type, integer_zero_node, inner);
5336 else
5337 /* MIN (X, 0) > -1 -> X > -1 */
5338 return fold_build2 (GT_EXPR, type, inner, comp_const);
5340 default:
5341 return NULL_TREE;
5345 /* T is an integer expression that is being multiplied, divided, or taken a
5346 modulus (CODE says which and what kind of divide or modulus) by a
5347 constant C. See if we can eliminate that operation by folding it with
5348 other operations already in T. WIDE_TYPE, if non-null, is a type that
5349 should be used for the computation if wider than our type.
5351 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5352 (X * 2) + (Y * 4). We must, however, be assured that either the original
5353 expression would not overflow or that overflow is undefined for the type
5354 in the language in question.
5356 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5357 the machine has a multiply-accumulate insn or that this is part of an
5358 addressing calculation.
5360 If we return a non-null expression, it is an equivalent form of the
5361 original computation, but need not be in the original type. */
5363 static tree
5364 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5366 /* To avoid exponential search depth, refuse to allow recursion past
5367 three levels. Beyond that (1) it's highly unlikely that we'll find
5368 something interesting and (2) we've probably processed it before
5369 when we built the inner expression. */
5371 static int depth;
5372 tree ret;
5374 if (depth > 3)
5375 return NULL;
5377 depth++;
5378 ret = extract_muldiv_1 (t, c, code, wide_type);
5379 depth--;
5381 return ret;
5384 static tree
5385 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5387 tree type = TREE_TYPE (t);
5388 enum tree_code tcode = TREE_CODE (t);
5389 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5390 > GET_MODE_SIZE (TYPE_MODE (type)))
5391 ? wide_type : type);
5392 tree t1, t2;
5393 int same_p = tcode == code;
5394 tree op0 = NULL_TREE, op1 = NULL_TREE;
5396 /* Don't deal with constants of zero here; they confuse the code below. */
5397 if (integer_zerop (c))
5398 return NULL_TREE;
5400 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5401 op0 = TREE_OPERAND (t, 0);
5403 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5404 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5406 /* Note that we need not handle conditional operations here since fold
5407 already handles those cases. So just do arithmetic here. */
5408 switch (tcode)
5410 case INTEGER_CST:
5411 /* For a constant, we can always simplify if we are a multiply
5412 or (for divide and modulus) if it is a multiple of our constant. */
5413 if (code == MULT_EXPR
5414 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5415 return const_binop (code, fold_convert (ctype, t),
5416 fold_convert (ctype, c), 0);
5417 break;
5419 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5420 /* If op0 is an expression ... */
5421 if ((COMPARISON_CLASS_P (op0)
5422 || UNARY_CLASS_P (op0)
5423 || BINARY_CLASS_P (op0)
5424 || EXPRESSION_CLASS_P (op0))
5425 /* ... and is unsigned, and its type is smaller than ctype,
5426 then we cannot pass through as widening. */
5427 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5428 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5429 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5430 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5431 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5432 /* ... or this is a truncation (t is narrower than op0),
5433 then we cannot pass through this narrowing. */
5434 || (GET_MODE_SIZE (TYPE_MODE (type))
5435 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5436 /* ... or signedness changes for division or modulus,
5437 then we cannot pass through this conversion. */
5438 || (code != MULT_EXPR
5439 && (TYPE_UNSIGNED (ctype)
5440 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5441 break;
5443 /* Pass the constant down and see if we can make a simplification. If
5444 we can, replace this expression with the inner simplification for
5445 possible later conversion to our or some other type. */
5446 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5447 && TREE_CODE (t2) == INTEGER_CST
5448 && ! TREE_CONSTANT_OVERFLOW (t2)
5449 && (0 != (t1 = extract_muldiv (op0, t2, code,
5450 code == MULT_EXPR
5451 ? ctype : NULL_TREE))))
5452 return t1;
5453 break;
5455 case ABS_EXPR:
5456 /* If widening the type changes it from signed to unsigned, then we
5457 must avoid building ABS_EXPR itself as unsigned. */
5458 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5460 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5461 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5463 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5464 return fold_convert (ctype, t1);
5466 break;
5468 /* FALLTHROUGH */
5469 case NEGATE_EXPR:
5470 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5471 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5472 break;
5474 case MIN_EXPR: case MAX_EXPR:
5475 /* If widening the type changes the signedness, then we can't perform
5476 this optimization as that changes the result. */
5477 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5478 break;
5480 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5481 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5482 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5484 if (tree_int_cst_sgn (c) < 0)
5485 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5487 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5488 fold_convert (ctype, t2));
5490 break;
5492 case LSHIFT_EXPR: case RSHIFT_EXPR:
5493 /* If the second operand is constant, this is a multiplication
5494 or floor division, by a power of two, so we can treat it that
5495 way unless the multiplier or divisor overflows. Signed
5496 left-shift overflow is implementation-defined rather than
5497 undefined in C90, so do not convert signed left shift into
5498 multiplication. */
5499 if (TREE_CODE (op1) == INTEGER_CST
5500 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5501 /* const_binop may not detect overflow correctly,
5502 so check for it explicitly here. */
5503 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5504 && TREE_INT_CST_HIGH (op1) == 0
5505 && 0 != (t1 = fold_convert (ctype,
5506 const_binop (LSHIFT_EXPR,
5507 size_one_node,
5508 op1, 0)))
5509 && ! TREE_OVERFLOW (t1))
5510 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5511 ? MULT_EXPR : FLOOR_DIV_EXPR,
5512 ctype, fold_convert (ctype, op0), t1),
5513 c, code, wide_type);
5514 break;
5516 case PLUS_EXPR: case MINUS_EXPR:
5517 /* See if we can eliminate the operation on both sides. If we can, we
5518 can return a new PLUS or MINUS. If we can't, the only remaining
5519 cases where we can do anything are if the second operand is a
5520 constant. */
5521 t1 = extract_muldiv (op0, c, code, wide_type);
5522 t2 = extract_muldiv (op1, c, code, wide_type);
5523 if (t1 != 0 && t2 != 0
5524 && (code == MULT_EXPR
5525 /* If not multiplication, we can only do this if both operands
5526 are divisible by c. */
5527 || (multiple_of_p (ctype, op0, c)
5528 && multiple_of_p (ctype, op1, c))))
5529 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5530 fold_convert (ctype, t2));
5532 /* If this was a subtraction, negate OP1 and set it to be an addition.
5533 This simplifies the logic below. */
5534 if (tcode == MINUS_EXPR)
5535 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5537 if (TREE_CODE (op1) != INTEGER_CST)
5538 break;
5540 /* If either OP1 or C are negative, this optimization is not safe for
5541 some of the division and remainder types while for others we need
5542 to change the code. */
5543 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5545 if (code == CEIL_DIV_EXPR)
5546 code = FLOOR_DIV_EXPR;
5547 else if (code == FLOOR_DIV_EXPR)
5548 code = CEIL_DIV_EXPR;
5549 else if (code != MULT_EXPR
5550 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5551 break;
5554 /* If it's a multiply or a division/modulus operation of a multiple
5555 of our constant, do the operation and verify it doesn't overflow. */
5556 if (code == MULT_EXPR
5557 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5559 op1 = const_binop (code, fold_convert (ctype, op1),
5560 fold_convert (ctype, c), 0);
5561 /* We allow the constant to overflow with wrapping semantics. */
5562 if (op1 == 0
5563 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5564 break;
5566 else
5567 break;
5569 /* If we have an unsigned type is not a sizetype, we cannot widen
5570 the operation since it will change the result if the original
5571 computation overflowed. */
5572 if (TYPE_UNSIGNED (ctype)
5573 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5574 && ctype != type)
5575 break;
5577 /* If we were able to eliminate our operation from the first side,
5578 apply our operation to the second side and reform the PLUS. */
5579 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5580 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5582 /* The last case is if we are a multiply. In that case, we can
5583 apply the distributive law to commute the multiply and addition
5584 if the multiplication of the constants doesn't overflow. */
5585 if (code == MULT_EXPR)
5586 return fold_build2 (tcode, ctype,
5587 fold_build2 (code, ctype,
5588 fold_convert (ctype, op0),
5589 fold_convert (ctype, c)),
5590 op1);
5592 break;
5594 case MULT_EXPR:
5595 /* We have a special case here if we are doing something like
5596 (C * 8) % 4 since we know that's zero. */
5597 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5598 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5599 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5600 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5601 return omit_one_operand (type, integer_zero_node, op0);
5603 /* ... fall through ... */
5605 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5606 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5607 /* If we can extract our operation from the LHS, do so and return a
5608 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5609 do something only if the second operand is a constant. */
5610 if (same_p
5611 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5612 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5613 fold_convert (ctype, op1));
5614 else if (tcode == MULT_EXPR && code == MULT_EXPR
5615 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5616 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5617 fold_convert (ctype, t1));
5618 else if (TREE_CODE (op1) != INTEGER_CST)
5619 return 0;
5621 /* If these are the same operation types, we can associate them
5622 assuming no overflow. */
5623 if (tcode == code
5624 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5625 fold_convert (ctype, c), 0))
5626 && ! TREE_OVERFLOW (t1))
5627 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5629 /* If these operations "cancel" each other, we have the main
5630 optimizations of this pass, which occur when either constant is a
5631 multiple of the other, in which case we replace this with either an
5632 operation or CODE or TCODE.
5634 If we have an unsigned type that is not a sizetype, we cannot do
5635 this since it will change the result if the original computation
5636 overflowed. */
5637 if ((! TYPE_UNSIGNED (ctype)
5638 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5639 && ! flag_wrapv
5640 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5641 || (tcode == MULT_EXPR
5642 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5643 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5645 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5646 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5647 fold_convert (ctype,
5648 const_binop (TRUNC_DIV_EXPR,
5649 op1, c, 0)));
5650 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5651 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5652 fold_convert (ctype,
5653 const_binop (TRUNC_DIV_EXPR,
5654 c, op1, 0)));
5656 break;
5658 default:
5659 break;
5662 return 0;
5665 /* Return a node which has the indicated constant VALUE (either 0 or
5666 1), and is of the indicated TYPE. */
5668 tree
5669 constant_boolean_node (int value, tree type)
5671 if (type == integer_type_node)
5672 return value ? integer_one_node : integer_zero_node;
5673 else if (type == boolean_type_node)
5674 return value ? boolean_true_node : boolean_false_node;
5675 else
5676 return build_int_cst (type, value);
5680 /* Return true if expr looks like an ARRAY_REF and set base and
5681 offset to the appropriate trees. If there is no offset,
5682 offset is set to NULL_TREE. Base will be canonicalized to
5683 something you can get the element type from using
5684 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5685 in bytes to the base. */
5687 static bool
5688 extract_array_ref (tree expr, tree *base, tree *offset)
5690 /* One canonical form is a PLUS_EXPR with the first
5691 argument being an ADDR_EXPR with a possible NOP_EXPR
5692 attached. */
5693 if (TREE_CODE (expr) == PLUS_EXPR)
5695 tree op0 = TREE_OPERAND (expr, 0);
5696 tree inner_base, dummy1;
5697 /* Strip NOP_EXPRs here because the C frontends and/or
5698 folders present us (int *)&x.a + 4B possibly. */
5699 STRIP_NOPS (op0);
5700 if (extract_array_ref (op0, &inner_base, &dummy1))
5702 *base = inner_base;
5703 if (dummy1 == NULL_TREE)
5704 *offset = TREE_OPERAND (expr, 1);
5705 else
5706 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5707 dummy1, TREE_OPERAND (expr, 1));
5708 return true;
5711 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5712 which we transform into an ADDR_EXPR with appropriate
5713 offset. For other arguments to the ADDR_EXPR we assume
5714 zero offset and as such do not care about the ADDR_EXPR
5715 type and strip possible nops from it. */
5716 else if (TREE_CODE (expr) == ADDR_EXPR)
5718 tree op0 = TREE_OPERAND (expr, 0);
5719 if (TREE_CODE (op0) == ARRAY_REF)
5721 tree idx = TREE_OPERAND (op0, 1);
5722 *base = TREE_OPERAND (op0, 0);
5723 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5724 array_ref_element_size (op0));
5726 else
5728 /* Handle array-to-pointer decay as &a. */
5729 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5730 *base = TREE_OPERAND (expr, 0);
5731 else
5732 *base = expr;
5733 *offset = NULL_TREE;
5735 return true;
5737 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5738 else if (SSA_VAR_P (expr)
5739 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5741 *base = expr;
5742 *offset = NULL_TREE;
5743 return true;
5746 return false;
5750 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5751 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5752 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5753 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5754 COND is the first argument to CODE; otherwise (as in the example
5755 given here), it is the second argument. TYPE is the type of the
5756 original expression. Return NULL_TREE if no simplification is
5757 possible. */
5759 static tree
5760 fold_binary_op_with_conditional_arg (enum tree_code code,
5761 tree type, tree op0, tree op1,
5762 tree cond, tree arg, int cond_first_p)
5764 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5765 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5766 tree test, true_value, false_value;
5767 tree lhs = NULL_TREE;
5768 tree rhs = NULL_TREE;
5770 /* This transformation is only worthwhile if we don't have to wrap
5771 arg in a SAVE_EXPR, and the operation can be simplified on at least
5772 one of the branches once its pushed inside the COND_EXPR. */
5773 if (!TREE_CONSTANT (arg))
5774 return NULL_TREE;
5776 if (TREE_CODE (cond) == COND_EXPR)
5778 test = TREE_OPERAND (cond, 0);
5779 true_value = TREE_OPERAND (cond, 1);
5780 false_value = TREE_OPERAND (cond, 2);
5781 /* If this operand throws an expression, then it does not make
5782 sense to try to perform a logical or arithmetic operation
5783 involving it. */
5784 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5785 lhs = true_value;
5786 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5787 rhs = false_value;
5789 else
5791 tree testtype = TREE_TYPE (cond);
5792 test = cond;
5793 true_value = constant_boolean_node (true, testtype);
5794 false_value = constant_boolean_node (false, testtype);
5797 arg = fold_convert (arg_type, arg);
5798 if (lhs == 0)
5800 true_value = fold_convert (cond_type, true_value);
5801 if (cond_first_p)
5802 lhs = fold_build2 (code, type, true_value, arg);
5803 else
5804 lhs = fold_build2 (code, type, arg, true_value);
5806 if (rhs == 0)
5808 false_value = fold_convert (cond_type, false_value);
5809 if (cond_first_p)
5810 rhs = fold_build2 (code, type, false_value, arg);
5811 else
5812 rhs = fold_build2 (code, type, arg, false_value);
5815 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5816 return fold_convert (type, test);
5820 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5822 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5823 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5824 ADDEND is the same as X.
5826 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5827 and finite. The problematic cases are when X is zero, and its mode
5828 has signed zeros. In the case of rounding towards -infinity,
5829 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5830 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5832 static bool
5833 fold_real_zero_addition_p (tree type, tree addend, int negate)
5835 if (!real_zerop (addend))
5836 return false;
5838 /* Don't allow the fold with -fsignaling-nans. */
5839 if (HONOR_SNANS (TYPE_MODE (type)))
5840 return false;
5842 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5843 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5844 return true;
5846 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5847 if (TREE_CODE (addend) == REAL_CST
5848 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5849 negate = !negate;
5851 /* The mode has signed zeros, and we have to honor their sign.
5852 In this situation, there is only one case we can return true for.
5853 X - 0 is the same as X unless rounding towards -infinity is
5854 supported. */
5855 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5858 /* Subroutine of fold() that checks comparisons of built-in math
5859 functions against real constants.
5861 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5862 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5863 is the type of the result and ARG0 and ARG1 are the operands of the
5864 comparison. ARG1 must be a TREE_REAL_CST.
5866 The function returns the constant folded tree if a simplification
5867 can be made, and NULL_TREE otherwise. */
5869 static tree
5870 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5871 tree type, tree arg0, tree arg1)
5873 REAL_VALUE_TYPE c;
5875 if (BUILTIN_SQRT_P (fcode))
5877 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5878 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5880 c = TREE_REAL_CST (arg1);
5881 if (REAL_VALUE_NEGATIVE (c))
5883 /* sqrt(x) < y is always false, if y is negative. */
5884 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5885 return omit_one_operand (type, integer_zero_node, arg);
5887 /* sqrt(x) > y is always true, if y is negative and we
5888 don't care about NaNs, i.e. negative values of x. */
5889 if (code == NE_EXPR || !HONOR_NANS (mode))
5890 return omit_one_operand (type, integer_one_node, arg);
5892 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5893 return fold_build2 (GE_EXPR, type, arg,
5894 build_real (TREE_TYPE (arg), dconst0));
5896 else if (code == GT_EXPR || code == GE_EXPR)
5898 REAL_VALUE_TYPE c2;
5900 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5901 real_convert (&c2, mode, &c2);
5903 if (REAL_VALUE_ISINF (c2))
5905 /* sqrt(x) > y is x == +Inf, when y is very large. */
5906 if (HONOR_INFINITIES (mode))
5907 return fold_build2 (EQ_EXPR, type, arg,
5908 build_real (TREE_TYPE (arg), c2));
5910 /* sqrt(x) > y is always false, when y is very large
5911 and we don't care about infinities. */
5912 return omit_one_operand (type, integer_zero_node, arg);
5915 /* sqrt(x) > c is the same as x > c*c. */
5916 return fold_build2 (code, type, arg,
5917 build_real (TREE_TYPE (arg), c2));
5919 else if (code == LT_EXPR || code == LE_EXPR)
5921 REAL_VALUE_TYPE c2;
5923 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5924 real_convert (&c2, mode, &c2);
5926 if (REAL_VALUE_ISINF (c2))
5928 /* sqrt(x) < y is always true, when y is a very large
5929 value and we don't care about NaNs or Infinities. */
5930 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5931 return omit_one_operand (type, integer_one_node, arg);
5933 /* sqrt(x) < y is x != +Inf when y is very large and we
5934 don't care about NaNs. */
5935 if (! HONOR_NANS (mode))
5936 return fold_build2 (NE_EXPR, type, arg,
5937 build_real (TREE_TYPE (arg), c2));
5939 /* sqrt(x) < y is x >= 0 when y is very large and we
5940 don't care about Infinities. */
5941 if (! HONOR_INFINITIES (mode))
5942 return fold_build2 (GE_EXPR, type, arg,
5943 build_real (TREE_TYPE (arg), dconst0));
5945 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5946 if (lang_hooks.decls.global_bindings_p () != 0
5947 || CONTAINS_PLACEHOLDER_P (arg))
5948 return NULL_TREE;
5950 arg = save_expr (arg);
5951 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5952 fold_build2 (GE_EXPR, type, arg,
5953 build_real (TREE_TYPE (arg),
5954 dconst0)),
5955 fold_build2 (NE_EXPR, type, arg,
5956 build_real (TREE_TYPE (arg),
5957 c2)));
5960 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5961 if (! HONOR_NANS (mode))
5962 return fold_build2 (code, type, arg,
5963 build_real (TREE_TYPE (arg), c2));
5965 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5966 if (lang_hooks.decls.global_bindings_p () == 0
5967 && ! CONTAINS_PLACEHOLDER_P (arg))
5969 arg = save_expr (arg);
5970 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5971 fold_build2 (GE_EXPR, type, arg,
5972 build_real (TREE_TYPE (arg),
5973 dconst0)),
5974 fold_build2 (code, type, arg,
5975 build_real (TREE_TYPE (arg),
5976 c2)));
5981 return NULL_TREE;
5984 /* Subroutine of fold() that optimizes comparisons against Infinities,
5985 either +Inf or -Inf.
5987 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5988 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5989 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5991 The function returns the constant folded tree if a simplification
5992 can be made, and NULL_TREE otherwise. */
5994 static tree
5995 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5997 enum machine_mode mode;
5998 REAL_VALUE_TYPE max;
5999 tree temp;
6000 bool neg;
6002 mode = TYPE_MODE (TREE_TYPE (arg0));
6004 /* For negative infinity swap the sense of the comparison. */
6005 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6006 if (neg)
6007 code = swap_tree_comparison (code);
6009 switch (code)
6011 case GT_EXPR:
6012 /* x > +Inf is always false, if with ignore sNANs. */
6013 if (HONOR_SNANS (mode))
6014 return NULL_TREE;
6015 return omit_one_operand (type, integer_zero_node, arg0);
6017 case LE_EXPR:
6018 /* x <= +Inf is always true, if we don't case about NaNs. */
6019 if (! HONOR_NANS (mode))
6020 return omit_one_operand (type, integer_one_node, arg0);
6022 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6023 if (lang_hooks.decls.global_bindings_p () == 0
6024 && ! CONTAINS_PLACEHOLDER_P (arg0))
6026 arg0 = save_expr (arg0);
6027 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6029 break;
6031 case EQ_EXPR:
6032 case GE_EXPR:
6033 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6034 real_maxval (&max, neg, mode);
6035 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6036 arg0, build_real (TREE_TYPE (arg0), max));
6038 case LT_EXPR:
6039 /* x < +Inf is always equal to x <= DBL_MAX. */
6040 real_maxval (&max, neg, mode);
6041 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6042 arg0, build_real (TREE_TYPE (arg0), max));
6044 case NE_EXPR:
6045 /* x != +Inf is always equal to !(x > DBL_MAX). */
6046 real_maxval (&max, neg, mode);
6047 if (! HONOR_NANS (mode))
6048 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6049 arg0, build_real (TREE_TYPE (arg0), max));
6051 /* The transformation below creates non-gimple code and thus is
6052 not appropriate if we are in gimple form. */
6053 if (in_gimple_form)
6054 return NULL_TREE;
6056 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6057 arg0, build_real (TREE_TYPE (arg0), max));
6058 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6060 default:
6061 break;
6064 return NULL_TREE;
6067 /* Subroutine of fold() that optimizes comparisons of a division by
6068 a nonzero integer constant against an integer constant, i.e.
6069 X/C1 op C2.
6071 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6072 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6073 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6075 The function returns the constant folded tree if a simplification
6076 can be made, and NULL_TREE otherwise. */
6078 static tree
6079 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6081 tree prod, tmp, hi, lo;
6082 tree arg00 = TREE_OPERAND (arg0, 0);
6083 tree arg01 = TREE_OPERAND (arg0, 1);
6084 unsigned HOST_WIDE_INT lpart;
6085 HOST_WIDE_INT hpart;
6086 bool neg_overflow;
6087 int overflow;
6089 /* We have to do this the hard way to detect unsigned overflow.
6090 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6091 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6092 TREE_INT_CST_HIGH (arg01),
6093 TREE_INT_CST_LOW (arg1),
6094 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6095 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6096 prod = force_fit_type (prod, -1, overflow, false);
6097 neg_overflow = false;
6099 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6101 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6102 lo = prod;
6104 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6105 overflow = add_double (TREE_INT_CST_LOW (prod),
6106 TREE_INT_CST_HIGH (prod),
6107 TREE_INT_CST_LOW (tmp),
6108 TREE_INT_CST_HIGH (tmp),
6109 &lpart, &hpart);
6110 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6111 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6112 TREE_CONSTANT_OVERFLOW (prod));
6114 else if (tree_int_cst_sgn (arg01) >= 0)
6116 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6117 switch (tree_int_cst_sgn (arg1))
6119 case -1:
6120 neg_overflow = true;
6121 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6122 hi = prod;
6123 break;
6125 case 0:
6126 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6127 hi = tmp;
6128 break;
6130 case 1:
6131 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6132 lo = prod;
6133 break;
6135 default:
6136 gcc_unreachable ();
6139 else
6141 /* A negative divisor reverses the relational operators. */
6142 code = swap_tree_comparison (code);
6144 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6145 switch (tree_int_cst_sgn (arg1))
6147 case -1:
6148 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6149 lo = prod;
6150 break;
6152 case 0:
6153 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6154 lo = tmp;
6155 break;
6157 case 1:
6158 neg_overflow = true;
6159 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6160 hi = prod;
6161 break;
6163 default:
6164 gcc_unreachable ();
6168 switch (code)
6170 case EQ_EXPR:
6171 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6172 return omit_one_operand (type, integer_zero_node, arg00);
6173 if (TREE_OVERFLOW (hi))
6174 return fold_build2 (GE_EXPR, type, arg00, lo);
6175 if (TREE_OVERFLOW (lo))
6176 return fold_build2 (LE_EXPR, type, arg00, hi);
6177 return build_range_check (type, arg00, 1, lo, hi);
6179 case NE_EXPR:
6180 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6181 return omit_one_operand (type, integer_one_node, arg00);
6182 if (TREE_OVERFLOW (hi))
6183 return fold_build2 (LT_EXPR, type, arg00, lo);
6184 if (TREE_OVERFLOW (lo))
6185 return fold_build2 (GT_EXPR, type, arg00, hi);
6186 return build_range_check (type, arg00, 0, lo, hi);
6188 case LT_EXPR:
6189 if (TREE_OVERFLOW (lo))
6191 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6192 return omit_one_operand (type, tmp, arg00);
6194 return fold_build2 (LT_EXPR, type, arg00, lo);
6196 case LE_EXPR:
6197 if (TREE_OVERFLOW (hi))
6199 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6200 return omit_one_operand (type, tmp, arg00);
6202 return fold_build2 (LE_EXPR, type, arg00, hi);
6204 case GT_EXPR:
6205 if (TREE_OVERFLOW (hi))
6207 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6208 return omit_one_operand (type, tmp, arg00);
6210 return fold_build2 (GT_EXPR, type, arg00, hi);
6212 case GE_EXPR:
6213 if (TREE_OVERFLOW (lo))
6215 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6216 return omit_one_operand (type, tmp, arg00);
6218 return fold_build2 (GE_EXPR, type, arg00, lo);
6220 default:
6221 break;
6224 return NULL_TREE;
6228 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6229 equality/inequality test, then return a simplified form of the test
6230 using a sign testing. Otherwise return NULL. TYPE is the desired
6231 result type. */
6233 static tree
6234 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6235 tree result_type)
6237 /* If this is testing a single bit, we can optimize the test. */
6238 if ((code == NE_EXPR || code == EQ_EXPR)
6239 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6240 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6242 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6243 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6244 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6246 if (arg00 != NULL_TREE
6247 /* This is only a win if casting to a signed type is cheap,
6248 i.e. when arg00's type is not a partial mode. */
6249 && TYPE_PRECISION (TREE_TYPE (arg00))
6250 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6252 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6253 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6254 result_type, fold_convert (stype, arg00),
6255 build_int_cst (stype, 0));
6259 return NULL_TREE;
6262 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6263 equality/inequality test, then return a simplified form of
6264 the test using shifts and logical operations. Otherwise return
6265 NULL. TYPE is the desired result type. */
6267 tree
6268 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6269 tree result_type)
6271 /* If this is testing a single bit, we can optimize the test. */
6272 if ((code == NE_EXPR || code == EQ_EXPR)
6273 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6274 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6276 tree inner = TREE_OPERAND (arg0, 0);
6277 tree type = TREE_TYPE (arg0);
6278 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6279 enum machine_mode operand_mode = TYPE_MODE (type);
6280 int ops_unsigned;
6281 tree signed_type, unsigned_type, intermediate_type;
6282 tree tem;
6284 /* First, see if we can fold the single bit test into a sign-bit
6285 test. */
6286 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6287 result_type);
6288 if (tem)
6289 return tem;
6291 /* Otherwise we have (A & C) != 0 where C is a single bit,
6292 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6293 Similarly for (A & C) == 0. */
6295 /* If INNER is a right shift of a constant and it plus BITNUM does
6296 not overflow, adjust BITNUM and INNER. */
6297 if (TREE_CODE (inner) == RSHIFT_EXPR
6298 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6299 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6300 && bitnum < TYPE_PRECISION (type)
6301 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6302 bitnum - TYPE_PRECISION (type)))
6304 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6305 inner = TREE_OPERAND (inner, 0);
6308 /* If we are going to be able to omit the AND below, we must do our
6309 operations as unsigned. If we must use the AND, we have a choice.
6310 Normally unsigned is faster, but for some machines signed is. */
6311 #ifdef LOAD_EXTEND_OP
6312 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6313 && !flag_syntax_only) ? 0 : 1;
6314 #else
6315 ops_unsigned = 1;
6316 #endif
6318 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6319 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6320 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6321 inner = fold_convert (intermediate_type, inner);
6323 if (bitnum != 0)
6324 inner = build2 (RSHIFT_EXPR, intermediate_type,
6325 inner, size_int (bitnum));
6327 if (code == EQ_EXPR)
6328 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6329 inner, integer_one_node);
6331 /* Put the AND last so it can combine with more things. */
6332 inner = build2 (BIT_AND_EXPR, intermediate_type,
6333 inner, integer_one_node);
6335 /* Make sure to return the proper type. */
6336 inner = fold_convert (result_type, inner);
6338 return inner;
6340 return NULL_TREE;
6343 /* Check whether we are allowed to reorder operands arg0 and arg1,
6344 such that the evaluation of arg1 occurs before arg0. */
6346 static bool
6347 reorder_operands_p (tree arg0, tree arg1)
6349 if (! flag_evaluation_order)
6350 return true;
6351 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6352 return true;
6353 return ! TREE_SIDE_EFFECTS (arg0)
6354 && ! TREE_SIDE_EFFECTS (arg1);
6357 /* Test whether it is preferable two swap two operands, ARG0 and
6358 ARG1, for example because ARG0 is an integer constant and ARG1
6359 isn't. If REORDER is true, only recommend swapping if we can
6360 evaluate the operands in reverse order. */
6362 bool
6363 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6365 STRIP_SIGN_NOPS (arg0);
6366 STRIP_SIGN_NOPS (arg1);
6368 if (TREE_CODE (arg1) == INTEGER_CST)
6369 return 0;
6370 if (TREE_CODE (arg0) == INTEGER_CST)
6371 return 1;
6373 if (TREE_CODE (arg1) == REAL_CST)
6374 return 0;
6375 if (TREE_CODE (arg0) == REAL_CST)
6376 return 1;
6378 if (TREE_CODE (arg1) == COMPLEX_CST)
6379 return 0;
6380 if (TREE_CODE (arg0) == COMPLEX_CST)
6381 return 1;
6383 if (TREE_CONSTANT (arg1))
6384 return 0;
6385 if (TREE_CONSTANT (arg0))
6386 return 1;
6388 if (optimize_size)
6389 return 0;
6391 if (reorder && flag_evaluation_order
6392 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6393 return 0;
6395 if (DECL_P (arg1))
6396 return 0;
6397 if (DECL_P (arg0))
6398 return 1;
6400 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6401 for commutative and comparison operators. Ensuring a canonical
6402 form allows the optimizers to find additional redundancies without
6403 having to explicitly check for both orderings. */
6404 if (TREE_CODE (arg0) == SSA_NAME
6405 && TREE_CODE (arg1) == SSA_NAME
6406 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6407 return 1;
6409 return 0;
6412 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6413 ARG0 is extended to a wider type. */
6415 static tree
6416 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6418 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6419 tree arg1_unw;
6420 tree shorter_type, outer_type;
6421 tree min, max;
6422 bool above, below;
6424 if (arg0_unw == arg0)
6425 return NULL_TREE;
6426 shorter_type = TREE_TYPE (arg0_unw);
6428 #ifdef HAVE_canonicalize_funcptr_for_compare
6429 /* Disable this optimization if we're casting a function pointer
6430 type on targets that require function pointer canonicalization. */
6431 if (HAVE_canonicalize_funcptr_for_compare
6432 && TREE_CODE (shorter_type) == POINTER_TYPE
6433 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6434 return NULL_TREE;
6435 #endif
6437 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6438 return NULL_TREE;
6440 arg1_unw = get_unwidened (arg1, shorter_type);
6442 /* If possible, express the comparison in the shorter mode. */
6443 if ((code == EQ_EXPR || code == NE_EXPR
6444 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6445 && (TREE_TYPE (arg1_unw) == shorter_type
6446 || (TREE_CODE (arg1_unw) == INTEGER_CST
6447 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6448 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6449 && int_fits_type_p (arg1_unw, shorter_type))))
6450 return fold_build2 (code, type, arg0_unw,
6451 fold_convert (shorter_type, arg1_unw));
6453 if (TREE_CODE (arg1_unw) != INTEGER_CST
6454 || TREE_CODE (shorter_type) != INTEGER_TYPE
6455 || !int_fits_type_p (arg1_unw, shorter_type))
6456 return NULL_TREE;
6458 /* If we are comparing with the integer that does not fit into the range
6459 of the shorter type, the result is known. */
6460 outer_type = TREE_TYPE (arg1_unw);
6461 min = lower_bound_in_type (outer_type, shorter_type);
6462 max = upper_bound_in_type (outer_type, shorter_type);
6464 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6465 max, arg1_unw));
6466 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6467 arg1_unw, min));
6469 switch (code)
6471 case EQ_EXPR:
6472 if (above || below)
6473 return omit_one_operand (type, integer_zero_node, arg0);
6474 break;
6476 case NE_EXPR:
6477 if (above || below)
6478 return omit_one_operand (type, integer_one_node, arg0);
6479 break;
6481 case LT_EXPR:
6482 case LE_EXPR:
6483 if (above)
6484 return omit_one_operand (type, integer_one_node, arg0);
6485 else if (below)
6486 return omit_one_operand (type, integer_zero_node, arg0);
6488 case GT_EXPR:
6489 case GE_EXPR:
6490 if (above)
6491 return omit_one_operand (type, integer_zero_node, arg0);
6492 else if (below)
6493 return omit_one_operand (type, integer_one_node, arg0);
6495 default:
6496 break;
6499 return NULL_TREE;
6502 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6503 ARG0 just the signedness is changed. */
6505 static tree
6506 fold_sign_changed_comparison (enum tree_code code, tree type,
6507 tree arg0, tree arg1)
6509 tree arg0_inner, tmp;
6510 tree inner_type, outer_type;
6512 if (TREE_CODE (arg0) != NOP_EXPR
6513 && TREE_CODE (arg0) != CONVERT_EXPR)
6514 return NULL_TREE;
6516 outer_type = TREE_TYPE (arg0);
6517 arg0_inner = TREE_OPERAND (arg0, 0);
6518 inner_type = TREE_TYPE (arg0_inner);
6520 #ifdef HAVE_canonicalize_funcptr_for_compare
6521 /* Disable this optimization if we're casting a function pointer
6522 type on targets that require function pointer canonicalization. */
6523 if (HAVE_canonicalize_funcptr_for_compare
6524 && TREE_CODE (inner_type) == POINTER_TYPE
6525 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6526 return NULL_TREE;
6527 #endif
6529 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6530 return NULL_TREE;
6532 if (TREE_CODE (arg1) != INTEGER_CST
6533 && !((TREE_CODE (arg1) == NOP_EXPR
6534 || TREE_CODE (arg1) == CONVERT_EXPR)
6535 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6536 return NULL_TREE;
6538 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6539 && code != NE_EXPR
6540 && code != EQ_EXPR)
6541 return NULL_TREE;
6543 if (TREE_CODE (arg1) == INTEGER_CST)
6545 tmp = build_int_cst_wide (inner_type,
6546 TREE_INT_CST_LOW (arg1),
6547 TREE_INT_CST_HIGH (arg1));
6548 arg1 = force_fit_type (tmp, 0,
6549 TREE_OVERFLOW (arg1),
6550 TREE_CONSTANT_OVERFLOW (arg1));
6552 else
6553 arg1 = fold_convert (inner_type, arg1);
6555 return fold_build2 (code, type, arg0_inner, arg1);
6558 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6559 step of the array. Reconstructs s and delta in the case of s * delta
6560 being an integer constant (and thus already folded).
6561 ADDR is the address. MULT is the multiplicative expression.
6562 If the function succeeds, the new address expression is returned. Otherwise
6563 NULL_TREE is returned. */
6565 static tree
6566 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6568 tree s, delta, step;
6569 tree ref = TREE_OPERAND (addr, 0), pref;
6570 tree ret, pos;
6571 tree itype;
6573 /* Canonicalize op1 into a possibly non-constant delta
6574 and an INTEGER_CST s. */
6575 if (TREE_CODE (op1) == MULT_EXPR)
6577 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6579 STRIP_NOPS (arg0);
6580 STRIP_NOPS (arg1);
6582 if (TREE_CODE (arg0) == INTEGER_CST)
6584 s = arg0;
6585 delta = arg1;
6587 else if (TREE_CODE (arg1) == INTEGER_CST)
6589 s = arg1;
6590 delta = arg0;
6592 else
6593 return NULL_TREE;
6595 else if (TREE_CODE (op1) == INTEGER_CST)
6597 delta = op1;
6598 s = NULL_TREE;
6600 else
6602 /* Simulate we are delta * 1. */
6603 delta = op1;
6604 s = integer_one_node;
6607 for (;; ref = TREE_OPERAND (ref, 0))
6609 if (TREE_CODE (ref) == ARRAY_REF)
6611 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6612 if (! itype)
6613 continue;
6615 step = array_ref_element_size (ref);
6616 if (TREE_CODE (step) != INTEGER_CST)
6617 continue;
6619 if (s)
6621 if (! tree_int_cst_equal (step, s))
6622 continue;
6624 else
6626 /* Try if delta is a multiple of step. */
6627 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6628 if (! tmp)
6629 continue;
6630 delta = tmp;
6633 break;
6636 if (!handled_component_p (ref))
6637 return NULL_TREE;
6640 /* We found the suitable array reference. So copy everything up to it,
6641 and replace the index. */
6643 pref = TREE_OPERAND (addr, 0);
6644 ret = copy_node (pref);
6645 pos = ret;
6647 while (pref != ref)
6649 pref = TREE_OPERAND (pref, 0);
6650 TREE_OPERAND (pos, 0) = copy_node (pref);
6651 pos = TREE_OPERAND (pos, 0);
6654 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6655 fold_convert (itype,
6656 TREE_OPERAND (pos, 1)),
6657 fold_convert (itype, delta));
6659 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6663 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6664 means A >= Y && A != MAX, but in this case we know that
6665 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6667 static tree
6668 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6670 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6672 if (TREE_CODE (bound) == LT_EXPR)
6673 a = TREE_OPERAND (bound, 0);
6674 else if (TREE_CODE (bound) == GT_EXPR)
6675 a = TREE_OPERAND (bound, 1);
6676 else
6677 return NULL_TREE;
6679 typea = TREE_TYPE (a);
6680 if (!INTEGRAL_TYPE_P (typea)
6681 && !POINTER_TYPE_P (typea))
6682 return NULL_TREE;
6684 if (TREE_CODE (ineq) == LT_EXPR)
6686 a1 = TREE_OPERAND (ineq, 1);
6687 y = TREE_OPERAND (ineq, 0);
6689 else if (TREE_CODE (ineq) == GT_EXPR)
6691 a1 = TREE_OPERAND (ineq, 0);
6692 y = TREE_OPERAND (ineq, 1);
6694 else
6695 return NULL_TREE;
6697 if (TREE_TYPE (a1) != typea)
6698 return NULL_TREE;
6700 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6701 if (!integer_onep (diff))
6702 return NULL_TREE;
6704 return fold_build2 (GE_EXPR, type, a, y);
6707 /* Fold a sum or difference of at least one multiplication.
6708 Returns the folded tree or NULL if no simplification could be made. */
6710 static tree
6711 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6713 tree arg00, arg01, arg10, arg11;
6714 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6716 /* (A * C) +- (B * C) -> (A+-B) * C.
6717 (A * C) +- A -> A * (C+-1).
6718 We are most concerned about the case where C is a constant,
6719 but other combinations show up during loop reduction. Since
6720 it is not difficult, try all four possibilities. */
6722 if (TREE_CODE (arg0) == MULT_EXPR)
6724 arg00 = TREE_OPERAND (arg0, 0);
6725 arg01 = TREE_OPERAND (arg0, 1);
6727 else
6729 arg00 = arg0;
6730 arg01 = build_one_cst (type);
6732 if (TREE_CODE (arg1) == MULT_EXPR)
6734 arg10 = TREE_OPERAND (arg1, 0);
6735 arg11 = TREE_OPERAND (arg1, 1);
6737 else
6739 arg10 = arg1;
6740 arg11 = build_one_cst (type);
6742 same = NULL_TREE;
6744 if (operand_equal_p (arg01, arg11, 0))
6745 same = arg01, alt0 = arg00, alt1 = arg10;
6746 else if (operand_equal_p (arg00, arg10, 0))
6747 same = arg00, alt0 = arg01, alt1 = arg11;
6748 else if (operand_equal_p (arg00, arg11, 0))
6749 same = arg00, alt0 = arg01, alt1 = arg10;
6750 else if (operand_equal_p (arg01, arg10, 0))
6751 same = arg01, alt0 = arg00, alt1 = arg11;
6753 /* No identical multiplicands; see if we can find a common
6754 power-of-two factor in non-power-of-two multiplies. This
6755 can help in multi-dimensional array access. */
6756 else if (host_integerp (arg01, 0)
6757 && host_integerp (arg11, 0))
6759 HOST_WIDE_INT int01, int11, tmp;
6760 bool swap = false;
6761 tree maybe_same;
6762 int01 = TREE_INT_CST_LOW (arg01);
6763 int11 = TREE_INT_CST_LOW (arg11);
6765 /* Move min of absolute values to int11. */
6766 if ((int01 >= 0 ? int01 : -int01)
6767 < (int11 >= 0 ? int11 : -int11))
6769 tmp = int01, int01 = int11, int11 = tmp;
6770 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6771 maybe_same = arg01;
6772 swap = true;
6774 else
6775 maybe_same = arg11;
6777 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6779 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6780 build_int_cst (TREE_TYPE (arg00),
6781 int01 / int11));
6782 alt1 = arg10;
6783 same = maybe_same;
6784 if (swap)
6785 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6789 if (same)
6790 return fold_build2 (MULT_EXPR, type,
6791 fold_build2 (code, type,
6792 fold_convert (type, alt0),
6793 fold_convert (type, alt1)),
6794 fold_convert (type, same));
6796 return NULL_TREE;
6799 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6800 specified by EXPR into the buffer PTR of length LEN bytes.
6801 Return the number of bytes placed in the buffer, or zero
6802 upon failure. */
6804 static int
6805 native_encode_int (tree expr, unsigned char *ptr, int len)
6807 tree type = TREE_TYPE (expr);
6808 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6809 int byte, offset, word, words;
6810 unsigned char value;
6812 if (total_bytes > len)
6813 return 0;
6814 words = total_bytes / UNITS_PER_WORD;
6816 for (byte = 0; byte < total_bytes; byte++)
6818 int bitpos = byte * BITS_PER_UNIT;
6819 if (bitpos < HOST_BITS_PER_WIDE_INT)
6820 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6821 else
6822 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6823 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6825 if (total_bytes > UNITS_PER_WORD)
6827 word = byte / UNITS_PER_WORD;
6828 if (WORDS_BIG_ENDIAN)
6829 word = (words - 1) - word;
6830 offset = word * UNITS_PER_WORD;
6831 if (BYTES_BIG_ENDIAN)
6832 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6833 else
6834 offset += byte % UNITS_PER_WORD;
6836 else
6837 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6838 ptr[offset] = value;
6840 return total_bytes;
6844 /* Subroutine of native_encode_expr. Encode the REAL_CST
6845 specified by EXPR into the buffer PTR of length LEN bytes.
6846 Return the number of bytes placed in the buffer, or zero
6847 upon failure. */
6849 static int
6850 native_encode_real (tree expr, unsigned char *ptr, int len)
6852 tree type = TREE_TYPE (expr);
6853 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6854 int byte, offset, word, words;
6855 unsigned char value;
6857 /* There are always 32 bits in each long, no matter the size of
6858 the hosts long. We handle floating point representations with
6859 up to 192 bits. */
6860 long tmp[6];
6862 if (total_bytes > len)
6863 return 0;
6864 words = total_bytes / UNITS_PER_WORD;
6866 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6868 for (byte = 0; byte < total_bytes; byte++)
6870 int bitpos = byte * BITS_PER_UNIT;
6871 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6873 if (total_bytes > UNITS_PER_WORD)
6875 word = byte / UNITS_PER_WORD;
6876 if (FLOAT_WORDS_BIG_ENDIAN)
6877 word = (words - 1) - word;
6878 offset = word * UNITS_PER_WORD;
6879 if (BYTES_BIG_ENDIAN)
6880 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6881 else
6882 offset += byte % UNITS_PER_WORD;
6884 else
6885 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6886 ptr[offset] = value;
6888 return total_bytes;
6891 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6892 specified by EXPR into the buffer PTR of length LEN bytes.
6893 Return the number of bytes placed in the buffer, or zero
6894 upon failure. */
6896 static int
6897 native_encode_complex (tree expr, unsigned char *ptr, int len)
6899 int rsize, isize;
6900 tree part;
6902 part = TREE_REALPART (expr);
6903 rsize = native_encode_expr (part, ptr, len);
6904 if (rsize == 0)
6905 return 0;
6906 part = TREE_IMAGPART (expr);
6907 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6908 if (isize != rsize)
6909 return 0;
6910 return rsize + isize;
6914 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6915 specified by EXPR into the buffer PTR of length LEN bytes.
6916 Return the number of bytes placed in the buffer, or zero
6917 upon failure. */
6919 static int
6920 native_encode_vector (tree expr, unsigned char *ptr, int len)
6922 int i, size, offset, count;
6923 tree itype, elem, elements;
6925 offset = 0;
6926 elements = TREE_VECTOR_CST_ELTS (expr);
6927 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6928 itype = TREE_TYPE (TREE_TYPE (expr));
6929 size = GET_MODE_SIZE (TYPE_MODE (itype));
6930 for (i = 0; i < count; i++)
6932 if (elements)
6934 elem = TREE_VALUE (elements);
6935 elements = TREE_CHAIN (elements);
6937 else
6938 elem = NULL_TREE;
6940 if (elem)
6942 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6943 return 0;
6945 else
6947 if (offset + size > len)
6948 return 0;
6949 memset (ptr+offset, 0, size);
6951 offset += size;
6953 return offset;
6957 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6958 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6959 buffer PTR of length LEN bytes. Return the number of bytes
6960 placed in the buffer, or zero upon failure. */
6962 static int
6963 native_encode_expr (tree expr, unsigned char *ptr, int len)
6965 switch (TREE_CODE (expr))
6967 case INTEGER_CST:
6968 return native_encode_int (expr, ptr, len);
6970 case REAL_CST:
6971 return native_encode_real (expr, ptr, len);
6973 case COMPLEX_CST:
6974 return native_encode_complex (expr, ptr, len);
6976 case VECTOR_CST:
6977 return native_encode_vector (expr, ptr, len);
6979 default:
6980 return 0;
6985 /* Subroutine of native_interpret_expr. Interpret the contents of
6986 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6987 If the buffer cannot be interpreted, return NULL_TREE. */
6989 static tree
6990 native_interpret_int (tree type, unsigned char *ptr, int len)
6992 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6993 int byte, offset, word, words;
6994 unsigned char value;
6995 unsigned int HOST_WIDE_INT lo = 0;
6996 HOST_WIDE_INT hi = 0;
6998 if (total_bytes > len)
6999 return NULL_TREE;
7000 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7001 return NULL_TREE;
7002 words = total_bytes / UNITS_PER_WORD;
7004 for (byte = 0; byte < total_bytes; byte++)
7006 int bitpos = byte * BITS_PER_UNIT;
7007 if (total_bytes > UNITS_PER_WORD)
7009 word = byte / UNITS_PER_WORD;
7010 if (WORDS_BIG_ENDIAN)
7011 word = (words - 1) - word;
7012 offset = word * UNITS_PER_WORD;
7013 if (BYTES_BIG_ENDIAN)
7014 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7015 else
7016 offset += byte % UNITS_PER_WORD;
7018 else
7019 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7020 value = ptr[offset];
7022 if (bitpos < HOST_BITS_PER_WIDE_INT)
7023 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7024 else
7025 hi |= (unsigned HOST_WIDE_INT) value
7026 << (bitpos - HOST_BITS_PER_WIDE_INT);
7029 return force_fit_type (build_int_cst_wide (type, lo, hi),
7030 0, false, false);
7034 /* Subroutine of native_interpret_expr. Interpret the contents of
7035 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7036 If the buffer cannot be interpreted, return NULL_TREE. */
7038 static tree
7039 native_interpret_real (tree type, unsigned char *ptr, int len)
7041 enum machine_mode mode = TYPE_MODE (type);
7042 int total_bytes = GET_MODE_SIZE (mode);
7043 int byte, offset, word, words;
7044 unsigned char value;
7045 /* There are always 32 bits in each long, no matter the size of
7046 the hosts long. We handle floating point representations with
7047 up to 192 bits. */
7048 REAL_VALUE_TYPE r;
7049 long tmp[6];
7051 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7052 if (total_bytes > len || total_bytes > 24)
7053 return NULL_TREE;
7054 words = total_bytes / UNITS_PER_WORD;
7056 memset (tmp, 0, sizeof (tmp));
7057 for (byte = 0; byte < total_bytes; byte++)
7059 int bitpos = byte * BITS_PER_UNIT;
7060 if (total_bytes > UNITS_PER_WORD)
7062 word = byte / UNITS_PER_WORD;
7063 if (FLOAT_WORDS_BIG_ENDIAN)
7064 word = (words - 1) - word;
7065 offset = word * UNITS_PER_WORD;
7066 if (BYTES_BIG_ENDIAN)
7067 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7068 else
7069 offset += byte % UNITS_PER_WORD;
7071 else
7072 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7073 value = ptr[offset];
7075 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7078 real_from_target (&r, tmp, mode);
7079 return build_real (type, r);
7083 /* Subroutine of native_interpret_expr. Interpret the contents of
7084 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7085 If the buffer cannot be interpreted, return NULL_TREE. */
7087 static tree
7088 native_interpret_complex (tree type, unsigned char *ptr, int len)
7090 tree etype, rpart, ipart;
7091 int size;
7093 etype = TREE_TYPE (type);
7094 size = GET_MODE_SIZE (TYPE_MODE (etype));
7095 if (size * 2 > len)
7096 return NULL_TREE;
7097 rpart = native_interpret_expr (etype, ptr, size);
7098 if (!rpart)
7099 return NULL_TREE;
7100 ipart = native_interpret_expr (etype, ptr+size, size);
7101 if (!ipart)
7102 return NULL_TREE;
7103 return build_complex (type, rpart, ipart);
7107 /* Subroutine of native_interpret_expr. Interpret the contents of
7108 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7109 If the buffer cannot be interpreted, return NULL_TREE. */
7111 static tree
7112 native_interpret_vector (tree type, unsigned char *ptr, int len)
7114 tree etype, elem, elements;
7115 int i, size, count;
7117 etype = TREE_TYPE (type);
7118 size = GET_MODE_SIZE (TYPE_MODE (etype));
7119 count = TYPE_VECTOR_SUBPARTS (type);
7120 if (size * count > len)
7121 return NULL_TREE;
7123 elements = NULL_TREE;
7124 for (i = count - 1; i >= 0; i--)
7126 elem = native_interpret_expr (etype, ptr+(i*size), size);
7127 if (!elem)
7128 return NULL_TREE;
7129 elements = tree_cons (NULL_TREE, elem, elements);
7131 return build_vector (type, elements);
7135 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7136 the buffer PTR of length LEN as a constant of type TYPE. For
7137 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7138 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7139 return NULL_TREE. */
7141 static tree
7142 native_interpret_expr (tree type, unsigned char *ptr, int len)
7144 switch (TREE_CODE (type))
7146 case INTEGER_TYPE:
7147 case ENUMERAL_TYPE:
7148 case BOOLEAN_TYPE:
7149 return native_interpret_int (type, ptr, len);
7151 case REAL_TYPE:
7152 return native_interpret_real (type, ptr, len);
7154 case COMPLEX_TYPE:
7155 return native_interpret_complex (type, ptr, len);
7157 case VECTOR_TYPE:
7158 return native_interpret_vector (type, ptr, len);
7160 default:
7161 return NULL_TREE;
7166 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7167 TYPE at compile-time. If we're unable to perform the conversion
7168 return NULL_TREE. */
7170 static tree
7171 fold_view_convert_expr (tree type, tree expr)
7173 /* We support up to 512-bit values (for V8DFmode). */
7174 unsigned char buffer[64];
7175 int len;
7177 /* Check that the host and target are sane. */
7178 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7179 return NULL_TREE;
7181 len = native_encode_expr (expr, buffer, sizeof (buffer));
7182 if (len == 0)
7183 return NULL_TREE;
7185 return native_interpret_expr (type, buffer, len);
7189 /* Fold a unary expression of code CODE and type TYPE with operand
7190 OP0. Return the folded expression if folding is successful.
7191 Otherwise, return NULL_TREE. */
7193 tree
7194 fold_unary (enum tree_code code, tree type, tree op0)
7196 tree tem;
7197 tree arg0;
7198 enum tree_code_class kind = TREE_CODE_CLASS (code);
7200 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7201 && TREE_CODE_LENGTH (code) == 1);
7203 arg0 = op0;
7204 if (arg0)
7206 if (code == NOP_EXPR || code == CONVERT_EXPR
7207 || code == FLOAT_EXPR || code == ABS_EXPR)
7209 /* Don't use STRIP_NOPS, because signedness of argument type
7210 matters. */
7211 STRIP_SIGN_NOPS (arg0);
7213 else
7215 /* Strip any conversions that don't change the mode. This
7216 is safe for every expression, except for a comparison
7217 expression because its signedness is derived from its
7218 operands.
7220 Note that this is done as an internal manipulation within
7221 the constant folder, in order to find the simplest
7222 representation of the arguments so that their form can be
7223 studied. In any cases, the appropriate type conversions
7224 should be put back in the tree that will get out of the
7225 constant folder. */
7226 STRIP_NOPS (arg0);
7230 if (TREE_CODE_CLASS (code) == tcc_unary)
7232 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7233 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7234 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7235 else if (TREE_CODE (arg0) == COND_EXPR)
7237 tree arg01 = TREE_OPERAND (arg0, 1);
7238 tree arg02 = TREE_OPERAND (arg0, 2);
7239 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7240 arg01 = fold_build1 (code, type, arg01);
7241 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7242 arg02 = fold_build1 (code, type, arg02);
7243 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7244 arg01, arg02);
7246 /* If this was a conversion, and all we did was to move into
7247 inside the COND_EXPR, bring it back out. But leave it if
7248 it is a conversion from integer to integer and the
7249 result precision is no wider than a word since such a
7250 conversion is cheap and may be optimized away by combine,
7251 while it couldn't if it were outside the COND_EXPR. Then return
7252 so we don't get into an infinite recursion loop taking the
7253 conversion out and then back in. */
7255 if ((code == NOP_EXPR || code == CONVERT_EXPR
7256 || code == NON_LVALUE_EXPR)
7257 && TREE_CODE (tem) == COND_EXPR
7258 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7259 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7260 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7261 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7262 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7263 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7264 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7265 && (INTEGRAL_TYPE_P
7266 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7267 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7268 || flag_syntax_only))
7269 tem = build1 (code, type,
7270 build3 (COND_EXPR,
7271 TREE_TYPE (TREE_OPERAND
7272 (TREE_OPERAND (tem, 1), 0)),
7273 TREE_OPERAND (tem, 0),
7274 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7275 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7276 return tem;
7278 else if (COMPARISON_CLASS_P (arg0))
7280 if (TREE_CODE (type) == BOOLEAN_TYPE)
7282 arg0 = copy_node (arg0);
7283 TREE_TYPE (arg0) = type;
7284 return arg0;
7286 else if (TREE_CODE (type) != INTEGER_TYPE)
7287 return fold_build3 (COND_EXPR, type, arg0,
7288 fold_build1 (code, type,
7289 integer_one_node),
7290 fold_build1 (code, type,
7291 integer_zero_node));
7295 switch (code)
7297 case NOP_EXPR:
7298 case FLOAT_EXPR:
7299 case CONVERT_EXPR:
7300 case FIX_TRUNC_EXPR:
7301 case FIX_CEIL_EXPR:
7302 case FIX_FLOOR_EXPR:
7303 case FIX_ROUND_EXPR:
7304 if (TREE_TYPE (op0) == type)
7305 return op0;
7307 /* If we have (type) (a CMP b) and type is an integral type, return
7308 new expression involving the new type. */
7309 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7310 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7311 TREE_OPERAND (op0, 1));
7313 /* Handle cases of two conversions in a row. */
7314 if (TREE_CODE (op0) == NOP_EXPR
7315 || TREE_CODE (op0) == CONVERT_EXPR)
7317 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7318 tree inter_type = TREE_TYPE (op0);
7319 int inside_int = INTEGRAL_TYPE_P (inside_type);
7320 int inside_ptr = POINTER_TYPE_P (inside_type);
7321 int inside_float = FLOAT_TYPE_P (inside_type);
7322 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7323 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7324 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7325 int inter_int = INTEGRAL_TYPE_P (inter_type);
7326 int inter_ptr = POINTER_TYPE_P (inter_type);
7327 int inter_float = FLOAT_TYPE_P (inter_type);
7328 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7329 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7330 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7331 int final_int = INTEGRAL_TYPE_P (type);
7332 int final_ptr = POINTER_TYPE_P (type);
7333 int final_float = FLOAT_TYPE_P (type);
7334 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7335 unsigned int final_prec = TYPE_PRECISION (type);
7336 int final_unsignedp = TYPE_UNSIGNED (type);
7338 /* In addition to the cases of two conversions in a row
7339 handled below, if we are converting something to its own
7340 type via an object of identical or wider precision, neither
7341 conversion is needed. */
7342 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7343 && (((inter_int || inter_ptr) && final_int)
7344 || (inter_float && final_float))
7345 && inter_prec >= final_prec)
7346 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7348 /* Likewise, if the intermediate and final types are either both
7349 float or both integer, we don't need the middle conversion if
7350 it is wider than the final type and doesn't change the signedness
7351 (for integers). Avoid this if the final type is a pointer
7352 since then we sometimes need the inner conversion. Likewise if
7353 the outer has a precision not equal to the size of its mode. */
7354 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7355 || (inter_float && inside_float)
7356 || (inter_vec && inside_vec))
7357 && inter_prec >= inside_prec
7358 && (inter_float || inter_vec
7359 || inter_unsignedp == inside_unsignedp)
7360 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7361 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7362 && ! final_ptr
7363 && (! final_vec || inter_prec == inside_prec))
7364 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7366 /* If we have a sign-extension of a zero-extended value, we can
7367 replace that by a single zero-extension. */
7368 if (inside_int && inter_int && final_int
7369 && inside_prec < inter_prec && inter_prec < final_prec
7370 && inside_unsignedp && !inter_unsignedp)
7371 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7373 /* Two conversions in a row are not needed unless:
7374 - some conversion is floating-point (overstrict for now), or
7375 - some conversion is a vector (overstrict for now), or
7376 - the intermediate type is narrower than both initial and
7377 final, or
7378 - the intermediate type and innermost type differ in signedness,
7379 and the outermost type is wider than the intermediate, or
7380 - the initial type is a pointer type and the precisions of the
7381 intermediate and final types differ, or
7382 - the final type is a pointer type and the precisions of the
7383 initial and intermediate types differ.
7384 - the final type is a pointer type and the initial type not
7385 - the initial type is a pointer to an array and the final type
7386 not. */
7387 if (! inside_float && ! inter_float && ! final_float
7388 && ! inside_vec && ! inter_vec && ! final_vec
7389 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7390 && ! (inside_int && inter_int
7391 && inter_unsignedp != inside_unsignedp
7392 && inter_prec < final_prec)
7393 && ((inter_unsignedp && inter_prec > inside_prec)
7394 == (final_unsignedp && final_prec > inter_prec))
7395 && ! (inside_ptr && inter_prec != final_prec)
7396 && ! (final_ptr && inside_prec != inter_prec)
7397 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7398 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7399 && final_ptr == inside_ptr
7400 && ! (inside_ptr
7401 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7402 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7403 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7406 /* Handle (T *)&A.B.C for A being of type T and B and C
7407 living at offset zero. This occurs frequently in
7408 C++ upcasting and then accessing the base. */
7409 if (TREE_CODE (op0) == ADDR_EXPR
7410 && POINTER_TYPE_P (type)
7411 && handled_component_p (TREE_OPERAND (op0, 0)))
7413 HOST_WIDE_INT bitsize, bitpos;
7414 tree offset;
7415 enum machine_mode mode;
7416 int unsignedp, volatilep;
7417 tree base = TREE_OPERAND (op0, 0);
7418 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7419 &mode, &unsignedp, &volatilep, false);
7420 /* If the reference was to a (constant) zero offset, we can use
7421 the address of the base if it has the same base type
7422 as the result type. */
7423 if (! offset && bitpos == 0
7424 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7425 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7426 return fold_convert (type, build_fold_addr_expr (base));
7429 if (TREE_CODE (op0) == MODIFY_EXPR
7430 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7431 /* Detect assigning a bitfield. */
7432 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7433 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7435 /* Don't leave an assignment inside a conversion
7436 unless assigning a bitfield. */
7437 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7438 /* First do the assignment, then return converted constant. */
7439 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7440 TREE_NO_WARNING (tem) = 1;
7441 TREE_USED (tem) = 1;
7442 return tem;
7445 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7446 constants (if x has signed type, the sign bit cannot be set
7447 in c). This folds extension into the BIT_AND_EXPR. */
7448 if (INTEGRAL_TYPE_P (type)
7449 && TREE_CODE (type) != BOOLEAN_TYPE
7450 && TREE_CODE (op0) == BIT_AND_EXPR
7451 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7453 tree and = op0;
7454 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7455 int change = 0;
7457 if (TYPE_UNSIGNED (TREE_TYPE (and))
7458 || (TYPE_PRECISION (type)
7459 <= TYPE_PRECISION (TREE_TYPE (and))))
7460 change = 1;
7461 else if (TYPE_PRECISION (TREE_TYPE (and1))
7462 <= HOST_BITS_PER_WIDE_INT
7463 && host_integerp (and1, 1))
7465 unsigned HOST_WIDE_INT cst;
7467 cst = tree_low_cst (and1, 1);
7468 cst &= (HOST_WIDE_INT) -1
7469 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7470 change = (cst == 0);
7471 #ifdef LOAD_EXTEND_OP
7472 if (change
7473 && !flag_syntax_only
7474 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7475 == ZERO_EXTEND))
7477 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7478 and0 = fold_convert (uns, and0);
7479 and1 = fold_convert (uns, and1);
7481 #endif
7483 if (change)
7485 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7486 TREE_INT_CST_HIGH (and1));
7487 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7488 TREE_CONSTANT_OVERFLOW (and1));
7489 return fold_build2 (BIT_AND_EXPR, type,
7490 fold_convert (type, and0), tem);
7494 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7495 T2 being pointers to types of the same size. */
7496 if (POINTER_TYPE_P (type)
7497 && BINARY_CLASS_P (arg0)
7498 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7499 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7501 tree arg00 = TREE_OPERAND (arg0, 0);
7502 tree t0 = type;
7503 tree t1 = TREE_TYPE (arg00);
7504 tree tt0 = TREE_TYPE (t0);
7505 tree tt1 = TREE_TYPE (t1);
7506 tree s0 = TYPE_SIZE (tt0);
7507 tree s1 = TYPE_SIZE (tt1);
7509 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7510 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7511 TREE_OPERAND (arg0, 1));
7514 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7515 of the same precision, and X is a integer type not narrower than
7516 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7517 if (INTEGRAL_TYPE_P (type)
7518 && TREE_CODE (op0) == BIT_NOT_EXPR
7519 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7520 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7521 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7522 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7524 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7525 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7526 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7527 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7530 tem = fold_convert_const (code, type, arg0);
7531 return tem ? tem : NULL_TREE;
7533 case VIEW_CONVERT_EXPR:
7534 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7535 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7536 return fold_view_convert_expr (type, op0);
7538 case NEGATE_EXPR:
7539 tem = fold_negate_expr (arg0);
7540 if (tem)
7541 return fold_convert (type, tem);
7542 return NULL_TREE;
7544 case ABS_EXPR:
7545 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7546 return fold_abs_const (arg0, type);
7547 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7548 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7549 /* Convert fabs((double)float) into (double)fabsf(float). */
7550 else if (TREE_CODE (arg0) == NOP_EXPR
7551 && TREE_CODE (type) == REAL_TYPE)
7553 tree targ0 = strip_float_extensions (arg0);
7554 if (targ0 != arg0)
7555 return fold_convert (type, fold_build1 (ABS_EXPR,
7556 TREE_TYPE (targ0),
7557 targ0));
7559 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7560 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7561 return arg0;
7563 /* Strip sign ops from argument. */
7564 if (TREE_CODE (type) == REAL_TYPE)
7566 tem = fold_strip_sign_ops (arg0);
7567 if (tem)
7568 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7570 return NULL_TREE;
7572 case CONJ_EXPR:
7573 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7574 return fold_convert (type, arg0);
7575 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7577 tree itype = TREE_TYPE (type);
7578 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7579 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7580 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7582 if (TREE_CODE (arg0) == COMPLEX_CST)
7584 tree itype = TREE_TYPE (type);
7585 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7586 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7587 return build_complex (type, rpart, negate_expr (ipart));
7589 if (TREE_CODE (arg0) == CONJ_EXPR)
7590 return fold_convert (type, TREE_OPERAND (arg0, 0));
7591 return NULL_TREE;
7593 case BIT_NOT_EXPR:
7594 if (TREE_CODE (arg0) == INTEGER_CST)
7595 return fold_not_const (arg0, type);
7596 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7597 return TREE_OPERAND (arg0, 0);
7598 /* Convert ~ (-A) to A - 1. */
7599 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7600 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7601 build_int_cst (type, 1));
7602 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7603 else if (INTEGRAL_TYPE_P (type)
7604 && ((TREE_CODE (arg0) == MINUS_EXPR
7605 && integer_onep (TREE_OPERAND (arg0, 1)))
7606 || (TREE_CODE (arg0) == PLUS_EXPR
7607 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7608 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7609 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7610 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7611 && (tem = fold_unary (BIT_NOT_EXPR, type,
7612 fold_convert (type,
7613 TREE_OPERAND (arg0, 0)))))
7614 return fold_build2 (BIT_XOR_EXPR, type, tem,
7615 fold_convert (type, TREE_OPERAND (arg0, 1)));
7616 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7617 && (tem = fold_unary (BIT_NOT_EXPR, type,
7618 fold_convert (type,
7619 TREE_OPERAND (arg0, 1)))))
7620 return fold_build2 (BIT_XOR_EXPR, type,
7621 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7623 return NULL_TREE;
7625 case TRUTH_NOT_EXPR:
7626 /* The argument to invert_truthvalue must have Boolean type. */
7627 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7628 arg0 = fold_convert (boolean_type_node, arg0);
7630 /* Note that the operand of this must be an int
7631 and its values must be 0 or 1.
7632 ("true" is a fixed value perhaps depending on the language,
7633 but we don't handle values other than 1 correctly yet.) */
7634 tem = fold_truth_not_expr (arg0);
7635 if (!tem)
7636 return NULL_TREE;
7637 return fold_convert (type, tem);
7639 case REALPART_EXPR:
7640 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7641 return fold_convert (type, arg0);
7642 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7643 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7644 TREE_OPERAND (arg0, 1));
7645 if (TREE_CODE (arg0) == COMPLEX_CST)
7646 return fold_convert (type, TREE_REALPART (arg0));
7647 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7649 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7650 tem = fold_build2 (TREE_CODE (arg0), itype,
7651 fold_build1 (REALPART_EXPR, itype,
7652 TREE_OPERAND (arg0, 0)),
7653 fold_build1 (REALPART_EXPR, itype,
7654 TREE_OPERAND (arg0, 1)));
7655 return fold_convert (type, tem);
7657 if (TREE_CODE (arg0) == CONJ_EXPR)
7659 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7660 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7661 return fold_convert (type, tem);
7663 return NULL_TREE;
7665 case IMAGPART_EXPR:
7666 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7667 return fold_convert (type, integer_zero_node);
7668 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7669 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7670 TREE_OPERAND (arg0, 0));
7671 if (TREE_CODE (arg0) == COMPLEX_CST)
7672 return fold_convert (type, TREE_IMAGPART (arg0));
7673 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7675 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7676 tem = fold_build2 (TREE_CODE (arg0), itype,
7677 fold_build1 (IMAGPART_EXPR, itype,
7678 TREE_OPERAND (arg0, 0)),
7679 fold_build1 (IMAGPART_EXPR, itype,
7680 TREE_OPERAND (arg0, 1)));
7681 return fold_convert (type, tem);
7683 if (TREE_CODE (arg0) == CONJ_EXPR)
7685 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7686 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7687 return fold_convert (type, negate_expr (tem));
7689 return NULL_TREE;
7691 default:
7692 return NULL_TREE;
7693 } /* switch (code) */
7696 /* Fold a binary expression of code CODE and type TYPE with operands
7697 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7698 Return the folded expression if folding is successful. Otherwise,
7699 return NULL_TREE. */
7701 static tree
7702 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7704 enum tree_code compl_code;
7706 if (code == MIN_EXPR)
7707 compl_code = MAX_EXPR;
7708 else if (code == MAX_EXPR)
7709 compl_code = MIN_EXPR;
7710 else
7711 gcc_unreachable ();
7713 /* MIN (MAX (a, b), b) == b.  */
7714 if (TREE_CODE (op0) == compl_code
7715 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7716 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7718 /* MIN (MAX (b, a), b) == b.  */
7719 if (TREE_CODE (op0) == compl_code
7720 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7721 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7722 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7724 /* MIN (a, MAX (a, b)) == a.  */
7725 if (TREE_CODE (op1) == compl_code
7726 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7727 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7728 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7730 /* MIN (a, MAX (b, a)) == a.  */
7731 if (TREE_CODE (op1) == compl_code
7732 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7733 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7734 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7736 return NULL_TREE;
7739 /* Subroutine of fold_binary. This routine performs all of the
7740 transformations that are common to the equality/inequality
7741 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7742 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7743 fold_binary should call fold_binary. Fold a comparison with
7744 tree code CODE and type TYPE with operands OP0 and OP1. Return
7745 the folded comparison or NULL_TREE. */
7747 static tree
7748 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7750 tree arg0, arg1, tem;
7752 arg0 = op0;
7753 arg1 = op1;
7755 STRIP_SIGN_NOPS (arg0);
7756 STRIP_SIGN_NOPS (arg1);
7758 tem = fold_relational_const (code, type, arg0, arg1);
7759 if (tem != NULL_TREE)
7760 return tem;
7762 /* If one arg is a real or integer constant, put it last. */
7763 if (tree_swap_operands_p (arg0, arg1, true))
7764 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7766 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7767 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7768 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7769 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7770 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7771 && !(flag_wrapv || flag_trapv))
7772 && (TREE_CODE (arg1) == INTEGER_CST
7773 && !TREE_OVERFLOW (arg1)))
7775 tree const1 = TREE_OPERAND (arg0, 1);
7776 tree const2 = arg1;
7777 tree variable = TREE_OPERAND (arg0, 0);
7778 tree lhs;
7779 int lhs_add;
7780 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7782 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7783 TREE_TYPE (arg1), const2, const1);
7784 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7785 && (TREE_CODE (lhs) != INTEGER_CST
7786 || !TREE_OVERFLOW (lhs)))
7787 return fold_build2 (code, type, variable, lhs);
7790 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7791 same object, then we can fold this to a comparison of the two offsets in
7792 signed size type. This is possible because pointer arithmetic is
7793 restricted to retain within an object and overflow on pointer differences
7794 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7795 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7796 && !flag_wrapv && !flag_trapv)
7798 tree base0, offset0, base1, offset1;
7800 if (extract_array_ref (arg0, &base0, &offset0)
7801 && extract_array_ref (arg1, &base1, &offset1)
7802 && operand_equal_p (base0, base1, 0))
7804 tree signed_size_type_node;
7805 signed_size_type_node = signed_type_for (size_type_node);
7807 /* By converting to signed size type we cover middle-end pointer
7808 arithmetic which operates on unsigned pointer types of size
7809 type size and ARRAY_REF offsets which are properly sign or
7810 zero extended from their type in case it is narrower than
7811 size type. */
7812 if (offset0 == NULL_TREE)
7813 offset0 = build_int_cst (signed_size_type_node, 0);
7814 else
7815 offset0 = fold_convert (signed_size_type_node, offset0);
7816 if (offset1 == NULL_TREE)
7817 offset1 = build_int_cst (signed_size_type_node, 0);
7818 else
7819 offset1 = fold_convert (signed_size_type_node, offset1);
7821 return fold_build2 (code, type, offset0, offset1);
7825 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7827 tree targ0 = strip_float_extensions (arg0);
7828 tree targ1 = strip_float_extensions (arg1);
7829 tree newtype = TREE_TYPE (targ0);
7831 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7832 newtype = TREE_TYPE (targ1);
7834 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7835 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7836 return fold_build2 (code, type, fold_convert (newtype, targ0),
7837 fold_convert (newtype, targ1));
7839 /* (-a) CMP (-b) -> b CMP a */
7840 if (TREE_CODE (arg0) == NEGATE_EXPR
7841 && TREE_CODE (arg1) == NEGATE_EXPR)
7842 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7843 TREE_OPERAND (arg0, 0));
7845 if (TREE_CODE (arg1) == REAL_CST)
7847 REAL_VALUE_TYPE cst;
7848 cst = TREE_REAL_CST (arg1);
7850 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7851 if (TREE_CODE (arg0) == NEGATE_EXPR)
7852 return fold_build2 (swap_tree_comparison (code), type,
7853 TREE_OPERAND (arg0, 0),
7854 build_real (TREE_TYPE (arg1),
7855 REAL_VALUE_NEGATE (cst)));
7857 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7858 /* a CMP (-0) -> a CMP 0 */
7859 if (REAL_VALUE_MINUS_ZERO (cst))
7860 return fold_build2 (code, type, arg0,
7861 build_real (TREE_TYPE (arg1), dconst0));
7863 /* x != NaN is always true, other ops are always false. */
7864 if (REAL_VALUE_ISNAN (cst)
7865 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7867 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7868 return omit_one_operand (type, tem, arg0);
7871 /* Fold comparisons against infinity. */
7872 if (REAL_VALUE_ISINF (cst))
7874 tem = fold_inf_compare (code, type, arg0, arg1);
7875 if (tem != NULL_TREE)
7876 return tem;
7880 /* If this is a comparison of a real constant with a PLUS_EXPR
7881 or a MINUS_EXPR of a real constant, we can convert it into a
7882 comparison with a revised real constant as long as no overflow
7883 occurs when unsafe_math_optimizations are enabled. */
7884 if (flag_unsafe_math_optimizations
7885 && TREE_CODE (arg1) == REAL_CST
7886 && (TREE_CODE (arg0) == PLUS_EXPR
7887 || TREE_CODE (arg0) == MINUS_EXPR)
7888 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7889 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7890 ? MINUS_EXPR : PLUS_EXPR,
7891 arg1, TREE_OPERAND (arg0, 1), 0))
7892 && ! TREE_CONSTANT_OVERFLOW (tem))
7893 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7895 /* Likewise, we can simplify a comparison of a real constant with
7896 a MINUS_EXPR whose first operand is also a real constant, i.e.
7897 (c1 - x) < c2 becomes x > c1-c2. */
7898 if (flag_unsafe_math_optimizations
7899 && TREE_CODE (arg1) == REAL_CST
7900 && TREE_CODE (arg0) == MINUS_EXPR
7901 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7902 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7903 arg1, 0))
7904 && ! TREE_CONSTANT_OVERFLOW (tem))
7905 return fold_build2 (swap_tree_comparison (code), type,
7906 TREE_OPERAND (arg0, 1), tem);
7908 /* Fold comparisons against built-in math functions. */
7909 if (TREE_CODE (arg1) == REAL_CST
7910 && flag_unsafe_math_optimizations
7911 && ! flag_errno_math)
7913 enum built_in_function fcode = builtin_mathfn_code (arg0);
7915 if (fcode != END_BUILTINS)
7917 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7918 if (tem != NULL_TREE)
7919 return tem;
7924 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7925 if (TREE_CONSTANT (arg1)
7926 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7927 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7928 /* This optimization is invalid for ordered comparisons
7929 if CONST+INCR overflows or if foo+incr might overflow.
7930 This optimization is invalid for floating point due to rounding.
7931 For pointer types we assume overflow doesn't happen. */
7932 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7933 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7934 && (code == EQ_EXPR || code == NE_EXPR))))
7936 tree varop, newconst;
7938 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7940 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7941 arg1, TREE_OPERAND (arg0, 1));
7942 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7943 TREE_OPERAND (arg0, 0),
7944 TREE_OPERAND (arg0, 1));
7946 else
7948 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7949 arg1, TREE_OPERAND (arg0, 1));
7950 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7951 TREE_OPERAND (arg0, 0),
7952 TREE_OPERAND (arg0, 1));
7956 /* If VAROP is a reference to a bitfield, we must mask
7957 the constant by the width of the field. */
7958 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7959 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7960 && host_integerp (DECL_SIZE (TREE_OPERAND
7961 (TREE_OPERAND (varop, 0), 1)), 1))
7963 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7964 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7965 tree folded_compare, shift;
7967 /* First check whether the comparison would come out
7968 always the same. If we don't do that we would
7969 change the meaning with the masking. */
7970 folded_compare = fold_build2 (code, type,
7971 TREE_OPERAND (varop, 0), arg1);
7972 if (TREE_CODE (folded_compare) == INTEGER_CST)
7973 return omit_one_operand (type, folded_compare, varop);
7975 shift = build_int_cst (NULL_TREE,
7976 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7977 shift = fold_convert (TREE_TYPE (varop), shift);
7978 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7979 newconst, shift);
7980 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7981 newconst, shift);
7984 return fold_build2 (code, type, varop, newconst);
7987 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7988 && (TREE_CODE (arg0) == NOP_EXPR
7989 || TREE_CODE (arg0) == CONVERT_EXPR))
7991 /* If we are widening one operand of an integer comparison,
7992 see if the other operand is similarly being widened. Perhaps we
7993 can do the comparison in the narrower type. */
7994 tem = fold_widened_comparison (code, type, arg0, arg1);
7995 if (tem)
7996 return tem;
7998 /* Or if we are changing signedness. */
7999 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8000 if (tem)
8001 return tem;
8004 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8005 constant, we can simplify it. */
8006 if (TREE_CODE (arg1) == INTEGER_CST
8007 && (TREE_CODE (arg0) == MIN_EXPR
8008 || TREE_CODE (arg0) == MAX_EXPR)
8009 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8011 tem = optimize_minmax_comparison (code, type, op0, op1);
8012 if (tem)
8013 return tem;
8016 /* Simplify comparison of something with itself. (For IEEE
8017 floating-point, we can only do some of these simplifications.) */
8018 if (operand_equal_p (arg0, arg1, 0))
8020 switch (code)
8022 case EQ_EXPR:
8023 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8024 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8025 return constant_boolean_node (1, type);
8026 break;
8028 case GE_EXPR:
8029 case LE_EXPR:
8030 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8031 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8032 return constant_boolean_node (1, type);
8033 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8035 case NE_EXPR:
8036 /* For NE, we can only do this simplification if integer
8037 or we don't honor IEEE floating point NaNs. */
8038 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8039 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8040 break;
8041 /* ... fall through ... */
8042 case GT_EXPR:
8043 case LT_EXPR:
8044 return constant_boolean_node (0, type);
8045 default:
8046 gcc_unreachable ();
8050 /* If we are comparing an expression that just has comparisons
8051 of two integer values, arithmetic expressions of those comparisons,
8052 and constants, we can simplify it. There are only three cases
8053 to check: the two values can either be equal, the first can be
8054 greater, or the second can be greater. Fold the expression for
8055 those three values. Since each value must be 0 or 1, we have
8056 eight possibilities, each of which corresponds to the constant 0
8057 or 1 or one of the six possible comparisons.
8059 This handles common cases like (a > b) == 0 but also handles
8060 expressions like ((x > y) - (y > x)) > 0, which supposedly
8061 occur in macroized code. */
8063 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8065 tree cval1 = 0, cval2 = 0;
8066 int save_p = 0;
8068 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8069 /* Don't handle degenerate cases here; they should already
8070 have been handled anyway. */
8071 && cval1 != 0 && cval2 != 0
8072 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8073 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8074 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8075 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8076 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8077 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8078 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8080 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8081 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8083 /* We can't just pass T to eval_subst in case cval1 or cval2
8084 was the same as ARG1. */
8086 tree high_result
8087 = fold_build2 (code, type,
8088 eval_subst (arg0, cval1, maxval,
8089 cval2, minval),
8090 arg1);
8091 tree equal_result
8092 = fold_build2 (code, type,
8093 eval_subst (arg0, cval1, maxval,
8094 cval2, maxval),
8095 arg1);
8096 tree low_result
8097 = fold_build2 (code, type,
8098 eval_subst (arg0, cval1, minval,
8099 cval2, maxval),
8100 arg1);
8102 /* All three of these results should be 0 or 1. Confirm they are.
8103 Then use those values to select the proper code to use. */
8105 if (TREE_CODE (high_result) == INTEGER_CST
8106 && TREE_CODE (equal_result) == INTEGER_CST
8107 && TREE_CODE (low_result) == INTEGER_CST)
8109 /* Make a 3-bit mask with the high-order bit being the
8110 value for `>', the next for '=', and the low for '<'. */
8111 switch ((integer_onep (high_result) * 4)
8112 + (integer_onep (equal_result) * 2)
8113 + integer_onep (low_result))
8115 case 0:
8116 /* Always false. */
8117 return omit_one_operand (type, integer_zero_node, arg0);
8118 case 1:
8119 code = LT_EXPR;
8120 break;
8121 case 2:
8122 code = EQ_EXPR;
8123 break;
8124 case 3:
8125 code = LE_EXPR;
8126 break;
8127 case 4:
8128 code = GT_EXPR;
8129 break;
8130 case 5:
8131 code = NE_EXPR;
8132 break;
8133 case 6:
8134 code = GE_EXPR;
8135 break;
8136 case 7:
8137 /* Always true. */
8138 return omit_one_operand (type, integer_one_node, arg0);
8141 if (save_p)
8142 return save_expr (build2 (code, type, cval1, cval2));
8143 return fold_build2 (code, type, cval1, cval2);
8148 /* Fold a comparison of the address of COMPONENT_REFs with the same
8149 type and component to a comparison of the address of the base
8150 object. In short, &x->a OP &y->a to x OP y and
8151 &x->a OP &y.a to x OP &y */
8152 if (TREE_CODE (arg0) == ADDR_EXPR
8153 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8154 && TREE_CODE (arg1) == ADDR_EXPR
8155 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8157 tree cref0 = TREE_OPERAND (arg0, 0);
8158 tree cref1 = TREE_OPERAND (arg1, 0);
8159 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8161 tree op0 = TREE_OPERAND (cref0, 0);
8162 tree op1 = TREE_OPERAND (cref1, 0);
8163 return fold_build2 (code, type,
8164 build_fold_addr_expr (op0),
8165 build_fold_addr_expr (op1));
8169 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8170 into a single range test. */
8171 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8172 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8173 && TREE_CODE (arg1) == INTEGER_CST
8174 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8175 && !integer_zerop (TREE_OPERAND (arg0, 1))
8176 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8177 && !TREE_OVERFLOW (arg1))
8179 tem = fold_div_compare (code, type, arg0, arg1);
8180 if (tem != NULL_TREE)
8181 return tem;
8184 return NULL_TREE;
8188 /* Subroutine of fold_binary. Optimize complex multiplications of the
8189 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8190 argument EXPR represents the expression "z" of type TYPE. */
8192 static tree
8193 fold_mult_zconjz (tree type, tree expr)
8195 tree itype = TREE_TYPE (type);
8196 tree rpart, ipart, tem;
8198 if (TREE_CODE (expr) == COMPLEX_EXPR)
8200 rpart = TREE_OPERAND (expr, 0);
8201 ipart = TREE_OPERAND (expr, 1);
8203 else if (TREE_CODE (expr) == COMPLEX_CST)
8205 rpart = TREE_REALPART (expr);
8206 ipart = TREE_IMAGPART (expr);
8208 else
8210 expr = save_expr (expr);
8211 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8212 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8215 rpart = save_expr (rpart);
8216 ipart = save_expr (ipart);
8217 tem = fold_build2 (PLUS_EXPR, itype,
8218 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8219 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8220 return fold_build2 (COMPLEX_EXPR, type, tem,
8221 fold_convert (itype, integer_zero_node));
8225 /* Fold a binary expression of code CODE and type TYPE with operands
8226 OP0 and OP1. Return the folded expression if folding is
8227 successful. Otherwise, return NULL_TREE. */
8229 tree
8230 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8232 enum tree_code_class kind = TREE_CODE_CLASS (code);
8233 tree arg0, arg1, tem;
8234 tree t1 = NULL_TREE;
8236 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8237 && TREE_CODE_LENGTH (code) == 2
8238 && op0 != NULL_TREE
8239 && op1 != NULL_TREE);
8241 arg0 = op0;
8242 arg1 = op1;
8244 /* Strip any conversions that don't change the mode. This is
8245 safe for every expression, except for a comparison expression
8246 because its signedness is derived from its operands. So, in
8247 the latter case, only strip conversions that don't change the
8248 signedness.
8250 Note that this is done as an internal manipulation within the
8251 constant folder, in order to find the simplest representation
8252 of the arguments so that their form can be studied. In any
8253 cases, the appropriate type conversions should be put back in
8254 the tree that will get out of the constant folder. */
8256 if (kind == tcc_comparison)
8258 STRIP_SIGN_NOPS (arg0);
8259 STRIP_SIGN_NOPS (arg1);
8261 else
8263 STRIP_NOPS (arg0);
8264 STRIP_NOPS (arg1);
8267 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8268 constant but we can't do arithmetic on them. */
8269 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8270 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8271 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8272 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8274 if (kind == tcc_binary)
8275 tem = const_binop (code, arg0, arg1, 0);
8276 else if (kind == tcc_comparison)
8277 tem = fold_relational_const (code, type, arg0, arg1);
8278 else
8279 tem = NULL_TREE;
8281 if (tem != NULL_TREE)
8283 if (TREE_TYPE (tem) != type)
8284 tem = fold_convert (type, tem);
8285 return tem;
8289 /* If this is a commutative operation, and ARG0 is a constant, move it
8290 to ARG1 to reduce the number of tests below. */
8291 if (commutative_tree_code (code)
8292 && tree_swap_operands_p (arg0, arg1, true))
8293 return fold_build2 (code, type, op1, op0);
8295 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8297 First check for cases where an arithmetic operation is applied to a
8298 compound, conditional, or comparison operation. Push the arithmetic
8299 operation inside the compound or conditional to see if any folding
8300 can then be done. Convert comparison to conditional for this purpose.
8301 The also optimizes non-constant cases that used to be done in
8302 expand_expr.
8304 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8305 one of the operands is a comparison and the other is a comparison, a
8306 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8307 code below would make the expression more complex. Change it to a
8308 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8309 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8311 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8312 || code == EQ_EXPR || code == NE_EXPR)
8313 && ((truth_value_p (TREE_CODE (arg0))
8314 && (truth_value_p (TREE_CODE (arg1))
8315 || (TREE_CODE (arg1) == BIT_AND_EXPR
8316 && integer_onep (TREE_OPERAND (arg1, 1)))))
8317 || (truth_value_p (TREE_CODE (arg1))
8318 && (truth_value_p (TREE_CODE (arg0))
8319 || (TREE_CODE (arg0) == BIT_AND_EXPR
8320 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8322 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8323 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8324 : TRUTH_XOR_EXPR,
8325 boolean_type_node,
8326 fold_convert (boolean_type_node, arg0),
8327 fold_convert (boolean_type_node, arg1));
8329 if (code == EQ_EXPR)
8330 tem = invert_truthvalue (tem);
8332 return fold_convert (type, tem);
8335 if (TREE_CODE_CLASS (code) == tcc_binary
8336 || TREE_CODE_CLASS (code) == tcc_comparison)
8338 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8339 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8340 fold_build2 (code, type,
8341 TREE_OPERAND (arg0, 1), op1));
8342 if (TREE_CODE (arg1) == COMPOUND_EXPR
8343 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8344 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8345 fold_build2 (code, type,
8346 op0, TREE_OPERAND (arg1, 1)));
8348 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8350 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8351 arg0, arg1,
8352 /*cond_first_p=*/1);
8353 if (tem != NULL_TREE)
8354 return tem;
8357 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8359 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8360 arg1, arg0,
8361 /*cond_first_p=*/0);
8362 if (tem != NULL_TREE)
8363 return tem;
8367 switch (code)
8369 case PLUS_EXPR:
8370 /* A + (-B) -> A - B */
8371 if (TREE_CODE (arg1) == NEGATE_EXPR)
8372 return fold_build2 (MINUS_EXPR, type,
8373 fold_convert (type, arg0),
8374 fold_convert (type, TREE_OPERAND (arg1, 0)));
8375 /* (-A) + B -> B - A */
8376 if (TREE_CODE (arg0) == NEGATE_EXPR
8377 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8378 return fold_build2 (MINUS_EXPR, type,
8379 fold_convert (type, arg1),
8380 fold_convert (type, TREE_OPERAND (arg0, 0)));
8381 /* Convert ~A + 1 to -A. */
8382 if (INTEGRAL_TYPE_P (type)
8383 && TREE_CODE (arg0) == BIT_NOT_EXPR
8384 && integer_onep (arg1))
8385 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8387 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8388 same or one. */
8389 if ((TREE_CODE (arg0) == MULT_EXPR
8390 || TREE_CODE (arg1) == MULT_EXPR)
8391 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8393 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8394 if (tem)
8395 return tem;
8398 if (! FLOAT_TYPE_P (type))
8400 if (integer_zerop (arg1))
8401 return non_lvalue (fold_convert (type, arg0));
8403 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8404 with a constant, and the two constants have no bits in common,
8405 we should treat this as a BIT_IOR_EXPR since this may produce more
8406 simplifications. */
8407 if (TREE_CODE (arg0) == BIT_AND_EXPR
8408 && TREE_CODE (arg1) == BIT_AND_EXPR
8409 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8410 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8411 && integer_zerop (const_binop (BIT_AND_EXPR,
8412 TREE_OPERAND (arg0, 1),
8413 TREE_OPERAND (arg1, 1), 0)))
8415 code = BIT_IOR_EXPR;
8416 goto bit_ior;
8419 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8420 (plus (plus (mult) (mult)) (foo)) so that we can
8421 take advantage of the factoring cases below. */
8422 if (((TREE_CODE (arg0) == PLUS_EXPR
8423 || TREE_CODE (arg0) == MINUS_EXPR)
8424 && TREE_CODE (arg1) == MULT_EXPR)
8425 || ((TREE_CODE (arg1) == PLUS_EXPR
8426 || TREE_CODE (arg1) == MINUS_EXPR)
8427 && TREE_CODE (arg0) == MULT_EXPR))
8429 tree parg0, parg1, parg, marg;
8430 enum tree_code pcode;
8432 if (TREE_CODE (arg1) == MULT_EXPR)
8433 parg = arg0, marg = arg1;
8434 else
8435 parg = arg1, marg = arg0;
8436 pcode = TREE_CODE (parg);
8437 parg0 = TREE_OPERAND (parg, 0);
8438 parg1 = TREE_OPERAND (parg, 1);
8439 STRIP_NOPS (parg0);
8440 STRIP_NOPS (parg1);
8442 if (TREE_CODE (parg0) == MULT_EXPR
8443 && TREE_CODE (parg1) != MULT_EXPR)
8444 return fold_build2 (pcode, type,
8445 fold_build2 (PLUS_EXPR, type,
8446 fold_convert (type, parg0),
8447 fold_convert (type, marg)),
8448 fold_convert (type, parg1));
8449 if (TREE_CODE (parg0) != MULT_EXPR
8450 && TREE_CODE (parg1) == MULT_EXPR)
8451 return fold_build2 (PLUS_EXPR, type,
8452 fold_convert (type, parg0),
8453 fold_build2 (pcode, type,
8454 fold_convert (type, marg),
8455 fold_convert (type,
8456 parg1)));
8459 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8460 of the array. Loop optimizer sometimes produce this type of
8461 expressions. */
8462 if (TREE_CODE (arg0) == ADDR_EXPR)
8464 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8465 if (tem)
8466 return fold_convert (type, tem);
8468 else if (TREE_CODE (arg1) == ADDR_EXPR)
8470 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8471 if (tem)
8472 return fold_convert (type, tem);
8475 else
8477 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8478 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8479 return non_lvalue (fold_convert (type, arg0));
8481 /* Likewise if the operands are reversed. */
8482 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8483 return non_lvalue (fold_convert (type, arg1));
8485 /* Convert X + -C into X - C. */
8486 if (TREE_CODE (arg1) == REAL_CST
8487 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8489 tem = fold_negate_const (arg1, type);
8490 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8491 return fold_build2 (MINUS_EXPR, type,
8492 fold_convert (type, arg0),
8493 fold_convert (type, tem));
8496 if (flag_unsafe_math_optimizations
8497 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8498 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8499 && (tem = distribute_real_division (code, type, arg0, arg1)))
8500 return tem;
8502 /* Convert x+x into x*2.0. */
8503 if (operand_equal_p (arg0, arg1, 0)
8504 && SCALAR_FLOAT_TYPE_P (type))
8505 return fold_build2 (MULT_EXPR, type, arg0,
8506 build_real (type, dconst2));
8508 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8509 if (flag_unsafe_math_optimizations
8510 && TREE_CODE (arg1) == PLUS_EXPR
8511 && TREE_CODE (arg0) != MULT_EXPR)
8513 tree tree10 = TREE_OPERAND (arg1, 0);
8514 tree tree11 = TREE_OPERAND (arg1, 1);
8515 if (TREE_CODE (tree11) == MULT_EXPR
8516 && TREE_CODE (tree10) == MULT_EXPR)
8518 tree tree0;
8519 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8520 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8523 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8524 if (flag_unsafe_math_optimizations
8525 && TREE_CODE (arg0) == PLUS_EXPR
8526 && TREE_CODE (arg1) != MULT_EXPR)
8528 tree tree00 = TREE_OPERAND (arg0, 0);
8529 tree tree01 = TREE_OPERAND (arg0, 1);
8530 if (TREE_CODE (tree01) == MULT_EXPR
8531 && TREE_CODE (tree00) == MULT_EXPR)
8533 tree tree0;
8534 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8535 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8540 bit_rotate:
8541 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8542 is a rotate of A by C1 bits. */
8543 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8544 is a rotate of A by B bits. */
8546 enum tree_code code0, code1;
8547 code0 = TREE_CODE (arg0);
8548 code1 = TREE_CODE (arg1);
8549 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8550 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8551 && operand_equal_p (TREE_OPERAND (arg0, 0),
8552 TREE_OPERAND (arg1, 0), 0)
8553 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8555 tree tree01, tree11;
8556 enum tree_code code01, code11;
8558 tree01 = TREE_OPERAND (arg0, 1);
8559 tree11 = TREE_OPERAND (arg1, 1);
8560 STRIP_NOPS (tree01);
8561 STRIP_NOPS (tree11);
8562 code01 = TREE_CODE (tree01);
8563 code11 = TREE_CODE (tree11);
8564 if (code01 == INTEGER_CST
8565 && code11 == INTEGER_CST
8566 && TREE_INT_CST_HIGH (tree01) == 0
8567 && TREE_INT_CST_HIGH (tree11) == 0
8568 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8569 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8570 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8571 code0 == LSHIFT_EXPR ? tree01 : tree11);
8572 else if (code11 == MINUS_EXPR)
8574 tree tree110, tree111;
8575 tree110 = TREE_OPERAND (tree11, 0);
8576 tree111 = TREE_OPERAND (tree11, 1);
8577 STRIP_NOPS (tree110);
8578 STRIP_NOPS (tree111);
8579 if (TREE_CODE (tree110) == INTEGER_CST
8580 && 0 == compare_tree_int (tree110,
8581 TYPE_PRECISION
8582 (TREE_TYPE (TREE_OPERAND
8583 (arg0, 0))))
8584 && operand_equal_p (tree01, tree111, 0))
8585 return build2 ((code0 == LSHIFT_EXPR
8586 ? LROTATE_EXPR
8587 : RROTATE_EXPR),
8588 type, TREE_OPERAND (arg0, 0), tree01);
8590 else if (code01 == MINUS_EXPR)
8592 tree tree010, tree011;
8593 tree010 = TREE_OPERAND (tree01, 0);
8594 tree011 = TREE_OPERAND (tree01, 1);
8595 STRIP_NOPS (tree010);
8596 STRIP_NOPS (tree011);
8597 if (TREE_CODE (tree010) == INTEGER_CST
8598 && 0 == compare_tree_int (tree010,
8599 TYPE_PRECISION
8600 (TREE_TYPE (TREE_OPERAND
8601 (arg0, 0))))
8602 && operand_equal_p (tree11, tree011, 0))
8603 return build2 ((code0 != LSHIFT_EXPR
8604 ? LROTATE_EXPR
8605 : RROTATE_EXPR),
8606 type, TREE_OPERAND (arg0, 0), tree11);
8611 associate:
8612 /* In most languages, can't associate operations on floats through
8613 parentheses. Rather than remember where the parentheses were, we
8614 don't associate floats at all, unless the user has specified
8615 -funsafe-math-optimizations. */
8617 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8619 tree var0, con0, lit0, minus_lit0;
8620 tree var1, con1, lit1, minus_lit1;
8622 /* Split both trees into variables, constants, and literals. Then
8623 associate each group together, the constants with literals,
8624 then the result with variables. This increases the chances of
8625 literals being recombined later and of generating relocatable
8626 expressions for the sum of a constant and literal. */
8627 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8628 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8629 code == MINUS_EXPR);
8631 /* Only do something if we found more than two objects. Otherwise,
8632 nothing has changed and we risk infinite recursion. */
8633 if (2 < ((var0 != 0) + (var1 != 0)
8634 + (con0 != 0) + (con1 != 0)
8635 + (lit0 != 0) + (lit1 != 0)
8636 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8638 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8639 if (code == MINUS_EXPR)
8640 code = PLUS_EXPR;
8642 var0 = associate_trees (var0, var1, code, type);
8643 con0 = associate_trees (con0, con1, code, type);
8644 lit0 = associate_trees (lit0, lit1, code, type);
8645 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8647 /* Preserve the MINUS_EXPR if the negative part of the literal is
8648 greater than the positive part. Otherwise, the multiplicative
8649 folding code (i.e extract_muldiv) may be fooled in case
8650 unsigned constants are subtracted, like in the following
8651 example: ((X*2 + 4) - 8U)/2. */
8652 if (minus_lit0 && lit0)
8654 if (TREE_CODE (lit0) == INTEGER_CST
8655 && TREE_CODE (minus_lit0) == INTEGER_CST
8656 && tree_int_cst_lt (lit0, minus_lit0))
8658 minus_lit0 = associate_trees (minus_lit0, lit0,
8659 MINUS_EXPR, type);
8660 lit0 = 0;
8662 else
8664 lit0 = associate_trees (lit0, minus_lit0,
8665 MINUS_EXPR, type);
8666 minus_lit0 = 0;
8669 if (minus_lit0)
8671 if (con0 == 0)
8672 return fold_convert (type,
8673 associate_trees (var0, minus_lit0,
8674 MINUS_EXPR, type));
8675 else
8677 con0 = associate_trees (con0, minus_lit0,
8678 MINUS_EXPR, type);
8679 return fold_convert (type,
8680 associate_trees (var0, con0,
8681 PLUS_EXPR, type));
8685 con0 = associate_trees (con0, lit0, code, type);
8686 return fold_convert (type, associate_trees (var0, con0,
8687 code, type));
8691 return NULL_TREE;
8693 case MINUS_EXPR:
8694 /* A - (-B) -> A + B */
8695 if (TREE_CODE (arg1) == NEGATE_EXPR)
8696 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8697 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8698 if (TREE_CODE (arg0) == NEGATE_EXPR
8699 && (FLOAT_TYPE_P (type)
8700 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8701 && negate_expr_p (arg1)
8702 && reorder_operands_p (arg0, arg1))
8703 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8704 TREE_OPERAND (arg0, 0));
8705 /* Convert -A - 1 to ~A. */
8706 if (INTEGRAL_TYPE_P (type)
8707 && TREE_CODE (arg0) == NEGATE_EXPR
8708 && integer_onep (arg1))
8709 return fold_build1 (BIT_NOT_EXPR, type,
8710 fold_convert (type, TREE_OPERAND (arg0, 0)));
8712 /* Convert -1 - A to ~A. */
8713 if (INTEGRAL_TYPE_P (type)
8714 && integer_all_onesp (arg0))
8715 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8717 if (! FLOAT_TYPE_P (type))
8719 if (integer_zerop (arg0))
8720 return negate_expr (fold_convert (type, arg1));
8721 if (integer_zerop (arg1))
8722 return non_lvalue (fold_convert (type, arg0));
8724 /* Fold A - (A & B) into ~B & A. */
8725 if (!TREE_SIDE_EFFECTS (arg0)
8726 && TREE_CODE (arg1) == BIT_AND_EXPR)
8728 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8729 return fold_build2 (BIT_AND_EXPR, type,
8730 fold_build1 (BIT_NOT_EXPR, type,
8731 TREE_OPERAND (arg1, 0)),
8732 arg0);
8733 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8734 return fold_build2 (BIT_AND_EXPR, type,
8735 fold_build1 (BIT_NOT_EXPR, type,
8736 TREE_OPERAND (arg1, 1)),
8737 arg0);
8740 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8741 any power of 2 minus 1. */
8742 if (TREE_CODE (arg0) == BIT_AND_EXPR
8743 && TREE_CODE (arg1) == BIT_AND_EXPR
8744 && operand_equal_p (TREE_OPERAND (arg0, 0),
8745 TREE_OPERAND (arg1, 0), 0))
8747 tree mask0 = TREE_OPERAND (arg0, 1);
8748 tree mask1 = TREE_OPERAND (arg1, 1);
8749 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8751 if (operand_equal_p (tem, mask1, 0))
8753 tem = fold_build2 (BIT_XOR_EXPR, type,
8754 TREE_OPERAND (arg0, 0), mask1);
8755 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8760 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8761 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8762 return non_lvalue (fold_convert (type, arg0));
8764 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8765 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8766 (-ARG1 + ARG0) reduces to -ARG1. */
8767 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8768 return negate_expr (fold_convert (type, arg1));
8770 /* Fold &x - &x. This can happen from &x.foo - &x.
8771 This is unsafe for certain floats even in non-IEEE formats.
8772 In IEEE, it is unsafe because it does wrong for NaNs.
8773 Also note that operand_equal_p is always false if an operand
8774 is volatile. */
8776 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8777 && operand_equal_p (arg0, arg1, 0))
8778 return fold_convert (type, integer_zero_node);
8780 /* A - B -> A + (-B) if B is easily negatable. */
8781 if (negate_expr_p (arg1)
8782 && ((FLOAT_TYPE_P (type)
8783 /* Avoid this transformation if B is a positive REAL_CST. */
8784 && (TREE_CODE (arg1) != REAL_CST
8785 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8786 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8787 return fold_build2 (PLUS_EXPR, type,
8788 fold_convert (type, arg0),
8789 fold_convert (type, negate_expr (arg1)));
8791 /* Try folding difference of addresses. */
8793 HOST_WIDE_INT diff;
8795 if ((TREE_CODE (arg0) == ADDR_EXPR
8796 || TREE_CODE (arg1) == ADDR_EXPR)
8797 && ptr_difference_const (arg0, arg1, &diff))
8798 return build_int_cst_type (type, diff);
8801 /* Fold &a[i] - &a[j] to i-j. */
8802 if (TREE_CODE (arg0) == ADDR_EXPR
8803 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8804 && TREE_CODE (arg1) == ADDR_EXPR
8805 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8807 tree aref0 = TREE_OPERAND (arg0, 0);
8808 tree aref1 = TREE_OPERAND (arg1, 0);
8809 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8810 TREE_OPERAND (aref1, 0), 0))
8812 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8813 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8814 tree esz = array_ref_element_size (aref0);
8815 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8816 return fold_build2 (MULT_EXPR, type, diff,
8817 fold_convert (type, esz));
8822 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8823 of the array. Loop optimizer sometimes produce this type of
8824 expressions. */
8825 if (TREE_CODE (arg0) == ADDR_EXPR)
8827 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8828 if (tem)
8829 return fold_convert (type, tem);
8832 if (flag_unsafe_math_optimizations
8833 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8834 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8835 && (tem = distribute_real_division (code, type, arg0, arg1)))
8836 return tem;
8838 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8839 same or one. */
8840 if ((TREE_CODE (arg0) == MULT_EXPR
8841 || TREE_CODE (arg1) == MULT_EXPR)
8842 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8844 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8845 if (tem)
8846 return tem;
8849 goto associate;
8851 case MULT_EXPR:
8852 /* (-A) * (-B) -> A * B */
8853 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8854 return fold_build2 (MULT_EXPR, type,
8855 fold_convert (type, TREE_OPERAND (arg0, 0)),
8856 fold_convert (type, negate_expr (arg1)));
8857 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8858 return fold_build2 (MULT_EXPR, type,
8859 fold_convert (type, negate_expr (arg0)),
8860 fold_convert (type, TREE_OPERAND (arg1, 0)));
8862 if (! FLOAT_TYPE_P (type))
8864 if (integer_zerop (arg1))
8865 return omit_one_operand (type, arg1, arg0);
8866 if (integer_onep (arg1))
8867 return non_lvalue (fold_convert (type, arg0));
8868 /* Transform x * -1 into -x. */
8869 if (integer_all_onesp (arg1))
8870 return fold_convert (type, negate_expr (arg0));
8872 /* (a * (1 << b)) is (a << b) */
8873 if (TREE_CODE (arg1) == LSHIFT_EXPR
8874 && integer_onep (TREE_OPERAND (arg1, 0)))
8875 return fold_build2 (LSHIFT_EXPR, type, arg0,
8876 TREE_OPERAND (arg1, 1));
8877 if (TREE_CODE (arg0) == LSHIFT_EXPR
8878 && integer_onep (TREE_OPERAND (arg0, 0)))
8879 return fold_build2 (LSHIFT_EXPR, type, arg1,
8880 TREE_OPERAND (arg0, 1));
8882 if (TREE_CODE (arg1) == INTEGER_CST
8883 && 0 != (tem = extract_muldiv (op0,
8884 fold_convert (type, arg1),
8885 code, NULL_TREE)))
8886 return fold_convert (type, tem);
8888 /* Optimize z * conj(z) for integer complex numbers. */
8889 if (TREE_CODE (arg0) == CONJ_EXPR
8890 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8891 return fold_mult_zconjz (type, arg1);
8892 if (TREE_CODE (arg1) == CONJ_EXPR
8893 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8894 return fold_mult_zconjz (type, arg0);
8896 else
8898 /* Maybe fold x * 0 to 0. The expressions aren't the same
8899 when x is NaN, since x * 0 is also NaN. Nor are they the
8900 same in modes with signed zeros, since multiplying a
8901 negative value by 0 gives -0, not +0. */
8902 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8903 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8904 && real_zerop (arg1))
8905 return omit_one_operand (type, arg1, arg0);
8906 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8907 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8908 && real_onep (arg1))
8909 return non_lvalue (fold_convert (type, arg0));
8911 /* Transform x * -1.0 into -x. */
8912 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8913 && real_minus_onep (arg1))
8914 return fold_convert (type, negate_expr (arg0));
8916 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8917 if (flag_unsafe_math_optimizations
8918 && TREE_CODE (arg0) == RDIV_EXPR
8919 && TREE_CODE (arg1) == REAL_CST
8920 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8922 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8923 arg1, 0);
8924 if (tem)
8925 return fold_build2 (RDIV_EXPR, type, tem,
8926 TREE_OPERAND (arg0, 1));
8929 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8930 if (operand_equal_p (arg0, arg1, 0))
8932 tree tem = fold_strip_sign_ops (arg0);
8933 if (tem != NULL_TREE)
8935 tem = fold_convert (type, tem);
8936 return fold_build2 (MULT_EXPR, type, tem, tem);
8940 /* Optimize z * conj(z) for floating point complex numbers.
8941 Guarded by flag_unsafe_math_optimizations as non-finite
8942 imaginary components don't produce scalar results. */
8943 if (flag_unsafe_math_optimizations
8944 && TREE_CODE (arg0) == CONJ_EXPR
8945 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8946 return fold_mult_zconjz (type, arg1);
8947 if (flag_unsafe_math_optimizations
8948 && TREE_CODE (arg1) == CONJ_EXPR
8949 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8950 return fold_mult_zconjz (type, arg0);
8952 if (flag_unsafe_math_optimizations)
8954 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8955 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8957 /* Optimizations of root(...)*root(...). */
8958 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8960 tree rootfn, arg, arglist;
8961 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8962 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8964 /* Optimize sqrt(x)*sqrt(x) as x. */
8965 if (BUILTIN_SQRT_P (fcode0)
8966 && operand_equal_p (arg00, arg10, 0)
8967 && ! HONOR_SNANS (TYPE_MODE (type)))
8968 return arg00;
8970 /* Optimize root(x)*root(y) as root(x*y). */
8971 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8972 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8973 arglist = build_tree_list (NULL_TREE, arg);
8974 return build_function_call_expr (rootfn, arglist);
8977 /* Optimize expN(x)*expN(y) as expN(x+y). */
8978 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8980 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8981 tree arg = fold_build2 (PLUS_EXPR, type,
8982 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8983 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8984 tree arglist = build_tree_list (NULL_TREE, arg);
8985 return build_function_call_expr (expfn, arglist);
8988 /* Optimizations of pow(...)*pow(...). */
8989 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8990 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8991 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8993 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8994 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8995 1)));
8996 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8997 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8998 1)));
9000 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9001 if (operand_equal_p (arg01, arg11, 0))
9003 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9004 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9005 tree arglist = tree_cons (NULL_TREE, arg,
9006 build_tree_list (NULL_TREE,
9007 arg01));
9008 return build_function_call_expr (powfn, arglist);
9011 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9012 if (operand_equal_p (arg00, arg10, 0))
9014 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9015 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9016 tree arglist = tree_cons (NULL_TREE, arg00,
9017 build_tree_list (NULL_TREE,
9018 arg));
9019 return build_function_call_expr (powfn, arglist);
9023 /* Optimize tan(x)*cos(x) as sin(x). */
9024 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9025 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9026 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9027 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9028 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9029 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9030 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9031 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9033 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9035 if (sinfn != NULL_TREE)
9036 return build_function_call_expr (sinfn,
9037 TREE_OPERAND (arg0, 1));
9040 /* Optimize x*pow(x,c) as pow(x,c+1). */
9041 if (fcode1 == BUILT_IN_POW
9042 || fcode1 == BUILT_IN_POWF
9043 || fcode1 == BUILT_IN_POWL)
9045 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9046 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9047 1)));
9048 if (TREE_CODE (arg11) == REAL_CST
9049 && ! TREE_CONSTANT_OVERFLOW (arg11)
9050 && operand_equal_p (arg0, arg10, 0))
9052 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9053 REAL_VALUE_TYPE c;
9054 tree arg, arglist;
9056 c = TREE_REAL_CST (arg11);
9057 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9058 arg = build_real (type, c);
9059 arglist = build_tree_list (NULL_TREE, arg);
9060 arglist = tree_cons (NULL_TREE, arg0, arglist);
9061 return build_function_call_expr (powfn, arglist);
9065 /* Optimize pow(x,c)*x as pow(x,c+1). */
9066 if (fcode0 == BUILT_IN_POW
9067 || fcode0 == BUILT_IN_POWF
9068 || fcode0 == BUILT_IN_POWL)
9070 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9071 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9072 1)));
9073 if (TREE_CODE (arg01) == REAL_CST
9074 && ! TREE_CONSTANT_OVERFLOW (arg01)
9075 && operand_equal_p (arg1, arg00, 0))
9077 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9078 REAL_VALUE_TYPE c;
9079 tree arg, arglist;
9081 c = TREE_REAL_CST (arg01);
9082 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9083 arg = build_real (type, c);
9084 arglist = build_tree_list (NULL_TREE, arg);
9085 arglist = tree_cons (NULL_TREE, arg1, arglist);
9086 return build_function_call_expr (powfn, arglist);
9090 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9091 if (! optimize_size
9092 && operand_equal_p (arg0, arg1, 0))
9094 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9096 if (powfn)
9098 tree arg = build_real (type, dconst2);
9099 tree arglist = build_tree_list (NULL_TREE, arg);
9100 arglist = tree_cons (NULL_TREE, arg0, arglist);
9101 return build_function_call_expr (powfn, arglist);
9106 goto associate;
9108 case BIT_IOR_EXPR:
9109 bit_ior:
9110 if (integer_all_onesp (arg1))
9111 return omit_one_operand (type, arg1, arg0);
9112 if (integer_zerop (arg1))
9113 return non_lvalue (fold_convert (type, arg0));
9114 if (operand_equal_p (arg0, arg1, 0))
9115 return non_lvalue (fold_convert (type, arg0));
9117 /* ~X | X is -1. */
9118 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9119 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9121 t1 = build_int_cst (type, -1);
9122 t1 = force_fit_type (t1, 0, false, false);
9123 return omit_one_operand (type, t1, arg1);
9126 /* X | ~X is -1. */
9127 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9128 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9130 t1 = build_int_cst (type, -1);
9131 t1 = force_fit_type (t1, 0, false, false);
9132 return omit_one_operand (type, t1, arg0);
9135 /* Canonicalize (X & C1) | C2. */
9136 if (TREE_CODE (arg0) == BIT_AND_EXPR
9137 && TREE_CODE (arg1) == INTEGER_CST
9138 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9140 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9141 int width = TYPE_PRECISION (type);
9142 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9143 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9144 hi2 = TREE_INT_CST_HIGH (arg1);
9145 lo2 = TREE_INT_CST_LOW (arg1);
9147 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9148 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9149 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9151 if (width > HOST_BITS_PER_WIDE_INT)
9153 mhi = (unsigned HOST_WIDE_INT) -1
9154 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9155 mlo = -1;
9157 else
9159 mhi = 0;
9160 mlo = (unsigned HOST_WIDE_INT) -1
9161 >> (HOST_BITS_PER_WIDE_INT - width);
9164 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9165 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9166 return fold_build2 (BIT_IOR_EXPR, type,
9167 TREE_OPERAND (arg0, 0), arg1);
9169 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9170 hi1 &= mhi;
9171 lo1 &= mlo;
9172 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9173 return fold_build2 (BIT_IOR_EXPR, type,
9174 fold_build2 (BIT_AND_EXPR, type,
9175 TREE_OPERAND (arg0, 0),
9176 build_int_cst_wide (type,
9177 lo1 & ~lo2,
9178 hi1 & ~hi2)),
9179 arg1);
9182 /* (X & Y) | Y is (X, Y). */
9183 if (TREE_CODE (arg0) == BIT_AND_EXPR
9184 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9185 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9186 /* (X & Y) | X is (Y, X). */
9187 if (TREE_CODE (arg0) == BIT_AND_EXPR
9188 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9189 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9190 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9191 /* X | (X & Y) is (Y, X). */
9192 if (TREE_CODE (arg1) == BIT_AND_EXPR
9193 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9194 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9195 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9196 /* X | (Y & X) is (Y, X). */
9197 if (TREE_CODE (arg1) == BIT_AND_EXPR
9198 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9199 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9200 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9202 t1 = distribute_bit_expr (code, type, arg0, arg1);
9203 if (t1 != NULL_TREE)
9204 return t1;
9206 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9208 This results in more efficient code for machines without a NAND
9209 instruction. Combine will canonicalize to the first form
9210 which will allow use of NAND instructions provided by the
9211 backend if they exist. */
9212 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9213 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9215 return fold_build1 (BIT_NOT_EXPR, type,
9216 build2 (BIT_AND_EXPR, type,
9217 TREE_OPERAND (arg0, 0),
9218 TREE_OPERAND (arg1, 0)));
9221 /* See if this can be simplified into a rotate first. If that
9222 is unsuccessful continue in the association code. */
9223 goto bit_rotate;
9225 case BIT_XOR_EXPR:
9226 if (integer_zerop (arg1))
9227 return non_lvalue (fold_convert (type, arg0));
9228 if (integer_all_onesp (arg1))
9229 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9230 if (operand_equal_p (arg0, arg1, 0))
9231 return omit_one_operand (type, integer_zero_node, arg0);
9233 /* ~X ^ X is -1. */
9234 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9235 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9237 t1 = build_int_cst (type, -1);
9238 t1 = force_fit_type (t1, 0, false, false);
9239 return omit_one_operand (type, t1, arg1);
9242 /* X ^ ~X is -1. */
9243 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9244 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9246 t1 = build_int_cst (type, -1);
9247 t1 = force_fit_type (t1, 0, false, false);
9248 return omit_one_operand (type, t1, arg0);
9251 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9252 with a constant, and the two constants have no bits in common,
9253 we should treat this as a BIT_IOR_EXPR since this may produce more
9254 simplifications. */
9255 if (TREE_CODE (arg0) == BIT_AND_EXPR
9256 && TREE_CODE (arg1) == BIT_AND_EXPR
9257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9258 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9259 && integer_zerop (const_binop (BIT_AND_EXPR,
9260 TREE_OPERAND (arg0, 1),
9261 TREE_OPERAND (arg1, 1), 0)))
9263 code = BIT_IOR_EXPR;
9264 goto bit_ior;
9267 /* (X | Y) ^ X -> Y & ~ X*/
9268 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9269 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9271 tree t2 = TREE_OPERAND (arg0, 1);
9272 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9273 arg1);
9274 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9275 fold_convert (type, t1));
9276 return t1;
9279 /* (Y | X) ^ X -> Y & ~ X*/
9280 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9281 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9283 tree t2 = TREE_OPERAND (arg0, 0);
9284 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9285 arg1);
9286 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9287 fold_convert (type, t1));
9288 return t1;
9291 /* X ^ (X | Y) -> Y & ~ X*/
9292 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9293 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9295 tree t2 = TREE_OPERAND (arg1, 1);
9296 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9297 arg0);
9298 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9299 fold_convert (type, t1));
9300 return t1;
9303 /* X ^ (Y | X) -> Y & ~ X*/
9304 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9305 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9307 tree t2 = TREE_OPERAND (arg1, 0);
9308 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9309 arg0);
9310 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9311 fold_convert (type, t1));
9312 return t1;
9315 /* Convert ~X ^ ~Y to X ^ Y. */
9316 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9317 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9318 return fold_build2 (code, type,
9319 fold_convert (type, TREE_OPERAND (arg0, 0)),
9320 fold_convert (type, TREE_OPERAND (arg1, 0)));
9322 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9323 if (TREE_CODE (arg0) == BIT_AND_EXPR
9324 && integer_onep (TREE_OPERAND (arg0, 1))
9325 && integer_onep (arg1))
9326 return fold_build2 (EQ_EXPR, type, arg0,
9327 build_int_cst (TREE_TYPE (arg0), 0));
9329 /* Fold (X & Y) ^ Y as ~X & Y. */
9330 if (TREE_CODE (arg0) == BIT_AND_EXPR
9331 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9333 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9334 return fold_build2 (BIT_AND_EXPR, type,
9335 fold_build1 (BIT_NOT_EXPR, type, tem),
9336 fold_convert (type, arg1));
9338 /* Fold (X & Y) ^ X as ~Y & X. */
9339 if (TREE_CODE (arg0) == BIT_AND_EXPR
9340 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9341 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9343 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9344 return fold_build2 (BIT_AND_EXPR, type,
9345 fold_build1 (BIT_NOT_EXPR, type, tem),
9346 fold_convert (type, arg1));
9348 /* Fold X ^ (X & Y) as X & ~Y. */
9349 if (TREE_CODE (arg1) == BIT_AND_EXPR
9350 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9352 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9353 return fold_build2 (BIT_AND_EXPR, type,
9354 fold_convert (type, arg0),
9355 fold_build1 (BIT_NOT_EXPR, type, tem));
9357 /* Fold X ^ (Y & X) as ~Y & X. */
9358 if (TREE_CODE (arg1) == BIT_AND_EXPR
9359 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9360 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9362 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9363 return fold_build2 (BIT_AND_EXPR, type,
9364 fold_build1 (BIT_NOT_EXPR, type, tem),
9365 fold_convert (type, arg0));
9368 /* See if this can be simplified into a rotate first. If that
9369 is unsuccessful continue in the association code. */
9370 goto bit_rotate;
9372 case BIT_AND_EXPR:
9373 if (integer_all_onesp (arg1))
9374 return non_lvalue (fold_convert (type, arg0));
9375 if (integer_zerop (arg1))
9376 return omit_one_operand (type, arg1, arg0);
9377 if (operand_equal_p (arg0, arg1, 0))
9378 return non_lvalue (fold_convert (type, arg0));
9380 /* ~X & X is always zero. */
9381 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9382 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9383 return omit_one_operand (type, integer_zero_node, arg1);
9385 /* X & ~X is always zero. */
9386 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9387 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9388 return omit_one_operand (type, integer_zero_node, arg0);
9390 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9391 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9392 && TREE_CODE (arg1) == INTEGER_CST
9393 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9394 return fold_build2 (BIT_IOR_EXPR, type,
9395 fold_build2 (BIT_AND_EXPR, type,
9396 TREE_OPERAND (arg0, 0), arg1),
9397 fold_build2 (BIT_AND_EXPR, type,
9398 TREE_OPERAND (arg0, 1), arg1));
9400 /* (X | Y) & Y is (X, Y). */
9401 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9402 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9403 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9404 /* (X | Y) & X is (Y, X). */
9405 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9406 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9407 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9408 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9409 /* X & (X | Y) is (Y, X). */
9410 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9411 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9412 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9413 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9414 /* X & (Y | X) is (Y, X). */
9415 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9416 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9417 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9418 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9420 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9421 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9422 && integer_onep (TREE_OPERAND (arg0, 1))
9423 && integer_onep (arg1))
9425 tem = TREE_OPERAND (arg0, 0);
9426 return fold_build2 (EQ_EXPR, type,
9427 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9428 build_int_cst (TREE_TYPE (tem), 1)),
9429 build_int_cst (TREE_TYPE (tem), 0));
9431 /* Fold ~X & 1 as (X & 1) == 0. */
9432 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9433 && integer_onep (arg1))
9435 tem = TREE_OPERAND (arg0, 0);
9436 return fold_build2 (EQ_EXPR, type,
9437 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9438 build_int_cst (TREE_TYPE (tem), 1)),
9439 build_int_cst (TREE_TYPE (tem), 0));
9442 /* Fold (X ^ Y) & Y as ~X & Y. */
9443 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9444 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9446 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9447 return fold_build2 (BIT_AND_EXPR, type,
9448 fold_build1 (BIT_NOT_EXPR, type, tem),
9449 fold_convert (type, arg1));
9451 /* Fold (X ^ Y) & X as ~Y & X. */
9452 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9453 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9454 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9456 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9457 return fold_build2 (BIT_AND_EXPR, type,
9458 fold_build1 (BIT_NOT_EXPR, type, tem),
9459 fold_convert (type, arg1));
9461 /* Fold X & (X ^ Y) as X & ~Y. */
9462 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9463 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9465 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9466 return fold_build2 (BIT_AND_EXPR, type,
9467 fold_convert (type, arg0),
9468 fold_build1 (BIT_NOT_EXPR, type, tem));
9470 /* Fold X & (Y ^ X) as ~Y & X. */
9471 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9472 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9473 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9475 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9476 return fold_build2 (BIT_AND_EXPR, type,
9477 fold_build1 (BIT_NOT_EXPR, type, tem),
9478 fold_convert (type, arg0));
9481 t1 = distribute_bit_expr (code, type, arg0, arg1);
9482 if (t1 != NULL_TREE)
9483 return t1;
9484 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9485 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9486 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9488 unsigned int prec
9489 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9491 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9492 && (~TREE_INT_CST_LOW (arg1)
9493 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9494 return fold_convert (type, TREE_OPERAND (arg0, 0));
9497 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9499 This results in more efficient code for machines without a NOR
9500 instruction. Combine will canonicalize to the first form
9501 which will allow use of NOR instructions provided by the
9502 backend if they exist. */
9503 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9504 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9506 return fold_build1 (BIT_NOT_EXPR, type,
9507 build2 (BIT_IOR_EXPR, type,
9508 TREE_OPERAND (arg0, 0),
9509 TREE_OPERAND (arg1, 0)));
9512 goto associate;
9514 case RDIV_EXPR:
9515 /* Don't touch a floating-point divide by zero unless the mode
9516 of the constant can represent infinity. */
9517 if (TREE_CODE (arg1) == REAL_CST
9518 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9519 && real_zerop (arg1))
9520 return NULL_TREE;
9522 /* Optimize A / A to 1.0 if we don't care about
9523 NaNs or Infinities. Skip the transformation
9524 for non-real operands. */
9525 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9526 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9527 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9528 && operand_equal_p (arg0, arg1, 0))
9530 tree r = build_real (TREE_TYPE (arg0), dconst1);
9532 return omit_two_operands (type, r, arg0, arg1);
9535 /* The complex version of the above A / A optimization. */
9536 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9537 && operand_equal_p (arg0, arg1, 0))
9539 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9540 if (! HONOR_NANS (TYPE_MODE (elem_type))
9541 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9543 tree r = build_real (elem_type, dconst1);
9544 /* omit_two_operands will call fold_convert for us. */
9545 return omit_two_operands (type, r, arg0, arg1);
9549 /* (-A) / (-B) -> A / B */
9550 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9551 return fold_build2 (RDIV_EXPR, type,
9552 TREE_OPERAND (arg0, 0),
9553 negate_expr (arg1));
9554 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9555 return fold_build2 (RDIV_EXPR, type,
9556 negate_expr (arg0),
9557 TREE_OPERAND (arg1, 0));
9559 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9560 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9561 && real_onep (arg1))
9562 return non_lvalue (fold_convert (type, arg0));
9564 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9565 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9566 && real_minus_onep (arg1))
9567 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9569 /* If ARG1 is a constant, we can convert this to a multiply by the
9570 reciprocal. This does not have the same rounding properties,
9571 so only do this if -funsafe-math-optimizations. We can actually
9572 always safely do it if ARG1 is a power of two, but it's hard to
9573 tell if it is or not in a portable manner. */
9574 if (TREE_CODE (arg1) == REAL_CST)
9576 if (flag_unsafe_math_optimizations
9577 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9578 arg1, 0)))
9579 return fold_build2 (MULT_EXPR, type, arg0, tem);
9580 /* Find the reciprocal if optimizing and the result is exact. */
9581 if (optimize)
9583 REAL_VALUE_TYPE r;
9584 r = TREE_REAL_CST (arg1);
9585 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9587 tem = build_real (type, r);
9588 return fold_build2 (MULT_EXPR, type,
9589 fold_convert (type, arg0), tem);
9593 /* Convert A/B/C to A/(B*C). */
9594 if (flag_unsafe_math_optimizations
9595 && TREE_CODE (arg0) == RDIV_EXPR)
9596 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9597 fold_build2 (MULT_EXPR, type,
9598 TREE_OPERAND (arg0, 1), arg1));
9600 /* Convert A/(B/C) to (A/B)*C. */
9601 if (flag_unsafe_math_optimizations
9602 && TREE_CODE (arg1) == RDIV_EXPR)
9603 return fold_build2 (MULT_EXPR, type,
9604 fold_build2 (RDIV_EXPR, type, arg0,
9605 TREE_OPERAND (arg1, 0)),
9606 TREE_OPERAND (arg1, 1));
9608 /* Convert C1/(X*C2) into (C1/C2)/X. */
9609 if (flag_unsafe_math_optimizations
9610 && TREE_CODE (arg1) == MULT_EXPR
9611 && TREE_CODE (arg0) == REAL_CST
9612 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9614 tree tem = const_binop (RDIV_EXPR, arg0,
9615 TREE_OPERAND (arg1, 1), 0);
9616 if (tem)
9617 return fold_build2 (RDIV_EXPR, type, tem,
9618 TREE_OPERAND (arg1, 0));
9621 if (flag_unsafe_math_optimizations)
9623 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9624 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9626 /* Optimize sin(x)/cos(x) as tan(x). */
9627 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9628 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9629 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9630 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9631 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9633 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9635 if (tanfn != NULL_TREE)
9636 return build_function_call_expr (tanfn,
9637 TREE_OPERAND (arg0, 1));
9640 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9641 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9642 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9643 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9644 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9645 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9647 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9649 if (tanfn != NULL_TREE)
9651 tree tmp = TREE_OPERAND (arg0, 1);
9652 tmp = build_function_call_expr (tanfn, tmp);
9653 return fold_build2 (RDIV_EXPR, type,
9654 build_real (type, dconst1), tmp);
9658 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9659 NaNs or Infinities. */
9660 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9661 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9662 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9664 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9665 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9667 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9668 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9669 && operand_equal_p (arg00, arg01, 0))
9671 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9673 if (cosfn != NULL_TREE)
9674 return build_function_call_expr (cosfn,
9675 TREE_OPERAND (arg0, 1));
9679 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9680 NaNs or Infinities. */
9681 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9682 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9683 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9685 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9686 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9688 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9689 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9690 && operand_equal_p (arg00, arg01, 0))
9692 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9694 if (cosfn != NULL_TREE)
9696 tree tmp = TREE_OPERAND (arg0, 1);
9697 tmp = build_function_call_expr (cosfn, tmp);
9698 return fold_build2 (RDIV_EXPR, type,
9699 build_real (type, dconst1),
9700 tmp);
9705 /* Optimize pow(x,c)/x as pow(x,c-1). */
9706 if (fcode0 == BUILT_IN_POW
9707 || fcode0 == BUILT_IN_POWF
9708 || fcode0 == BUILT_IN_POWL)
9710 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9711 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9712 if (TREE_CODE (arg01) == REAL_CST
9713 && ! TREE_CONSTANT_OVERFLOW (arg01)
9714 && operand_equal_p (arg1, arg00, 0))
9716 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9717 REAL_VALUE_TYPE c;
9718 tree arg, arglist;
9720 c = TREE_REAL_CST (arg01);
9721 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9722 arg = build_real (type, c);
9723 arglist = build_tree_list (NULL_TREE, arg);
9724 arglist = tree_cons (NULL_TREE, arg1, arglist);
9725 return build_function_call_expr (powfn, arglist);
9729 /* Optimize x/expN(y) into x*expN(-y). */
9730 if (BUILTIN_EXPONENT_P (fcode1))
9732 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9733 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9734 tree arglist = build_tree_list (NULL_TREE,
9735 fold_convert (type, arg));
9736 arg1 = build_function_call_expr (expfn, arglist);
9737 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9740 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9741 if (fcode1 == BUILT_IN_POW
9742 || fcode1 == BUILT_IN_POWF
9743 || fcode1 == BUILT_IN_POWL)
9745 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9746 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9747 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9748 tree neg11 = fold_convert (type, negate_expr (arg11));
9749 tree arglist = tree_cons(NULL_TREE, arg10,
9750 build_tree_list (NULL_TREE, neg11));
9751 arg1 = build_function_call_expr (powfn, arglist);
9752 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9755 return NULL_TREE;
9757 case TRUNC_DIV_EXPR:
9758 case FLOOR_DIV_EXPR:
9759 /* Simplify A / (B << N) where A and B are positive and B is
9760 a power of 2, to A >> (N + log2(B)). */
9761 if (TREE_CODE (arg1) == LSHIFT_EXPR
9762 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9764 tree sval = TREE_OPERAND (arg1, 0);
9765 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9767 tree sh_cnt = TREE_OPERAND (arg1, 1);
9768 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9770 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9771 sh_cnt, build_int_cst (NULL_TREE, pow2));
9772 return fold_build2 (RSHIFT_EXPR, type,
9773 fold_convert (type, arg0), sh_cnt);
9776 /* Fall thru */
9778 case ROUND_DIV_EXPR:
9779 case CEIL_DIV_EXPR:
9780 case EXACT_DIV_EXPR:
9781 if (integer_onep (arg1))
9782 return non_lvalue (fold_convert (type, arg0));
9783 if (integer_zerop (arg1))
9784 return NULL_TREE;
9785 /* X / -1 is -X. */
9786 if (!TYPE_UNSIGNED (type)
9787 && TREE_CODE (arg1) == INTEGER_CST
9788 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9789 && TREE_INT_CST_HIGH (arg1) == -1)
9790 return fold_convert (type, negate_expr (arg0));
9792 /* Convert -A / -B to A / B when the type is signed and overflow is
9793 undefined. */
9794 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9795 && TREE_CODE (arg0) == NEGATE_EXPR
9796 && negate_expr_p (arg1))
9797 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9798 negate_expr (arg1));
9799 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9800 && TREE_CODE (arg1) == NEGATE_EXPR
9801 && negate_expr_p (arg0))
9802 return fold_build2 (code, type, negate_expr (arg0),
9803 TREE_OPERAND (arg1, 0));
9805 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9806 operation, EXACT_DIV_EXPR.
9808 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9809 At one time others generated faster code, it's not clear if they do
9810 after the last round to changes to the DIV code in expmed.c. */
9811 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9812 && multiple_of_p (type, arg0, arg1))
9813 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9815 if (TREE_CODE (arg1) == INTEGER_CST
9816 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9817 return fold_convert (type, tem);
9819 return NULL_TREE;
9821 case CEIL_MOD_EXPR:
9822 case FLOOR_MOD_EXPR:
9823 case ROUND_MOD_EXPR:
9824 case TRUNC_MOD_EXPR:
9825 /* X % 1 is always zero, but be sure to preserve any side
9826 effects in X. */
9827 if (integer_onep (arg1))
9828 return omit_one_operand (type, integer_zero_node, arg0);
9830 /* X % 0, return X % 0 unchanged so that we can get the
9831 proper warnings and errors. */
9832 if (integer_zerop (arg1))
9833 return NULL_TREE;
9835 /* 0 % X is always zero, but be sure to preserve any side
9836 effects in X. Place this after checking for X == 0. */
9837 if (integer_zerop (arg0))
9838 return omit_one_operand (type, integer_zero_node, arg1);
9840 /* X % -1 is zero. */
9841 if (!TYPE_UNSIGNED (type)
9842 && TREE_CODE (arg1) == INTEGER_CST
9843 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9844 && TREE_INT_CST_HIGH (arg1) == -1)
9845 return omit_one_operand (type, integer_zero_node, arg0);
9847 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9848 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9849 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9850 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9852 tree c = arg1;
9853 /* Also optimize A % (C << N) where C is a power of 2,
9854 to A & ((C << N) - 1). */
9855 if (TREE_CODE (arg1) == LSHIFT_EXPR)
9856 c = TREE_OPERAND (arg1, 0);
9858 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
9860 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
9861 arg1, integer_one_node);
9862 return fold_build2 (BIT_AND_EXPR, type,
9863 fold_convert (type, arg0),
9864 fold_convert (type, mask));
9868 /* X % -C is the same as X % C. */
9869 if (code == TRUNC_MOD_EXPR
9870 && !TYPE_UNSIGNED (type)
9871 && TREE_CODE (arg1) == INTEGER_CST
9872 && !TREE_CONSTANT_OVERFLOW (arg1)
9873 && TREE_INT_CST_HIGH (arg1) < 0
9874 && !flag_trapv
9875 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9876 && !sign_bit_p (arg1, arg1))
9877 return fold_build2 (code, type, fold_convert (type, arg0),
9878 fold_convert (type, negate_expr (arg1)));
9880 /* X % -Y is the same as X % Y. */
9881 if (code == TRUNC_MOD_EXPR
9882 && !TYPE_UNSIGNED (type)
9883 && TREE_CODE (arg1) == NEGATE_EXPR
9884 && !flag_trapv)
9885 return fold_build2 (code, type, fold_convert (type, arg0),
9886 fold_convert (type, TREE_OPERAND (arg1, 0)));
9888 if (TREE_CODE (arg1) == INTEGER_CST
9889 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9890 return fold_convert (type, tem);
9892 return NULL_TREE;
9894 case LROTATE_EXPR:
9895 case RROTATE_EXPR:
9896 if (integer_all_onesp (arg0))
9897 return omit_one_operand (type, arg0, arg1);
9898 goto shift;
9900 case RSHIFT_EXPR:
9901 /* Optimize -1 >> x for arithmetic right shifts. */
9902 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9903 return omit_one_operand (type, arg0, arg1);
9904 /* ... fall through ... */
9906 case LSHIFT_EXPR:
9907 shift:
9908 if (integer_zerop (arg1))
9909 return non_lvalue (fold_convert (type, arg0));
9910 if (integer_zerop (arg0))
9911 return omit_one_operand (type, arg0, arg1);
9913 /* Since negative shift count is not well-defined,
9914 don't try to compute it in the compiler. */
9915 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9916 return NULL_TREE;
9918 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9919 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
9920 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9921 && host_integerp (TREE_OPERAND (arg0, 1), false)
9922 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9924 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9925 + TREE_INT_CST_LOW (arg1));
9927 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9928 being well defined. */
9929 if (low >= TYPE_PRECISION (type))
9931 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9932 low = low % TYPE_PRECISION (type);
9933 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9934 return build_int_cst (type, 0);
9935 else
9936 low = TYPE_PRECISION (type) - 1;
9939 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9940 build_int_cst (type, low));
9943 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9944 into x & ((unsigned)-1 >> c) for unsigned types. */
9945 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9946 || (TYPE_UNSIGNED (type)
9947 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9948 && host_integerp (arg1, false)
9949 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9950 && host_integerp (TREE_OPERAND (arg0, 1), false)
9951 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9953 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9954 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9955 tree lshift;
9956 tree arg00;
9958 if (low0 == low1)
9960 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9962 lshift = build_int_cst (type, -1);
9963 lshift = int_const_binop (code, lshift, arg1, 0);
9965 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9969 /* Rewrite an LROTATE_EXPR by a constant into an
9970 RROTATE_EXPR by a new constant. */
9971 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9973 tree tem = build_int_cst (NULL_TREE,
9974 GET_MODE_BITSIZE (TYPE_MODE (type)));
9975 tem = fold_convert (TREE_TYPE (arg1), tem);
9976 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9977 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9980 /* If we have a rotate of a bit operation with the rotate count and
9981 the second operand of the bit operation both constant,
9982 permute the two operations. */
9983 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9984 && (TREE_CODE (arg0) == BIT_AND_EXPR
9985 || TREE_CODE (arg0) == BIT_IOR_EXPR
9986 || TREE_CODE (arg0) == BIT_XOR_EXPR)
9987 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9988 return fold_build2 (TREE_CODE (arg0), type,
9989 fold_build2 (code, type,
9990 TREE_OPERAND (arg0, 0), arg1),
9991 fold_build2 (code, type,
9992 TREE_OPERAND (arg0, 1), arg1));
9994 /* Two consecutive rotates adding up to the width of the mode can
9995 be ignored. */
9996 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9997 && TREE_CODE (arg0) == RROTATE_EXPR
9998 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9999 && TREE_INT_CST_HIGH (arg1) == 0
10000 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10001 && ((TREE_INT_CST_LOW (arg1)
10002 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10003 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10004 return TREE_OPERAND (arg0, 0);
10006 return NULL_TREE;
10008 case MIN_EXPR:
10009 if (operand_equal_p (arg0, arg1, 0))
10010 return omit_one_operand (type, arg0, arg1);
10011 if (INTEGRAL_TYPE_P (type)
10012 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10013 return omit_one_operand (type, arg1, arg0);
10014 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10015 if (tem)
10016 return tem;
10017 goto associate;
10019 case MAX_EXPR:
10020 if (operand_equal_p (arg0, arg1, 0))
10021 return omit_one_operand (type, arg0, arg1);
10022 if (INTEGRAL_TYPE_P (type)
10023 && TYPE_MAX_VALUE (type)
10024 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10025 return omit_one_operand (type, arg1, arg0);
10026 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10027 if (tem)
10028 return tem;
10029 goto associate;
10031 case TRUTH_ANDIF_EXPR:
10032 /* Note that the operands of this must be ints
10033 and their values must be 0 or 1.
10034 ("true" is a fixed value perhaps depending on the language.) */
10035 /* If first arg is constant zero, return it. */
10036 if (integer_zerop (arg0))
10037 return fold_convert (type, arg0);
10038 case TRUTH_AND_EXPR:
10039 /* If either arg is constant true, drop it. */
10040 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10041 return non_lvalue (fold_convert (type, arg1));
10042 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10043 /* Preserve sequence points. */
10044 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10045 return non_lvalue (fold_convert (type, arg0));
10046 /* If second arg is constant zero, result is zero, but first arg
10047 must be evaluated. */
10048 if (integer_zerop (arg1))
10049 return omit_one_operand (type, arg1, arg0);
10050 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10051 case will be handled here. */
10052 if (integer_zerop (arg0))
10053 return omit_one_operand (type, arg0, arg1);
10055 /* !X && X is always false. */
10056 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10057 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10058 return omit_one_operand (type, integer_zero_node, arg1);
10059 /* X && !X is always false. */
10060 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10061 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10062 return omit_one_operand (type, integer_zero_node, arg0);
10064 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10065 means A >= Y && A != MAX, but in this case we know that
10066 A < X <= MAX. */
10068 if (!TREE_SIDE_EFFECTS (arg0)
10069 && !TREE_SIDE_EFFECTS (arg1))
10071 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10072 if (tem && !operand_equal_p (tem, arg0, 0))
10073 return fold_build2 (code, type, tem, arg1);
10075 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10076 if (tem && !operand_equal_p (tem, arg1, 0))
10077 return fold_build2 (code, type, arg0, tem);
10080 truth_andor:
10081 /* We only do these simplifications if we are optimizing. */
10082 if (!optimize)
10083 return NULL_TREE;
10085 /* Check for things like (A || B) && (A || C). We can convert this
10086 to A || (B && C). Note that either operator can be any of the four
10087 truth and/or operations and the transformation will still be
10088 valid. Also note that we only care about order for the
10089 ANDIF and ORIF operators. If B contains side effects, this
10090 might change the truth-value of A. */
10091 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10092 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10093 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10094 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10095 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10096 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10098 tree a00 = TREE_OPERAND (arg0, 0);
10099 tree a01 = TREE_OPERAND (arg0, 1);
10100 tree a10 = TREE_OPERAND (arg1, 0);
10101 tree a11 = TREE_OPERAND (arg1, 1);
10102 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10103 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10104 && (code == TRUTH_AND_EXPR
10105 || code == TRUTH_OR_EXPR));
10107 if (operand_equal_p (a00, a10, 0))
10108 return fold_build2 (TREE_CODE (arg0), type, a00,
10109 fold_build2 (code, type, a01, a11));
10110 else if (commutative && operand_equal_p (a00, a11, 0))
10111 return fold_build2 (TREE_CODE (arg0), type, a00,
10112 fold_build2 (code, type, a01, a10));
10113 else if (commutative && operand_equal_p (a01, a10, 0))
10114 return fold_build2 (TREE_CODE (arg0), type, a01,
10115 fold_build2 (code, type, a00, a11));
10117 /* This case if tricky because we must either have commutative
10118 operators or else A10 must not have side-effects. */
10120 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10121 && operand_equal_p (a01, a11, 0))
10122 return fold_build2 (TREE_CODE (arg0), type,
10123 fold_build2 (code, type, a00, a10),
10124 a01);
10127 /* See if we can build a range comparison. */
10128 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10129 return tem;
10131 /* Check for the possibility of merging component references. If our
10132 lhs is another similar operation, try to merge its rhs with our
10133 rhs. Then try to merge our lhs and rhs. */
10134 if (TREE_CODE (arg0) == code
10135 && 0 != (tem = fold_truthop (code, type,
10136 TREE_OPERAND (arg0, 1), arg1)))
10137 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10139 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10140 return tem;
10142 return NULL_TREE;
10144 case TRUTH_ORIF_EXPR:
10145 /* Note that the operands of this must be ints
10146 and their values must be 0 or true.
10147 ("true" is a fixed value perhaps depending on the language.) */
10148 /* If first arg is constant true, return it. */
10149 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10150 return fold_convert (type, arg0);
10151 case TRUTH_OR_EXPR:
10152 /* If either arg is constant zero, drop it. */
10153 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10154 return non_lvalue (fold_convert (type, arg1));
10155 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10156 /* Preserve sequence points. */
10157 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10158 return non_lvalue (fold_convert (type, arg0));
10159 /* If second arg is constant true, result is true, but we must
10160 evaluate first arg. */
10161 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10162 return omit_one_operand (type, arg1, arg0);
10163 /* Likewise for first arg, but note this only occurs here for
10164 TRUTH_OR_EXPR. */
10165 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10166 return omit_one_operand (type, arg0, arg1);
10168 /* !X || X is always true. */
10169 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10170 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10171 return omit_one_operand (type, integer_one_node, arg1);
10172 /* X || !X is always true. */
10173 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10174 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10175 return omit_one_operand (type, integer_one_node, arg0);
10177 goto truth_andor;
10179 case TRUTH_XOR_EXPR:
10180 /* If the second arg is constant zero, drop it. */
10181 if (integer_zerop (arg1))
10182 return non_lvalue (fold_convert (type, arg0));
10183 /* If the second arg is constant true, this is a logical inversion. */
10184 if (integer_onep (arg1))
10186 /* Only call invert_truthvalue if operand is a truth value. */
10187 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10188 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10189 else
10190 tem = invert_truthvalue (arg0);
10191 return non_lvalue (fold_convert (type, tem));
10193 /* Identical arguments cancel to zero. */
10194 if (operand_equal_p (arg0, arg1, 0))
10195 return omit_one_operand (type, integer_zero_node, arg0);
10197 /* !X ^ X is always true. */
10198 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10199 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10200 return omit_one_operand (type, integer_one_node, arg1);
10202 /* X ^ !X is always true. */
10203 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10204 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10205 return omit_one_operand (type, integer_one_node, arg0);
10207 return NULL_TREE;
10209 case EQ_EXPR:
10210 case NE_EXPR:
10211 tem = fold_comparison (code, type, op0, op1);
10212 if (tem != NULL_TREE)
10213 return tem;
10215 /* bool_var != 0 becomes bool_var. */
10216 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10217 && code == NE_EXPR)
10218 return non_lvalue (fold_convert (type, arg0));
10220 /* bool_var == 1 becomes bool_var. */
10221 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10222 && code == EQ_EXPR)
10223 return non_lvalue (fold_convert (type, arg0));
10225 /* bool_var != 1 becomes !bool_var. */
10226 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10227 && code == NE_EXPR)
10228 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10230 /* bool_var == 0 becomes !bool_var. */
10231 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10232 && code == EQ_EXPR)
10233 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10235 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10236 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10237 && TREE_CODE (arg1) == INTEGER_CST)
10238 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10239 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10240 arg1));
10242 /* If this is an equality comparison of the address of a non-weak
10243 object against zero, then we know the result. */
10244 if (TREE_CODE (arg0) == ADDR_EXPR
10245 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10246 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10247 && integer_zerop (arg1))
10248 return constant_boolean_node (code != EQ_EXPR, type);
10250 /* If this is an equality comparison of the address of two non-weak,
10251 unaliased symbols neither of which are extern (since we do not
10252 have access to attributes for externs), then we know the result. */
10253 if (TREE_CODE (arg0) == ADDR_EXPR
10254 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10255 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10256 && ! lookup_attribute ("alias",
10257 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10258 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10259 && TREE_CODE (arg1) == ADDR_EXPR
10260 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10261 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10262 && ! lookup_attribute ("alias",
10263 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10264 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10266 /* We know that we're looking at the address of two
10267 non-weak, unaliased, static _DECL nodes.
10269 It is both wasteful and incorrect to call operand_equal_p
10270 to compare the two ADDR_EXPR nodes. It is wasteful in that
10271 all we need to do is test pointer equality for the arguments
10272 to the two ADDR_EXPR nodes. It is incorrect to use
10273 operand_equal_p as that function is NOT equivalent to a
10274 C equality test. It can in fact return false for two
10275 objects which would test as equal using the C equality
10276 operator. */
10277 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10278 return constant_boolean_node (equal
10279 ? code == EQ_EXPR : code != EQ_EXPR,
10280 type);
10283 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10284 a MINUS_EXPR of a constant, we can convert it into a comparison with
10285 a revised constant as long as no overflow occurs. */
10286 if (TREE_CODE (arg1) == INTEGER_CST
10287 && (TREE_CODE (arg0) == PLUS_EXPR
10288 || TREE_CODE (arg0) == MINUS_EXPR)
10289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10290 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10291 ? MINUS_EXPR : PLUS_EXPR,
10292 fold_convert (TREE_TYPE (arg0), arg1),
10293 TREE_OPERAND (arg0, 1), 0))
10294 && ! TREE_CONSTANT_OVERFLOW (tem))
10295 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10297 /* Similarly for a NEGATE_EXPR. */
10298 if (TREE_CODE (arg0) == NEGATE_EXPR
10299 && TREE_CODE (arg1) == INTEGER_CST
10300 && 0 != (tem = negate_expr (arg1))
10301 && TREE_CODE (tem) == INTEGER_CST
10302 && ! TREE_CONSTANT_OVERFLOW (tem))
10303 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10305 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10306 for !=. Don't do this for ordered comparisons due to overflow. */
10307 if (TREE_CODE (arg0) == MINUS_EXPR
10308 && integer_zerop (arg1))
10309 return fold_build2 (code, type,
10310 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10312 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10313 if (TREE_CODE (arg0) == ABS_EXPR
10314 && (integer_zerop (arg1) || real_zerop (arg1)))
10315 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10317 /* If this is an EQ or NE comparison with zero and ARG0 is
10318 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10319 two operations, but the latter can be done in one less insn
10320 on machines that have only two-operand insns or on which a
10321 constant cannot be the first operand. */
10322 if (TREE_CODE (arg0) == BIT_AND_EXPR
10323 && integer_zerop (arg1))
10325 tree arg00 = TREE_OPERAND (arg0, 0);
10326 tree arg01 = TREE_OPERAND (arg0, 1);
10327 if (TREE_CODE (arg00) == LSHIFT_EXPR
10328 && integer_onep (TREE_OPERAND (arg00, 0)))
10329 return
10330 fold_build2 (code, type,
10331 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10332 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10333 arg01, TREE_OPERAND (arg00, 1)),
10334 fold_convert (TREE_TYPE (arg0),
10335 integer_one_node)),
10336 arg1);
10337 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10338 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10339 return
10340 fold_build2 (code, type,
10341 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10342 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10343 arg00, TREE_OPERAND (arg01, 1)),
10344 fold_convert (TREE_TYPE (arg0),
10345 integer_one_node)),
10346 arg1);
10349 /* If this is an NE or EQ comparison of zero against the result of a
10350 signed MOD operation whose second operand is a power of 2, make
10351 the MOD operation unsigned since it is simpler and equivalent. */
10352 if (integer_zerop (arg1)
10353 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10354 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10355 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10356 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10357 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10358 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10360 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10361 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10362 fold_convert (newtype,
10363 TREE_OPERAND (arg0, 0)),
10364 fold_convert (newtype,
10365 TREE_OPERAND (arg0, 1)));
10367 return fold_build2 (code, type, newmod,
10368 fold_convert (newtype, arg1));
10371 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10372 C1 is a valid shift constant, and C2 is a power of two, i.e.
10373 a single bit. */
10374 if (TREE_CODE (arg0) == BIT_AND_EXPR
10375 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10376 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10377 == INTEGER_CST
10378 && integer_pow2p (TREE_OPERAND (arg0, 1))
10379 && integer_zerop (arg1))
10381 tree itype = TREE_TYPE (arg0);
10382 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10383 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10385 /* Check for a valid shift count. */
10386 if (TREE_INT_CST_HIGH (arg001) == 0
10387 && TREE_INT_CST_LOW (arg001) < prec)
10389 tree arg01 = TREE_OPERAND (arg0, 1);
10390 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10391 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10392 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10393 can be rewritten as (X & (C2 << C1)) != 0. */
10394 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10396 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10397 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10398 return fold_build2 (code, type, tem, arg1);
10400 /* Otherwise, for signed (arithmetic) shifts,
10401 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10402 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10403 else if (!TYPE_UNSIGNED (itype))
10404 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10405 arg000, build_int_cst (itype, 0));
10406 /* Otherwise, of unsigned (logical) shifts,
10407 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10408 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10409 else
10410 return omit_one_operand (type,
10411 code == EQ_EXPR ? integer_one_node
10412 : integer_zero_node,
10413 arg000);
10417 /* If this is an NE comparison of zero with an AND of one, remove the
10418 comparison since the AND will give the correct value. */
10419 if (code == NE_EXPR
10420 && integer_zerop (arg1)
10421 && TREE_CODE (arg0) == BIT_AND_EXPR
10422 && integer_onep (TREE_OPERAND (arg0, 1)))
10423 return fold_convert (type, arg0);
10425 /* If we have (A & C) == C where C is a power of 2, convert this into
10426 (A & C) != 0. Similarly for NE_EXPR. */
10427 if (TREE_CODE (arg0) == BIT_AND_EXPR
10428 && integer_pow2p (TREE_OPERAND (arg0, 1))
10429 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10430 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10431 arg0, fold_convert (TREE_TYPE (arg0),
10432 integer_zero_node));
10434 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10435 bit, then fold the expression into A < 0 or A >= 0. */
10436 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10437 if (tem)
10438 return tem;
10440 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10441 Similarly for NE_EXPR. */
10442 if (TREE_CODE (arg0) == BIT_AND_EXPR
10443 && TREE_CODE (arg1) == INTEGER_CST
10444 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10446 tree notc = fold_build1 (BIT_NOT_EXPR,
10447 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10448 TREE_OPERAND (arg0, 1));
10449 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10450 arg1, notc);
10451 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10452 if (integer_nonzerop (dandnotc))
10453 return omit_one_operand (type, rslt, arg0);
10456 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10457 Similarly for NE_EXPR. */
10458 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10459 && TREE_CODE (arg1) == INTEGER_CST
10460 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10462 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10463 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10464 TREE_OPERAND (arg0, 1), notd);
10465 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10466 if (integer_nonzerop (candnotd))
10467 return omit_one_operand (type, rslt, arg0);
10470 /* If this is a comparison of a field, we may be able to simplify it. */
10471 if (((TREE_CODE (arg0) == COMPONENT_REF
10472 && lang_hooks.can_use_bit_fields_p ())
10473 || TREE_CODE (arg0) == BIT_FIELD_REF)
10474 /* Handle the constant case even without -O
10475 to make sure the warnings are given. */
10476 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10478 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10479 if (t1)
10480 return t1;
10483 /* Optimize comparisons of strlen vs zero to a compare of the
10484 first character of the string vs zero. To wit,
10485 strlen(ptr) == 0 => *ptr == 0
10486 strlen(ptr) != 0 => *ptr != 0
10487 Other cases should reduce to one of these two (or a constant)
10488 due to the return value of strlen being unsigned. */
10489 if (TREE_CODE (arg0) == CALL_EXPR
10490 && integer_zerop (arg1))
10492 tree fndecl = get_callee_fndecl (arg0);
10493 tree arglist;
10495 if (fndecl
10496 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10497 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10498 && (arglist = TREE_OPERAND (arg0, 1))
10499 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10500 && ! TREE_CHAIN (arglist))
10502 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10503 return fold_build2 (code, type, iref,
10504 build_int_cst (TREE_TYPE (iref), 0));
10508 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10509 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10510 if (TREE_CODE (arg0) == RSHIFT_EXPR
10511 && integer_zerop (arg1)
10512 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10514 tree arg00 = TREE_OPERAND (arg0, 0);
10515 tree arg01 = TREE_OPERAND (arg0, 1);
10516 tree itype = TREE_TYPE (arg00);
10517 if (TREE_INT_CST_HIGH (arg01) == 0
10518 && TREE_INT_CST_LOW (arg01)
10519 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10521 if (TYPE_UNSIGNED (itype))
10523 itype = lang_hooks.types.signed_type (itype);
10524 arg00 = fold_convert (itype, arg00);
10526 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10527 type, arg00, build_int_cst (itype, 0));
10531 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10532 if (integer_zerop (arg1)
10533 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10534 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10535 TREE_OPERAND (arg0, 1));
10537 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10538 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10539 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10540 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10541 build_int_cst (TREE_TYPE (arg1), 0));
10542 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10543 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10544 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10545 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10546 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10547 build_int_cst (TREE_TYPE (arg1), 0));
10549 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10550 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10551 && TREE_CODE (arg1) == INTEGER_CST
10552 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10553 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10554 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10555 TREE_OPERAND (arg0, 1), arg1));
10557 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10558 (X & C) == 0 when C is a single bit. */
10559 if (TREE_CODE (arg0) == BIT_AND_EXPR
10560 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10561 && integer_zerop (arg1)
10562 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10564 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10565 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10566 TREE_OPERAND (arg0, 1));
10567 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10568 type, tem, arg1);
10571 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10572 constant C is a power of two, i.e. a single bit. */
10573 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10574 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10575 && integer_zerop (arg1)
10576 && integer_pow2p (TREE_OPERAND (arg0, 1))
10577 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10578 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10580 tree arg00 = TREE_OPERAND (arg0, 0);
10581 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10582 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10585 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10586 when is C is a power of two, i.e. a single bit. */
10587 if (TREE_CODE (arg0) == BIT_AND_EXPR
10588 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10589 && integer_zerop (arg1)
10590 && integer_pow2p (TREE_OPERAND (arg0, 1))
10591 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10592 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10594 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10595 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10596 arg000, TREE_OPERAND (arg0, 1));
10597 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10598 tem, build_int_cst (TREE_TYPE (tem), 0));
10601 if (integer_zerop (arg1)
10602 && tree_expr_nonzero_p (arg0))
10604 tree res = constant_boolean_node (code==NE_EXPR, type);
10605 return omit_one_operand (type, res, arg0);
10607 return NULL_TREE;
10609 case LT_EXPR:
10610 case GT_EXPR:
10611 case LE_EXPR:
10612 case GE_EXPR:
10613 tem = fold_comparison (code, type, op0, op1);
10614 if (tem != NULL_TREE)
10615 return tem;
10617 /* Transform comparisons of the form X +- C CMP X. */
10618 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10619 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10620 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10621 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10622 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10623 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10624 && !(flag_wrapv || flag_trapv))))
10626 tree arg01 = TREE_OPERAND (arg0, 1);
10627 enum tree_code code0 = TREE_CODE (arg0);
10628 int is_positive;
10630 if (TREE_CODE (arg01) == REAL_CST)
10631 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10632 else
10633 is_positive = tree_int_cst_sgn (arg01);
10635 /* (X - c) > X becomes false. */
10636 if (code == GT_EXPR
10637 && ((code0 == MINUS_EXPR && is_positive >= 0)
10638 || (code0 == PLUS_EXPR && is_positive <= 0)))
10639 return constant_boolean_node (0, type);
10641 /* Likewise (X + c) < X becomes false. */
10642 if (code == LT_EXPR
10643 && ((code0 == PLUS_EXPR && is_positive >= 0)
10644 || (code0 == MINUS_EXPR && is_positive <= 0)))
10645 return constant_boolean_node (0, type);
10647 /* Convert (X - c) <= X to true. */
10648 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10649 && code == LE_EXPR
10650 && ((code0 == MINUS_EXPR && is_positive >= 0)
10651 || (code0 == PLUS_EXPR && is_positive <= 0)))
10652 return constant_boolean_node (1, type);
10654 /* Convert (X + c) >= X to true. */
10655 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10656 && code == GE_EXPR
10657 && ((code0 == PLUS_EXPR && is_positive >= 0)
10658 || (code0 == MINUS_EXPR && is_positive <= 0)))
10659 return constant_boolean_node (1, type);
10661 if (TREE_CODE (arg01) == INTEGER_CST)
10663 /* Convert X + c > X and X - c < X to true for integers. */
10664 if (code == GT_EXPR
10665 && ((code0 == PLUS_EXPR && is_positive > 0)
10666 || (code0 == MINUS_EXPR && is_positive < 0)))
10667 return constant_boolean_node (1, type);
10669 if (code == LT_EXPR
10670 && ((code0 == MINUS_EXPR && is_positive > 0)
10671 || (code0 == PLUS_EXPR && is_positive < 0)))
10672 return constant_boolean_node (1, type);
10674 /* Convert X + c <= X and X - c >= X to false for integers. */
10675 if (code == LE_EXPR
10676 && ((code0 == PLUS_EXPR && is_positive > 0)
10677 || (code0 == MINUS_EXPR && is_positive < 0)))
10678 return constant_boolean_node (0, type);
10680 if (code == GE_EXPR
10681 && ((code0 == MINUS_EXPR && is_positive > 0)
10682 || (code0 == PLUS_EXPR && is_positive < 0)))
10683 return constant_boolean_node (0, type);
10687 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10688 This transformation affects the cases which are handled in later
10689 optimizations involving comparisons with non-negative constants. */
10690 if (TREE_CODE (arg1) == INTEGER_CST
10691 && TREE_CODE (arg0) != INTEGER_CST
10692 && tree_int_cst_sgn (arg1) > 0)
10694 if (code == GE_EXPR)
10696 arg1 = const_binop (MINUS_EXPR, arg1,
10697 build_int_cst (TREE_TYPE (arg1), 1), 0);
10698 return fold_build2 (GT_EXPR, type, arg0,
10699 fold_convert (TREE_TYPE (arg0), arg1));
10701 if (code == LT_EXPR)
10703 arg1 = const_binop (MINUS_EXPR, arg1,
10704 build_int_cst (TREE_TYPE (arg1), 1), 0);
10705 return fold_build2 (LE_EXPR, type, arg0,
10706 fold_convert (TREE_TYPE (arg0), arg1));
10710 /* Comparisons with the highest or lowest possible integer of
10711 the specified size will have known values. */
10713 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10715 if (TREE_CODE (arg1) == INTEGER_CST
10716 && ! TREE_CONSTANT_OVERFLOW (arg1)
10717 && width <= 2 * HOST_BITS_PER_WIDE_INT
10718 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10719 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10721 HOST_WIDE_INT signed_max_hi;
10722 unsigned HOST_WIDE_INT signed_max_lo;
10723 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10725 if (width <= HOST_BITS_PER_WIDE_INT)
10727 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10728 - 1;
10729 signed_max_hi = 0;
10730 max_hi = 0;
10732 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10734 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10735 min_lo = 0;
10736 min_hi = 0;
10738 else
10740 max_lo = signed_max_lo;
10741 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10742 min_hi = -1;
10745 else
10747 width -= HOST_BITS_PER_WIDE_INT;
10748 signed_max_lo = -1;
10749 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10750 - 1;
10751 max_lo = -1;
10752 min_lo = 0;
10754 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10756 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10757 min_hi = 0;
10759 else
10761 max_hi = signed_max_hi;
10762 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10766 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10767 && TREE_INT_CST_LOW (arg1) == max_lo)
10768 switch (code)
10770 case GT_EXPR:
10771 return omit_one_operand (type, integer_zero_node, arg0);
10773 case GE_EXPR:
10774 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10776 case LE_EXPR:
10777 return omit_one_operand (type, integer_one_node, arg0);
10779 case LT_EXPR:
10780 return fold_build2 (NE_EXPR, type, arg0, arg1);
10782 /* The GE_EXPR and LT_EXPR cases above are not normally
10783 reached because of previous transformations. */
10785 default:
10786 break;
10788 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10789 == max_hi
10790 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10791 switch (code)
10793 case GT_EXPR:
10794 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10795 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10796 case LE_EXPR:
10797 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10798 return fold_build2 (NE_EXPR, type, arg0, arg1);
10799 default:
10800 break;
10802 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10803 == min_hi
10804 && TREE_INT_CST_LOW (arg1) == min_lo)
10805 switch (code)
10807 case LT_EXPR:
10808 return omit_one_operand (type, integer_zero_node, arg0);
10810 case LE_EXPR:
10811 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10813 case GE_EXPR:
10814 return omit_one_operand (type, integer_one_node, arg0);
10816 case GT_EXPR:
10817 return fold_build2 (NE_EXPR, type, op0, op1);
10819 default:
10820 break;
10822 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10823 == min_hi
10824 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10825 switch (code)
10827 case GE_EXPR:
10828 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10829 return fold_build2 (NE_EXPR, type, arg0, arg1);
10830 case LT_EXPR:
10831 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10832 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10833 default:
10834 break;
10837 else if (!in_gimple_form
10838 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10839 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10840 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10841 /* signed_type does not work on pointer types. */
10842 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10844 /* The following case also applies to X < signed_max+1
10845 and X >= signed_max+1 because previous transformations. */
10846 if (code == LE_EXPR || code == GT_EXPR)
10848 tree st0, st1;
10849 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10850 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10851 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10852 type, fold_convert (st0, arg0),
10853 build_int_cst (st1, 0));
10859 /* If we are comparing an ABS_EXPR with a constant, we can
10860 convert all the cases into explicit comparisons, but they may
10861 well not be faster than doing the ABS and one comparison.
10862 But ABS (X) <= C is a range comparison, which becomes a subtraction
10863 and a comparison, and is probably faster. */
10864 if (code == LE_EXPR
10865 && TREE_CODE (arg1) == INTEGER_CST
10866 && TREE_CODE (arg0) == ABS_EXPR
10867 && ! TREE_SIDE_EFFECTS (arg0)
10868 && (0 != (tem = negate_expr (arg1)))
10869 && TREE_CODE (tem) == INTEGER_CST
10870 && ! TREE_CONSTANT_OVERFLOW (tem))
10871 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10872 build2 (GE_EXPR, type,
10873 TREE_OPERAND (arg0, 0), tem),
10874 build2 (LE_EXPR, type,
10875 TREE_OPERAND (arg0, 0), arg1));
10877 /* Convert ABS_EXPR<x> >= 0 to true. */
10878 if (code == GE_EXPR
10879 && tree_expr_nonnegative_p (arg0)
10880 && (integer_zerop (arg1)
10881 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10882 && real_zerop (arg1))))
10883 return omit_one_operand (type, integer_one_node, arg0);
10885 /* Convert ABS_EXPR<x> < 0 to false. */
10886 if (code == LT_EXPR
10887 && tree_expr_nonnegative_p (arg0)
10888 && (integer_zerop (arg1) || real_zerop (arg1)))
10889 return omit_one_operand (type, integer_zero_node, arg0);
10891 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10892 and similarly for >= into !=. */
10893 if ((code == LT_EXPR || code == GE_EXPR)
10894 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10895 && TREE_CODE (arg1) == LSHIFT_EXPR
10896 && integer_onep (TREE_OPERAND (arg1, 0)))
10897 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10898 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10899 TREE_OPERAND (arg1, 1)),
10900 build_int_cst (TREE_TYPE (arg0), 0));
10902 if ((code == LT_EXPR || code == GE_EXPR)
10903 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10904 && (TREE_CODE (arg1) == NOP_EXPR
10905 || TREE_CODE (arg1) == CONVERT_EXPR)
10906 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10907 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10908 return
10909 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10910 fold_convert (TREE_TYPE (arg0),
10911 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10912 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10913 1))),
10914 build_int_cst (TREE_TYPE (arg0), 0));
10916 return NULL_TREE;
10918 case UNORDERED_EXPR:
10919 case ORDERED_EXPR:
10920 case UNLT_EXPR:
10921 case UNLE_EXPR:
10922 case UNGT_EXPR:
10923 case UNGE_EXPR:
10924 case UNEQ_EXPR:
10925 case LTGT_EXPR:
10926 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10928 t1 = fold_relational_const (code, type, arg0, arg1);
10929 if (t1 != NULL_TREE)
10930 return t1;
10933 /* If the first operand is NaN, the result is constant. */
10934 if (TREE_CODE (arg0) == REAL_CST
10935 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10936 && (code != LTGT_EXPR || ! flag_trapping_math))
10938 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10939 ? integer_zero_node
10940 : integer_one_node;
10941 return omit_one_operand (type, t1, arg1);
10944 /* If the second operand is NaN, the result is constant. */
10945 if (TREE_CODE (arg1) == REAL_CST
10946 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10947 && (code != LTGT_EXPR || ! flag_trapping_math))
10949 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10950 ? integer_zero_node
10951 : integer_one_node;
10952 return omit_one_operand (type, t1, arg0);
10955 /* Simplify unordered comparison of something with itself. */
10956 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10957 && operand_equal_p (arg0, arg1, 0))
10958 return constant_boolean_node (1, type);
10960 if (code == LTGT_EXPR
10961 && !flag_trapping_math
10962 && operand_equal_p (arg0, arg1, 0))
10963 return constant_boolean_node (0, type);
10965 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10967 tree targ0 = strip_float_extensions (arg0);
10968 tree targ1 = strip_float_extensions (arg1);
10969 tree newtype = TREE_TYPE (targ0);
10971 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10972 newtype = TREE_TYPE (targ1);
10974 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10975 return fold_build2 (code, type, fold_convert (newtype, targ0),
10976 fold_convert (newtype, targ1));
10979 return NULL_TREE;
10981 case COMPOUND_EXPR:
10982 /* When pedantic, a compound expression can be neither an lvalue
10983 nor an integer constant expression. */
10984 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10985 return NULL_TREE;
10986 /* Don't let (0, 0) be null pointer constant. */
10987 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10988 : fold_convert (type, arg1);
10989 return pedantic_non_lvalue (tem);
10991 case COMPLEX_EXPR:
10992 if ((TREE_CODE (arg0) == REAL_CST
10993 && TREE_CODE (arg1) == REAL_CST)
10994 || (TREE_CODE (arg0) == INTEGER_CST
10995 && TREE_CODE (arg1) == INTEGER_CST))
10996 return build_complex (type, arg0, arg1);
10997 return NULL_TREE;
10999 case ASSERT_EXPR:
11000 /* An ASSERT_EXPR should never be passed to fold_binary. */
11001 gcc_unreachable ();
11003 default:
11004 return NULL_TREE;
11005 } /* switch (code) */
11008 /* Callback for walk_tree, looking for LABEL_EXPR.
11009 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11010 Do not check the sub-tree of GOTO_EXPR. */
11012 static tree
11013 contains_label_1 (tree *tp,
11014 int *walk_subtrees,
11015 void *data ATTRIBUTE_UNUSED)
11017 switch (TREE_CODE (*tp))
11019 case LABEL_EXPR:
11020 return *tp;
11021 case GOTO_EXPR:
11022 *walk_subtrees = 0;
11023 /* no break */
11024 default:
11025 return NULL_TREE;
11029 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11030 accessible from outside the sub-tree. Returns NULL_TREE if no
11031 addressable label is found. */
11033 static bool
11034 contains_label_p (tree st)
11036 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11039 /* Fold a ternary expression of code CODE and type TYPE with operands
11040 OP0, OP1, and OP2. Return the folded expression if folding is
11041 successful. Otherwise, return NULL_TREE. */
11043 tree
11044 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11046 tree tem;
11047 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11048 enum tree_code_class kind = TREE_CODE_CLASS (code);
11050 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11051 && TREE_CODE_LENGTH (code) == 3);
11053 /* Strip any conversions that don't change the mode. This is safe
11054 for every expression, except for a comparison expression because
11055 its signedness is derived from its operands. So, in the latter
11056 case, only strip conversions that don't change the signedness.
11058 Note that this is done as an internal manipulation within the
11059 constant folder, in order to find the simplest representation of
11060 the arguments so that their form can be studied. In any cases,
11061 the appropriate type conversions should be put back in the tree
11062 that will get out of the constant folder. */
11063 if (op0)
11065 arg0 = op0;
11066 STRIP_NOPS (arg0);
11069 if (op1)
11071 arg1 = op1;
11072 STRIP_NOPS (arg1);
11075 switch (code)
11077 case COMPONENT_REF:
11078 if (TREE_CODE (arg0) == CONSTRUCTOR
11079 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11081 unsigned HOST_WIDE_INT idx;
11082 tree field, value;
11083 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11084 if (field == arg1)
11085 return value;
11087 return NULL_TREE;
11089 case COND_EXPR:
11090 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11091 so all simple results must be passed through pedantic_non_lvalue. */
11092 if (TREE_CODE (arg0) == INTEGER_CST)
11094 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11095 tem = integer_zerop (arg0) ? op2 : op1;
11096 /* Only optimize constant conditions when the selected branch
11097 has the same type as the COND_EXPR. This avoids optimizing
11098 away "c ? x : throw", where the throw has a void type.
11099 Avoid throwing away that operand which contains label. */
11100 if ((!TREE_SIDE_EFFECTS (unused_op)
11101 || !contains_label_p (unused_op))
11102 && (! VOID_TYPE_P (TREE_TYPE (tem))
11103 || VOID_TYPE_P (type)))
11104 return pedantic_non_lvalue (tem);
11105 return NULL_TREE;
11107 if (operand_equal_p (arg1, op2, 0))
11108 return pedantic_omit_one_operand (type, arg1, arg0);
11110 /* If we have A op B ? A : C, we may be able to convert this to a
11111 simpler expression, depending on the operation and the values
11112 of B and C. Signed zeros prevent all of these transformations,
11113 for reasons given above each one.
11115 Also try swapping the arguments and inverting the conditional. */
11116 if (COMPARISON_CLASS_P (arg0)
11117 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11118 arg1, TREE_OPERAND (arg0, 1))
11119 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11121 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11122 if (tem)
11123 return tem;
11126 if (COMPARISON_CLASS_P (arg0)
11127 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11128 op2,
11129 TREE_OPERAND (arg0, 1))
11130 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11132 tem = fold_truth_not_expr (arg0);
11133 if (tem && COMPARISON_CLASS_P (tem))
11135 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11136 if (tem)
11137 return tem;
11141 /* If the second operand is simpler than the third, swap them
11142 since that produces better jump optimization results. */
11143 if (truth_value_p (TREE_CODE (arg0))
11144 && tree_swap_operands_p (op1, op2, false))
11146 /* See if this can be inverted. If it can't, possibly because
11147 it was a floating-point inequality comparison, don't do
11148 anything. */
11149 tem = fold_truth_not_expr (arg0);
11150 if (tem)
11151 return fold_build3 (code, type, tem, op2, op1);
11154 /* Convert A ? 1 : 0 to simply A. */
11155 if (integer_onep (op1)
11156 && integer_zerop (op2)
11157 /* If we try to convert OP0 to our type, the
11158 call to fold will try to move the conversion inside
11159 a COND, which will recurse. In that case, the COND_EXPR
11160 is probably the best choice, so leave it alone. */
11161 && type == TREE_TYPE (arg0))
11162 return pedantic_non_lvalue (arg0);
11164 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11165 over COND_EXPR in cases such as floating point comparisons. */
11166 if (integer_zerop (op1)
11167 && integer_onep (op2)
11168 && truth_value_p (TREE_CODE (arg0)))
11169 return pedantic_non_lvalue (fold_convert (type,
11170 invert_truthvalue (arg0)));
11172 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11173 if (TREE_CODE (arg0) == LT_EXPR
11174 && integer_zerop (TREE_OPERAND (arg0, 1))
11175 && integer_zerop (op2)
11176 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11177 return fold_convert (type,
11178 fold_build2 (BIT_AND_EXPR,
11179 TREE_TYPE (tem), tem,
11180 fold_convert (TREE_TYPE (tem), arg1)));
11182 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11183 already handled above. */
11184 if (TREE_CODE (arg0) == BIT_AND_EXPR
11185 && integer_onep (TREE_OPERAND (arg0, 1))
11186 && integer_zerop (op2)
11187 && integer_pow2p (arg1))
11189 tree tem = TREE_OPERAND (arg0, 0);
11190 STRIP_NOPS (tem);
11191 if (TREE_CODE (tem) == RSHIFT_EXPR
11192 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11193 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11194 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11195 return fold_build2 (BIT_AND_EXPR, type,
11196 TREE_OPERAND (tem, 0), arg1);
11199 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11200 is probably obsolete because the first operand should be a
11201 truth value (that's why we have the two cases above), but let's
11202 leave it in until we can confirm this for all front-ends. */
11203 if (integer_zerop (op2)
11204 && TREE_CODE (arg0) == NE_EXPR
11205 && integer_zerop (TREE_OPERAND (arg0, 1))
11206 && integer_pow2p (arg1)
11207 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11208 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11209 arg1, OEP_ONLY_CONST))
11210 return pedantic_non_lvalue (fold_convert (type,
11211 TREE_OPERAND (arg0, 0)));
11213 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11214 if (integer_zerop (op2)
11215 && truth_value_p (TREE_CODE (arg0))
11216 && truth_value_p (TREE_CODE (arg1)))
11217 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11218 fold_convert (type, arg0),
11219 arg1);
11221 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11222 if (integer_onep (op2)
11223 && truth_value_p (TREE_CODE (arg0))
11224 && truth_value_p (TREE_CODE (arg1)))
11226 /* Only perform transformation if ARG0 is easily inverted. */
11227 tem = fold_truth_not_expr (arg0);
11228 if (tem)
11229 return fold_build2 (TRUTH_ORIF_EXPR, type,
11230 fold_convert (type, tem),
11231 arg1);
11234 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11235 if (integer_zerop (arg1)
11236 && truth_value_p (TREE_CODE (arg0))
11237 && truth_value_p (TREE_CODE (op2)))
11239 /* Only perform transformation if ARG0 is easily inverted. */
11240 tem = fold_truth_not_expr (arg0);
11241 if (tem)
11242 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11243 fold_convert (type, tem),
11244 op2);
11247 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11248 if (integer_onep (arg1)
11249 && truth_value_p (TREE_CODE (arg0))
11250 && truth_value_p (TREE_CODE (op2)))
11251 return fold_build2 (TRUTH_ORIF_EXPR, type,
11252 fold_convert (type, arg0),
11253 op2);
11255 return NULL_TREE;
11257 case CALL_EXPR:
11258 /* Check for a built-in function. */
11259 if (TREE_CODE (op0) == ADDR_EXPR
11260 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11261 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11262 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11263 return NULL_TREE;
11265 case BIT_FIELD_REF:
11266 if (TREE_CODE (arg0) == VECTOR_CST
11267 && type == TREE_TYPE (TREE_TYPE (arg0))
11268 && host_integerp (arg1, 1)
11269 && host_integerp (op2, 1))
11271 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11272 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11274 if (width != 0
11275 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11276 && (idx % width) == 0
11277 && (idx = idx / width)
11278 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11280 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11281 while (idx-- > 0 && elements)
11282 elements = TREE_CHAIN (elements);
11283 if (elements)
11284 return TREE_VALUE (elements);
11285 else
11286 return fold_convert (type, integer_zero_node);
11289 return NULL_TREE;
11291 default:
11292 return NULL_TREE;
11293 } /* switch (code) */
11296 /* Perform constant folding and related simplification of EXPR.
11297 The related simplifications include x*1 => x, x*0 => 0, etc.,
11298 and application of the associative law.
11299 NOP_EXPR conversions may be removed freely (as long as we
11300 are careful not to change the type of the overall expression).
11301 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11302 but we can constant-fold them if they have constant operands. */
11304 #ifdef ENABLE_FOLD_CHECKING
11305 # define fold(x) fold_1 (x)
11306 static tree fold_1 (tree);
11307 static
11308 #endif
11309 tree
11310 fold (tree expr)
11312 const tree t = expr;
11313 enum tree_code code = TREE_CODE (t);
11314 enum tree_code_class kind = TREE_CODE_CLASS (code);
11315 tree tem;
11317 /* Return right away if a constant. */
11318 if (kind == tcc_constant)
11319 return t;
11321 if (IS_EXPR_CODE_CLASS (kind))
11323 tree type = TREE_TYPE (t);
11324 tree op0, op1, op2;
11326 switch (TREE_CODE_LENGTH (code))
11328 case 1:
11329 op0 = TREE_OPERAND (t, 0);
11330 tem = fold_unary (code, type, op0);
11331 return tem ? tem : expr;
11332 case 2:
11333 op0 = TREE_OPERAND (t, 0);
11334 op1 = TREE_OPERAND (t, 1);
11335 tem = fold_binary (code, type, op0, op1);
11336 return tem ? tem : expr;
11337 case 3:
11338 op0 = TREE_OPERAND (t, 0);
11339 op1 = TREE_OPERAND (t, 1);
11340 op2 = TREE_OPERAND (t, 2);
11341 tem = fold_ternary (code, type, op0, op1, op2);
11342 return tem ? tem : expr;
11343 default:
11344 break;
11348 switch (code)
11350 case CONST_DECL:
11351 return fold (DECL_INITIAL (t));
11353 default:
11354 return t;
11355 } /* switch (code) */
11358 #ifdef ENABLE_FOLD_CHECKING
11359 #undef fold
11361 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11362 static void fold_check_failed (tree, tree);
11363 void print_fold_checksum (tree);
11365 /* When --enable-checking=fold, compute a digest of expr before
11366 and after actual fold call to see if fold did not accidentally
11367 change original expr. */
11369 tree
11370 fold (tree expr)
11372 tree ret;
11373 struct md5_ctx ctx;
11374 unsigned char checksum_before[16], checksum_after[16];
11375 htab_t ht;
11377 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11378 md5_init_ctx (&ctx);
11379 fold_checksum_tree (expr, &ctx, ht);
11380 md5_finish_ctx (&ctx, checksum_before);
11381 htab_empty (ht);
11383 ret = fold_1 (expr);
11385 md5_init_ctx (&ctx);
11386 fold_checksum_tree (expr, &ctx, ht);
11387 md5_finish_ctx (&ctx, checksum_after);
11388 htab_delete (ht);
11390 if (memcmp (checksum_before, checksum_after, 16))
11391 fold_check_failed (expr, ret);
11393 return ret;
11396 void
11397 print_fold_checksum (tree expr)
11399 struct md5_ctx ctx;
11400 unsigned char checksum[16], cnt;
11401 htab_t ht;
11403 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11404 md5_init_ctx (&ctx);
11405 fold_checksum_tree (expr, &ctx, ht);
11406 md5_finish_ctx (&ctx, checksum);
11407 htab_delete (ht);
11408 for (cnt = 0; cnt < 16; ++cnt)
11409 fprintf (stderr, "%02x", checksum[cnt]);
11410 putc ('\n', stderr);
11413 static void
11414 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11416 internal_error ("fold check: original tree changed by fold");
11419 static void
11420 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11422 void **slot;
11423 enum tree_code code;
11424 struct tree_function_decl buf;
11425 int i, len;
11427 recursive_label:
11429 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11430 <= sizeof (struct tree_function_decl))
11431 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11432 if (expr == NULL)
11433 return;
11434 slot = htab_find_slot (ht, expr, INSERT);
11435 if (*slot != NULL)
11436 return;
11437 *slot = expr;
11438 code = TREE_CODE (expr);
11439 if (TREE_CODE_CLASS (code) == tcc_declaration
11440 && DECL_ASSEMBLER_NAME_SET_P (expr))
11442 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11443 memcpy ((char *) &buf, expr, tree_size (expr));
11444 expr = (tree) &buf;
11445 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11447 else if (TREE_CODE_CLASS (code) == tcc_type
11448 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11449 || TYPE_CACHED_VALUES_P (expr)
11450 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11452 /* Allow these fields to be modified. */
11453 memcpy ((char *) &buf, expr, tree_size (expr));
11454 expr = (tree) &buf;
11455 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11456 TYPE_POINTER_TO (expr) = NULL;
11457 TYPE_REFERENCE_TO (expr) = NULL;
11458 if (TYPE_CACHED_VALUES_P (expr))
11460 TYPE_CACHED_VALUES_P (expr) = 0;
11461 TYPE_CACHED_VALUES (expr) = NULL;
11464 md5_process_bytes (expr, tree_size (expr), ctx);
11465 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11466 if (TREE_CODE_CLASS (code) != tcc_type
11467 && TREE_CODE_CLASS (code) != tcc_declaration
11468 && code != TREE_LIST)
11469 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11470 switch (TREE_CODE_CLASS (code))
11472 case tcc_constant:
11473 switch (code)
11475 case STRING_CST:
11476 md5_process_bytes (TREE_STRING_POINTER (expr),
11477 TREE_STRING_LENGTH (expr), ctx);
11478 break;
11479 case COMPLEX_CST:
11480 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11481 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11482 break;
11483 case VECTOR_CST:
11484 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11485 break;
11486 default:
11487 break;
11489 break;
11490 case tcc_exceptional:
11491 switch (code)
11493 case TREE_LIST:
11494 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11495 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11496 expr = TREE_CHAIN (expr);
11497 goto recursive_label;
11498 break;
11499 case TREE_VEC:
11500 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11501 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11502 break;
11503 default:
11504 break;
11506 break;
11507 case tcc_expression:
11508 case tcc_reference:
11509 case tcc_comparison:
11510 case tcc_unary:
11511 case tcc_binary:
11512 case tcc_statement:
11513 len = TREE_CODE_LENGTH (code);
11514 for (i = 0; i < len; ++i)
11515 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11516 break;
11517 case tcc_declaration:
11518 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11519 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11520 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11522 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11523 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11524 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11525 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11526 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11528 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11529 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11531 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11533 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11534 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11535 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11537 break;
11538 case tcc_type:
11539 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11540 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11541 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11542 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11543 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11544 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11545 if (INTEGRAL_TYPE_P (expr)
11546 || SCALAR_FLOAT_TYPE_P (expr))
11548 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11549 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11551 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11552 if (TREE_CODE (expr) == RECORD_TYPE
11553 || TREE_CODE (expr) == UNION_TYPE
11554 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11555 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11556 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11557 break;
11558 default:
11559 break;
11563 #endif
11565 /* Fold a unary tree expression with code CODE of type TYPE with an
11566 operand OP0. Return a folded expression if successful. Otherwise,
11567 return a tree expression with code CODE of type TYPE with an
11568 operand OP0. */
11570 tree
11571 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11573 tree tem;
11574 #ifdef ENABLE_FOLD_CHECKING
11575 unsigned char checksum_before[16], checksum_after[16];
11576 struct md5_ctx ctx;
11577 htab_t ht;
11579 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11580 md5_init_ctx (&ctx);
11581 fold_checksum_tree (op0, &ctx, ht);
11582 md5_finish_ctx (&ctx, checksum_before);
11583 htab_empty (ht);
11584 #endif
11586 tem = fold_unary (code, type, op0);
11587 if (!tem)
11588 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11590 #ifdef ENABLE_FOLD_CHECKING
11591 md5_init_ctx (&ctx);
11592 fold_checksum_tree (op0, &ctx, ht);
11593 md5_finish_ctx (&ctx, checksum_after);
11594 htab_delete (ht);
11596 if (memcmp (checksum_before, checksum_after, 16))
11597 fold_check_failed (op0, tem);
11598 #endif
11599 return tem;
11602 /* Fold a binary tree expression with code CODE of type TYPE with
11603 operands OP0 and OP1. Return a folded expression if successful.
11604 Otherwise, return a tree expression with code CODE of type TYPE
11605 with operands OP0 and OP1. */
11607 tree
11608 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11609 MEM_STAT_DECL)
11611 tree tem;
11612 #ifdef ENABLE_FOLD_CHECKING
11613 unsigned char checksum_before_op0[16],
11614 checksum_before_op1[16],
11615 checksum_after_op0[16],
11616 checksum_after_op1[16];
11617 struct md5_ctx ctx;
11618 htab_t ht;
11620 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11621 md5_init_ctx (&ctx);
11622 fold_checksum_tree (op0, &ctx, ht);
11623 md5_finish_ctx (&ctx, checksum_before_op0);
11624 htab_empty (ht);
11626 md5_init_ctx (&ctx);
11627 fold_checksum_tree (op1, &ctx, ht);
11628 md5_finish_ctx (&ctx, checksum_before_op1);
11629 htab_empty (ht);
11630 #endif
11632 tem = fold_binary (code, type, op0, op1);
11633 if (!tem)
11634 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11636 #ifdef ENABLE_FOLD_CHECKING
11637 md5_init_ctx (&ctx);
11638 fold_checksum_tree (op0, &ctx, ht);
11639 md5_finish_ctx (&ctx, checksum_after_op0);
11640 htab_empty (ht);
11642 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11643 fold_check_failed (op0, tem);
11645 md5_init_ctx (&ctx);
11646 fold_checksum_tree (op1, &ctx, ht);
11647 md5_finish_ctx (&ctx, checksum_after_op1);
11648 htab_delete (ht);
11650 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11651 fold_check_failed (op1, tem);
11652 #endif
11653 return tem;
11656 /* Fold a ternary tree expression with code CODE of type TYPE with
11657 operands OP0, OP1, and OP2. Return a folded expression if
11658 successful. Otherwise, return a tree expression with code CODE of
11659 type TYPE with operands OP0, OP1, and OP2. */
11661 tree
11662 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11663 MEM_STAT_DECL)
11665 tree tem;
11666 #ifdef ENABLE_FOLD_CHECKING
11667 unsigned char checksum_before_op0[16],
11668 checksum_before_op1[16],
11669 checksum_before_op2[16],
11670 checksum_after_op0[16],
11671 checksum_after_op1[16],
11672 checksum_after_op2[16];
11673 struct md5_ctx ctx;
11674 htab_t ht;
11676 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11677 md5_init_ctx (&ctx);
11678 fold_checksum_tree (op0, &ctx, ht);
11679 md5_finish_ctx (&ctx, checksum_before_op0);
11680 htab_empty (ht);
11682 md5_init_ctx (&ctx);
11683 fold_checksum_tree (op1, &ctx, ht);
11684 md5_finish_ctx (&ctx, checksum_before_op1);
11685 htab_empty (ht);
11687 md5_init_ctx (&ctx);
11688 fold_checksum_tree (op2, &ctx, ht);
11689 md5_finish_ctx (&ctx, checksum_before_op2);
11690 htab_empty (ht);
11691 #endif
11693 tem = fold_ternary (code, type, op0, op1, op2);
11694 if (!tem)
11695 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11697 #ifdef ENABLE_FOLD_CHECKING
11698 md5_init_ctx (&ctx);
11699 fold_checksum_tree (op0, &ctx, ht);
11700 md5_finish_ctx (&ctx, checksum_after_op0);
11701 htab_empty (ht);
11703 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11704 fold_check_failed (op0, tem);
11706 md5_init_ctx (&ctx);
11707 fold_checksum_tree (op1, &ctx, ht);
11708 md5_finish_ctx (&ctx, checksum_after_op1);
11709 htab_empty (ht);
11711 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11712 fold_check_failed (op1, tem);
11714 md5_init_ctx (&ctx);
11715 fold_checksum_tree (op2, &ctx, ht);
11716 md5_finish_ctx (&ctx, checksum_after_op2);
11717 htab_delete (ht);
11719 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11720 fold_check_failed (op2, tem);
11721 #endif
11722 return tem;
11725 /* Perform constant folding and related simplification of initializer
11726 expression EXPR. These behave identically to "fold_buildN" but ignore
11727 potential run-time traps and exceptions that fold must preserve. */
11729 #define START_FOLD_INIT \
11730 int saved_signaling_nans = flag_signaling_nans;\
11731 int saved_trapping_math = flag_trapping_math;\
11732 int saved_rounding_math = flag_rounding_math;\
11733 int saved_trapv = flag_trapv;\
11734 int saved_folding_initializer = folding_initializer;\
11735 flag_signaling_nans = 0;\
11736 flag_trapping_math = 0;\
11737 flag_rounding_math = 0;\
11738 flag_trapv = 0;\
11739 folding_initializer = 1;
11741 #define END_FOLD_INIT \
11742 flag_signaling_nans = saved_signaling_nans;\
11743 flag_trapping_math = saved_trapping_math;\
11744 flag_rounding_math = saved_rounding_math;\
11745 flag_trapv = saved_trapv;\
11746 folding_initializer = saved_folding_initializer;
11748 tree
11749 fold_build1_initializer (enum tree_code code, tree type, tree op)
11751 tree result;
11752 START_FOLD_INIT;
11754 result = fold_build1 (code, type, op);
11756 END_FOLD_INIT;
11757 return result;
11760 tree
11761 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11763 tree result;
11764 START_FOLD_INIT;
11766 result = fold_build2 (code, type, op0, op1);
11768 END_FOLD_INIT;
11769 return result;
11772 tree
11773 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11774 tree op2)
11776 tree result;
11777 START_FOLD_INIT;
11779 result = fold_build3 (code, type, op0, op1, op2);
11781 END_FOLD_INIT;
11782 return result;
11785 #undef START_FOLD_INIT
11786 #undef END_FOLD_INIT
11788 /* Determine if first argument is a multiple of second argument. Return 0 if
11789 it is not, or we cannot easily determined it to be.
11791 An example of the sort of thing we care about (at this point; this routine
11792 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11793 fold cases do now) is discovering that
11795 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11797 is a multiple of
11799 SAVE_EXPR (J * 8)
11801 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11803 This code also handles discovering that
11805 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11807 is a multiple of 8 so we don't have to worry about dealing with a
11808 possible remainder.
11810 Note that we *look* inside a SAVE_EXPR only to determine how it was
11811 calculated; it is not safe for fold to do much of anything else with the
11812 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11813 at run time. For example, the latter example above *cannot* be implemented
11814 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11815 evaluation time of the original SAVE_EXPR is not necessarily the same at
11816 the time the new expression is evaluated. The only optimization of this
11817 sort that would be valid is changing
11819 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11821 divided by 8 to
11823 SAVE_EXPR (I) * SAVE_EXPR (J)
11825 (where the same SAVE_EXPR (J) is used in the original and the
11826 transformed version). */
11828 static int
11829 multiple_of_p (tree type, tree top, tree bottom)
11831 if (operand_equal_p (top, bottom, 0))
11832 return 1;
11834 if (TREE_CODE (type) != INTEGER_TYPE)
11835 return 0;
11837 switch (TREE_CODE (top))
11839 case BIT_AND_EXPR:
11840 /* Bitwise and provides a power of two multiple. If the mask is
11841 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11842 if (!integer_pow2p (bottom))
11843 return 0;
11844 /* FALLTHRU */
11846 case MULT_EXPR:
11847 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11848 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11850 case PLUS_EXPR:
11851 case MINUS_EXPR:
11852 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11853 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11855 case LSHIFT_EXPR:
11856 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11858 tree op1, t1;
11860 op1 = TREE_OPERAND (top, 1);
11861 /* const_binop may not detect overflow correctly,
11862 so check for it explicitly here. */
11863 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11864 > TREE_INT_CST_LOW (op1)
11865 && TREE_INT_CST_HIGH (op1) == 0
11866 && 0 != (t1 = fold_convert (type,
11867 const_binop (LSHIFT_EXPR,
11868 size_one_node,
11869 op1, 0)))
11870 && ! TREE_OVERFLOW (t1))
11871 return multiple_of_p (type, t1, bottom);
11873 return 0;
11875 case NOP_EXPR:
11876 /* Can't handle conversions from non-integral or wider integral type. */
11877 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11878 || (TYPE_PRECISION (type)
11879 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11880 return 0;
11882 /* .. fall through ... */
11884 case SAVE_EXPR:
11885 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11887 case INTEGER_CST:
11888 if (TREE_CODE (bottom) != INTEGER_CST
11889 || (TYPE_UNSIGNED (type)
11890 && (tree_int_cst_sgn (top) < 0
11891 || tree_int_cst_sgn (bottom) < 0)))
11892 return 0;
11893 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11894 top, bottom, 0));
11896 default:
11897 return 0;
11901 /* Return true if `t' is known to be non-negative. */
11904 tree_expr_nonnegative_p (tree t)
11906 if (t == error_mark_node)
11907 return 0;
11909 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11910 return 1;
11912 switch (TREE_CODE (t))
11914 case SSA_NAME:
11915 /* Query VRP to see if it has recorded any information about
11916 the range of this object. */
11917 return ssa_name_nonnegative_p (t);
11919 case ABS_EXPR:
11920 /* We can't return 1 if flag_wrapv is set because
11921 ABS_EXPR<INT_MIN> = INT_MIN. */
11922 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11923 return 1;
11924 break;
11926 case INTEGER_CST:
11927 return tree_int_cst_sgn (t) >= 0;
11929 case REAL_CST:
11930 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11932 case PLUS_EXPR:
11933 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11934 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11935 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11937 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11938 both unsigned and at least 2 bits shorter than the result. */
11939 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11940 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11941 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11943 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11944 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11945 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11946 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11948 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11949 TYPE_PRECISION (inner2)) + 1;
11950 return prec < TYPE_PRECISION (TREE_TYPE (t));
11953 break;
11955 case MULT_EXPR:
11956 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11958 /* x * x for floating point x is always non-negative. */
11959 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11960 return 1;
11961 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11962 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11965 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11966 both unsigned and their total bits is shorter than the result. */
11967 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11968 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11969 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11971 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11972 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11973 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11974 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11975 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11976 < TYPE_PRECISION (TREE_TYPE (t));
11978 return 0;
11980 case BIT_AND_EXPR:
11981 case MAX_EXPR:
11982 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11983 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11985 case BIT_IOR_EXPR:
11986 case BIT_XOR_EXPR:
11987 case MIN_EXPR:
11988 case RDIV_EXPR:
11989 case TRUNC_DIV_EXPR:
11990 case CEIL_DIV_EXPR:
11991 case FLOOR_DIV_EXPR:
11992 case ROUND_DIV_EXPR:
11993 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11994 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11996 case TRUNC_MOD_EXPR:
11997 case CEIL_MOD_EXPR:
11998 case FLOOR_MOD_EXPR:
11999 case ROUND_MOD_EXPR:
12000 case SAVE_EXPR:
12001 case NON_LVALUE_EXPR:
12002 case FLOAT_EXPR:
12003 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12005 case COMPOUND_EXPR:
12006 case MODIFY_EXPR:
12007 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12009 case BIND_EXPR:
12010 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12012 case COND_EXPR:
12013 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12014 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12016 case NOP_EXPR:
12018 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12019 tree outer_type = TREE_TYPE (t);
12021 if (TREE_CODE (outer_type) == REAL_TYPE)
12023 if (TREE_CODE (inner_type) == REAL_TYPE)
12024 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12025 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12027 if (TYPE_UNSIGNED (inner_type))
12028 return 1;
12029 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12032 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12034 if (TREE_CODE (inner_type) == REAL_TYPE)
12035 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12036 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12037 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12038 && TYPE_UNSIGNED (inner_type);
12041 break;
12043 case TARGET_EXPR:
12045 tree temp = TARGET_EXPR_SLOT (t);
12046 t = TARGET_EXPR_INITIAL (t);
12048 /* If the initializer is non-void, then it's a normal expression
12049 that will be assigned to the slot. */
12050 if (!VOID_TYPE_P (t))
12051 return tree_expr_nonnegative_p (t);
12053 /* Otherwise, the initializer sets the slot in some way. One common
12054 way is an assignment statement at the end of the initializer. */
12055 while (1)
12057 if (TREE_CODE (t) == BIND_EXPR)
12058 t = expr_last (BIND_EXPR_BODY (t));
12059 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12060 || TREE_CODE (t) == TRY_CATCH_EXPR)
12061 t = expr_last (TREE_OPERAND (t, 0));
12062 else if (TREE_CODE (t) == STATEMENT_LIST)
12063 t = expr_last (t);
12064 else
12065 break;
12067 if (TREE_CODE (t) == MODIFY_EXPR
12068 && TREE_OPERAND (t, 0) == temp)
12069 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12071 return 0;
12074 case CALL_EXPR:
12076 tree fndecl = get_callee_fndecl (t);
12077 tree arglist = TREE_OPERAND (t, 1);
12078 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12079 switch (DECL_FUNCTION_CODE (fndecl))
12081 CASE_FLT_FN (BUILT_IN_ACOS):
12082 CASE_FLT_FN (BUILT_IN_ACOSH):
12083 CASE_FLT_FN (BUILT_IN_CABS):
12084 CASE_FLT_FN (BUILT_IN_COSH):
12085 CASE_FLT_FN (BUILT_IN_ERFC):
12086 CASE_FLT_FN (BUILT_IN_EXP):
12087 CASE_FLT_FN (BUILT_IN_EXP10):
12088 CASE_FLT_FN (BUILT_IN_EXP2):
12089 CASE_FLT_FN (BUILT_IN_FABS):
12090 CASE_FLT_FN (BUILT_IN_FDIM):
12091 CASE_FLT_FN (BUILT_IN_HYPOT):
12092 CASE_FLT_FN (BUILT_IN_POW10):
12093 CASE_INT_FN (BUILT_IN_FFS):
12094 CASE_INT_FN (BUILT_IN_PARITY):
12095 CASE_INT_FN (BUILT_IN_POPCOUNT):
12096 /* Always true. */
12097 return 1;
12099 CASE_FLT_FN (BUILT_IN_SQRT):
12100 /* sqrt(-0.0) is -0.0. */
12101 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12102 return 1;
12103 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12105 CASE_FLT_FN (BUILT_IN_ASINH):
12106 CASE_FLT_FN (BUILT_IN_ATAN):
12107 CASE_FLT_FN (BUILT_IN_ATANH):
12108 CASE_FLT_FN (BUILT_IN_CBRT):
12109 CASE_FLT_FN (BUILT_IN_CEIL):
12110 CASE_FLT_FN (BUILT_IN_ERF):
12111 CASE_FLT_FN (BUILT_IN_EXPM1):
12112 CASE_FLT_FN (BUILT_IN_FLOOR):
12113 CASE_FLT_FN (BUILT_IN_FMOD):
12114 CASE_FLT_FN (BUILT_IN_FREXP):
12115 CASE_FLT_FN (BUILT_IN_LCEIL):
12116 CASE_FLT_FN (BUILT_IN_LDEXP):
12117 CASE_FLT_FN (BUILT_IN_LFLOOR):
12118 CASE_FLT_FN (BUILT_IN_LLCEIL):
12119 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12120 CASE_FLT_FN (BUILT_IN_LLRINT):
12121 CASE_FLT_FN (BUILT_IN_LLROUND):
12122 CASE_FLT_FN (BUILT_IN_LRINT):
12123 CASE_FLT_FN (BUILT_IN_LROUND):
12124 CASE_FLT_FN (BUILT_IN_MODF):
12125 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12126 CASE_FLT_FN (BUILT_IN_POW):
12127 CASE_FLT_FN (BUILT_IN_RINT):
12128 CASE_FLT_FN (BUILT_IN_ROUND):
12129 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12130 CASE_FLT_FN (BUILT_IN_SINH):
12131 CASE_FLT_FN (BUILT_IN_TANH):
12132 CASE_FLT_FN (BUILT_IN_TRUNC):
12133 /* True if the 1st argument is nonnegative. */
12134 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12136 CASE_FLT_FN (BUILT_IN_FMAX):
12137 /* True if the 1st OR 2nd arguments are nonnegative. */
12138 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12139 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12141 CASE_FLT_FN (BUILT_IN_FMIN):
12142 /* True if the 1st AND 2nd arguments are nonnegative. */
12143 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12144 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12146 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12147 /* True if the 2nd argument is nonnegative. */
12148 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12150 default:
12151 break;
12155 /* ... fall through ... */
12157 default:
12158 if (truth_value_p (TREE_CODE (t)))
12159 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12160 return 1;
12163 /* We don't know sign of `t', so be conservative and return false. */
12164 return 0;
12167 /* Return true when T is an address and is known to be nonzero.
12168 For floating point we further ensure that T is not denormal.
12169 Similar logic is present in nonzero_address in rtlanal.h. */
12171 bool
12172 tree_expr_nonzero_p (tree t)
12174 tree type = TREE_TYPE (t);
12176 /* Doing something useful for floating point would need more work. */
12177 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12178 return false;
12180 switch (TREE_CODE (t))
12182 case SSA_NAME:
12183 /* Query VRP to see if it has recorded any information about
12184 the range of this object. */
12185 return ssa_name_nonzero_p (t);
12187 case ABS_EXPR:
12188 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12190 case INTEGER_CST:
12191 /* We used to test for !integer_zerop here. This does not work correctly
12192 if TREE_CONSTANT_OVERFLOW (t). */
12193 return (TREE_INT_CST_LOW (t) != 0
12194 || TREE_INT_CST_HIGH (t) != 0);
12196 case PLUS_EXPR:
12197 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12199 /* With the presence of negative values it is hard
12200 to say something. */
12201 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12202 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12203 return false;
12204 /* One of operands must be positive and the other non-negative. */
12205 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12206 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12208 break;
12210 case MULT_EXPR:
12211 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12213 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12214 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12216 break;
12218 case NOP_EXPR:
12220 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12221 tree outer_type = TREE_TYPE (t);
12223 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12224 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12226 break;
12228 case ADDR_EXPR:
12230 tree base = get_base_address (TREE_OPERAND (t, 0));
12232 if (!base)
12233 return false;
12235 /* Weak declarations may link to NULL. */
12236 if (VAR_OR_FUNCTION_DECL_P (base))
12237 return !DECL_WEAK (base);
12239 /* Constants are never weak. */
12240 if (CONSTANT_CLASS_P (base))
12241 return true;
12243 return false;
12246 case COND_EXPR:
12247 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12248 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12250 case MIN_EXPR:
12251 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12252 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12254 case MAX_EXPR:
12255 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12257 /* When both operands are nonzero, then MAX must be too. */
12258 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12259 return true;
12261 /* MAX where operand 0 is positive is positive. */
12262 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12264 /* MAX where operand 1 is positive is positive. */
12265 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12266 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12267 return true;
12268 break;
12270 case COMPOUND_EXPR:
12271 case MODIFY_EXPR:
12272 case BIND_EXPR:
12273 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12275 case SAVE_EXPR:
12276 case NON_LVALUE_EXPR:
12277 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12279 case BIT_IOR_EXPR:
12280 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12281 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12283 case CALL_EXPR:
12284 return alloca_call_p (t);
12286 default:
12287 break;
12289 return false;
12292 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12293 attempt to fold the expression to a constant without modifying TYPE,
12294 OP0 or OP1.
12296 If the expression could be simplified to a constant, then return
12297 the constant. If the expression would not be simplified to a
12298 constant, then return NULL_TREE. */
12300 tree
12301 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12303 tree tem = fold_binary (code, type, op0, op1);
12304 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12307 /* Given the components of a unary expression CODE, TYPE and OP0,
12308 attempt to fold the expression to a constant without modifying
12309 TYPE or OP0.
12311 If the expression could be simplified to a constant, then return
12312 the constant. If the expression would not be simplified to a
12313 constant, then return NULL_TREE. */
12315 tree
12316 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12318 tree tem = fold_unary (code, type, op0);
12319 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12322 /* If EXP represents referencing an element in a constant string
12323 (either via pointer arithmetic or array indexing), return the
12324 tree representing the value accessed, otherwise return NULL. */
12326 tree
12327 fold_read_from_constant_string (tree exp)
12329 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12331 tree exp1 = TREE_OPERAND (exp, 0);
12332 tree index;
12333 tree string;
12335 if (TREE_CODE (exp) == INDIRECT_REF)
12336 string = string_constant (exp1, &index);
12337 else
12339 tree low_bound = array_ref_low_bound (exp);
12340 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12342 /* Optimize the special-case of a zero lower bound.
12344 We convert the low_bound to sizetype to avoid some problems
12345 with constant folding. (E.g. suppose the lower bound is 1,
12346 and its mode is QI. Without the conversion,l (ARRAY
12347 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12348 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12349 if (! integer_zerop (low_bound))
12350 index = size_diffop (index, fold_convert (sizetype, low_bound));
12352 string = exp1;
12355 if (string
12356 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12357 && TREE_CODE (string) == STRING_CST
12358 && TREE_CODE (index) == INTEGER_CST
12359 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12360 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12361 == MODE_INT)
12362 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12363 return fold_convert (TREE_TYPE (exp),
12364 build_int_cst (NULL_TREE,
12365 (TREE_STRING_POINTER (string)
12366 [TREE_INT_CST_LOW (index)])));
12368 return NULL;
12371 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12372 an integer constant or real constant.
12374 TYPE is the type of the result. */
12376 static tree
12377 fold_negate_const (tree arg0, tree type)
12379 tree t = NULL_TREE;
12381 switch (TREE_CODE (arg0))
12383 case INTEGER_CST:
12385 unsigned HOST_WIDE_INT low;
12386 HOST_WIDE_INT high;
12387 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12388 TREE_INT_CST_HIGH (arg0),
12389 &low, &high);
12390 t = build_int_cst_wide (type, low, high);
12391 t = force_fit_type (t, 1,
12392 (overflow | TREE_OVERFLOW (arg0))
12393 && !TYPE_UNSIGNED (type),
12394 TREE_CONSTANT_OVERFLOW (arg0));
12395 break;
12398 case REAL_CST:
12399 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12400 break;
12402 default:
12403 gcc_unreachable ();
12406 return t;
12409 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12410 an integer constant or real constant.
12412 TYPE is the type of the result. */
12414 tree
12415 fold_abs_const (tree arg0, tree type)
12417 tree t = NULL_TREE;
12419 switch (TREE_CODE (arg0))
12421 case INTEGER_CST:
12422 /* If the value is unsigned, then the absolute value is
12423 the same as the ordinary value. */
12424 if (TYPE_UNSIGNED (type))
12425 t = arg0;
12426 /* Similarly, if the value is non-negative. */
12427 else if (INT_CST_LT (integer_minus_one_node, arg0))
12428 t = arg0;
12429 /* If the value is negative, then the absolute value is
12430 its negation. */
12431 else
12433 unsigned HOST_WIDE_INT low;
12434 HOST_WIDE_INT high;
12435 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12436 TREE_INT_CST_HIGH (arg0),
12437 &low, &high);
12438 t = build_int_cst_wide (type, low, high);
12439 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12440 TREE_CONSTANT_OVERFLOW (arg0));
12442 break;
12444 case REAL_CST:
12445 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12446 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12447 else
12448 t = arg0;
12449 break;
12451 default:
12452 gcc_unreachable ();
12455 return t;
12458 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12459 constant. TYPE is the type of the result. */
12461 static tree
12462 fold_not_const (tree arg0, tree type)
12464 tree t = NULL_TREE;
12466 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12468 t = build_int_cst_wide (type,
12469 ~ TREE_INT_CST_LOW (arg0),
12470 ~ TREE_INT_CST_HIGH (arg0));
12471 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12472 TREE_CONSTANT_OVERFLOW (arg0));
12474 return t;
12477 /* Given CODE, a relational operator, the target type, TYPE and two
12478 constant operands OP0 and OP1, return the result of the
12479 relational operation. If the result is not a compile time
12480 constant, then return NULL_TREE. */
12482 static tree
12483 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12485 int result, invert;
12487 /* From here on, the only cases we handle are when the result is
12488 known to be a constant. */
12490 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12492 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12493 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12495 /* Handle the cases where either operand is a NaN. */
12496 if (real_isnan (c0) || real_isnan (c1))
12498 switch (code)
12500 case EQ_EXPR:
12501 case ORDERED_EXPR:
12502 result = 0;
12503 break;
12505 case NE_EXPR:
12506 case UNORDERED_EXPR:
12507 case UNLT_EXPR:
12508 case UNLE_EXPR:
12509 case UNGT_EXPR:
12510 case UNGE_EXPR:
12511 case UNEQ_EXPR:
12512 result = 1;
12513 break;
12515 case LT_EXPR:
12516 case LE_EXPR:
12517 case GT_EXPR:
12518 case GE_EXPR:
12519 case LTGT_EXPR:
12520 if (flag_trapping_math)
12521 return NULL_TREE;
12522 result = 0;
12523 break;
12525 default:
12526 gcc_unreachable ();
12529 return constant_boolean_node (result, type);
12532 return constant_boolean_node (real_compare (code, c0, c1), type);
12535 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12537 To compute GT, swap the arguments and do LT.
12538 To compute GE, do LT and invert the result.
12539 To compute LE, swap the arguments, do LT and invert the result.
12540 To compute NE, do EQ and invert the result.
12542 Therefore, the code below must handle only EQ and LT. */
12544 if (code == LE_EXPR || code == GT_EXPR)
12546 tree tem = op0;
12547 op0 = op1;
12548 op1 = tem;
12549 code = swap_tree_comparison (code);
12552 /* Note that it is safe to invert for real values here because we
12553 have already handled the one case that it matters. */
12555 invert = 0;
12556 if (code == NE_EXPR || code == GE_EXPR)
12558 invert = 1;
12559 code = invert_tree_comparison (code, false);
12562 /* Compute a result for LT or EQ if args permit;
12563 Otherwise return T. */
12564 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12566 if (code == EQ_EXPR)
12567 result = tree_int_cst_equal (op0, op1);
12568 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12569 result = INT_CST_LT_UNSIGNED (op0, op1);
12570 else
12571 result = INT_CST_LT (op0, op1);
12573 else
12574 return NULL_TREE;
12576 if (invert)
12577 result ^= 1;
12578 return constant_boolean_node (result, type);
12581 /* Build an expression for the a clean point containing EXPR with type TYPE.
12582 Don't build a cleanup point expression for EXPR which don't have side
12583 effects. */
12585 tree
12586 fold_build_cleanup_point_expr (tree type, tree expr)
12588 /* If the expression does not have side effects then we don't have to wrap
12589 it with a cleanup point expression. */
12590 if (!TREE_SIDE_EFFECTS (expr))
12591 return expr;
12593 /* If the expression is a return, check to see if the expression inside the
12594 return has no side effects or the right hand side of the modify expression
12595 inside the return. If either don't have side effects set we don't need to
12596 wrap the expression in a cleanup point expression. Note we don't check the
12597 left hand side of the modify because it should always be a return decl. */
12598 if (TREE_CODE (expr) == RETURN_EXPR)
12600 tree op = TREE_OPERAND (expr, 0);
12601 if (!op || !TREE_SIDE_EFFECTS (op))
12602 return expr;
12603 op = TREE_OPERAND (op, 1);
12604 if (!TREE_SIDE_EFFECTS (op))
12605 return expr;
12608 return build1 (CLEANUP_POINT_EXPR, type, expr);
12611 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12612 avoid confusing the gimplify process. */
12614 tree
12615 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12617 /* The size of the object is not relevant when talking about its address. */
12618 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12619 t = TREE_OPERAND (t, 0);
12621 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12622 if (TREE_CODE (t) == INDIRECT_REF
12623 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12625 t = TREE_OPERAND (t, 0);
12626 if (TREE_TYPE (t) != ptrtype)
12627 t = build1 (NOP_EXPR, ptrtype, t);
12629 else
12631 tree base = t;
12633 while (handled_component_p (base))
12634 base = TREE_OPERAND (base, 0);
12635 if (DECL_P (base))
12636 TREE_ADDRESSABLE (base) = 1;
12638 t = build1 (ADDR_EXPR, ptrtype, t);
12641 return t;
12644 tree
12645 build_fold_addr_expr (tree t)
12647 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12650 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12651 of an indirection through OP0, or NULL_TREE if no simplification is
12652 possible. */
12654 tree
12655 fold_indirect_ref_1 (tree type, tree op0)
12657 tree sub = op0;
12658 tree subtype;
12660 STRIP_NOPS (sub);
12661 subtype = TREE_TYPE (sub);
12662 if (!POINTER_TYPE_P (subtype))
12663 return NULL_TREE;
12665 if (TREE_CODE (sub) == ADDR_EXPR)
12667 tree op = TREE_OPERAND (sub, 0);
12668 tree optype = TREE_TYPE (op);
12669 /* *&p => p; make sure to handle *&"str"[cst] here. */
12670 if (type == optype)
12672 tree fop = fold_read_from_constant_string (op);
12673 if (fop)
12674 return fop;
12675 else
12676 return op;
12678 /* *(foo *)&fooarray => fooarray[0] */
12679 else if (TREE_CODE (optype) == ARRAY_TYPE
12680 && type == TREE_TYPE (optype))
12682 tree type_domain = TYPE_DOMAIN (optype);
12683 tree min_val = size_zero_node;
12684 if (type_domain && TYPE_MIN_VALUE (type_domain))
12685 min_val = TYPE_MIN_VALUE (type_domain);
12686 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12688 /* *(foo *)&complexfoo => __real__ complexfoo */
12689 else if (TREE_CODE (optype) == COMPLEX_TYPE
12690 && type == TREE_TYPE (optype))
12691 return fold_build1 (REALPART_EXPR, type, op);
12694 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12695 if (TREE_CODE (sub) == PLUS_EXPR
12696 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12698 tree op00 = TREE_OPERAND (sub, 0);
12699 tree op01 = TREE_OPERAND (sub, 1);
12700 tree op00type;
12702 STRIP_NOPS (op00);
12703 op00type = TREE_TYPE (op00);
12704 if (TREE_CODE (op00) == ADDR_EXPR
12705 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12706 && type == TREE_TYPE (TREE_TYPE (op00type)))
12708 tree size = TYPE_SIZE_UNIT (type);
12709 if (tree_int_cst_equal (size, op01))
12710 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
12714 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12715 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
12716 && type == TREE_TYPE (TREE_TYPE (subtype)))
12718 tree type_domain;
12719 tree min_val = size_zero_node;
12720 sub = build_fold_indirect_ref (sub);
12721 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
12722 if (type_domain && TYPE_MIN_VALUE (type_domain))
12723 min_val = TYPE_MIN_VALUE (type_domain);
12724 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
12727 return NULL_TREE;
12730 /* Builds an expression for an indirection through T, simplifying some
12731 cases. */
12733 tree
12734 build_fold_indirect_ref (tree t)
12736 tree type = TREE_TYPE (TREE_TYPE (t));
12737 tree sub = fold_indirect_ref_1 (type, t);
12739 if (sub)
12740 return sub;
12741 else
12742 return build1 (INDIRECT_REF, type, t);
12745 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12747 tree
12748 fold_indirect_ref (tree t)
12750 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12752 if (sub)
12753 return sub;
12754 else
12755 return t;
12758 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12759 whose result is ignored. The type of the returned tree need not be
12760 the same as the original expression. */
12762 tree
12763 fold_ignored_result (tree t)
12765 if (!TREE_SIDE_EFFECTS (t))
12766 return integer_zero_node;
12768 for (;;)
12769 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12771 case tcc_unary:
12772 t = TREE_OPERAND (t, 0);
12773 break;
12775 case tcc_binary:
12776 case tcc_comparison:
12777 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12778 t = TREE_OPERAND (t, 0);
12779 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12780 t = TREE_OPERAND (t, 1);
12781 else
12782 return t;
12783 break;
12785 case tcc_expression:
12786 switch (TREE_CODE (t))
12788 case COMPOUND_EXPR:
12789 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12790 return t;
12791 t = TREE_OPERAND (t, 0);
12792 break;
12794 case COND_EXPR:
12795 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12796 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12797 return t;
12798 t = TREE_OPERAND (t, 0);
12799 break;
12801 default:
12802 return t;
12804 break;
12806 default:
12807 return t;
12811 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12812 This can only be applied to objects of a sizetype. */
12814 tree
12815 round_up (tree value, int divisor)
12817 tree div = NULL_TREE;
12819 gcc_assert (divisor > 0);
12820 if (divisor == 1)
12821 return value;
12823 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12824 have to do anything. Only do this when we are not given a const,
12825 because in that case, this check is more expensive than just
12826 doing it. */
12827 if (TREE_CODE (value) != INTEGER_CST)
12829 div = build_int_cst (TREE_TYPE (value), divisor);
12831 if (multiple_of_p (TREE_TYPE (value), value, div))
12832 return value;
12835 /* If divisor is a power of two, simplify this to bit manipulation. */
12836 if (divisor == (divisor & -divisor))
12838 tree t;
12840 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12841 value = size_binop (PLUS_EXPR, value, t);
12842 t = build_int_cst (TREE_TYPE (value), -divisor);
12843 value = size_binop (BIT_AND_EXPR, value, t);
12845 else
12847 if (!div)
12848 div = build_int_cst (TREE_TYPE (value), divisor);
12849 value = size_binop (CEIL_DIV_EXPR, value, div);
12850 value = size_binop (MULT_EXPR, value, div);
12853 return value;
12856 /* Likewise, but round down. */
12858 tree
12859 round_down (tree value, int divisor)
12861 tree div = NULL_TREE;
12863 gcc_assert (divisor > 0);
12864 if (divisor == 1)
12865 return value;
12867 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12868 have to do anything. Only do this when we are not given a const,
12869 because in that case, this check is more expensive than just
12870 doing it. */
12871 if (TREE_CODE (value) != INTEGER_CST)
12873 div = build_int_cst (TREE_TYPE (value), divisor);
12875 if (multiple_of_p (TREE_TYPE (value), value, div))
12876 return value;
12879 /* If divisor is a power of two, simplify this to bit manipulation. */
12880 if (divisor == (divisor & -divisor))
12882 tree t;
12884 t = build_int_cst (TREE_TYPE (value), -divisor);
12885 value = size_binop (BIT_AND_EXPR, value, t);
12887 else
12889 if (!div)
12890 div = build_int_cst (TREE_TYPE (value), divisor);
12891 value = size_binop (FLOOR_DIV_EXPR, value, div);
12892 value = size_binop (MULT_EXPR, value, div);
12895 return value;
12898 /* Returns the pointer to the base of the object addressed by EXP and
12899 extracts the information about the offset of the access, storing it
12900 to PBITPOS and POFFSET. */
12902 static tree
12903 split_address_to_core_and_offset (tree exp,
12904 HOST_WIDE_INT *pbitpos, tree *poffset)
12906 tree core;
12907 enum machine_mode mode;
12908 int unsignedp, volatilep;
12909 HOST_WIDE_INT bitsize;
12911 if (TREE_CODE (exp) == ADDR_EXPR)
12913 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12914 poffset, &mode, &unsignedp, &volatilep,
12915 false);
12916 core = build_fold_addr_expr (core);
12918 else
12920 core = exp;
12921 *pbitpos = 0;
12922 *poffset = NULL_TREE;
12925 return core;
12928 /* Returns true if addresses of E1 and E2 differ by a constant, false
12929 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12931 bool
12932 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12934 tree core1, core2;
12935 HOST_WIDE_INT bitpos1, bitpos2;
12936 tree toffset1, toffset2, tdiff, type;
12938 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12939 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12941 if (bitpos1 % BITS_PER_UNIT != 0
12942 || bitpos2 % BITS_PER_UNIT != 0
12943 || !operand_equal_p (core1, core2, 0))
12944 return false;
12946 if (toffset1 && toffset2)
12948 type = TREE_TYPE (toffset1);
12949 if (type != TREE_TYPE (toffset2))
12950 toffset2 = fold_convert (type, toffset2);
12952 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12953 if (!cst_and_fits_in_hwi (tdiff))
12954 return false;
12956 *diff = int_cst_value (tdiff);
12958 else if (toffset1 || toffset2)
12960 /* If only one of the offsets is non-constant, the difference cannot
12961 be a constant. */
12962 return false;
12964 else
12965 *diff = 0;
12967 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12968 return true;
12971 /* Simplify the floating point expression EXP when the sign of the
12972 result is not significant. Return NULL_TREE if no simplification
12973 is possible. */
12975 tree
12976 fold_strip_sign_ops (tree exp)
12978 tree arg0, arg1;
12980 switch (TREE_CODE (exp))
12982 case ABS_EXPR:
12983 case NEGATE_EXPR:
12984 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12985 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12987 case MULT_EXPR:
12988 case RDIV_EXPR:
12989 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12990 return NULL_TREE;
12991 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12992 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12993 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12994 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12995 arg0 ? arg0 : TREE_OPERAND (exp, 0),
12996 arg1 ? arg1 : TREE_OPERAND (exp, 1));
12997 break;
12999 default:
13000 break;
13002 return NULL_TREE;