* tree-ssa-propagate.c (set_rhs): Restructure validity tests as a
[official-gcc.git] / gcc / fold-const.c
blobc74ffa37bdece36f524477e601cdb8bfbbf686bd
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
63 otherwise. */
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
70 COMPCODE_FALSE = 0,
71 COMPCODE_LT = 1,
72 COMPCODE_EQ = 2,
73 COMPCODE_LE = 3,
74 COMPCODE_GT = 4,
75 COMPCODE_LTGT = 5,
76 COMPCODE_GE = 6,
77 COMPCODE_ORD = 7,
78 COMPCODE_UNORD = 8,
79 COMPCODE_UNLT = 9,
80 COMPCODE_UNEQ = 10,
81 COMPCODE_UNLE = 11,
82 COMPCODE_UNGT = 12,
83 COMPCODE_NE = 13,
84 COMPCODE_UNGE = 14,
85 COMPCODE_TRUE = 15
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
110 tree *, tree *);
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
120 tree);
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
130 tree, tree,
131 tree, tree, int);
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
134 tree, tree, tree);
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
148 addition.
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 sign. */
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
160 #define LOWPART(x) \
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
170 static void
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
183 static void
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
185 HOST_WIDE_INT *hi)
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
206 tree
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
211 HOST_WIDE_INT high;
212 unsigned int prec;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
222 prec = POINTER_SIZE;
223 else
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
236 else
238 high = 0;
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
257 high = -1;
259 else
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
264 high = -1;
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
275 if (overflowed
276 || overflowable < 0
277 || (overflowable > 0 && sign_extended_type))
279 t = copy_node (t);
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
285 t = copy_node (t);
286 TREE_CONSTANT_OVERFLOW (t) = 1;
290 return t;
293 /* Add two doubleword integers with doubleword result.
294 Return nonzero if the operation overflows according to UNSIGNED_P.
295 Each argument is given as two `HOST_WIDE_INT' pieces.
296 One argument is L1 and H1; the other, L2 and H2.
297 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
300 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
301 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
302 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
303 bool unsigned_p)
305 unsigned HOST_WIDE_INT l;
306 HOST_WIDE_INT h;
308 l = l1 + l2;
309 h = h1 + h2 + (l < l1);
311 *lv = l;
312 *hv = h;
314 if (unsigned_p)
315 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
316 else
317 return OVERFLOW_SUM_SIGN (h1, h2, h);
320 /* Negate a doubleword integer with doubleword result.
321 Return nonzero if the operation overflows, assuming it's signed.
322 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
323 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
327 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
329 if (l1 == 0)
331 *lv = 0;
332 *hv = - h1;
333 return (*hv & h1) < 0;
335 else
337 *lv = -l1;
338 *hv = ~h1;
339 return 0;
343 /* Multiply two doubleword integers with doubleword result.
344 Return nonzero if the operation overflows according to UNSIGNED_P.
345 Each argument is given as two `HOST_WIDE_INT' pieces.
346 One argument is L1 and H1; the other, L2 and H2.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
352 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
353 bool unsigned_p)
355 HOST_WIDE_INT arg1[4];
356 HOST_WIDE_INT arg2[4];
357 HOST_WIDE_INT prod[4 * 2];
358 unsigned HOST_WIDE_INT carry;
359 int i, j, k;
360 unsigned HOST_WIDE_INT toplow, neglow;
361 HOST_WIDE_INT tophigh, neghigh;
363 encode (arg1, l1, h1);
364 encode (arg2, l2, h2);
366 memset (prod, 0, sizeof prod);
368 for (i = 0; i < 4; i++)
370 carry = 0;
371 for (j = 0; j < 4; j++)
373 k = i + j;
374 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
375 carry += arg1[i] * arg2[j];
376 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
377 carry += prod[k];
378 prod[k] = LOWPART (carry);
379 carry = HIGHPART (carry);
381 prod[i + 4] = carry;
384 decode (prod, lv, hv);
385 decode (prod + 4, &toplow, &tophigh);
387 /* Unsigned overflow is immediate. */
388 if (unsigned_p)
389 return (toplow | tophigh) != 0;
391 /* Check for signed overflow by calculating the signed representation of the
392 top half of the result; it should agree with the low half's sign bit. */
393 if (h1 < 0)
395 neg_double (l2, h2, &neglow, &neghigh);
396 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
398 if (h2 < 0)
400 neg_double (l1, h1, &neglow, &neghigh);
401 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
403 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406 /* Shift the doubleword integer in L1, H1 left by COUNT places
407 keeping only PREC bits of result.
408 Shift right if COUNT is negative.
409 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
410 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
412 void
413 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
414 HOST_WIDE_INT count, unsigned int prec,
415 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
417 unsigned HOST_WIDE_INT signmask;
419 if (count < 0)
421 rshift_double (l1, h1, -count, prec, lv, hv, arith);
422 return;
425 if (SHIFT_COUNT_TRUNCATED)
426 count %= prec;
428 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
430 /* Shifting by the host word size is undefined according to the
431 ANSI standard, so we must handle this as a special case. */
432 *hv = 0;
433 *lv = 0;
435 else if (count >= HOST_BITS_PER_WIDE_INT)
437 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
438 *lv = 0;
440 else
442 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
443 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
444 *lv = l1 << count;
447 /* Sign extend all bits that are beyond the precision. */
449 signmask = -((prec > HOST_BITS_PER_WIDE_INT
450 ? ((unsigned HOST_WIDE_INT) *hv
451 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
452 : (*lv >> (prec - 1))) & 1);
454 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
456 else if (prec >= HOST_BITS_PER_WIDE_INT)
458 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
459 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
461 else
463 *hv = signmask;
464 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
465 *lv |= signmask << prec;
469 /* Shift the doubleword integer in L1, H1 right by COUNT places
470 keeping only PREC bits of result. COUNT must be positive.
471 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
472 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
474 void
475 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
476 HOST_WIDE_INT count, unsigned int prec,
477 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
478 int arith)
480 unsigned HOST_WIDE_INT signmask;
482 signmask = (arith
483 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
484 : 0);
486 if (SHIFT_COUNT_TRUNCATED)
487 count %= prec;
489 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
491 /* Shifting by the host word size is undefined according to the
492 ANSI standard, so we must handle this as a special case. */
493 *hv = 0;
494 *lv = 0;
496 else if (count >= HOST_BITS_PER_WIDE_INT)
498 *hv = 0;
499 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
501 else
503 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
504 *lv = ((l1 >> count)
505 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508 /* Zero / sign extend all bits that are beyond the precision. */
510 if (count >= (HOST_WIDE_INT)prec)
512 *hv = signmask;
513 *lv = signmask;
515 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
517 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
519 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
520 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
522 else
524 *hv = signmask;
525 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
526 *lv |= signmask << (prec - count);
530 /* Rotate the doubleword integer in L1, H1 left by COUNT places
531 keeping only PREC bits of result.
532 Rotate right if COUNT is negative.
533 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
535 void
536 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
537 HOST_WIDE_INT count, unsigned int prec,
538 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
540 unsigned HOST_WIDE_INT s1l, s2l;
541 HOST_WIDE_INT s1h, s2h;
543 count %= prec;
544 if (count < 0)
545 count += prec;
547 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
548 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
549 *lv = s1l | s2l;
550 *hv = s1h | s2h;
553 /* Rotate the doubleword integer in L1, H1 left by COUNT places
554 keeping only PREC bits of result. COUNT must be positive.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 void
558 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
565 count %= prec;
566 if (count < 0)
567 count += prec;
569 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
571 *lv = s1l | s2l;
572 *hv = s1h | s2h;
575 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
576 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
577 CODE is a tree code for a kind of division, one of
578 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
579 or EXACT_DIV_EXPR
580 It controls how the quotient is rounded to an integer.
581 Return nonzero if the operation overflows.
582 UNS nonzero says do unsigned division. */
585 div_and_round_double (enum tree_code code, int uns,
586 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
587 HOST_WIDE_INT hnum_orig,
588 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
589 HOST_WIDE_INT hden_orig,
590 unsigned HOST_WIDE_INT *lquo,
591 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
592 HOST_WIDE_INT *hrem)
594 int quo_neg = 0;
595 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
596 HOST_WIDE_INT den[4], quo[4];
597 int i, j;
598 unsigned HOST_WIDE_INT work;
599 unsigned HOST_WIDE_INT carry = 0;
600 unsigned HOST_WIDE_INT lnum = lnum_orig;
601 HOST_WIDE_INT hnum = hnum_orig;
602 unsigned HOST_WIDE_INT lden = lden_orig;
603 HOST_WIDE_INT hden = hden_orig;
604 int overflow = 0;
606 if (hden == 0 && lden == 0)
607 overflow = 1, lden = 1;
609 /* Calculate quotient sign and convert operands to unsigned. */
610 if (!uns)
612 if (hnum < 0)
614 quo_neg = ~ quo_neg;
615 /* (minimum integer) / (-1) is the only overflow case. */
616 if (neg_double (lnum, hnum, &lnum, &hnum)
617 && ((HOST_WIDE_INT) lden & hden) == -1)
618 overflow = 1;
620 if (hden < 0)
622 quo_neg = ~ quo_neg;
623 neg_double (lden, hden, &lden, &hden);
627 if (hnum == 0 && hden == 0)
628 { /* single precision */
629 *hquo = *hrem = 0;
630 /* This unsigned division rounds toward zero. */
631 *lquo = lnum / lden;
632 goto finish_up;
635 if (hnum == 0)
636 { /* trivial case: dividend < divisor */
637 /* hden != 0 already checked. */
638 *hquo = *lquo = 0;
639 *hrem = hnum;
640 *lrem = lnum;
641 goto finish_up;
644 memset (quo, 0, sizeof quo);
646 memset (num, 0, sizeof num); /* to zero 9th element */
647 memset (den, 0, sizeof den);
649 encode (num, lnum, hnum);
650 encode (den, lden, hden);
652 /* Special code for when the divisor < BASE. */
653 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
655 /* hnum != 0 already checked. */
656 for (i = 4 - 1; i >= 0; i--)
658 work = num[i] + carry * BASE;
659 quo[i] = work / lden;
660 carry = work % lden;
663 else
665 /* Full double precision division,
666 with thanks to Don Knuth's "Seminumerical Algorithms". */
667 int num_hi_sig, den_hi_sig;
668 unsigned HOST_WIDE_INT quo_est, scale;
670 /* Find the highest nonzero divisor digit. */
671 for (i = 4 - 1;; i--)
672 if (den[i] != 0)
674 den_hi_sig = i;
675 break;
678 /* Insure that the first digit of the divisor is at least BASE/2.
679 This is required by the quotient digit estimation algorithm. */
681 scale = BASE / (den[den_hi_sig] + 1);
682 if (scale > 1)
683 { /* scale divisor and dividend */
684 carry = 0;
685 for (i = 0; i <= 4 - 1; i++)
687 work = (num[i] * scale) + carry;
688 num[i] = LOWPART (work);
689 carry = HIGHPART (work);
692 num[4] = carry;
693 carry = 0;
694 for (i = 0; i <= 4 - 1; i++)
696 work = (den[i] * scale) + carry;
697 den[i] = LOWPART (work);
698 carry = HIGHPART (work);
699 if (den[i] != 0) den_hi_sig = i;
703 num_hi_sig = 4;
705 /* Main loop */
706 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
708 /* Guess the next quotient digit, quo_est, by dividing the first
709 two remaining dividend digits by the high order quotient digit.
710 quo_est is never low and is at most 2 high. */
711 unsigned HOST_WIDE_INT tmp;
713 num_hi_sig = i + den_hi_sig + 1;
714 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
715 if (num[num_hi_sig] != den[den_hi_sig])
716 quo_est = work / den[den_hi_sig];
717 else
718 quo_est = BASE - 1;
720 /* Refine quo_est so it's usually correct, and at most one high. */
721 tmp = work - quo_est * den[den_hi_sig];
722 if (tmp < BASE
723 && (den[den_hi_sig - 1] * quo_est
724 > (tmp * BASE + num[num_hi_sig - 2])))
725 quo_est--;
727 /* Try QUO_EST as the quotient digit, by multiplying the
728 divisor by QUO_EST and subtracting from the remaining dividend.
729 Keep in mind that QUO_EST is the I - 1st digit. */
731 carry = 0;
732 for (j = 0; j <= den_hi_sig; j++)
734 work = quo_est * den[j] + carry;
735 carry = HIGHPART (work);
736 work = num[i + j] - LOWPART (work);
737 num[i + j] = LOWPART (work);
738 carry += HIGHPART (work) != 0;
741 /* If quo_est was high by one, then num[i] went negative and
742 we need to correct things. */
743 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
745 quo_est--;
746 carry = 0; /* add divisor back in */
747 for (j = 0; j <= den_hi_sig; j++)
749 work = num[i + j] + den[j] + carry;
750 carry = HIGHPART (work);
751 num[i + j] = LOWPART (work);
754 num [num_hi_sig] += carry;
757 /* Store the quotient digit. */
758 quo[i] = quo_est;
762 decode (quo, lquo, hquo);
764 finish_up:
765 /* If result is negative, make it so. */
766 if (quo_neg)
767 neg_double (*lquo, *hquo, lquo, hquo);
769 /* Compute trial remainder: rem = num - (quo * den) */
770 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
771 neg_double (*lrem, *hrem, lrem, hrem);
772 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
774 switch (code)
776 case TRUNC_DIV_EXPR:
777 case TRUNC_MOD_EXPR: /* round toward zero */
778 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
779 return overflow;
781 case FLOOR_DIV_EXPR:
782 case FLOOR_MOD_EXPR: /* round toward negative infinity */
783 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
785 /* quo = quo - 1; */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
787 lquo, hquo);
789 else
790 return overflow;
791 break;
793 case CEIL_DIV_EXPR:
794 case CEIL_MOD_EXPR: /* round toward positive infinity */
795 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
797 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
798 lquo, hquo);
800 else
801 return overflow;
802 break;
804 case ROUND_DIV_EXPR:
805 case ROUND_MOD_EXPR: /* round to closest integer */
807 unsigned HOST_WIDE_INT labs_rem = *lrem;
808 HOST_WIDE_INT habs_rem = *hrem;
809 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
810 HOST_WIDE_INT habs_den = hden, htwice;
812 /* Get absolute values. */
813 if (*hrem < 0)
814 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
815 if (hden < 0)
816 neg_double (lden, hden, &labs_den, &habs_den);
818 /* If (2 * abs (lrem) >= abs (lden)) */
819 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
820 labs_rem, habs_rem, &ltwice, &htwice);
822 if (((unsigned HOST_WIDE_INT) habs_den
823 < (unsigned HOST_WIDE_INT) htwice)
824 || (((unsigned HOST_WIDE_INT) habs_den
825 == (unsigned HOST_WIDE_INT) htwice)
826 && (labs_den < ltwice)))
828 if (*hquo < 0)
829 /* quo = quo - 1; */
830 add_double (*lquo, *hquo,
831 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
832 else
833 /* quo = quo + 1; */
834 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
835 lquo, hquo);
837 else
838 return overflow;
840 break;
842 default:
843 gcc_unreachable ();
846 /* Compute true remainder: rem = num - (quo * den) */
847 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
848 neg_double (*lrem, *hrem, lrem, hrem);
849 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
850 return overflow;
853 /* If ARG2 divides ARG1 with zero remainder, carries out the division
854 of type CODE and returns the quotient.
855 Otherwise returns NULL_TREE. */
857 static tree
858 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
860 unsigned HOST_WIDE_INT int1l, int2l;
861 HOST_WIDE_INT int1h, int2h;
862 unsigned HOST_WIDE_INT quol, reml;
863 HOST_WIDE_INT quoh, remh;
864 tree type = TREE_TYPE (arg1);
865 int uns = TYPE_UNSIGNED (type);
867 int1l = TREE_INT_CST_LOW (arg1);
868 int1h = TREE_INT_CST_HIGH (arg1);
869 int2l = TREE_INT_CST_LOW (arg2);
870 int2h = TREE_INT_CST_HIGH (arg2);
872 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
873 &quol, &quoh, &reml, &remh);
874 if (remh != 0 || reml != 0)
875 return NULL_TREE;
877 return build_int_cst_wide (type, quol, quoh);
880 /* Return true if the built-in mathematical function specified by CODE
881 is odd, i.e. -f(x) == f(-x). */
883 static bool
884 negate_mathfn_p (enum built_in_function code)
886 switch (code)
888 CASE_FLT_FN (BUILT_IN_ASIN):
889 CASE_FLT_FN (BUILT_IN_ASINH):
890 CASE_FLT_FN (BUILT_IN_ATAN):
891 CASE_FLT_FN (BUILT_IN_ATANH):
892 CASE_FLT_FN (BUILT_IN_CBRT):
893 CASE_FLT_FN (BUILT_IN_SIN):
894 CASE_FLT_FN (BUILT_IN_SINH):
895 CASE_FLT_FN (BUILT_IN_TAN):
896 CASE_FLT_FN (BUILT_IN_TANH):
897 return true;
899 default:
900 break;
902 return false;
905 /* Check whether we may negate an integer constant T without causing
906 overflow. */
908 bool
909 may_negate_without_overflow_p (tree t)
911 unsigned HOST_WIDE_INT val;
912 unsigned int prec;
913 tree type;
915 gcc_assert (TREE_CODE (t) == INTEGER_CST);
917 type = TREE_TYPE (t);
918 if (TYPE_UNSIGNED (type))
919 return false;
921 prec = TYPE_PRECISION (type);
922 if (prec > HOST_BITS_PER_WIDE_INT)
924 if (TREE_INT_CST_LOW (t) != 0)
925 return true;
926 prec -= HOST_BITS_PER_WIDE_INT;
927 val = TREE_INT_CST_HIGH (t);
929 else
930 val = TREE_INT_CST_LOW (t);
931 if (prec < HOST_BITS_PER_WIDE_INT)
932 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
933 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
936 /* Determine whether an expression T can be cheaply negated using
937 the function negate_expr without introducing undefined overflow. */
939 static bool
940 negate_expr_p (tree t)
942 tree type;
944 if (t == 0)
945 return false;
947 type = TREE_TYPE (t);
949 STRIP_SIGN_NOPS (t);
950 switch (TREE_CODE (t))
952 case INTEGER_CST:
953 if (TYPE_UNSIGNED (type)
954 || (flag_wrapv && ! flag_trapv))
955 return true;
957 /* Check that -CST will not overflow type. */
958 return may_negate_without_overflow_p (t);
959 case BIT_NOT_EXPR:
960 return INTEGRAL_TYPE_P (type)
961 && (TYPE_UNSIGNED (type)
962 || (flag_wrapv && !flag_trapv));
964 case REAL_CST:
965 case NEGATE_EXPR:
966 return true;
968 case COMPLEX_CST:
969 return negate_expr_p (TREE_REALPART (t))
970 && negate_expr_p (TREE_IMAGPART (t));
972 case PLUS_EXPR:
973 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
974 return false;
975 /* -(A + B) -> (-B) - A. */
976 if (negate_expr_p (TREE_OPERAND (t, 1))
977 && reorder_operands_p (TREE_OPERAND (t, 0),
978 TREE_OPERAND (t, 1)))
979 return true;
980 /* -(A + B) -> (-A) - B. */
981 return negate_expr_p (TREE_OPERAND (t, 0));
983 case MINUS_EXPR:
984 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
985 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
986 && reorder_operands_p (TREE_OPERAND (t, 0),
987 TREE_OPERAND (t, 1));
989 case MULT_EXPR:
990 if (TYPE_UNSIGNED (TREE_TYPE (t)))
991 break;
993 /* Fall through. */
995 case RDIV_EXPR:
996 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
997 return negate_expr_p (TREE_OPERAND (t, 1))
998 || negate_expr_p (TREE_OPERAND (t, 0));
999 break;
1001 case TRUNC_DIV_EXPR:
1002 case ROUND_DIV_EXPR:
1003 case FLOOR_DIV_EXPR:
1004 case CEIL_DIV_EXPR:
1005 case EXACT_DIV_EXPR:
1006 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1007 break;
1008 return negate_expr_p (TREE_OPERAND (t, 1))
1009 || negate_expr_p (TREE_OPERAND (t, 0));
1011 case NOP_EXPR:
1012 /* Negate -((double)float) as (double)(-float). */
1013 if (TREE_CODE (type) == REAL_TYPE)
1015 tree tem = strip_float_extensions (t);
1016 if (tem != t)
1017 return negate_expr_p (tem);
1019 break;
1021 case CALL_EXPR:
1022 /* Negate -f(x) as f(-x). */
1023 if (negate_mathfn_p (builtin_mathfn_code (t)))
1024 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1025 break;
1027 case RSHIFT_EXPR:
1028 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1029 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1031 tree op1 = TREE_OPERAND (t, 1);
1032 if (TREE_INT_CST_HIGH (op1) == 0
1033 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1034 == TREE_INT_CST_LOW (op1))
1035 return true;
1037 break;
1039 default:
1040 break;
1042 return false;
1045 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1046 simplification is possible.
1047 If negate_expr_p would return true for T, NULL_TREE will never be
1048 returned. */
1050 static tree
1051 fold_negate_expr (tree t)
1053 tree type = TREE_TYPE (t);
1054 tree tem;
1056 switch (TREE_CODE (t))
1058 /* Convert - (~A) to A + 1. */
1059 case BIT_NOT_EXPR:
1060 if (INTEGRAL_TYPE_P (type))
1061 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1062 build_int_cst (type, 1));
1063 break;
1065 case INTEGER_CST:
1066 tem = fold_negate_const (t, type);
1067 if (! TREE_OVERFLOW (tem)
1068 || TYPE_UNSIGNED (type)
1069 || ! flag_trapv)
1070 return tem;
1071 break;
1073 case REAL_CST:
1074 tem = fold_negate_const (t, type);
1075 /* Two's complement FP formats, such as c4x, may overflow. */
1076 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1077 return tem;
1078 break;
1080 case COMPLEX_CST:
1082 tree rpart = negate_expr (TREE_REALPART (t));
1083 tree ipart = negate_expr (TREE_IMAGPART (t));
1085 if ((TREE_CODE (rpart) == REAL_CST
1086 && TREE_CODE (ipart) == REAL_CST)
1087 || (TREE_CODE (rpart) == INTEGER_CST
1088 && TREE_CODE (ipart) == INTEGER_CST))
1089 return build_complex (type, rpart, ipart);
1091 break;
1093 case NEGATE_EXPR:
1094 return TREE_OPERAND (t, 0);
1096 case PLUS_EXPR:
1097 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1099 /* -(A + B) -> (-B) - A. */
1100 if (negate_expr_p (TREE_OPERAND (t, 1))
1101 && reorder_operands_p (TREE_OPERAND (t, 0),
1102 TREE_OPERAND (t, 1)))
1104 tem = negate_expr (TREE_OPERAND (t, 1));
1105 return fold_build2 (MINUS_EXPR, type,
1106 tem, TREE_OPERAND (t, 0));
1109 /* -(A + B) -> (-A) - B. */
1110 if (negate_expr_p (TREE_OPERAND (t, 0)))
1112 tem = negate_expr (TREE_OPERAND (t, 0));
1113 return fold_build2 (MINUS_EXPR, type,
1114 tem, TREE_OPERAND (t, 1));
1117 break;
1119 case MINUS_EXPR:
1120 /* - (A - B) -> B - A */
1121 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1122 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1123 return fold_build2 (MINUS_EXPR, type,
1124 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1125 break;
1127 case MULT_EXPR:
1128 if (TYPE_UNSIGNED (type))
1129 break;
1131 /* Fall through. */
1133 case RDIV_EXPR:
1134 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1136 tem = TREE_OPERAND (t, 1);
1137 if (negate_expr_p (tem))
1138 return fold_build2 (TREE_CODE (t), type,
1139 TREE_OPERAND (t, 0), negate_expr (tem));
1140 tem = TREE_OPERAND (t, 0);
1141 if (negate_expr_p (tem))
1142 return fold_build2 (TREE_CODE (t), type,
1143 negate_expr (tem), TREE_OPERAND (t, 1));
1145 break;
1147 case TRUNC_DIV_EXPR:
1148 case ROUND_DIV_EXPR:
1149 case FLOOR_DIV_EXPR:
1150 case CEIL_DIV_EXPR:
1151 case EXACT_DIV_EXPR:
1152 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1154 tem = TREE_OPERAND (t, 1);
1155 if (negate_expr_p (tem))
1156 return fold_build2 (TREE_CODE (t), type,
1157 TREE_OPERAND (t, 0), negate_expr (tem));
1158 tem = TREE_OPERAND (t, 0);
1159 if (negate_expr_p (tem))
1160 return fold_build2 (TREE_CODE (t), type,
1161 negate_expr (tem), TREE_OPERAND (t, 1));
1163 break;
1165 case NOP_EXPR:
1166 /* Convert -((double)float) into (double)(-float). */
1167 if (TREE_CODE (type) == REAL_TYPE)
1169 tem = strip_float_extensions (t);
1170 if (tem != t && negate_expr_p (tem))
1171 return negate_expr (tem);
1173 break;
1175 case CALL_EXPR:
1176 /* Negate -f(x) as f(-x). */
1177 if (negate_mathfn_p (builtin_mathfn_code (t))
1178 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1180 tree fndecl, arg, arglist;
1182 fndecl = get_callee_fndecl (t);
1183 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1184 arglist = build_tree_list (NULL_TREE, arg);
1185 return build_function_call_expr (fndecl, arglist);
1187 break;
1189 case RSHIFT_EXPR:
1190 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1191 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1193 tree op1 = TREE_OPERAND (t, 1);
1194 if (TREE_INT_CST_HIGH (op1) == 0
1195 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1196 == TREE_INT_CST_LOW (op1))
1198 tree ntype = TYPE_UNSIGNED (type)
1199 ? lang_hooks.types.signed_type (type)
1200 : lang_hooks.types.unsigned_type (type);
1201 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1202 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1203 return fold_convert (type, temp);
1206 break;
1208 default:
1209 break;
1212 return NULL_TREE;
1215 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1216 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1217 return NULL_TREE. */
1219 static tree
1220 negate_expr (tree t)
1222 tree type, tem;
1224 if (t == NULL_TREE)
1225 return NULL_TREE;
1227 type = TREE_TYPE (t);
1228 STRIP_SIGN_NOPS (t);
1230 tem = fold_negate_expr (t);
1231 if (!tem)
1232 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1233 return fold_convert (type, tem);
1236 /* Split a tree IN into a constant, literal and variable parts that could be
1237 combined with CODE to make IN. "constant" means an expression with
1238 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1239 commutative arithmetic operation. Store the constant part into *CONP,
1240 the literal in *LITP and return the variable part. If a part isn't
1241 present, set it to null. If the tree does not decompose in this way,
1242 return the entire tree as the variable part and the other parts as null.
1244 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1245 case, we negate an operand that was subtracted. Except if it is a
1246 literal for which we use *MINUS_LITP instead.
1248 If NEGATE_P is true, we are negating all of IN, again except a literal
1249 for which we use *MINUS_LITP instead.
1251 If IN is itself a literal or constant, return it as appropriate.
1253 Note that we do not guarantee that any of the three values will be the
1254 same type as IN, but they will have the same signedness and mode. */
1256 static tree
1257 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1258 tree *minus_litp, int negate_p)
1260 tree var = 0;
1262 *conp = 0;
1263 *litp = 0;
1264 *minus_litp = 0;
1266 /* Strip any conversions that don't change the machine mode or signedness. */
1267 STRIP_SIGN_NOPS (in);
1269 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1270 *litp = in;
1271 else if (TREE_CODE (in) == code
1272 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1273 /* We can associate addition and subtraction together (even
1274 though the C standard doesn't say so) for integers because
1275 the value is not affected. For reals, the value might be
1276 affected, so we can't. */
1277 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1278 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1280 tree op0 = TREE_OPERAND (in, 0);
1281 tree op1 = TREE_OPERAND (in, 1);
1282 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1283 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1285 /* First see if either of the operands is a literal, then a constant. */
1286 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1287 *litp = op0, op0 = 0;
1288 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1289 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1291 if (op0 != 0 && TREE_CONSTANT (op0))
1292 *conp = op0, op0 = 0;
1293 else if (op1 != 0 && TREE_CONSTANT (op1))
1294 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1296 /* If we haven't dealt with either operand, this is not a case we can
1297 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1298 if (op0 != 0 && op1 != 0)
1299 var = in;
1300 else if (op0 != 0)
1301 var = op0;
1302 else
1303 var = op1, neg_var_p = neg1_p;
1305 /* Now do any needed negations. */
1306 if (neg_litp_p)
1307 *minus_litp = *litp, *litp = 0;
1308 if (neg_conp_p)
1309 *conp = negate_expr (*conp);
1310 if (neg_var_p)
1311 var = negate_expr (var);
1313 else if (TREE_CONSTANT (in))
1314 *conp = in;
1315 else
1316 var = in;
1318 if (negate_p)
1320 if (*litp)
1321 *minus_litp = *litp, *litp = 0;
1322 else if (*minus_litp)
1323 *litp = *minus_litp, *minus_litp = 0;
1324 *conp = negate_expr (*conp);
1325 var = negate_expr (var);
1328 return var;
1331 /* Re-associate trees split by the above function. T1 and T2 are either
1332 expressions to associate or null. Return the new expression, if any. If
1333 we build an operation, do it in TYPE and with CODE. */
1335 static tree
1336 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1338 if (t1 == 0)
1339 return t2;
1340 else if (t2 == 0)
1341 return t1;
1343 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1344 try to fold this since we will have infinite recursion. But do
1345 deal with any NEGATE_EXPRs. */
1346 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1347 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1349 if (code == PLUS_EXPR)
1351 if (TREE_CODE (t1) == NEGATE_EXPR)
1352 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1353 fold_convert (type, TREE_OPERAND (t1, 0)));
1354 else if (TREE_CODE (t2) == NEGATE_EXPR)
1355 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1356 fold_convert (type, TREE_OPERAND (t2, 0)));
1357 else if (integer_zerop (t2))
1358 return fold_convert (type, t1);
1360 else if (code == MINUS_EXPR)
1362 if (integer_zerop (t2))
1363 return fold_convert (type, t1);
1366 return build2 (code, type, fold_convert (type, t1),
1367 fold_convert (type, t2));
1370 return fold_build2 (code, type, fold_convert (type, t1),
1371 fold_convert (type, t2));
1374 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1375 to produce a new constant. Return NULL_TREE if we don't know how
1376 to evaluate CODE at compile-time.
1378 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1380 tree
1381 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1383 unsigned HOST_WIDE_INT int1l, int2l;
1384 HOST_WIDE_INT int1h, int2h;
1385 unsigned HOST_WIDE_INT low;
1386 HOST_WIDE_INT hi;
1387 unsigned HOST_WIDE_INT garbagel;
1388 HOST_WIDE_INT garbageh;
1389 tree t;
1390 tree type = TREE_TYPE (arg1);
1391 int uns = TYPE_UNSIGNED (type);
1392 int is_sizetype
1393 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1394 int overflow = 0;
1396 int1l = TREE_INT_CST_LOW (arg1);
1397 int1h = TREE_INT_CST_HIGH (arg1);
1398 int2l = TREE_INT_CST_LOW (arg2);
1399 int2h = TREE_INT_CST_HIGH (arg2);
1401 switch (code)
1403 case BIT_IOR_EXPR:
1404 low = int1l | int2l, hi = int1h | int2h;
1405 break;
1407 case BIT_XOR_EXPR:
1408 low = int1l ^ int2l, hi = int1h ^ int2h;
1409 break;
1411 case BIT_AND_EXPR:
1412 low = int1l & int2l, hi = int1h & int2h;
1413 break;
1415 case RSHIFT_EXPR:
1416 int2l = -int2l;
1417 case LSHIFT_EXPR:
1418 /* It's unclear from the C standard whether shifts can overflow.
1419 The following code ignores overflow; perhaps a C standard
1420 interpretation ruling is needed. */
1421 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1422 &low, &hi, !uns);
1423 break;
1425 case RROTATE_EXPR:
1426 int2l = - int2l;
1427 case LROTATE_EXPR:
1428 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1429 &low, &hi);
1430 break;
1432 case PLUS_EXPR:
1433 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1434 break;
1436 case MINUS_EXPR:
1437 neg_double (int2l, int2h, &low, &hi);
1438 add_double (int1l, int1h, low, hi, &low, &hi);
1439 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1440 break;
1442 case MULT_EXPR:
1443 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1444 break;
1446 case TRUNC_DIV_EXPR:
1447 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1448 case EXACT_DIV_EXPR:
1449 /* This is a shortcut for a common special case. */
1450 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1451 && ! TREE_CONSTANT_OVERFLOW (arg1)
1452 && ! TREE_CONSTANT_OVERFLOW (arg2)
1453 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1455 if (code == CEIL_DIV_EXPR)
1456 int1l += int2l - 1;
1458 low = int1l / int2l, hi = 0;
1459 break;
1462 /* ... fall through ... */
1464 case ROUND_DIV_EXPR:
1465 if (int2h == 0 && int2l == 0)
1466 return NULL_TREE;
1467 if (int2h == 0 && int2l == 1)
1469 low = int1l, hi = int1h;
1470 break;
1472 if (int1l == int2l && int1h == int2h
1473 && ! (int1l == 0 && int1h == 0))
1475 low = 1, hi = 0;
1476 break;
1478 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1479 &low, &hi, &garbagel, &garbageh);
1480 break;
1482 case TRUNC_MOD_EXPR:
1483 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1484 /* This is a shortcut for a common special case. */
1485 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1486 && ! TREE_CONSTANT_OVERFLOW (arg1)
1487 && ! TREE_CONSTANT_OVERFLOW (arg2)
1488 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1490 if (code == CEIL_MOD_EXPR)
1491 int1l += int2l - 1;
1492 low = int1l % int2l, hi = 0;
1493 break;
1496 /* ... fall through ... */
1498 case ROUND_MOD_EXPR:
1499 if (int2h == 0 && int2l == 0)
1500 return NULL_TREE;
1501 overflow = div_and_round_double (code, uns,
1502 int1l, int1h, int2l, int2h,
1503 &garbagel, &garbageh, &low, &hi);
1504 break;
1506 case MIN_EXPR:
1507 case MAX_EXPR:
1508 if (uns)
1509 low = (((unsigned HOST_WIDE_INT) int1h
1510 < (unsigned HOST_WIDE_INT) int2h)
1511 || (((unsigned HOST_WIDE_INT) int1h
1512 == (unsigned HOST_WIDE_INT) int2h)
1513 && int1l < int2l));
1514 else
1515 low = (int1h < int2h
1516 || (int1h == int2h && int1l < int2l));
1518 if (low == (code == MIN_EXPR))
1519 low = int1l, hi = int1h;
1520 else
1521 low = int2l, hi = int2h;
1522 break;
1524 default:
1525 return NULL_TREE;
1528 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1530 if (notrunc)
1532 /* Propagate overflow flags ourselves. */
1533 if (((!uns || is_sizetype) && overflow)
1534 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1536 t = copy_node (t);
1537 TREE_OVERFLOW (t) = 1;
1538 TREE_CONSTANT_OVERFLOW (t) = 1;
1540 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1542 t = copy_node (t);
1543 TREE_CONSTANT_OVERFLOW (t) = 1;
1546 else
1547 t = force_fit_type (t, 1,
1548 ((!uns || is_sizetype) && overflow)
1549 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1550 TREE_CONSTANT_OVERFLOW (arg1)
1551 | TREE_CONSTANT_OVERFLOW (arg2));
1553 return t;
1556 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1557 constant. We assume ARG1 and ARG2 have the same data type, or at least
1558 are the same kind of constant and the same machine mode. Return zero if
1559 combining the constants is not allowed in the current operating mode.
1561 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1563 static tree
1564 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1566 /* Sanity check for the recursive cases. */
1567 if (!arg1 || !arg2)
1568 return NULL_TREE;
1570 STRIP_NOPS (arg1);
1571 STRIP_NOPS (arg2);
1573 if (TREE_CODE (arg1) == INTEGER_CST)
1574 return int_const_binop (code, arg1, arg2, notrunc);
1576 if (TREE_CODE (arg1) == REAL_CST)
1578 enum machine_mode mode;
1579 REAL_VALUE_TYPE d1;
1580 REAL_VALUE_TYPE d2;
1581 REAL_VALUE_TYPE value;
1582 REAL_VALUE_TYPE result;
1583 bool inexact;
1584 tree t, type;
1586 /* The following codes are handled by real_arithmetic. */
1587 switch (code)
1589 case PLUS_EXPR:
1590 case MINUS_EXPR:
1591 case MULT_EXPR:
1592 case RDIV_EXPR:
1593 case MIN_EXPR:
1594 case MAX_EXPR:
1595 break;
1597 default:
1598 return NULL_TREE;
1601 d1 = TREE_REAL_CST (arg1);
1602 d2 = TREE_REAL_CST (arg2);
1604 type = TREE_TYPE (arg1);
1605 mode = TYPE_MODE (type);
1607 /* Don't perform operation if we honor signaling NaNs and
1608 either operand is a NaN. */
1609 if (HONOR_SNANS (mode)
1610 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1611 return NULL_TREE;
1613 /* Don't perform operation if it would raise a division
1614 by zero exception. */
1615 if (code == RDIV_EXPR
1616 && REAL_VALUES_EQUAL (d2, dconst0)
1617 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1618 return NULL_TREE;
1620 /* If either operand is a NaN, just return it. Otherwise, set up
1621 for floating-point trap; we return an overflow. */
1622 if (REAL_VALUE_ISNAN (d1))
1623 return arg1;
1624 else if (REAL_VALUE_ISNAN (d2))
1625 return arg2;
1627 inexact = real_arithmetic (&value, code, &d1, &d2);
1628 real_convert (&result, mode, &value);
1630 /* Don't constant fold this floating point operation if
1631 the result has overflowed and flag_trapping_math. */
1632 if (flag_trapping_math
1633 && MODE_HAS_INFINITIES (mode)
1634 && REAL_VALUE_ISINF (result)
1635 && !REAL_VALUE_ISINF (d1)
1636 && !REAL_VALUE_ISINF (d2))
1637 return NULL_TREE;
1639 /* Don't constant fold this floating point operation if the
1640 result may dependent upon the run-time rounding mode and
1641 flag_rounding_math is set, or if GCC's software emulation
1642 is unable to accurately represent the result. */
1643 if ((flag_rounding_math
1644 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1645 && !flag_unsafe_math_optimizations))
1646 && (inexact || !real_identical (&result, &value)))
1647 return NULL_TREE;
1649 t = build_real (type, result);
1651 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1652 TREE_CONSTANT_OVERFLOW (t)
1653 = TREE_OVERFLOW (t)
1654 | TREE_CONSTANT_OVERFLOW (arg1)
1655 | TREE_CONSTANT_OVERFLOW (arg2);
1656 return t;
1659 if (TREE_CODE (arg1) == COMPLEX_CST)
1661 tree type = TREE_TYPE (arg1);
1662 tree r1 = TREE_REALPART (arg1);
1663 tree i1 = TREE_IMAGPART (arg1);
1664 tree r2 = TREE_REALPART (arg2);
1665 tree i2 = TREE_IMAGPART (arg2);
1666 tree real, imag;
1668 switch (code)
1670 case PLUS_EXPR:
1671 case MINUS_EXPR:
1672 real = const_binop (code, r1, r2, notrunc);
1673 imag = const_binop (code, i1, i2, notrunc);
1674 break;
1676 case MULT_EXPR:
1677 real = const_binop (MINUS_EXPR,
1678 const_binop (MULT_EXPR, r1, r2, notrunc),
1679 const_binop (MULT_EXPR, i1, i2, notrunc),
1680 notrunc);
1681 imag = const_binop (PLUS_EXPR,
1682 const_binop (MULT_EXPR, r1, i2, notrunc),
1683 const_binop (MULT_EXPR, i1, r2, notrunc),
1684 notrunc);
1685 break;
1687 case RDIV_EXPR:
1689 tree magsquared
1690 = const_binop (PLUS_EXPR,
1691 const_binop (MULT_EXPR, r2, r2, notrunc),
1692 const_binop (MULT_EXPR, i2, i2, notrunc),
1693 notrunc);
1694 tree t1
1695 = const_binop (PLUS_EXPR,
1696 const_binop (MULT_EXPR, r1, r2, notrunc),
1697 const_binop (MULT_EXPR, i1, i2, notrunc),
1698 notrunc);
1699 tree t2
1700 = const_binop (MINUS_EXPR,
1701 const_binop (MULT_EXPR, i1, r2, notrunc),
1702 const_binop (MULT_EXPR, r1, i2, notrunc),
1703 notrunc);
1705 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1706 code = TRUNC_DIV_EXPR;
1708 real = const_binop (code, t1, magsquared, notrunc);
1709 imag = const_binop (code, t2, magsquared, notrunc);
1711 break;
1713 default:
1714 return NULL_TREE;
1717 if (real && imag)
1718 return build_complex (type, real, imag);
1721 return NULL_TREE;
1724 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1725 indicates which particular sizetype to create. */
1727 tree
1728 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1730 return build_int_cst (sizetype_tab[(int) kind], number);
1733 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1734 is a tree code. The type of the result is taken from the operands.
1735 Both must be the same type integer type and it must be a size type.
1736 If the operands are constant, so is the result. */
1738 tree
1739 size_binop (enum tree_code code, tree arg0, tree arg1)
1741 tree type = TREE_TYPE (arg0);
1743 if (arg0 == error_mark_node || arg1 == error_mark_node)
1744 return error_mark_node;
1746 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1747 && type == TREE_TYPE (arg1));
1749 /* Handle the special case of two integer constants faster. */
1750 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1752 /* And some specific cases even faster than that. */
1753 if (code == PLUS_EXPR && integer_zerop (arg0))
1754 return arg1;
1755 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1756 && integer_zerop (arg1))
1757 return arg0;
1758 else if (code == MULT_EXPR && integer_onep (arg0))
1759 return arg1;
1761 /* Handle general case of two integer constants. */
1762 return int_const_binop (code, arg0, arg1, 0);
1765 return fold_build2 (code, type, arg0, arg1);
1768 /* Given two values, either both of sizetype or both of bitsizetype,
1769 compute the difference between the two values. Return the value
1770 in signed type corresponding to the type of the operands. */
1772 tree
1773 size_diffop (tree arg0, tree arg1)
1775 tree type = TREE_TYPE (arg0);
1776 tree ctype;
1778 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1779 && type == TREE_TYPE (arg1));
1781 /* If the type is already signed, just do the simple thing. */
1782 if (!TYPE_UNSIGNED (type))
1783 return size_binop (MINUS_EXPR, arg0, arg1);
1785 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1787 /* If either operand is not a constant, do the conversions to the signed
1788 type and subtract. The hardware will do the right thing with any
1789 overflow in the subtraction. */
1790 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1791 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1792 fold_convert (ctype, arg1));
1794 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1795 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1796 overflow) and negate (which can't either). Special-case a result
1797 of zero while we're here. */
1798 if (tree_int_cst_equal (arg0, arg1))
1799 return build_int_cst (ctype, 0);
1800 else if (tree_int_cst_lt (arg1, arg0))
1801 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1802 else
1803 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1804 fold_convert (ctype, size_binop (MINUS_EXPR,
1805 arg1, arg0)));
1808 /* A subroutine of fold_convert_const handling conversions of an
1809 INTEGER_CST to another integer type. */
1811 static tree
1812 fold_convert_const_int_from_int (tree type, tree arg1)
1814 tree t;
1816 /* Given an integer constant, make new constant with new type,
1817 appropriately sign-extended or truncated. */
1818 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1819 TREE_INT_CST_HIGH (arg1));
1821 t = force_fit_type (t,
1822 /* Don't set the overflow when
1823 converting a pointer */
1824 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1825 (TREE_INT_CST_HIGH (arg1) < 0
1826 && (TYPE_UNSIGNED (type)
1827 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1828 | TREE_OVERFLOW (arg1),
1829 TREE_CONSTANT_OVERFLOW (arg1));
1831 return t;
1834 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1835 to an integer type. */
1837 static tree
1838 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1840 int overflow = 0;
1841 tree t;
1843 /* The following code implements the floating point to integer
1844 conversion rules required by the Java Language Specification,
1845 that IEEE NaNs are mapped to zero and values that overflow
1846 the target precision saturate, i.e. values greater than
1847 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1848 are mapped to INT_MIN. These semantics are allowed by the
1849 C and C++ standards that simply state that the behavior of
1850 FP-to-integer conversion is unspecified upon overflow. */
1852 HOST_WIDE_INT high, low;
1853 REAL_VALUE_TYPE r;
1854 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1856 switch (code)
1858 case FIX_TRUNC_EXPR:
1859 real_trunc (&r, VOIDmode, &x);
1860 break;
1862 case FIX_CEIL_EXPR:
1863 real_ceil (&r, VOIDmode, &x);
1864 break;
1866 case FIX_FLOOR_EXPR:
1867 real_floor (&r, VOIDmode, &x);
1868 break;
1870 case FIX_ROUND_EXPR:
1871 real_round (&r, VOIDmode, &x);
1872 break;
1874 default:
1875 gcc_unreachable ();
1878 /* If R is NaN, return zero and show we have an overflow. */
1879 if (REAL_VALUE_ISNAN (r))
1881 overflow = 1;
1882 high = 0;
1883 low = 0;
1886 /* See if R is less than the lower bound or greater than the
1887 upper bound. */
1889 if (! overflow)
1891 tree lt = TYPE_MIN_VALUE (type);
1892 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1893 if (REAL_VALUES_LESS (r, l))
1895 overflow = 1;
1896 high = TREE_INT_CST_HIGH (lt);
1897 low = TREE_INT_CST_LOW (lt);
1901 if (! overflow)
1903 tree ut = TYPE_MAX_VALUE (type);
1904 if (ut)
1906 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1907 if (REAL_VALUES_LESS (u, r))
1909 overflow = 1;
1910 high = TREE_INT_CST_HIGH (ut);
1911 low = TREE_INT_CST_LOW (ut);
1916 if (! overflow)
1917 REAL_VALUE_TO_INT (&low, &high, r);
1919 t = build_int_cst_wide (type, low, high);
1921 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1922 TREE_CONSTANT_OVERFLOW (arg1));
1923 return t;
1926 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1927 to another floating point type. */
1929 static tree
1930 fold_convert_const_real_from_real (tree type, tree arg1)
1932 REAL_VALUE_TYPE value;
1933 tree t;
1935 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1936 t = build_real (type, value);
1938 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1939 TREE_CONSTANT_OVERFLOW (t)
1940 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1941 return t;
1944 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1945 type TYPE. If no simplification can be done return NULL_TREE. */
1947 static tree
1948 fold_convert_const (enum tree_code code, tree type, tree arg1)
1950 if (TREE_TYPE (arg1) == type)
1951 return arg1;
1953 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1955 if (TREE_CODE (arg1) == INTEGER_CST)
1956 return fold_convert_const_int_from_int (type, arg1);
1957 else if (TREE_CODE (arg1) == REAL_CST)
1958 return fold_convert_const_int_from_real (code, type, arg1);
1960 else if (TREE_CODE (type) == REAL_TYPE)
1962 if (TREE_CODE (arg1) == INTEGER_CST)
1963 return build_real_from_int_cst (type, arg1);
1964 if (TREE_CODE (arg1) == REAL_CST)
1965 return fold_convert_const_real_from_real (type, arg1);
1967 return NULL_TREE;
1970 /* Construct a vector of zero elements of vector type TYPE. */
1972 static tree
1973 build_zero_vector (tree type)
1975 tree elem, list;
1976 int i, units;
1978 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1979 units = TYPE_VECTOR_SUBPARTS (type);
1981 list = NULL_TREE;
1982 for (i = 0; i < units; i++)
1983 list = tree_cons (NULL_TREE, elem, list);
1984 return build_vector (type, list);
1987 /* Convert expression ARG to type TYPE. Used by the middle-end for
1988 simple conversions in preference to calling the front-end's convert. */
1990 tree
1991 fold_convert (tree type, tree arg)
1993 tree orig = TREE_TYPE (arg);
1994 tree tem;
1996 if (type == orig)
1997 return arg;
1999 if (TREE_CODE (arg) == ERROR_MARK
2000 || TREE_CODE (type) == ERROR_MARK
2001 || TREE_CODE (orig) == ERROR_MARK)
2002 return error_mark_node;
2004 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2005 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2006 TYPE_MAIN_VARIANT (orig)))
2007 return fold_build1 (NOP_EXPR, type, arg);
2009 switch (TREE_CODE (type))
2011 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2012 case POINTER_TYPE: case REFERENCE_TYPE:
2013 case OFFSET_TYPE:
2014 if (TREE_CODE (arg) == INTEGER_CST)
2016 tem = fold_convert_const (NOP_EXPR, type, arg);
2017 if (tem != NULL_TREE)
2018 return tem;
2020 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == OFFSET_TYPE)
2022 return fold_build1 (NOP_EXPR, type, arg);
2023 if (TREE_CODE (orig) == COMPLEX_TYPE)
2025 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert (type, tem);
2028 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2029 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2030 return fold_build1 (NOP_EXPR, type, arg);
2032 case REAL_TYPE:
2033 if (TREE_CODE (arg) == INTEGER_CST)
2035 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2036 if (tem != NULL_TREE)
2037 return tem;
2039 else if (TREE_CODE (arg) == REAL_CST)
2041 tem = fold_convert_const (NOP_EXPR, type, arg);
2042 if (tem != NULL_TREE)
2043 return tem;
2046 switch (TREE_CODE (orig))
2048 case INTEGER_TYPE:
2049 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2050 case POINTER_TYPE: case REFERENCE_TYPE:
2051 return fold_build1 (FLOAT_EXPR, type, arg);
2053 case REAL_TYPE:
2054 return fold_build1 (NOP_EXPR, type, arg);
2056 case COMPLEX_TYPE:
2057 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2058 return fold_convert (type, tem);
2060 default:
2061 gcc_unreachable ();
2064 case COMPLEX_TYPE:
2065 switch (TREE_CODE (orig))
2067 case INTEGER_TYPE:
2068 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2069 case POINTER_TYPE: case REFERENCE_TYPE:
2070 case REAL_TYPE:
2071 return build2 (COMPLEX_EXPR, type,
2072 fold_convert (TREE_TYPE (type), arg),
2073 fold_convert (TREE_TYPE (type), integer_zero_node));
2074 case COMPLEX_TYPE:
2076 tree rpart, ipart;
2078 if (TREE_CODE (arg) == COMPLEX_EXPR)
2080 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2081 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2082 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2085 arg = save_expr (arg);
2086 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2087 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2088 rpart = fold_convert (TREE_TYPE (type), rpart);
2089 ipart = fold_convert (TREE_TYPE (type), ipart);
2090 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2093 default:
2094 gcc_unreachable ();
2097 case VECTOR_TYPE:
2098 if (integer_zerop (arg))
2099 return build_zero_vector (type);
2100 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2101 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2102 || TREE_CODE (orig) == VECTOR_TYPE);
2103 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2105 case VOID_TYPE:
2106 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2108 default:
2109 gcc_unreachable ();
2113 /* Return false if expr can be assumed not to be an lvalue, true
2114 otherwise. */
2116 static bool
2117 maybe_lvalue_p (tree x)
2119 /* We only need to wrap lvalue tree codes. */
2120 switch (TREE_CODE (x))
2122 case VAR_DECL:
2123 case PARM_DECL:
2124 case RESULT_DECL:
2125 case LABEL_DECL:
2126 case FUNCTION_DECL:
2127 case SSA_NAME:
2129 case COMPONENT_REF:
2130 case INDIRECT_REF:
2131 case ALIGN_INDIRECT_REF:
2132 case MISALIGNED_INDIRECT_REF:
2133 case ARRAY_REF:
2134 case ARRAY_RANGE_REF:
2135 case BIT_FIELD_REF:
2136 case OBJ_TYPE_REF:
2138 case REALPART_EXPR:
2139 case IMAGPART_EXPR:
2140 case PREINCREMENT_EXPR:
2141 case PREDECREMENT_EXPR:
2142 case SAVE_EXPR:
2143 case TRY_CATCH_EXPR:
2144 case WITH_CLEANUP_EXPR:
2145 case COMPOUND_EXPR:
2146 case MODIFY_EXPR:
2147 case TARGET_EXPR:
2148 case COND_EXPR:
2149 case BIND_EXPR:
2150 case MIN_EXPR:
2151 case MAX_EXPR:
2152 break;
2154 default:
2155 /* Assume the worst for front-end tree codes. */
2156 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2157 break;
2158 return false;
2161 return true;
2164 /* Return an expr equal to X but certainly not valid as an lvalue. */
2166 tree
2167 non_lvalue (tree x)
2169 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2170 us. */
2171 if (in_gimple_form)
2172 return x;
2174 if (! maybe_lvalue_p (x))
2175 return x;
2176 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2179 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2180 Zero means allow extended lvalues. */
2182 int pedantic_lvalues;
2184 /* When pedantic, return an expr equal to X but certainly not valid as a
2185 pedantic lvalue. Otherwise, return X. */
2187 static tree
2188 pedantic_non_lvalue (tree x)
2190 if (pedantic_lvalues)
2191 return non_lvalue (x);
2192 else
2193 return x;
2196 /* Given a tree comparison code, return the code that is the logical inverse
2197 of the given code. It is not safe to do this for floating-point
2198 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2199 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2201 enum tree_code
2202 invert_tree_comparison (enum tree_code code, bool honor_nans)
2204 if (honor_nans && flag_trapping_math)
2205 return ERROR_MARK;
2207 switch (code)
2209 case EQ_EXPR:
2210 return NE_EXPR;
2211 case NE_EXPR:
2212 return EQ_EXPR;
2213 case GT_EXPR:
2214 return honor_nans ? UNLE_EXPR : LE_EXPR;
2215 case GE_EXPR:
2216 return honor_nans ? UNLT_EXPR : LT_EXPR;
2217 case LT_EXPR:
2218 return honor_nans ? UNGE_EXPR : GE_EXPR;
2219 case LE_EXPR:
2220 return honor_nans ? UNGT_EXPR : GT_EXPR;
2221 case LTGT_EXPR:
2222 return UNEQ_EXPR;
2223 case UNEQ_EXPR:
2224 return LTGT_EXPR;
2225 case UNGT_EXPR:
2226 return LE_EXPR;
2227 case UNGE_EXPR:
2228 return LT_EXPR;
2229 case UNLT_EXPR:
2230 return GE_EXPR;
2231 case UNLE_EXPR:
2232 return GT_EXPR;
2233 case ORDERED_EXPR:
2234 return UNORDERED_EXPR;
2235 case UNORDERED_EXPR:
2236 return ORDERED_EXPR;
2237 default:
2238 gcc_unreachable ();
2242 /* Similar, but return the comparison that results if the operands are
2243 swapped. This is safe for floating-point. */
2245 enum tree_code
2246 swap_tree_comparison (enum tree_code code)
2248 switch (code)
2250 case EQ_EXPR:
2251 case NE_EXPR:
2252 case ORDERED_EXPR:
2253 case UNORDERED_EXPR:
2254 case LTGT_EXPR:
2255 case UNEQ_EXPR:
2256 return code;
2257 case GT_EXPR:
2258 return LT_EXPR;
2259 case GE_EXPR:
2260 return LE_EXPR;
2261 case LT_EXPR:
2262 return GT_EXPR;
2263 case LE_EXPR:
2264 return GE_EXPR;
2265 case UNGT_EXPR:
2266 return UNLT_EXPR;
2267 case UNGE_EXPR:
2268 return UNLE_EXPR;
2269 case UNLT_EXPR:
2270 return UNGT_EXPR;
2271 case UNLE_EXPR:
2272 return UNGE_EXPR;
2273 default:
2274 gcc_unreachable ();
2279 /* Convert a comparison tree code from an enum tree_code representation
2280 into a compcode bit-based encoding. This function is the inverse of
2281 compcode_to_comparison. */
2283 static enum comparison_code
2284 comparison_to_compcode (enum tree_code code)
2286 switch (code)
2288 case LT_EXPR:
2289 return COMPCODE_LT;
2290 case EQ_EXPR:
2291 return COMPCODE_EQ;
2292 case LE_EXPR:
2293 return COMPCODE_LE;
2294 case GT_EXPR:
2295 return COMPCODE_GT;
2296 case NE_EXPR:
2297 return COMPCODE_NE;
2298 case GE_EXPR:
2299 return COMPCODE_GE;
2300 case ORDERED_EXPR:
2301 return COMPCODE_ORD;
2302 case UNORDERED_EXPR:
2303 return COMPCODE_UNORD;
2304 case UNLT_EXPR:
2305 return COMPCODE_UNLT;
2306 case UNEQ_EXPR:
2307 return COMPCODE_UNEQ;
2308 case UNLE_EXPR:
2309 return COMPCODE_UNLE;
2310 case UNGT_EXPR:
2311 return COMPCODE_UNGT;
2312 case LTGT_EXPR:
2313 return COMPCODE_LTGT;
2314 case UNGE_EXPR:
2315 return COMPCODE_UNGE;
2316 default:
2317 gcc_unreachable ();
2321 /* Convert a compcode bit-based encoding of a comparison operator back
2322 to GCC's enum tree_code representation. This function is the
2323 inverse of comparison_to_compcode. */
2325 static enum tree_code
2326 compcode_to_comparison (enum comparison_code code)
2328 switch (code)
2330 case COMPCODE_LT:
2331 return LT_EXPR;
2332 case COMPCODE_EQ:
2333 return EQ_EXPR;
2334 case COMPCODE_LE:
2335 return LE_EXPR;
2336 case COMPCODE_GT:
2337 return GT_EXPR;
2338 case COMPCODE_NE:
2339 return NE_EXPR;
2340 case COMPCODE_GE:
2341 return GE_EXPR;
2342 case COMPCODE_ORD:
2343 return ORDERED_EXPR;
2344 case COMPCODE_UNORD:
2345 return UNORDERED_EXPR;
2346 case COMPCODE_UNLT:
2347 return UNLT_EXPR;
2348 case COMPCODE_UNEQ:
2349 return UNEQ_EXPR;
2350 case COMPCODE_UNLE:
2351 return UNLE_EXPR;
2352 case COMPCODE_UNGT:
2353 return UNGT_EXPR;
2354 case COMPCODE_LTGT:
2355 return LTGT_EXPR;
2356 case COMPCODE_UNGE:
2357 return UNGE_EXPR;
2358 default:
2359 gcc_unreachable ();
2363 /* Return a tree for the comparison which is the combination of
2364 doing the AND or OR (depending on CODE) of the two operations LCODE
2365 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2366 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2367 if this makes the transformation invalid. */
2369 tree
2370 combine_comparisons (enum tree_code code, enum tree_code lcode,
2371 enum tree_code rcode, tree truth_type,
2372 tree ll_arg, tree lr_arg)
2374 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2375 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2376 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2377 enum comparison_code compcode;
2379 switch (code)
2381 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2382 compcode = lcompcode & rcompcode;
2383 break;
2385 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2386 compcode = lcompcode | rcompcode;
2387 break;
2389 default:
2390 return NULL_TREE;
2393 if (!honor_nans)
2395 /* Eliminate unordered comparisons, as well as LTGT and ORD
2396 which are not used unless the mode has NaNs. */
2397 compcode &= ~COMPCODE_UNORD;
2398 if (compcode == COMPCODE_LTGT)
2399 compcode = COMPCODE_NE;
2400 else if (compcode == COMPCODE_ORD)
2401 compcode = COMPCODE_TRUE;
2403 else if (flag_trapping_math)
2405 /* Check that the original operation and the optimized ones will trap
2406 under the same condition. */
2407 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2408 && (lcompcode != COMPCODE_EQ)
2409 && (lcompcode != COMPCODE_ORD);
2410 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2411 && (rcompcode != COMPCODE_EQ)
2412 && (rcompcode != COMPCODE_ORD);
2413 bool trap = (compcode & COMPCODE_UNORD) == 0
2414 && (compcode != COMPCODE_EQ)
2415 && (compcode != COMPCODE_ORD);
2417 /* In a short-circuited boolean expression the LHS might be
2418 such that the RHS, if evaluated, will never trap. For
2419 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2420 if neither x nor y is NaN. (This is a mixed blessing: for
2421 example, the expression above will never trap, hence
2422 optimizing it to x < y would be invalid). */
2423 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2424 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2425 rtrap = false;
2427 /* If the comparison was short-circuited, and only the RHS
2428 trapped, we may now generate a spurious trap. */
2429 if (rtrap && !ltrap
2430 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2431 return NULL_TREE;
2433 /* If we changed the conditions that cause a trap, we lose. */
2434 if ((ltrap || rtrap) != trap)
2435 return NULL_TREE;
2438 if (compcode == COMPCODE_TRUE)
2439 return constant_boolean_node (true, truth_type);
2440 else if (compcode == COMPCODE_FALSE)
2441 return constant_boolean_node (false, truth_type);
2442 else
2443 return fold_build2 (compcode_to_comparison (compcode),
2444 truth_type, ll_arg, lr_arg);
2447 /* Return nonzero if CODE is a tree code that represents a truth value. */
2449 static int
2450 truth_value_p (enum tree_code code)
2452 return (TREE_CODE_CLASS (code) == tcc_comparison
2453 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2454 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2455 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2458 /* Return nonzero if two operands (typically of the same tree node)
2459 are necessarily equal. If either argument has side-effects this
2460 function returns zero. FLAGS modifies behavior as follows:
2462 If OEP_ONLY_CONST is set, only return nonzero for constants.
2463 This function tests whether the operands are indistinguishable;
2464 it does not test whether they are equal using C's == operation.
2465 The distinction is important for IEEE floating point, because
2466 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2467 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2469 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2470 even though it may hold multiple values during a function.
2471 This is because a GCC tree node guarantees that nothing else is
2472 executed between the evaluation of its "operands" (which may often
2473 be evaluated in arbitrary order). Hence if the operands themselves
2474 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2475 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2476 unset means assuming isochronic (or instantaneous) tree equivalence.
2477 Unless comparing arbitrary expression trees, such as from different
2478 statements, this flag can usually be left unset.
2480 If OEP_PURE_SAME is set, then pure functions with identical arguments
2481 are considered the same. It is used when the caller has other ways
2482 to ensure that global memory is unchanged in between. */
2485 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2487 /* If either is ERROR_MARK, they aren't equal. */
2488 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2489 return 0;
2491 /* If both types don't have the same signedness, then we can't consider
2492 them equal. We must check this before the STRIP_NOPS calls
2493 because they may change the signedness of the arguments. */
2494 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2495 return 0;
2497 /* If both types don't have the same precision, then it is not safe
2498 to strip NOPs. */
2499 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2500 return 0;
2502 STRIP_NOPS (arg0);
2503 STRIP_NOPS (arg1);
2505 /* In case both args are comparisons but with different comparison
2506 code, try to swap the comparison operands of one arg to produce
2507 a match and compare that variant. */
2508 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2509 && COMPARISON_CLASS_P (arg0)
2510 && COMPARISON_CLASS_P (arg1))
2512 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2514 if (TREE_CODE (arg0) == swap_code)
2515 return operand_equal_p (TREE_OPERAND (arg0, 0),
2516 TREE_OPERAND (arg1, 1), flags)
2517 && operand_equal_p (TREE_OPERAND (arg0, 1),
2518 TREE_OPERAND (arg1, 0), flags);
2521 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2522 /* This is needed for conversions and for COMPONENT_REF.
2523 Might as well play it safe and always test this. */
2524 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2525 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2526 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2527 return 0;
2529 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2530 We don't care about side effects in that case because the SAVE_EXPR
2531 takes care of that for us. In all other cases, two expressions are
2532 equal if they have no side effects. If we have two identical
2533 expressions with side effects that should be treated the same due
2534 to the only side effects being identical SAVE_EXPR's, that will
2535 be detected in the recursive calls below. */
2536 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2537 && (TREE_CODE (arg0) == SAVE_EXPR
2538 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2539 return 1;
2541 /* Next handle constant cases, those for which we can return 1 even
2542 if ONLY_CONST is set. */
2543 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2544 switch (TREE_CODE (arg0))
2546 case INTEGER_CST:
2547 return (! TREE_CONSTANT_OVERFLOW (arg0)
2548 && ! TREE_CONSTANT_OVERFLOW (arg1)
2549 && tree_int_cst_equal (arg0, arg1));
2551 case REAL_CST:
2552 return (! TREE_CONSTANT_OVERFLOW (arg0)
2553 && ! TREE_CONSTANT_OVERFLOW (arg1)
2554 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2555 TREE_REAL_CST (arg1)));
2557 case VECTOR_CST:
2559 tree v1, v2;
2561 if (TREE_CONSTANT_OVERFLOW (arg0)
2562 || TREE_CONSTANT_OVERFLOW (arg1))
2563 return 0;
2565 v1 = TREE_VECTOR_CST_ELTS (arg0);
2566 v2 = TREE_VECTOR_CST_ELTS (arg1);
2567 while (v1 && v2)
2569 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2570 flags))
2571 return 0;
2572 v1 = TREE_CHAIN (v1);
2573 v2 = TREE_CHAIN (v2);
2576 return v1 == v2;
2579 case COMPLEX_CST:
2580 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2581 flags)
2582 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2583 flags));
2585 case STRING_CST:
2586 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2587 && ! memcmp (TREE_STRING_POINTER (arg0),
2588 TREE_STRING_POINTER (arg1),
2589 TREE_STRING_LENGTH (arg0)));
2591 case ADDR_EXPR:
2592 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2594 default:
2595 break;
2598 if (flags & OEP_ONLY_CONST)
2599 return 0;
2601 /* Define macros to test an operand from arg0 and arg1 for equality and a
2602 variant that allows null and views null as being different from any
2603 non-null value. In the latter case, if either is null, the both
2604 must be; otherwise, do the normal comparison. */
2605 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2606 TREE_OPERAND (arg1, N), flags)
2608 #define OP_SAME_WITH_NULL(N) \
2609 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2610 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2612 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2614 case tcc_unary:
2615 /* Two conversions are equal only if signedness and modes match. */
2616 switch (TREE_CODE (arg0))
2618 case NOP_EXPR:
2619 case CONVERT_EXPR:
2620 case FIX_CEIL_EXPR:
2621 case FIX_TRUNC_EXPR:
2622 case FIX_FLOOR_EXPR:
2623 case FIX_ROUND_EXPR:
2624 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2625 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2626 return 0;
2627 break;
2628 default:
2629 break;
2632 return OP_SAME (0);
2635 case tcc_comparison:
2636 case tcc_binary:
2637 if (OP_SAME (0) && OP_SAME (1))
2638 return 1;
2640 /* For commutative ops, allow the other order. */
2641 return (commutative_tree_code (TREE_CODE (arg0))
2642 && operand_equal_p (TREE_OPERAND (arg0, 0),
2643 TREE_OPERAND (arg1, 1), flags)
2644 && operand_equal_p (TREE_OPERAND (arg0, 1),
2645 TREE_OPERAND (arg1, 0), flags));
2647 case tcc_reference:
2648 /* If either of the pointer (or reference) expressions we are
2649 dereferencing contain a side effect, these cannot be equal. */
2650 if (TREE_SIDE_EFFECTS (arg0)
2651 || TREE_SIDE_EFFECTS (arg1))
2652 return 0;
2654 switch (TREE_CODE (arg0))
2656 case INDIRECT_REF:
2657 case ALIGN_INDIRECT_REF:
2658 case MISALIGNED_INDIRECT_REF:
2659 case REALPART_EXPR:
2660 case IMAGPART_EXPR:
2661 return OP_SAME (0);
2663 case ARRAY_REF:
2664 case ARRAY_RANGE_REF:
2665 /* Operands 2 and 3 may be null. */
2666 return (OP_SAME (0)
2667 && OP_SAME (1)
2668 && OP_SAME_WITH_NULL (2)
2669 && OP_SAME_WITH_NULL (3));
2671 case COMPONENT_REF:
2672 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2673 may be NULL when we're called to compare MEM_EXPRs. */
2674 return OP_SAME_WITH_NULL (0)
2675 && OP_SAME (1)
2676 && OP_SAME_WITH_NULL (2);
2678 case BIT_FIELD_REF:
2679 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2681 default:
2682 return 0;
2685 case tcc_expression:
2686 switch (TREE_CODE (arg0))
2688 case ADDR_EXPR:
2689 case TRUTH_NOT_EXPR:
2690 return OP_SAME (0);
2692 case TRUTH_ANDIF_EXPR:
2693 case TRUTH_ORIF_EXPR:
2694 return OP_SAME (0) && OP_SAME (1);
2696 case TRUTH_AND_EXPR:
2697 case TRUTH_OR_EXPR:
2698 case TRUTH_XOR_EXPR:
2699 if (OP_SAME (0) && OP_SAME (1))
2700 return 1;
2702 /* Otherwise take into account this is a commutative operation. */
2703 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2704 TREE_OPERAND (arg1, 1), flags)
2705 && operand_equal_p (TREE_OPERAND (arg0, 1),
2706 TREE_OPERAND (arg1, 0), flags));
2708 case CALL_EXPR:
2709 /* If the CALL_EXPRs call different functions, then they
2710 clearly can not be equal. */
2711 if (!OP_SAME (0))
2712 return 0;
2715 unsigned int cef = call_expr_flags (arg0);
2716 if (flags & OEP_PURE_SAME)
2717 cef &= ECF_CONST | ECF_PURE;
2718 else
2719 cef &= ECF_CONST;
2720 if (!cef)
2721 return 0;
2724 /* Now see if all the arguments are the same. operand_equal_p
2725 does not handle TREE_LIST, so we walk the operands here
2726 feeding them to operand_equal_p. */
2727 arg0 = TREE_OPERAND (arg0, 1);
2728 arg1 = TREE_OPERAND (arg1, 1);
2729 while (arg0 && arg1)
2731 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2732 flags))
2733 return 0;
2735 arg0 = TREE_CHAIN (arg0);
2736 arg1 = TREE_CHAIN (arg1);
2739 /* If we get here and both argument lists are exhausted
2740 then the CALL_EXPRs are equal. */
2741 return ! (arg0 || arg1);
2743 default:
2744 return 0;
2747 case tcc_declaration:
2748 /* Consider __builtin_sqrt equal to sqrt. */
2749 return (TREE_CODE (arg0) == FUNCTION_DECL
2750 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2751 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2752 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2754 default:
2755 return 0;
2758 #undef OP_SAME
2759 #undef OP_SAME_WITH_NULL
2762 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2763 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2765 When in doubt, return 0. */
2767 static int
2768 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2770 int unsignedp1, unsignedpo;
2771 tree primarg0, primarg1, primother;
2772 unsigned int correct_width;
2774 if (operand_equal_p (arg0, arg1, 0))
2775 return 1;
2777 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2778 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2779 return 0;
2781 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2782 and see if the inner values are the same. This removes any
2783 signedness comparison, which doesn't matter here. */
2784 primarg0 = arg0, primarg1 = arg1;
2785 STRIP_NOPS (primarg0);
2786 STRIP_NOPS (primarg1);
2787 if (operand_equal_p (primarg0, primarg1, 0))
2788 return 1;
2790 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2791 actual comparison operand, ARG0.
2793 First throw away any conversions to wider types
2794 already present in the operands. */
2796 primarg1 = get_narrower (arg1, &unsignedp1);
2797 primother = get_narrower (other, &unsignedpo);
2799 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2800 if (unsignedp1 == unsignedpo
2801 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2802 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2804 tree type = TREE_TYPE (arg0);
2806 /* Make sure shorter operand is extended the right way
2807 to match the longer operand. */
2808 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2809 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2811 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2812 return 1;
2815 return 0;
2818 /* See if ARG is an expression that is either a comparison or is performing
2819 arithmetic on comparisons. The comparisons must only be comparing
2820 two different values, which will be stored in *CVAL1 and *CVAL2; if
2821 they are nonzero it means that some operands have already been found.
2822 No variables may be used anywhere else in the expression except in the
2823 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2824 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2826 If this is true, return 1. Otherwise, return zero. */
2828 static int
2829 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2831 enum tree_code code = TREE_CODE (arg);
2832 enum tree_code_class class = TREE_CODE_CLASS (code);
2834 /* We can handle some of the tcc_expression cases here. */
2835 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2836 class = tcc_unary;
2837 else if (class == tcc_expression
2838 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2839 || code == COMPOUND_EXPR))
2840 class = tcc_binary;
2842 else if (class == tcc_expression && code == SAVE_EXPR
2843 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2845 /* If we've already found a CVAL1 or CVAL2, this expression is
2846 two complex to handle. */
2847 if (*cval1 || *cval2)
2848 return 0;
2850 class = tcc_unary;
2851 *save_p = 1;
2854 switch (class)
2856 case tcc_unary:
2857 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2859 case tcc_binary:
2860 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2861 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2862 cval1, cval2, save_p));
2864 case tcc_constant:
2865 return 1;
2867 case tcc_expression:
2868 if (code == COND_EXPR)
2869 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2870 cval1, cval2, save_p)
2871 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2872 cval1, cval2, save_p)
2873 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2874 cval1, cval2, save_p));
2875 return 0;
2877 case tcc_comparison:
2878 /* First see if we can handle the first operand, then the second. For
2879 the second operand, we know *CVAL1 can't be zero. It must be that
2880 one side of the comparison is each of the values; test for the
2881 case where this isn't true by failing if the two operands
2882 are the same. */
2884 if (operand_equal_p (TREE_OPERAND (arg, 0),
2885 TREE_OPERAND (arg, 1), 0))
2886 return 0;
2888 if (*cval1 == 0)
2889 *cval1 = TREE_OPERAND (arg, 0);
2890 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2892 else if (*cval2 == 0)
2893 *cval2 = TREE_OPERAND (arg, 0);
2894 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2896 else
2897 return 0;
2899 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2901 else if (*cval2 == 0)
2902 *cval2 = TREE_OPERAND (arg, 1);
2903 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2905 else
2906 return 0;
2908 return 1;
2910 default:
2911 return 0;
2915 /* ARG is a tree that is known to contain just arithmetic operations and
2916 comparisons. Evaluate the operations in the tree substituting NEW0 for
2917 any occurrence of OLD0 as an operand of a comparison and likewise for
2918 NEW1 and OLD1. */
2920 static tree
2921 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2923 tree type = TREE_TYPE (arg);
2924 enum tree_code code = TREE_CODE (arg);
2925 enum tree_code_class class = TREE_CODE_CLASS (code);
2927 /* We can handle some of the tcc_expression cases here. */
2928 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2929 class = tcc_unary;
2930 else if (class == tcc_expression
2931 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2932 class = tcc_binary;
2934 switch (class)
2936 case tcc_unary:
2937 return fold_build1 (code, type,
2938 eval_subst (TREE_OPERAND (arg, 0),
2939 old0, new0, old1, new1));
2941 case tcc_binary:
2942 return fold_build2 (code, type,
2943 eval_subst (TREE_OPERAND (arg, 0),
2944 old0, new0, old1, new1),
2945 eval_subst (TREE_OPERAND (arg, 1),
2946 old0, new0, old1, new1));
2948 case tcc_expression:
2949 switch (code)
2951 case SAVE_EXPR:
2952 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2954 case COMPOUND_EXPR:
2955 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2957 case COND_EXPR:
2958 return fold_build3 (code, type,
2959 eval_subst (TREE_OPERAND (arg, 0),
2960 old0, new0, old1, new1),
2961 eval_subst (TREE_OPERAND (arg, 1),
2962 old0, new0, old1, new1),
2963 eval_subst (TREE_OPERAND (arg, 2),
2964 old0, new0, old1, new1));
2965 default:
2966 break;
2968 /* Fall through - ??? */
2970 case tcc_comparison:
2972 tree arg0 = TREE_OPERAND (arg, 0);
2973 tree arg1 = TREE_OPERAND (arg, 1);
2975 /* We need to check both for exact equality and tree equality. The
2976 former will be true if the operand has a side-effect. In that
2977 case, we know the operand occurred exactly once. */
2979 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2980 arg0 = new0;
2981 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2982 arg0 = new1;
2984 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2985 arg1 = new0;
2986 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2987 arg1 = new1;
2989 return fold_build2 (code, type, arg0, arg1);
2992 default:
2993 return arg;
2997 /* Return a tree for the case when the result of an expression is RESULT
2998 converted to TYPE and OMITTED was previously an operand of the expression
2999 but is now not needed (e.g., we folded OMITTED * 0).
3001 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3002 the conversion of RESULT to TYPE. */
3004 tree
3005 omit_one_operand (tree type, tree result, tree omitted)
3007 tree t = fold_convert (type, result);
3009 if (TREE_SIDE_EFFECTS (omitted))
3010 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3012 return non_lvalue (t);
3015 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3017 static tree
3018 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3020 tree t = fold_convert (type, result);
3022 if (TREE_SIDE_EFFECTS (omitted))
3023 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3025 return pedantic_non_lvalue (t);
3028 /* Return a tree for the case when the result of an expression is RESULT
3029 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3030 of the expression but are now not needed.
3032 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3033 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3034 evaluated before OMITTED2. Otherwise, if neither has side effects,
3035 just do the conversion of RESULT to TYPE. */
3037 tree
3038 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3040 tree t = fold_convert (type, result);
3042 if (TREE_SIDE_EFFECTS (omitted2))
3043 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3044 if (TREE_SIDE_EFFECTS (omitted1))
3045 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3047 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3051 /* Return a simplified tree node for the truth-negation of ARG. This
3052 never alters ARG itself. We assume that ARG is an operation that
3053 returns a truth value (0 or 1).
3055 FIXME: one would think we would fold the result, but it causes
3056 problems with the dominator optimizer. */
3058 tree
3059 fold_truth_not_expr (tree arg)
3061 tree type = TREE_TYPE (arg);
3062 enum tree_code code = TREE_CODE (arg);
3064 /* If this is a comparison, we can simply invert it, except for
3065 floating-point non-equality comparisons, in which case we just
3066 enclose a TRUTH_NOT_EXPR around what we have. */
3068 if (TREE_CODE_CLASS (code) == tcc_comparison)
3070 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3071 if (FLOAT_TYPE_P (op_type)
3072 && flag_trapping_math
3073 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3074 && code != NE_EXPR && code != EQ_EXPR)
3075 return NULL_TREE;
3076 else
3078 code = invert_tree_comparison (code,
3079 HONOR_NANS (TYPE_MODE (op_type)));
3080 if (code == ERROR_MARK)
3081 return NULL_TREE;
3082 else
3083 return build2 (code, type,
3084 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3088 switch (code)
3090 case INTEGER_CST:
3091 return constant_boolean_node (integer_zerop (arg), type);
3093 case TRUTH_AND_EXPR:
3094 return build2 (TRUTH_OR_EXPR, type,
3095 invert_truthvalue (TREE_OPERAND (arg, 0)),
3096 invert_truthvalue (TREE_OPERAND (arg, 1)));
3098 case TRUTH_OR_EXPR:
3099 return build2 (TRUTH_AND_EXPR, type,
3100 invert_truthvalue (TREE_OPERAND (arg, 0)),
3101 invert_truthvalue (TREE_OPERAND (arg, 1)));
3103 case TRUTH_XOR_EXPR:
3104 /* Here we can invert either operand. We invert the first operand
3105 unless the second operand is a TRUTH_NOT_EXPR in which case our
3106 result is the XOR of the first operand with the inside of the
3107 negation of the second operand. */
3109 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3110 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3111 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3112 else
3113 return build2 (TRUTH_XOR_EXPR, type,
3114 invert_truthvalue (TREE_OPERAND (arg, 0)),
3115 TREE_OPERAND (arg, 1));
3117 case TRUTH_ANDIF_EXPR:
3118 return build2 (TRUTH_ORIF_EXPR, type,
3119 invert_truthvalue (TREE_OPERAND (arg, 0)),
3120 invert_truthvalue (TREE_OPERAND (arg, 1)));
3122 case TRUTH_ORIF_EXPR:
3123 return build2 (TRUTH_ANDIF_EXPR, type,
3124 invert_truthvalue (TREE_OPERAND (arg, 0)),
3125 invert_truthvalue (TREE_OPERAND (arg, 1)));
3127 case TRUTH_NOT_EXPR:
3128 return TREE_OPERAND (arg, 0);
3130 case COND_EXPR:
3132 tree arg1 = TREE_OPERAND (arg, 1);
3133 tree arg2 = TREE_OPERAND (arg, 2);
3134 /* A COND_EXPR may have a throw as one operand, which
3135 then has void type. Just leave void operands
3136 as they are. */
3137 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3138 VOID_TYPE_P (TREE_TYPE (arg1))
3139 ? arg1 : invert_truthvalue (arg1),
3140 VOID_TYPE_P (TREE_TYPE (arg2))
3141 ? arg2 : invert_truthvalue (arg2));
3144 case COMPOUND_EXPR:
3145 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3146 invert_truthvalue (TREE_OPERAND (arg, 1)));
3148 case NON_LVALUE_EXPR:
3149 return invert_truthvalue (TREE_OPERAND (arg, 0));
3151 case NOP_EXPR:
3152 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3153 return build1 (TRUTH_NOT_EXPR, type, arg);
3155 case CONVERT_EXPR:
3156 case FLOAT_EXPR:
3157 return build1 (TREE_CODE (arg), type,
3158 invert_truthvalue (TREE_OPERAND (arg, 0)));
3160 case BIT_AND_EXPR:
3161 if (!integer_onep (TREE_OPERAND (arg, 1)))
3162 break;
3163 return build2 (EQ_EXPR, type, arg,
3164 build_int_cst (type, 0));
3166 case SAVE_EXPR:
3167 return build1 (TRUTH_NOT_EXPR, type, arg);
3169 case CLEANUP_POINT_EXPR:
3170 return build1 (CLEANUP_POINT_EXPR, type,
3171 invert_truthvalue (TREE_OPERAND (arg, 0)));
3173 default:
3174 break;
3177 return NULL_TREE;
3180 /* Return a simplified tree node for the truth-negation of ARG. This
3181 never alters ARG itself. We assume that ARG is an operation that
3182 returns a truth value (0 or 1).
3184 FIXME: one would think we would fold the result, but it causes
3185 problems with the dominator optimizer. */
3187 tree
3188 invert_truthvalue (tree arg)
3190 tree tem;
3192 if (TREE_CODE (arg) == ERROR_MARK)
3193 return arg;
3195 tem = fold_truth_not_expr (arg);
3196 if (!tem)
3197 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3199 return tem;
3202 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3203 operands are another bit-wise operation with a common input. If so,
3204 distribute the bit operations to save an operation and possibly two if
3205 constants are involved. For example, convert
3206 (A | B) & (A | C) into A | (B & C)
3207 Further simplification will occur if B and C are constants.
3209 If this optimization cannot be done, 0 will be returned. */
3211 static tree
3212 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3214 tree common;
3215 tree left, right;
3217 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3218 || TREE_CODE (arg0) == code
3219 || (TREE_CODE (arg0) != BIT_AND_EXPR
3220 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3221 return 0;
3223 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3225 common = TREE_OPERAND (arg0, 0);
3226 left = TREE_OPERAND (arg0, 1);
3227 right = TREE_OPERAND (arg1, 1);
3229 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3231 common = TREE_OPERAND (arg0, 0);
3232 left = TREE_OPERAND (arg0, 1);
3233 right = TREE_OPERAND (arg1, 0);
3235 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3237 common = TREE_OPERAND (arg0, 1);
3238 left = TREE_OPERAND (arg0, 0);
3239 right = TREE_OPERAND (arg1, 1);
3241 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3243 common = TREE_OPERAND (arg0, 1);
3244 left = TREE_OPERAND (arg0, 0);
3245 right = TREE_OPERAND (arg1, 0);
3247 else
3248 return 0;
3250 return fold_build2 (TREE_CODE (arg0), type, common,
3251 fold_build2 (code, type, left, right));
3254 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3255 with code CODE. This optimization is unsafe. */
3256 static tree
3257 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3259 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3260 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3262 /* (A / C) +- (B / C) -> (A +- B) / C. */
3263 if (mul0 == mul1
3264 && operand_equal_p (TREE_OPERAND (arg0, 1),
3265 TREE_OPERAND (arg1, 1), 0))
3266 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3267 fold_build2 (code, type,
3268 TREE_OPERAND (arg0, 0),
3269 TREE_OPERAND (arg1, 0)),
3270 TREE_OPERAND (arg0, 1));
3272 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3273 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3274 TREE_OPERAND (arg1, 0), 0)
3275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3276 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3278 REAL_VALUE_TYPE r0, r1;
3279 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3280 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3281 if (!mul0)
3282 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3283 if (!mul1)
3284 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3285 real_arithmetic (&r0, code, &r0, &r1);
3286 return fold_build2 (MULT_EXPR, type,
3287 TREE_OPERAND (arg0, 0),
3288 build_real (type, r0));
3291 return NULL_TREE;
3294 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3295 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3297 static tree
3298 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3299 int unsignedp)
3301 tree result;
3303 if (bitpos == 0)
3305 tree size = TYPE_SIZE (TREE_TYPE (inner));
3306 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3307 || POINTER_TYPE_P (TREE_TYPE (inner)))
3308 && host_integerp (size, 0)
3309 && tree_low_cst (size, 0) == bitsize)
3310 return fold_convert (type, inner);
3313 result = build3 (BIT_FIELD_REF, type, inner,
3314 size_int (bitsize), bitsize_int (bitpos));
3316 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3318 return result;
3321 /* Optimize a bit-field compare.
3323 There are two cases: First is a compare against a constant and the
3324 second is a comparison of two items where the fields are at the same
3325 bit position relative to the start of a chunk (byte, halfword, word)
3326 large enough to contain it. In these cases we can avoid the shift
3327 implicit in bitfield extractions.
3329 For constants, we emit a compare of the shifted constant with the
3330 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3331 compared. For two fields at the same position, we do the ANDs with the
3332 similar mask and compare the result of the ANDs.
3334 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3335 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3336 are the left and right operands of the comparison, respectively.
3338 If the optimization described above can be done, we return the resulting
3339 tree. Otherwise we return zero. */
3341 static tree
3342 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3343 tree lhs, tree rhs)
3345 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3346 tree type = TREE_TYPE (lhs);
3347 tree signed_type, unsigned_type;
3348 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3349 enum machine_mode lmode, rmode, nmode;
3350 int lunsignedp, runsignedp;
3351 int lvolatilep = 0, rvolatilep = 0;
3352 tree linner, rinner = NULL_TREE;
3353 tree mask;
3354 tree offset;
3356 /* Get all the information about the extractions being done. If the bit size
3357 if the same as the size of the underlying object, we aren't doing an
3358 extraction at all and so can do nothing. We also don't want to
3359 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3360 then will no longer be able to replace it. */
3361 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3362 &lunsignedp, &lvolatilep, false);
3363 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3364 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3365 return 0;
3367 if (!const_p)
3369 /* If this is not a constant, we can only do something if bit positions,
3370 sizes, and signedness are the same. */
3371 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3372 &runsignedp, &rvolatilep, false);
3374 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3375 || lunsignedp != runsignedp || offset != 0
3376 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3377 return 0;
3380 /* See if we can find a mode to refer to this field. We should be able to,
3381 but fail if we can't. */
3382 nmode = get_best_mode (lbitsize, lbitpos,
3383 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3384 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3385 TYPE_ALIGN (TREE_TYPE (rinner))),
3386 word_mode, lvolatilep || rvolatilep);
3387 if (nmode == VOIDmode)
3388 return 0;
3390 /* Set signed and unsigned types of the precision of this mode for the
3391 shifts below. */
3392 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3393 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3395 /* Compute the bit position and size for the new reference and our offset
3396 within it. If the new reference is the same size as the original, we
3397 won't optimize anything, so return zero. */
3398 nbitsize = GET_MODE_BITSIZE (nmode);
3399 nbitpos = lbitpos & ~ (nbitsize - 1);
3400 lbitpos -= nbitpos;
3401 if (nbitsize == lbitsize)
3402 return 0;
3404 if (BYTES_BIG_ENDIAN)
3405 lbitpos = nbitsize - lbitsize - lbitpos;
3407 /* Make the mask to be used against the extracted field. */
3408 mask = build_int_cst (unsigned_type, -1);
3409 mask = force_fit_type (mask, 0, false, false);
3410 mask = fold_convert (unsigned_type, mask);
3411 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3412 mask = const_binop (RSHIFT_EXPR, mask,
3413 size_int (nbitsize - lbitsize - lbitpos), 0);
3415 if (! const_p)
3416 /* If not comparing with constant, just rework the comparison
3417 and return. */
3418 return build2 (code, compare_type,
3419 build2 (BIT_AND_EXPR, unsigned_type,
3420 make_bit_field_ref (linner, unsigned_type,
3421 nbitsize, nbitpos, 1),
3422 mask),
3423 build2 (BIT_AND_EXPR, unsigned_type,
3424 make_bit_field_ref (rinner, unsigned_type,
3425 nbitsize, nbitpos, 1),
3426 mask));
3428 /* Otherwise, we are handling the constant case. See if the constant is too
3429 big for the field. Warn and return a tree of for 0 (false) if so. We do
3430 this not only for its own sake, but to avoid having to test for this
3431 error case below. If we didn't, we might generate wrong code.
3433 For unsigned fields, the constant shifted right by the field length should
3434 be all zero. For signed fields, the high-order bits should agree with
3435 the sign bit. */
3437 if (lunsignedp)
3439 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3440 fold_convert (unsigned_type, rhs),
3441 size_int (lbitsize), 0)))
3443 warning (0, "comparison is always %d due to width of bit-field",
3444 code == NE_EXPR);
3445 return constant_boolean_node (code == NE_EXPR, compare_type);
3448 else
3450 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3451 size_int (lbitsize - 1), 0);
3452 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3454 warning (0, "comparison is always %d due to width of bit-field",
3455 code == NE_EXPR);
3456 return constant_boolean_node (code == NE_EXPR, compare_type);
3460 /* Single-bit compares should always be against zero. */
3461 if (lbitsize == 1 && ! integer_zerop (rhs))
3463 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3464 rhs = build_int_cst (type, 0);
3467 /* Make a new bitfield reference, shift the constant over the
3468 appropriate number of bits and mask it with the computed mask
3469 (in case this was a signed field). If we changed it, make a new one. */
3470 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3471 if (lvolatilep)
3473 TREE_SIDE_EFFECTS (lhs) = 1;
3474 TREE_THIS_VOLATILE (lhs) = 1;
3477 rhs = const_binop (BIT_AND_EXPR,
3478 const_binop (LSHIFT_EXPR,
3479 fold_convert (unsigned_type, rhs),
3480 size_int (lbitpos), 0),
3481 mask, 0);
3483 return build2 (code, compare_type,
3484 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3485 rhs);
3488 /* Subroutine for fold_truthop: decode a field reference.
3490 If EXP is a comparison reference, we return the innermost reference.
3492 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3493 set to the starting bit number.
3495 If the innermost field can be completely contained in a mode-sized
3496 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3498 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3499 otherwise it is not changed.
3501 *PUNSIGNEDP is set to the signedness of the field.
3503 *PMASK is set to the mask used. This is either contained in a
3504 BIT_AND_EXPR or derived from the width of the field.
3506 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3508 Return 0 if this is not a component reference or is one that we can't
3509 do anything with. */
3511 static tree
3512 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3513 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3514 int *punsignedp, int *pvolatilep,
3515 tree *pmask, tree *pand_mask)
3517 tree outer_type = 0;
3518 tree and_mask = 0;
3519 tree mask, inner, offset;
3520 tree unsigned_type;
3521 unsigned int precision;
3523 /* All the optimizations using this function assume integer fields.
3524 There are problems with FP fields since the type_for_size call
3525 below can fail for, e.g., XFmode. */
3526 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3527 return 0;
3529 /* We are interested in the bare arrangement of bits, so strip everything
3530 that doesn't affect the machine mode. However, record the type of the
3531 outermost expression if it may matter below. */
3532 if (TREE_CODE (exp) == NOP_EXPR
3533 || TREE_CODE (exp) == CONVERT_EXPR
3534 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3535 outer_type = TREE_TYPE (exp);
3536 STRIP_NOPS (exp);
3538 if (TREE_CODE (exp) == BIT_AND_EXPR)
3540 and_mask = TREE_OPERAND (exp, 1);
3541 exp = TREE_OPERAND (exp, 0);
3542 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3543 if (TREE_CODE (and_mask) != INTEGER_CST)
3544 return 0;
3547 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3548 punsignedp, pvolatilep, false);
3549 if ((inner == exp && and_mask == 0)
3550 || *pbitsize < 0 || offset != 0
3551 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3552 return 0;
3554 /* If the number of bits in the reference is the same as the bitsize of
3555 the outer type, then the outer type gives the signedness. Otherwise
3556 (in case of a small bitfield) the signedness is unchanged. */
3557 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3558 *punsignedp = TYPE_UNSIGNED (outer_type);
3560 /* Compute the mask to access the bitfield. */
3561 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3562 precision = TYPE_PRECISION (unsigned_type);
3564 mask = build_int_cst (unsigned_type, -1);
3565 mask = force_fit_type (mask, 0, false, false);
3567 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3568 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3570 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3571 if (and_mask != 0)
3572 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3573 fold_convert (unsigned_type, and_mask), mask);
3575 *pmask = mask;
3576 *pand_mask = and_mask;
3577 return inner;
3580 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3581 bit positions. */
3583 static int
3584 all_ones_mask_p (tree mask, int size)
3586 tree type = TREE_TYPE (mask);
3587 unsigned int precision = TYPE_PRECISION (type);
3588 tree tmask;
3590 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3591 tmask = force_fit_type (tmask, 0, false, false);
3593 return
3594 tree_int_cst_equal (mask,
3595 const_binop (RSHIFT_EXPR,
3596 const_binop (LSHIFT_EXPR, tmask,
3597 size_int (precision - size),
3599 size_int (precision - size), 0));
3602 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3603 represents the sign bit of EXP's type. If EXP represents a sign
3604 or zero extension, also test VAL against the unextended type.
3605 The return value is the (sub)expression whose sign bit is VAL,
3606 or NULL_TREE otherwise. */
3608 static tree
3609 sign_bit_p (tree exp, tree val)
3611 unsigned HOST_WIDE_INT mask_lo, lo;
3612 HOST_WIDE_INT mask_hi, hi;
3613 int width;
3614 tree t;
3616 /* Tree EXP must have an integral type. */
3617 t = TREE_TYPE (exp);
3618 if (! INTEGRAL_TYPE_P (t))
3619 return NULL_TREE;
3621 /* Tree VAL must be an integer constant. */
3622 if (TREE_CODE (val) != INTEGER_CST
3623 || TREE_CONSTANT_OVERFLOW (val))
3624 return NULL_TREE;
3626 width = TYPE_PRECISION (t);
3627 if (width > HOST_BITS_PER_WIDE_INT)
3629 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3630 lo = 0;
3632 mask_hi = ((unsigned HOST_WIDE_INT) -1
3633 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3634 mask_lo = -1;
3636 else
3638 hi = 0;
3639 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3641 mask_hi = 0;
3642 mask_lo = ((unsigned HOST_WIDE_INT) -1
3643 >> (HOST_BITS_PER_WIDE_INT - width));
3646 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3647 treat VAL as if it were unsigned. */
3648 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3649 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3650 return exp;
3652 /* Handle extension from a narrower type. */
3653 if (TREE_CODE (exp) == NOP_EXPR
3654 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3655 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3657 return NULL_TREE;
3660 /* Subroutine for fold_truthop: determine if an operand is simple enough
3661 to be evaluated unconditionally. */
3663 static int
3664 simple_operand_p (tree exp)
3666 /* Strip any conversions that don't change the machine mode. */
3667 STRIP_NOPS (exp);
3669 return (CONSTANT_CLASS_P (exp)
3670 || TREE_CODE (exp) == SSA_NAME
3671 || (DECL_P (exp)
3672 && ! TREE_ADDRESSABLE (exp)
3673 && ! TREE_THIS_VOLATILE (exp)
3674 && ! DECL_NONLOCAL (exp)
3675 /* Don't regard global variables as simple. They may be
3676 allocated in ways unknown to the compiler (shared memory,
3677 #pragma weak, etc). */
3678 && ! TREE_PUBLIC (exp)
3679 && ! DECL_EXTERNAL (exp)
3680 /* Loading a static variable is unduly expensive, but global
3681 registers aren't expensive. */
3682 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3685 /* The following functions are subroutines to fold_range_test and allow it to
3686 try to change a logical combination of comparisons into a range test.
3688 For example, both
3689 X == 2 || X == 3 || X == 4 || X == 5
3691 X >= 2 && X <= 5
3692 are converted to
3693 (unsigned) (X - 2) <= 3
3695 We describe each set of comparisons as being either inside or outside
3696 a range, using a variable named like IN_P, and then describe the
3697 range with a lower and upper bound. If one of the bounds is omitted,
3698 it represents either the highest or lowest value of the type.
3700 In the comments below, we represent a range by two numbers in brackets
3701 preceded by a "+" to designate being inside that range, or a "-" to
3702 designate being outside that range, so the condition can be inverted by
3703 flipping the prefix. An omitted bound is represented by a "-". For
3704 example, "- [-, 10]" means being outside the range starting at the lowest
3705 possible value and ending at 10, in other words, being greater than 10.
3706 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3707 always false.
3709 We set up things so that the missing bounds are handled in a consistent
3710 manner so neither a missing bound nor "true" and "false" need to be
3711 handled using a special case. */
3713 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3714 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3715 and UPPER1_P are nonzero if the respective argument is an upper bound
3716 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3717 must be specified for a comparison. ARG1 will be converted to ARG0's
3718 type if both are specified. */
3720 static tree
3721 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3722 tree arg1, int upper1_p)
3724 tree tem;
3725 int result;
3726 int sgn0, sgn1;
3728 /* If neither arg represents infinity, do the normal operation.
3729 Else, if not a comparison, return infinity. Else handle the special
3730 comparison rules. Note that most of the cases below won't occur, but
3731 are handled for consistency. */
3733 if (arg0 != 0 && arg1 != 0)
3735 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3736 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3737 STRIP_NOPS (tem);
3738 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3741 if (TREE_CODE_CLASS (code) != tcc_comparison)
3742 return 0;
3744 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3745 for neither. In real maths, we cannot assume open ended ranges are
3746 the same. But, this is computer arithmetic, where numbers are finite.
3747 We can therefore make the transformation of any unbounded range with
3748 the value Z, Z being greater than any representable number. This permits
3749 us to treat unbounded ranges as equal. */
3750 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3751 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3752 switch (code)
3754 case EQ_EXPR:
3755 result = sgn0 == sgn1;
3756 break;
3757 case NE_EXPR:
3758 result = sgn0 != sgn1;
3759 break;
3760 case LT_EXPR:
3761 result = sgn0 < sgn1;
3762 break;
3763 case LE_EXPR:
3764 result = sgn0 <= sgn1;
3765 break;
3766 case GT_EXPR:
3767 result = sgn0 > sgn1;
3768 break;
3769 case GE_EXPR:
3770 result = sgn0 >= sgn1;
3771 break;
3772 default:
3773 gcc_unreachable ();
3776 return constant_boolean_node (result, type);
3779 /* Given EXP, a logical expression, set the range it is testing into
3780 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3781 actually being tested. *PLOW and *PHIGH will be made of the same type
3782 as the returned expression. If EXP is not a comparison, we will most
3783 likely not be returning a useful value and range. */
3785 static tree
3786 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3788 enum tree_code code;
3789 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3790 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3791 int in_p, n_in_p;
3792 tree low, high, n_low, n_high;
3794 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3795 and see if we can refine the range. Some of the cases below may not
3796 happen, but it doesn't seem worth worrying about this. We "continue"
3797 the outer loop when we've changed something; otherwise we "break"
3798 the switch, which will "break" the while. */
3800 in_p = 0;
3801 low = high = build_int_cst (TREE_TYPE (exp), 0);
3803 while (1)
3805 code = TREE_CODE (exp);
3806 exp_type = TREE_TYPE (exp);
3808 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3810 if (TREE_CODE_LENGTH (code) > 0)
3811 arg0 = TREE_OPERAND (exp, 0);
3812 if (TREE_CODE_CLASS (code) == tcc_comparison
3813 || TREE_CODE_CLASS (code) == tcc_unary
3814 || TREE_CODE_CLASS (code) == tcc_binary)
3815 arg0_type = TREE_TYPE (arg0);
3816 if (TREE_CODE_CLASS (code) == tcc_binary
3817 || TREE_CODE_CLASS (code) == tcc_comparison
3818 || (TREE_CODE_CLASS (code) == tcc_expression
3819 && TREE_CODE_LENGTH (code) > 1))
3820 arg1 = TREE_OPERAND (exp, 1);
3823 switch (code)
3825 case TRUTH_NOT_EXPR:
3826 in_p = ! in_p, exp = arg0;
3827 continue;
3829 case EQ_EXPR: case NE_EXPR:
3830 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3831 /* We can only do something if the range is testing for zero
3832 and if the second operand is an integer constant. Note that
3833 saying something is "in" the range we make is done by
3834 complementing IN_P since it will set in the initial case of
3835 being not equal to zero; "out" is leaving it alone. */
3836 if (low == 0 || high == 0
3837 || ! integer_zerop (low) || ! integer_zerop (high)
3838 || TREE_CODE (arg1) != INTEGER_CST)
3839 break;
3841 switch (code)
3843 case NE_EXPR: /* - [c, c] */
3844 low = high = arg1;
3845 break;
3846 case EQ_EXPR: /* + [c, c] */
3847 in_p = ! in_p, low = high = arg1;
3848 break;
3849 case GT_EXPR: /* - [-, c] */
3850 low = 0, high = arg1;
3851 break;
3852 case GE_EXPR: /* + [c, -] */
3853 in_p = ! in_p, low = arg1, high = 0;
3854 break;
3855 case LT_EXPR: /* - [c, -] */
3856 low = arg1, high = 0;
3857 break;
3858 case LE_EXPR: /* + [-, c] */
3859 in_p = ! in_p, low = 0, high = arg1;
3860 break;
3861 default:
3862 gcc_unreachable ();
3865 /* If this is an unsigned comparison, we also know that EXP is
3866 greater than or equal to zero. We base the range tests we make
3867 on that fact, so we record it here so we can parse existing
3868 range tests. We test arg0_type since often the return type
3869 of, e.g. EQ_EXPR, is boolean. */
3870 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3872 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3873 in_p, low, high, 1,
3874 build_int_cst (arg0_type, 0),
3875 NULL_TREE))
3876 break;
3878 in_p = n_in_p, low = n_low, high = n_high;
3880 /* If the high bound is missing, but we have a nonzero low
3881 bound, reverse the range so it goes from zero to the low bound
3882 minus 1. */
3883 if (high == 0 && low && ! integer_zerop (low))
3885 in_p = ! in_p;
3886 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3887 integer_one_node, 0);
3888 low = build_int_cst (arg0_type, 0);
3892 exp = arg0;
3893 continue;
3895 case NEGATE_EXPR:
3896 /* (-x) IN [a,b] -> x in [-b, -a] */
3897 n_low = range_binop (MINUS_EXPR, exp_type,
3898 build_int_cst (exp_type, 0),
3899 0, high, 1);
3900 n_high = range_binop (MINUS_EXPR, exp_type,
3901 build_int_cst (exp_type, 0),
3902 0, low, 0);
3903 low = n_low, high = n_high;
3904 exp = arg0;
3905 continue;
3907 case BIT_NOT_EXPR:
3908 /* ~ X -> -X - 1 */
3909 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3910 build_int_cst (exp_type, 1));
3911 continue;
3913 case PLUS_EXPR: case MINUS_EXPR:
3914 if (TREE_CODE (arg1) != INTEGER_CST)
3915 break;
3917 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3918 move a constant to the other side. */
3919 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3920 break;
3922 /* If EXP is signed, any overflow in the computation is undefined,
3923 so we don't worry about it so long as our computations on
3924 the bounds don't overflow. For unsigned, overflow is defined
3925 and this is exactly the right thing. */
3926 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3927 arg0_type, low, 0, arg1, 0);
3928 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3929 arg0_type, high, 1, arg1, 0);
3930 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3931 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3932 break;
3934 /* Check for an unsigned range which has wrapped around the maximum
3935 value thus making n_high < n_low, and normalize it. */
3936 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3938 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3939 integer_one_node, 0);
3940 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3941 integer_one_node, 0);
3943 /* If the range is of the form +/- [ x+1, x ], we won't
3944 be able to normalize it. But then, it represents the
3945 whole range or the empty set, so make it
3946 +/- [ -, - ]. */
3947 if (tree_int_cst_equal (n_low, low)
3948 && tree_int_cst_equal (n_high, high))
3949 low = high = 0;
3950 else
3951 in_p = ! in_p;
3953 else
3954 low = n_low, high = n_high;
3956 exp = arg0;
3957 continue;
3959 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3960 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3961 break;
3963 if (! INTEGRAL_TYPE_P (arg0_type)
3964 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3965 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3966 break;
3968 n_low = low, n_high = high;
3970 if (n_low != 0)
3971 n_low = fold_convert (arg0_type, n_low);
3973 if (n_high != 0)
3974 n_high = fold_convert (arg0_type, n_high);
3977 /* If we're converting arg0 from an unsigned type, to exp,
3978 a signed type, we will be doing the comparison as unsigned.
3979 The tests above have already verified that LOW and HIGH
3980 are both positive.
3982 So we have to ensure that we will handle large unsigned
3983 values the same way that the current signed bounds treat
3984 negative values. */
3986 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3988 tree high_positive;
3989 tree equiv_type = lang_hooks.types.type_for_mode
3990 (TYPE_MODE (arg0_type), 1);
3992 /* A range without an upper bound is, naturally, unbounded.
3993 Since convert would have cropped a very large value, use
3994 the max value for the destination type. */
3995 high_positive
3996 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3997 : TYPE_MAX_VALUE (arg0_type);
3999 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4000 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4001 fold_convert (arg0_type,
4002 high_positive),
4003 fold_convert (arg0_type,
4004 integer_one_node));
4006 /* If the low bound is specified, "and" the range with the
4007 range for which the original unsigned value will be
4008 positive. */
4009 if (low != 0)
4011 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4012 1, n_low, n_high, 1,
4013 fold_convert (arg0_type,
4014 integer_zero_node),
4015 high_positive))
4016 break;
4018 in_p = (n_in_p == in_p);
4020 else
4022 /* Otherwise, "or" the range with the range of the input
4023 that will be interpreted as negative. */
4024 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4025 0, n_low, n_high, 1,
4026 fold_convert (arg0_type,
4027 integer_zero_node),
4028 high_positive))
4029 break;
4031 in_p = (in_p != n_in_p);
4035 exp = arg0;
4036 low = n_low, high = n_high;
4037 continue;
4039 default:
4040 break;
4043 break;
4046 /* If EXP is a constant, we can evaluate whether this is true or false. */
4047 if (TREE_CODE (exp) == INTEGER_CST)
4049 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4050 exp, 0, low, 0))
4051 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4052 exp, 1, high, 1)));
4053 low = high = 0;
4054 exp = 0;
4057 *pin_p = in_p, *plow = low, *phigh = high;
4058 return exp;
4061 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4062 type, TYPE, return an expression to test if EXP is in (or out of, depending
4063 on IN_P) the range. Return 0 if the test couldn't be created. */
4065 static tree
4066 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4068 tree etype = TREE_TYPE (exp);
4069 tree value;
4071 #ifdef HAVE_canonicalize_funcptr_for_compare
4072 /* Disable this optimization for function pointer expressions
4073 on targets that require function pointer canonicalization. */
4074 if (HAVE_canonicalize_funcptr_for_compare
4075 && TREE_CODE (etype) == POINTER_TYPE
4076 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4077 return NULL_TREE;
4078 #endif
4080 if (! in_p)
4082 value = build_range_check (type, exp, 1, low, high);
4083 if (value != 0)
4084 return invert_truthvalue (value);
4086 return 0;
4089 if (low == 0 && high == 0)
4090 return build_int_cst (type, 1);
4092 if (low == 0)
4093 return fold_build2 (LE_EXPR, type, exp,
4094 fold_convert (etype, high));
4096 if (high == 0)
4097 return fold_build2 (GE_EXPR, type, exp,
4098 fold_convert (etype, low));
4100 if (operand_equal_p (low, high, 0))
4101 return fold_build2 (EQ_EXPR, type, exp,
4102 fold_convert (etype, low));
4104 if (integer_zerop (low))
4106 if (! TYPE_UNSIGNED (etype))
4108 etype = lang_hooks.types.unsigned_type (etype);
4109 high = fold_convert (etype, high);
4110 exp = fold_convert (etype, exp);
4112 return build_range_check (type, exp, 1, 0, high);
4115 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4116 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4118 unsigned HOST_WIDE_INT lo;
4119 HOST_WIDE_INT hi;
4120 int prec;
4122 prec = TYPE_PRECISION (etype);
4123 if (prec <= HOST_BITS_PER_WIDE_INT)
4125 hi = 0;
4126 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4128 else
4130 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4131 lo = (unsigned HOST_WIDE_INT) -1;
4134 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4136 if (TYPE_UNSIGNED (etype))
4138 etype = lang_hooks.types.signed_type (etype);
4139 exp = fold_convert (etype, exp);
4141 return fold_build2 (GT_EXPR, type, exp,
4142 build_int_cst (etype, 0));
4146 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4147 This requires wrap-around arithmetics for the type of the expression. */
4148 switch (TREE_CODE (etype))
4150 case INTEGER_TYPE:
4151 /* There is no requirement that LOW be within the range of ETYPE
4152 if the latter is a subtype. It must, however, be within the base
4153 type of ETYPE. So be sure we do the subtraction in that type. */
4154 if (TREE_TYPE (etype))
4155 etype = TREE_TYPE (etype);
4156 break;
4158 case ENUMERAL_TYPE:
4159 case BOOLEAN_TYPE:
4160 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4161 TYPE_UNSIGNED (etype));
4162 break;
4164 default:
4165 break;
4168 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4169 if (TREE_CODE (etype) == INTEGER_TYPE
4170 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4172 tree utype, minv, maxv;
4174 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4175 for the type in question, as we rely on this here. */
4176 utype = lang_hooks.types.unsigned_type (etype);
4177 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4178 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4179 integer_one_node, 1);
4180 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4182 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4183 minv, 1, maxv, 1)))
4184 etype = utype;
4185 else
4186 return 0;
4189 high = fold_convert (etype, high);
4190 low = fold_convert (etype, low);
4191 exp = fold_convert (etype, exp);
4193 value = const_binop (MINUS_EXPR, high, low, 0);
4195 if (value != 0 && !TREE_OVERFLOW (value))
4196 return build_range_check (type,
4197 fold_build2 (MINUS_EXPR, etype, exp, low),
4198 1, build_int_cst (etype, 0), value);
4200 return 0;
4203 /* Return the predecessor of VAL in its type, handling the infinite case. */
4205 static tree
4206 range_predecessor (tree val)
4208 tree type = TREE_TYPE (val);
4210 if (INTEGRAL_TYPE_P (type)
4211 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4212 return 0;
4213 else
4214 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4217 /* Return the successor of VAL in its type, handling the infinite case. */
4219 static tree
4220 range_successor (tree val)
4222 tree type = TREE_TYPE (val);
4224 if (INTEGRAL_TYPE_P (type)
4225 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4226 return 0;
4227 else
4228 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4231 /* Given two ranges, see if we can merge them into one. Return 1 if we
4232 can, 0 if we can't. Set the output range into the specified parameters. */
4234 static int
4235 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4236 tree high0, int in1_p, tree low1, tree high1)
4238 int no_overlap;
4239 int subset;
4240 int temp;
4241 tree tem;
4242 int in_p;
4243 tree low, high;
4244 int lowequal = ((low0 == 0 && low1 == 0)
4245 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4246 low0, 0, low1, 0)));
4247 int highequal = ((high0 == 0 && high1 == 0)
4248 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4249 high0, 1, high1, 1)));
4251 /* Make range 0 be the range that starts first, or ends last if they
4252 start at the same value. Swap them if it isn't. */
4253 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4254 low0, 0, low1, 0))
4255 || (lowequal
4256 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4257 high1, 1, high0, 1))))
4259 temp = in0_p, in0_p = in1_p, in1_p = temp;
4260 tem = low0, low0 = low1, low1 = tem;
4261 tem = high0, high0 = high1, high1 = tem;
4264 /* Now flag two cases, whether the ranges are disjoint or whether the
4265 second range is totally subsumed in the first. Note that the tests
4266 below are simplified by the ones above. */
4267 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4268 high0, 1, low1, 0));
4269 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4270 high1, 1, high0, 1));
4272 /* We now have four cases, depending on whether we are including or
4273 excluding the two ranges. */
4274 if (in0_p && in1_p)
4276 /* If they don't overlap, the result is false. If the second range
4277 is a subset it is the result. Otherwise, the range is from the start
4278 of the second to the end of the first. */
4279 if (no_overlap)
4280 in_p = 0, low = high = 0;
4281 else if (subset)
4282 in_p = 1, low = low1, high = high1;
4283 else
4284 in_p = 1, low = low1, high = high0;
4287 else if (in0_p && ! in1_p)
4289 /* If they don't overlap, the result is the first range. If they are
4290 equal, the result is false. If the second range is a subset of the
4291 first, and the ranges begin at the same place, we go from just after
4292 the end of the second range to the end of the first. If the second
4293 range is not a subset of the first, or if it is a subset and both
4294 ranges end at the same place, the range starts at the start of the
4295 first range and ends just before the second range.
4296 Otherwise, we can't describe this as a single range. */
4297 if (no_overlap)
4298 in_p = 1, low = low0, high = high0;
4299 else if (lowequal && highequal)
4300 in_p = 0, low = high = 0;
4301 else if (subset && lowequal)
4303 low = range_successor (high1);
4304 high = high0;
4305 in_p = (low != 0);
4307 else if (! subset || highequal)
4309 low = low0;
4310 high = range_predecessor (low1);
4311 in_p = (high != 0);
4313 else
4314 return 0;
4317 else if (! in0_p && in1_p)
4319 /* If they don't overlap, the result is the second range. If the second
4320 is a subset of the first, the result is false. Otherwise,
4321 the range starts just after the first range and ends at the
4322 end of the second. */
4323 if (no_overlap)
4324 in_p = 1, low = low1, high = high1;
4325 else if (subset || highequal)
4326 in_p = 0, low = high = 0;
4327 else
4329 low = range_successor (high0);
4330 high = high1;
4331 in_p = (low != 0);
4335 else
4337 /* The case where we are excluding both ranges. Here the complex case
4338 is if they don't overlap. In that case, the only time we have a
4339 range is if they are adjacent. If the second is a subset of the
4340 first, the result is the first. Otherwise, the range to exclude
4341 starts at the beginning of the first range and ends at the end of the
4342 second. */
4343 if (no_overlap)
4345 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4346 range_successor (high0),
4347 1, low1, 0)))
4348 in_p = 0, low = low0, high = high1;
4349 else
4351 /* Canonicalize - [min, x] into - [-, x]. */
4352 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4353 switch (TREE_CODE (TREE_TYPE (low0)))
4355 case ENUMERAL_TYPE:
4356 if (TYPE_PRECISION (TREE_TYPE (low0))
4357 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4358 break;
4359 /* FALLTHROUGH */
4360 case INTEGER_TYPE:
4361 if (tree_int_cst_equal (low0,
4362 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4363 low0 = 0;
4364 break;
4365 case POINTER_TYPE:
4366 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4367 && integer_zerop (low0))
4368 low0 = 0;
4369 break;
4370 default:
4371 break;
4374 /* Canonicalize - [x, max] into - [x, -]. */
4375 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4376 switch (TREE_CODE (TREE_TYPE (high1)))
4378 case ENUMERAL_TYPE:
4379 if (TYPE_PRECISION (TREE_TYPE (high1))
4380 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4381 break;
4382 /* FALLTHROUGH */
4383 case INTEGER_TYPE:
4384 if (tree_int_cst_equal (high1,
4385 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4386 high1 = 0;
4387 break;
4388 case POINTER_TYPE:
4389 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4390 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4391 high1, 1,
4392 integer_one_node, 1)))
4393 high1 = 0;
4394 break;
4395 default:
4396 break;
4399 /* The ranges might be also adjacent between the maximum and
4400 minimum values of the given type. For
4401 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4402 return + [x + 1, y - 1]. */
4403 if (low0 == 0 && high1 == 0)
4405 low = range_successor (high0);
4406 high = range_predecessor (low1);
4407 if (low == 0 || high == 0)
4408 return 0;
4410 in_p = 1;
4412 else
4413 return 0;
4416 else if (subset)
4417 in_p = 0, low = low0, high = high0;
4418 else
4419 in_p = 0, low = low0, high = high1;
4422 *pin_p = in_p, *plow = low, *phigh = high;
4423 return 1;
4427 /* Subroutine of fold, looking inside expressions of the form
4428 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4429 of the COND_EXPR. This function is being used also to optimize
4430 A op B ? C : A, by reversing the comparison first.
4432 Return a folded expression whose code is not a COND_EXPR
4433 anymore, or NULL_TREE if no folding opportunity is found. */
4435 static tree
4436 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4438 enum tree_code comp_code = TREE_CODE (arg0);
4439 tree arg00 = TREE_OPERAND (arg0, 0);
4440 tree arg01 = TREE_OPERAND (arg0, 1);
4441 tree arg1_type = TREE_TYPE (arg1);
4442 tree tem;
4444 STRIP_NOPS (arg1);
4445 STRIP_NOPS (arg2);
4447 /* If we have A op 0 ? A : -A, consider applying the following
4448 transformations:
4450 A == 0? A : -A same as -A
4451 A != 0? A : -A same as A
4452 A >= 0? A : -A same as abs (A)
4453 A > 0? A : -A same as abs (A)
4454 A <= 0? A : -A same as -abs (A)
4455 A < 0? A : -A same as -abs (A)
4457 None of these transformations work for modes with signed
4458 zeros. If A is +/-0, the first two transformations will
4459 change the sign of the result (from +0 to -0, or vice
4460 versa). The last four will fix the sign of the result,
4461 even though the original expressions could be positive or
4462 negative, depending on the sign of A.
4464 Note that all these transformations are correct if A is
4465 NaN, since the two alternatives (A and -A) are also NaNs. */
4466 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4467 ? real_zerop (arg01)
4468 : integer_zerop (arg01))
4469 && ((TREE_CODE (arg2) == NEGATE_EXPR
4470 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4471 /* In the case that A is of the form X-Y, '-A' (arg2) may
4472 have already been folded to Y-X, check for that. */
4473 || (TREE_CODE (arg1) == MINUS_EXPR
4474 && TREE_CODE (arg2) == MINUS_EXPR
4475 && operand_equal_p (TREE_OPERAND (arg1, 0),
4476 TREE_OPERAND (arg2, 1), 0)
4477 && operand_equal_p (TREE_OPERAND (arg1, 1),
4478 TREE_OPERAND (arg2, 0), 0))))
4479 switch (comp_code)
4481 case EQ_EXPR:
4482 case UNEQ_EXPR:
4483 tem = fold_convert (arg1_type, arg1);
4484 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4485 case NE_EXPR:
4486 case LTGT_EXPR:
4487 return pedantic_non_lvalue (fold_convert (type, arg1));
4488 case UNGE_EXPR:
4489 case UNGT_EXPR:
4490 if (flag_trapping_math)
4491 break;
4492 /* Fall through. */
4493 case GE_EXPR:
4494 case GT_EXPR:
4495 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4496 arg1 = fold_convert (lang_hooks.types.signed_type
4497 (TREE_TYPE (arg1)), arg1);
4498 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4499 return pedantic_non_lvalue (fold_convert (type, tem));
4500 case UNLE_EXPR:
4501 case UNLT_EXPR:
4502 if (flag_trapping_math)
4503 break;
4504 case LE_EXPR:
4505 case LT_EXPR:
4506 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4507 arg1 = fold_convert (lang_hooks.types.signed_type
4508 (TREE_TYPE (arg1)), arg1);
4509 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4510 return negate_expr (fold_convert (type, tem));
4511 default:
4512 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4513 break;
4516 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4517 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4518 both transformations are correct when A is NaN: A != 0
4519 is then true, and A == 0 is false. */
4521 if (integer_zerop (arg01) && integer_zerop (arg2))
4523 if (comp_code == NE_EXPR)
4524 return pedantic_non_lvalue (fold_convert (type, arg1));
4525 else if (comp_code == EQ_EXPR)
4526 return build_int_cst (type, 0);
4529 /* Try some transformations of A op B ? A : B.
4531 A == B? A : B same as B
4532 A != B? A : B same as A
4533 A >= B? A : B same as max (A, B)
4534 A > B? A : B same as max (B, A)
4535 A <= B? A : B same as min (A, B)
4536 A < B? A : B same as min (B, A)
4538 As above, these transformations don't work in the presence
4539 of signed zeros. For example, if A and B are zeros of
4540 opposite sign, the first two transformations will change
4541 the sign of the result. In the last four, the original
4542 expressions give different results for (A=+0, B=-0) and
4543 (A=-0, B=+0), but the transformed expressions do not.
4545 The first two transformations are correct if either A or B
4546 is a NaN. In the first transformation, the condition will
4547 be false, and B will indeed be chosen. In the case of the
4548 second transformation, the condition A != B will be true,
4549 and A will be chosen.
4551 The conversions to max() and min() are not correct if B is
4552 a number and A is not. The conditions in the original
4553 expressions will be false, so all four give B. The min()
4554 and max() versions would give a NaN instead. */
4555 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4556 /* Avoid these transformations if the COND_EXPR may be used
4557 as an lvalue in the C++ front-end. PR c++/19199. */
4558 && (in_gimple_form
4559 || (strcmp (lang_hooks.name, "GNU C++") != 0
4560 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4561 || ! maybe_lvalue_p (arg1)
4562 || ! maybe_lvalue_p (arg2)))
4564 tree comp_op0 = arg00;
4565 tree comp_op1 = arg01;
4566 tree comp_type = TREE_TYPE (comp_op0);
4568 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4569 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4571 comp_type = type;
4572 comp_op0 = arg1;
4573 comp_op1 = arg2;
4576 switch (comp_code)
4578 case EQ_EXPR:
4579 return pedantic_non_lvalue (fold_convert (type, arg2));
4580 case NE_EXPR:
4581 return pedantic_non_lvalue (fold_convert (type, arg1));
4582 case LE_EXPR:
4583 case LT_EXPR:
4584 case UNLE_EXPR:
4585 case UNLT_EXPR:
4586 /* In C++ a ?: expression can be an lvalue, so put the
4587 operand which will be used if they are equal first
4588 so that we can convert this back to the
4589 corresponding COND_EXPR. */
4590 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4592 comp_op0 = fold_convert (comp_type, comp_op0);
4593 comp_op1 = fold_convert (comp_type, comp_op1);
4594 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4595 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4596 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4597 return pedantic_non_lvalue (fold_convert (type, tem));
4599 break;
4600 case GE_EXPR:
4601 case GT_EXPR:
4602 case UNGE_EXPR:
4603 case UNGT_EXPR:
4604 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4606 comp_op0 = fold_convert (comp_type, comp_op0);
4607 comp_op1 = fold_convert (comp_type, comp_op1);
4608 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4609 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4610 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4611 return pedantic_non_lvalue (fold_convert (type, tem));
4613 break;
4614 case UNEQ_EXPR:
4615 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4616 return pedantic_non_lvalue (fold_convert (type, arg2));
4617 break;
4618 case LTGT_EXPR:
4619 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4620 return pedantic_non_lvalue (fold_convert (type, arg1));
4621 break;
4622 default:
4623 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4624 break;
4628 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4629 we might still be able to simplify this. For example,
4630 if C1 is one less or one more than C2, this might have started
4631 out as a MIN or MAX and been transformed by this function.
4632 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4634 if (INTEGRAL_TYPE_P (type)
4635 && TREE_CODE (arg01) == INTEGER_CST
4636 && TREE_CODE (arg2) == INTEGER_CST)
4637 switch (comp_code)
4639 case EQ_EXPR:
4640 /* We can replace A with C1 in this case. */
4641 arg1 = fold_convert (type, arg01);
4642 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4644 case LT_EXPR:
4645 /* If C1 is C2 + 1, this is min(A, C2). */
4646 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4647 OEP_ONLY_CONST)
4648 && operand_equal_p (arg01,
4649 const_binop (PLUS_EXPR, arg2,
4650 integer_one_node, 0),
4651 OEP_ONLY_CONST))
4652 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4653 type, arg1, arg2));
4654 break;
4656 case LE_EXPR:
4657 /* If C1 is C2 - 1, this is min(A, C2). */
4658 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4659 OEP_ONLY_CONST)
4660 && operand_equal_p (arg01,
4661 const_binop (MINUS_EXPR, arg2,
4662 integer_one_node, 0),
4663 OEP_ONLY_CONST))
4664 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4665 type, arg1, arg2));
4666 break;
4668 case GT_EXPR:
4669 /* If C1 is C2 - 1, this is max(A, C2). */
4670 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4671 OEP_ONLY_CONST)
4672 && operand_equal_p (arg01,
4673 const_binop (MINUS_EXPR, arg2,
4674 integer_one_node, 0),
4675 OEP_ONLY_CONST))
4676 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4677 type, arg1, arg2));
4678 break;
4680 case GE_EXPR:
4681 /* If C1 is C2 + 1, this is max(A, C2). */
4682 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4683 OEP_ONLY_CONST)
4684 && operand_equal_p (arg01,
4685 const_binop (PLUS_EXPR, arg2,
4686 integer_one_node, 0),
4687 OEP_ONLY_CONST))
4688 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4689 type, arg1, arg2));
4690 break;
4691 case NE_EXPR:
4692 break;
4693 default:
4694 gcc_unreachable ();
4697 return NULL_TREE;
4702 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4703 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4704 #endif
4706 /* EXP is some logical combination of boolean tests. See if we can
4707 merge it into some range test. Return the new tree if so. */
4709 static tree
4710 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4712 int or_op = (code == TRUTH_ORIF_EXPR
4713 || code == TRUTH_OR_EXPR);
4714 int in0_p, in1_p, in_p;
4715 tree low0, low1, low, high0, high1, high;
4716 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4717 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4718 tree tem;
4720 /* If this is an OR operation, invert both sides; we will invert
4721 again at the end. */
4722 if (or_op)
4723 in0_p = ! in0_p, in1_p = ! in1_p;
4725 /* If both expressions are the same, if we can merge the ranges, and we
4726 can build the range test, return it or it inverted. If one of the
4727 ranges is always true or always false, consider it to be the same
4728 expression as the other. */
4729 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4730 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4731 in1_p, low1, high1)
4732 && 0 != (tem = (build_range_check (type,
4733 lhs != 0 ? lhs
4734 : rhs != 0 ? rhs : integer_zero_node,
4735 in_p, low, high))))
4736 return or_op ? invert_truthvalue (tem) : tem;
4738 /* On machines where the branch cost is expensive, if this is a
4739 short-circuited branch and the underlying object on both sides
4740 is the same, make a non-short-circuit operation. */
4741 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4742 && lhs != 0 && rhs != 0
4743 && (code == TRUTH_ANDIF_EXPR
4744 || code == TRUTH_ORIF_EXPR)
4745 && operand_equal_p (lhs, rhs, 0))
4747 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4748 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4749 which cases we can't do this. */
4750 if (simple_operand_p (lhs))
4751 return build2 (code == TRUTH_ANDIF_EXPR
4752 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4753 type, op0, op1);
4755 else if (lang_hooks.decls.global_bindings_p () == 0
4756 && ! CONTAINS_PLACEHOLDER_P (lhs))
4758 tree common = save_expr (lhs);
4760 if (0 != (lhs = build_range_check (type, common,
4761 or_op ? ! in0_p : in0_p,
4762 low0, high0))
4763 && (0 != (rhs = build_range_check (type, common,
4764 or_op ? ! in1_p : in1_p,
4765 low1, high1))))
4766 return build2 (code == TRUTH_ANDIF_EXPR
4767 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4768 type, lhs, rhs);
4772 return 0;
4775 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4776 bit value. Arrange things so the extra bits will be set to zero if and
4777 only if C is signed-extended to its full width. If MASK is nonzero,
4778 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4780 static tree
4781 unextend (tree c, int p, int unsignedp, tree mask)
4783 tree type = TREE_TYPE (c);
4784 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4785 tree temp;
4787 if (p == modesize || unsignedp)
4788 return c;
4790 /* We work by getting just the sign bit into the low-order bit, then
4791 into the high-order bit, then sign-extend. We then XOR that value
4792 with C. */
4793 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4794 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4796 /* We must use a signed type in order to get an arithmetic right shift.
4797 However, we must also avoid introducing accidental overflows, so that
4798 a subsequent call to integer_zerop will work. Hence we must
4799 do the type conversion here. At this point, the constant is either
4800 zero or one, and the conversion to a signed type can never overflow.
4801 We could get an overflow if this conversion is done anywhere else. */
4802 if (TYPE_UNSIGNED (type))
4803 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4805 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4806 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4807 if (mask != 0)
4808 temp = const_binop (BIT_AND_EXPR, temp,
4809 fold_convert (TREE_TYPE (c), mask), 0);
4810 /* If necessary, convert the type back to match the type of C. */
4811 if (TYPE_UNSIGNED (type))
4812 temp = fold_convert (type, temp);
4814 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4817 /* Find ways of folding logical expressions of LHS and RHS:
4818 Try to merge two comparisons to the same innermost item.
4819 Look for range tests like "ch >= '0' && ch <= '9'".
4820 Look for combinations of simple terms on machines with expensive branches
4821 and evaluate the RHS unconditionally.
4823 For example, if we have p->a == 2 && p->b == 4 and we can make an
4824 object large enough to span both A and B, we can do this with a comparison
4825 against the object ANDed with the a mask.
4827 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4828 operations to do this with one comparison.
4830 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4831 function and the one above.
4833 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4834 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4836 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4837 two operands.
4839 We return the simplified tree or 0 if no optimization is possible. */
4841 static tree
4842 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4844 /* If this is the "or" of two comparisons, we can do something if
4845 the comparisons are NE_EXPR. If this is the "and", we can do something
4846 if the comparisons are EQ_EXPR. I.e.,
4847 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4849 WANTED_CODE is this operation code. For single bit fields, we can
4850 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4851 comparison for one-bit fields. */
4853 enum tree_code wanted_code;
4854 enum tree_code lcode, rcode;
4855 tree ll_arg, lr_arg, rl_arg, rr_arg;
4856 tree ll_inner, lr_inner, rl_inner, rr_inner;
4857 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4858 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4859 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4860 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4861 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4862 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4863 enum machine_mode lnmode, rnmode;
4864 tree ll_mask, lr_mask, rl_mask, rr_mask;
4865 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4866 tree l_const, r_const;
4867 tree lntype, rntype, result;
4868 int first_bit, end_bit;
4869 int volatilep;
4870 tree orig_lhs = lhs, orig_rhs = rhs;
4871 enum tree_code orig_code = code;
4873 /* Start by getting the comparison codes. Fail if anything is volatile.
4874 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4875 it were surrounded with a NE_EXPR. */
4877 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4878 return 0;
4880 lcode = TREE_CODE (lhs);
4881 rcode = TREE_CODE (rhs);
4883 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4885 lhs = build2 (NE_EXPR, truth_type, lhs,
4886 build_int_cst (TREE_TYPE (lhs), 0));
4887 lcode = NE_EXPR;
4890 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4892 rhs = build2 (NE_EXPR, truth_type, rhs,
4893 build_int_cst (TREE_TYPE (rhs), 0));
4894 rcode = NE_EXPR;
4897 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4898 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4899 return 0;
4901 ll_arg = TREE_OPERAND (lhs, 0);
4902 lr_arg = TREE_OPERAND (lhs, 1);
4903 rl_arg = TREE_OPERAND (rhs, 0);
4904 rr_arg = TREE_OPERAND (rhs, 1);
4906 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4907 if (simple_operand_p (ll_arg)
4908 && simple_operand_p (lr_arg))
4910 tree result;
4911 if (operand_equal_p (ll_arg, rl_arg, 0)
4912 && operand_equal_p (lr_arg, rr_arg, 0))
4914 result = combine_comparisons (code, lcode, rcode,
4915 truth_type, ll_arg, lr_arg);
4916 if (result)
4917 return result;
4919 else if (operand_equal_p (ll_arg, rr_arg, 0)
4920 && operand_equal_p (lr_arg, rl_arg, 0))
4922 result = combine_comparisons (code, lcode,
4923 swap_tree_comparison (rcode),
4924 truth_type, ll_arg, lr_arg);
4925 if (result)
4926 return result;
4930 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4931 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4933 /* If the RHS can be evaluated unconditionally and its operands are
4934 simple, it wins to evaluate the RHS unconditionally on machines
4935 with expensive branches. In this case, this isn't a comparison
4936 that can be merged. Avoid doing this if the RHS is a floating-point
4937 comparison since those can trap. */
4939 if (BRANCH_COST >= 2
4940 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4941 && simple_operand_p (rl_arg)
4942 && simple_operand_p (rr_arg))
4944 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4945 if (code == TRUTH_OR_EXPR
4946 && lcode == NE_EXPR && integer_zerop (lr_arg)
4947 && rcode == NE_EXPR && integer_zerop (rr_arg)
4948 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4949 return build2 (NE_EXPR, truth_type,
4950 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4951 ll_arg, rl_arg),
4952 build_int_cst (TREE_TYPE (ll_arg), 0));
4954 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4955 if (code == TRUTH_AND_EXPR
4956 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4957 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4958 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4959 return build2 (EQ_EXPR, truth_type,
4960 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4961 ll_arg, rl_arg),
4962 build_int_cst (TREE_TYPE (ll_arg), 0));
4964 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4966 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
4967 return build2 (code, truth_type, lhs, rhs);
4968 return NULL_TREE;
4972 /* See if the comparisons can be merged. Then get all the parameters for
4973 each side. */
4975 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4976 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4977 return 0;
4979 volatilep = 0;
4980 ll_inner = decode_field_reference (ll_arg,
4981 &ll_bitsize, &ll_bitpos, &ll_mode,
4982 &ll_unsignedp, &volatilep, &ll_mask,
4983 &ll_and_mask);
4984 lr_inner = decode_field_reference (lr_arg,
4985 &lr_bitsize, &lr_bitpos, &lr_mode,
4986 &lr_unsignedp, &volatilep, &lr_mask,
4987 &lr_and_mask);
4988 rl_inner = decode_field_reference (rl_arg,
4989 &rl_bitsize, &rl_bitpos, &rl_mode,
4990 &rl_unsignedp, &volatilep, &rl_mask,
4991 &rl_and_mask);
4992 rr_inner = decode_field_reference (rr_arg,
4993 &rr_bitsize, &rr_bitpos, &rr_mode,
4994 &rr_unsignedp, &volatilep, &rr_mask,
4995 &rr_and_mask);
4997 /* It must be true that the inner operation on the lhs of each
4998 comparison must be the same if we are to be able to do anything.
4999 Then see if we have constants. If not, the same must be true for
5000 the rhs's. */
5001 if (volatilep || ll_inner == 0 || rl_inner == 0
5002 || ! operand_equal_p (ll_inner, rl_inner, 0))
5003 return 0;
5005 if (TREE_CODE (lr_arg) == INTEGER_CST
5006 && TREE_CODE (rr_arg) == INTEGER_CST)
5007 l_const = lr_arg, r_const = rr_arg;
5008 else if (lr_inner == 0 || rr_inner == 0
5009 || ! operand_equal_p (lr_inner, rr_inner, 0))
5010 return 0;
5011 else
5012 l_const = r_const = 0;
5014 /* If either comparison code is not correct for our logical operation,
5015 fail. However, we can convert a one-bit comparison against zero into
5016 the opposite comparison against that bit being set in the field. */
5018 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5019 if (lcode != wanted_code)
5021 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5023 /* Make the left operand unsigned, since we are only interested
5024 in the value of one bit. Otherwise we are doing the wrong
5025 thing below. */
5026 ll_unsignedp = 1;
5027 l_const = ll_mask;
5029 else
5030 return 0;
5033 /* This is analogous to the code for l_const above. */
5034 if (rcode != wanted_code)
5036 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5038 rl_unsignedp = 1;
5039 r_const = rl_mask;
5041 else
5042 return 0;
5045 /* After this point all optimizations will generate bit-field
5046 references, which we might not want. */
5047 if (! lang_hooks.can_use_bit_fields_p ())
5048 return 0;
5050 /* See if we can find a mode that contains both fields being compared on
5051 the left. If we can't, fail. Otherwise, update all constants and masks
5052 to be relative to a field of that size. */
5053 first_bit = MIN (ll_bitpos, rl_bitpos);
5054 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5055 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5056 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5057 volatilep);
5058 if (lnmode == VOIDmode)
5059 return 0;
5061 lnbitsize = GET_MODE_BITSIZE (lnmode);
5062 lnbitpos = first_bit & ~ (lnbitsize - 1);
5063 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5064 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5066 if (BYTES_BIG_ENDIAN)
5068 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5069 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5072 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5073 size_int (xll_bitpos), 0);
5074 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5075 size_int (xrl_bitpos), 0);
5077 if (l_const)
5079 l_const = fold_convert (lntype, l_const);
5080 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5081 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5082 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5083 fold_build1 (BIT_NOT_EXPR,
5084 lntype, ll_mask),
5085 0)))
5087 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5089 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5092 if (r_const)
5094 r_const = fold_convert (lntype, r_const);
5095 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5096 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5097 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5098 fold_build1 (BIT_NOT_EXPR,
5099 lntype, rl_mask),
5100 0)))
5102 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5104 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5108 /* If the right sides are not constant, do the same for it. Also,
5109 disallow this optimization if a size or signedness mismatch occurs
5110 between the left and right sides. */
5111 if (l_const == 0)
5113 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5114 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5115 /* Make sure the two fields on the right
5116 correspond to the left without being swapped. */
5117 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5118 return 0;
5120 first_bit = MIN (lr_bitpos, rr_bitpos);
5121 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5122 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5123 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5124 volatilep);
5125 if (rnmode == VOIDmode)
5126 return 0;
5128 rnbitsize = GET_MODE_BITSIZE (rnmode);
5129 rnbitpos = first_bit & ~ (rnbitsize - 1);
5130 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5131 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5133 if (BYTES_BIG_ENDIAN)
5135 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5136 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5139 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5140 size_int (xlr_bitpos), 0);
5141 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5142 size_int (xrr_bitpos), 0);
5144 /* Make a mask that corresponds to both fields being compared.
5145 Do this for both items being compared. If the operands are the
5146 same size and the bits being compared are in the same position
5147 then we can do this by masking both and comparing the masked
5148 results. */
5149 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5150 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5151 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5153 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5154 ll_unsignedp || rl_unsignedp);
5155 if (! all_ones_mask_p (ll_mask, lnbitsize))
5156 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5158 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5159 lr_unsignedp || rr_unsignedp);
5160 if (! all_ones_mask_p (lr_mask, rnbitsize))
5161 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5163 return build2 (wanted_code, truth_type, lhs, rhs);
5166 /* There is still another way we can do something: If both pairs of
5167 fields being compared are adjacent, we may be able to make a wider
5168 field containing them both.
5170 Note that we still must mask the lhs/rhs expressions. Furthermore,
5171 the mask must be shifted to account for the shift done by
5172 make_bit_field_ref. */
5173 if ((ll_bitsize + ll_bitpos == rl_bitpos
5174 && lr_bitsize + lr_bitpos == rr_bitpos)
5175 || (ll_bitpos == rl_bitpos + rl_bitsize
5176 && lr_bitpos == rr_bitpos + rr_bitsize))
5178 tree type;
5180 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5181 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5182 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5183 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5185 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5186 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5187 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5188 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5190 /* Convert to the smaller type before masking out unwanted bits. */
5191 type = lntype;
5192 if (lntype != rntype)
5194 if (lnbitsize > rnbitsize)
5196 lhs = fold_convert (rntype, lhs);
5197 ll_mask = fold_convert (rntype, ll_mask);
5198 type = rntype;
5200 else if (lnbitsize < rnbitsize)
5202 rhs = fold_convert (lntype, rhs);
5203 lr_mask = fold_convert (lntype, lr_mask);
5204 type = lntype;
5208 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5209 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5211 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5212 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5214 return build2 (wanted_code, truth_type, lhs, rhs);
5217 return 0;
5220 /* Handle the case of comparisons with constants. If there is something in
5221 common between the masks, those bits of the constants must be the same.
5222 If not, the condition is always false. Test for this to avoid generating
5223 incorrect code below. */
5224 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5225 if (! integer_zerop (result)
5226 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5227 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5229 if (wanted_code == NE_EXPR)
5231 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5232 return constant_boolean_node (true, truth_type);
5234 else
5236 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5237 return constant_boolean_node (false, truth_type);
5241 /* Construct the expression we will return. First get the component
5242 reference we will make. Unless the mask is all ones the width of
5243 that field, perform the mask operation. Then compare with the
5244 merged constant. */
5245 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5246 ll_unsignedp || rl_unsignedp);
5248 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5249 if (! all_ones_mask_p (ll_mask, lnbitsize))
5250 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5252 return build2 (wanted_code, truth_type, result,
5253 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5256 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5257 constant. */
5259 static tree
5260 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5262 tree arg0 = op0;
5263 enum tree_code op_code;
5264 tree comp_const = op1;
5265 tree minmax_const;
5266 int consts_equal, consts_lt;
5267 tree inner;
5269 STRIP_SIGN_NOPS (arg0);
5271 op_code = TREE_CODE (arg0);
5272 minmax_const = TREE_OPERAND (arg0, 1);
5273 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5274 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5275 inner = TREE_OPERAND (arg0, 0);
5277 /* If something does not permit us to optimize, return the original tree. */
5278 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5279 || TREE_CODE (comp_const) != INTEGER_CST
5280 || TREE_CONSTANT_OVERFLOW (comp_const)
5281 || TREE_CODE (minmax_const) != INTEGER_CST
5282 || TREE_CONSTANT_OVERFLOW (minmax_const))
5283 return NULL_TREE;
5285 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5286 and GT_EXPR, doing the rest with recursive calls using logical
5287 simplifications. */
5288 switch (code)
5290 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5292 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5293 type, op0, op1);
5294 if (tem)
5295 return invert_truthvalue (tem);
5296 return NULL_TREE;
5299 case GE_EXPR:
5300 return
5301 fold_build2 (TRUTH_ORIF_EXPR, type,
5302 optimize_minmax_comparison
5303 (EQ_EXPR, type, arg0, comp_const),
5304 optimize_minmax_comparison
5305 (GT_EXPR, type, arg0, comp_const));
5307 case EQ_EXPR:
5308 if (op_code == MAX_EXPR && consts_equal)
5309 /* MAX (X, 0) == 0 -> X <= 0 */
5310 return fold_build2 (LE_EXPR, type, inner, comp_const);
5312 else if (op_code == MAX_EXPR && consts_lt)
5313 /* MAX (X, 0) == 5 -> X == 5 */
5314 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5316 else if (op_code == MAX_EXPR)
5317 /* MAX (X, 0) == -1 -> false */
5318 return omit_one_operand (type, integer_zero_node, inner);
5320 else if (consts_equal)
5321 /* MIN (X, 0) == 0 -> X >= 0 */
5322 return fold_build2 (GE_EXPR, type, inner, comp_const);
5324 else if (consts_lt)
5325 /* MIN (X, 0) == 5 -> false */
5326 return omit_one_operand (type, integer_zero_node, inner);
5328 else
5329 /* MIN (X, 0) == -1 -> X == -1 */
5330 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5332 case GT_EXPR:
5333 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5334 /* MAX (X, 0) > 0 -> X > 0
5335 MAX (X, 0) > 5 -> X > 5 */
5336 return fold_build2 (GT_EXPR, type, inner, comp_const);
5338 else if (op_code == MAX_EXPR)
5339 /* MAX (X, 0) > -1 -> true */
5340 return omit_one_operand (type, integer_one_node, inner);
5342 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5343 /* MIN (X, 0) > 0 -> false
5344 MIN (X, 0) > 5 -> false */
5345 return omit_one_operand (type, integer_zero_node, inner);
5347 else
5348 /* MIN (X, 0) > -1 -> X > -1 */
5349 return fold_build2 (GT_EXPR, type, inner, comp_const);
5351 default:
5352 return NULL_TREE;
5356 /* T is an integer expression that is being multiplied, divided, or taken a
5357 modulus (CODE says which and what kind of divide or modulus) by a
5358 constant C. See if we can eliminate that operation by folding it with
5359 other operations already in T. WIDE_TYPE, if non-null, is a type that
5360 should be used for the computation if wider than our type.
5362 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5363 (X * 2) + (Y * 4). We must, however, be assured that either the original
5364 expression would not overflow or that overflow is undefined for the type
5365 in the language in question.
5367 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5368 the machine has a multiply-accumulate insn or that this is part of an
5369 addressing calculation.
5371 If we return a non-null expression, it is an equivalent form of the
5372 original computation, but need not be in the original type. */
5374 static tree
5375 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5377 /* To avoid exponential search depth, refuse to allow recursion past
5378 three levels. Beyond that (1) it's highly unlikely that we'll find
5379 something interesting and (2) we've probably processed it before
5380 when we built the inner expression. */
5382 static int depth;
5383 tree ret;
5385 if (depth > 3)
5386 return NULL;
5388 depth++;
5389 ret = extract_muldiv_1 (t, c, code, wide_type);
5390 depth--;
5392 return ret;
5395 static tree
5396 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5398 tree type = TREE_TYPE (t);
5399 enum tree_code tcode = TREE_CODE (t);
5400 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5401 > GET_MODE_SIZE (TYPE_MODE (type)))
5402 ? wide_type : type);
5403 tree t1, t2;
5404 int same_p = tcode == code;
5405 tree op0 = NULL_TREE, op1 = NULL_TREE;
5407 /* Don't deal with constants of zero here; they confuse the code below. */
5408 if (integer_zerop (c))
5409 return NULL_TREE;
5411 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5412 op0 = TREE_OPERAND (t, 0);
5414 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5415 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5417 /* Note that we need not handle conditional operations here since fold
5418 already handles those cases. So just do arithmetic here. */
5419 switch (tcode)
5421 case INTEGER_CST:
5422 /* For a constant, we can always simplify if we are a multiply
5423 or (for divide and modulus) if it is a multiple of our constant. */
5424 if (code == MULT_EXPR
5425 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5426 return const_binop (code, fold_convert (ctype, t),
5427 fold_convert (ctype, c), 0);
5428 break;
5430 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5431 /* If op0 is an expression ... */
5432 if ((COMPARISON_CLASS_P (op0)
5433 || UNARY_CLASS_P (op0)
5434 || BINARY_CLASS_P (op0)
5435 || EXPRESSION_CLASS_P (op0))
5436 /* ... and is unsigned, and its type is smaller than ctype,
5437 then we cannot pass through as widening. */
5438 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5439 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5440 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5441 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5442 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5443 /* ... or this is a truncation (t is narrower than op0),
5444 then we cannot pass through this narrowing. */
5445 || (GET_MODE_SIZE (TYPE_MODE (type))
5446 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5447 /* ... or signedness changes for division or modulus,
5448 then we cannot pass through this conversion. */
5449 || (code != MULT_EXPR
5450 && (TYPE_UNSIGNED (ctype)
5451 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5452 break;
5454 /* Pass the constant down and see if we can make a simplification. If
5455 we can, replace this expression with the inner simplification for
5456 possible later conversion to our or some other type. */
5457 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5458 && TREE_CODE (t2) == INTEGER_CST
5459 && ! TREE_CONSTANT_OVERFLOW (t2)
5460 && (0 != (t1 = extract_muldiv (op0, t2, code,
5461 code == MULT_EXPR
5462 ? ctype : NULL_TREE))))
5463 return t1;
5464 break;
5466 case ABS_EXPR:
5467 /* If widening the type changes it from signed to unsigned, then we
5468 must avoid building ABS_EXPR itself as unsigned. */
5469 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5471 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5472 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5474 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5475 return fold_convert (ctype, t1);
5477 break;
5479 /* FALLTHROUGH */
5480 case NEGATE_EXPR:
5481 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5482 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5483 break;
5485 case MIN_EXPR: case MAX_EXPR:
5486 /* If widening the type changes the signedness, then we can't perform
5487 this optimization as that changes the result. */
5488 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5489 break;
5491 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5492 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5493 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5495 if (tree_int_cst_sgn (c) < 0)
5496 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5498 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5499 fold_convert (ctype, t2));
5501 break;
5503 case LSHIFT_EXPR: case RSHIFT_EXPR:
5504 /* If the second operand is constant, this is a multiplication
5505 or floor division, by a power of two, so we can treat it that
5506 way unless the multiplier or divisor overflows. Signed
5507 left-shift overflow is implementation-defined rather than
5508 undefined in C90, so do not convert signed left shift into
5509 multiplication. */
5510 if (TREE_CODE (op1) == INTEGER_CST
5511 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5512 /* const_binop may not detect overflow correctly,
5513 so check for it explicitly here. */
5514 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5515 && TREE_INT_CST_HIGH (op1) == 0
5516 && 0 != (t1 = fold_convert (ctype,
5517 const_binop (LSHIFT_EXPR,
5518 size_one_node,
5519 op1, 0)))
5520 && ! TREE_OVERFLOW (t1))
5521 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5522 ? MULT_EXPR : FLOOR_DIV_EXPR,
5523 ctype, fold_convert (ctype, op0), t1),
5524 c, code, wide_type);
5525 break;
5527 case PLUS_EXPR: case MINUS_EXPR:
5528 /* See if we can eliminate the operation on both sides. If we can, we
5529 can return a new PLUS or MINUS. If we can't, the only remaining
5530 cases where we can do anything are if the second operand is a
5531 constant. */
5532 t1 = extract_muldiv (op0, c, code, wide_type);
5533 t2 = extract_muldiv (op1, c, code, wide_type);
5534 if (t1 != 0 && t2 != 0
5535 && (code == MULT_EXPR
5536 /* If not multiplication, we can only do this if both operands
5537 are divisible by c. */
5538 || (multiple_of_p (ctype, op0, c)
5539 && multiple_of_p (ctype, op1, c))))
5540 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5541 fold_convert (ctype, t2));
5543 /* If this was a subtraction, negate OP1 and set it to be an addition.
5544 This simplifies the logic below. */
5545 if (tcode == MINUS_EXPR)
5546 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5548 if (TREE_CODE (op1) != INTEGER_CST)
5549 break;
5551 /* If either OP1 or C are negative, this optimization is not safe for
5552 some of the division and remainder types while for others we need
5553 to change the code. */
5554 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5556 if (code == CEIL_DIV_EXPR)
5557 code = FLOOR_DIV_EXPR;
5558 else if (code == FLOOR_DIV_EXPR)
5559 code = CEIL_DIV_EXPR;
5560 else if (code != MULT_EXPR
5561 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5562 break;
5565 /* If it's a multiply or a division/modulus operation of a multiple
5566 of our constant, do the operation and verify it doesn't overflow. */
5567 if (code == MULT_EXPR
5568 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5570 op1 = const_binop (code, fold_convert (ctype, op1),
5571 fold_convert (ctype, c), 0);
5572 /* We allow the constant to overflow with wrapping semantics. */
5573 if (op1 == 0
5574 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5575 break;
5577 else
5578 break;
5580 /* If we have an unsigned type is not a sizetype, we cannot widen
5581 the operation since it will change the result if the original
5582 computation overflowed. */
5583 if (TYPE_UNSIGNED (ctype)
5584 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5585 && ctype != type)
5586 break;
5588 /* If we were able to eliminate our operation from the first side,
5589 apply our operation to the second side and reform the PLUS. */
5590 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5591 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5593 /* The last case is if we are a multiply. In that case, we can
5594 apply the distributive law to commute the multiply and addition
5595 if the multiplication of the constants doesn't overflow. */
5596 if (code == MULT_EXPR)
5597 return fold_build2 (tcode, ctype,
5598 fold_build2 (code, ctype,
5599 fold_convert (ctype, op0),
5600 fold_convert (ctype, c)),
5601 op1);
5603 break;
5605 case MULT_EXPR:
5606 /* We have a special case here if we are doing something like
5607 (C * 8) % 4 since we know that's zero. */
5608 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5609 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5610 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5611 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5612 return omit_one_operand (type, integer_zero_node, op0);
5614 /* ... fall through ... */
5616 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5617 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5618 /* If we can extract our operation from the LHS, do so and return a
5619 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5620 do something only if the second operand is a constant. */
5621 if (same_p
5622 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5623 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5624 fold_convert (ctype, op1));
5625 else if (tcode == MULT_EXPR && code == MULT_EXPR
5626 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5627 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5628 fold_convert (ctype, t1));
5629 else if (TREE_CODE (op1) != INTEGER_CST)
5630 return 0;
5632 /* If these are the same operation types, we can associate them
5633 assuming no overflow. */
5634 if (tcode == code
5635 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5636 fold_convert (ctype, c), 0))
5637 && ! TREE_OVERFLOW (t1))
5638 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5640 /* If these operations "cancel" each other, we have the main
5641 optimizations of this pass, which occur when either constant is a
5642 multiple of the other, in which case we replace this with either an
5643 operation or CODE or TCODE.
5645 If we have an unsigned type that is not a sizetype, we cannot do
5646 this since it will change the result if the original computation
5647 overflowed. */
5648 if ((! TYPE_UNSIGNED (ctype)
5649 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5650 && ! flag_wrapv
5651 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5652 || (tcode == MULT_EXPR
5653 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5654 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5656 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5657 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5658 fold_convert (ctype,
5659 const_binop (TRUNC_DIV_EXPR,
5660 op1, c, 0)));
5661 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5662 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5663 fold_convert (ctype,
5664 const_binop (TRUNC_DIV_EXPR,
5665 c, op1, 0)));
5667 break;
5669 default:
5670 break;
5673 return 0;
5676 /* Return a node which has the indicated constant VALUE (either 0 or
5677 1), and is of the indicated TYPE. */
5679 tree
5680 constant_boolean_node (int value, tree type)
5682 if (type == integer_type_node)
5683 return value ? integer_one_node : integer_zero_node;
5684 else if (type == boolean_type_node)
5685 return value ? boolean_true_node : boolean_false_node;
5686 else
5687 return build_int_cst (type, value);
5691 /* Return true if expr looks like an ARRAY_REF and set base and
5692 offset to the appropriate trees. If there is no offset,
5693 offset is set to NULL_TREE. Base will be canonicalized to
5694 something you can get the element type from using
5695 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5696 in bytes to the base. */
5698 static bool
5699 extract_array_ref (tree expr, tree *base, tree *offset)
5701 /* One canonical form is a PLUS_EXPR with the first
5702 argument being an ADDR_EXPR with a possible NOP_EXPR
5703 attached. */
5704 if (TREE_CODE (expr) == PLUS_EXPR)
5706 tree op0 = TREE_OPERAND (expr, 0);
5707 tree inner_base, dummy1;
5708 /* Strip NOP_EXPRs here because the C frontends and/or
5709 folders present us (int *)&x.a + 4B possibly. */
5710 STRIP_NOPS (op0);
5711 if (extract_array_ref (op0, &inner_base, &dummy1))
5713 *base = inner_base;
5714 if (dummy1 == NULL_TREE)
5715 *offset = TREE_OPERAND (expr, 1);
5716 else
5717 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5718 dummy1, TREE_OPERAND (expr, 1));
5719 return true;
5722 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5723 which we transform into an ADDR_EXPR with appropriate
5724 offset. For other arguments to the ADDR_EXPR we assume
5725 zero offset and as such do not care about the ADDR_EXPR
5726 type and strip possible nops from it. */
5727 else if (TREE_CODE (expr) == ADDR_EXPR)
5729 tree op0 = TREE_OPERAND (expr, 0);
5730 if (TREE_CODE (op0) == ARRAY_REF)
5732 tree idx = TREE_OPERAND (op0, 1);
5733 *base = TREE_OPERAND (op0, 0);
5734 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5735 array_ref_element_size (op0));
5737 else
5739 /* Handle array-to-pointer decay as &a. */
5740 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5741 *base = TREE_OPERAND (expr, 0);
5742 else
5743 *base = expr;
5744 *offset = NULL_TREE;
5746 return true;
5748 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5749 else if (SSA_VAR_P (expr)
5750 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5752 *base = expr;
5753 *offset = NULL_TREE;
5754 return true;
5757 return false;
5761 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5762 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5763 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5764 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5765 COND is the first argument to CODE; otherwise (as in the example
5766 given here), it is the second argument. TYPE is the type of the
5767 original expression. Return NULL_TREE if no simplification is
5768 possible. */
5770 static tree
5771 fold_binary_op_with_conditional_arg (enum tree_code code,
5772 tree type, tree op0, tree op1,
5773 tree cond, tree arg, int cond_first_p)
5775 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5776 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5777 tree test, true_value, false_value;
5778 tree lhs = NULL_TREE;
5779 tree rhs = NULL_TREE;
5781 /* This transformation is only worthwhile if we don't have to wrap
5782 arg in a SAVE_EXPR, and the operation can be simplified on at least
5783 one of the branches once its pushed inside the COND_EXPR. */
5784 if (!TREE_CONSTANT (arg))
5785 return NULL_TREE;
5787 if (TREE_CODE (cond) == COND_EXPR)
5789 test = TREE_OPERAND (cond, 0);
5790 true_value = TREE_OPERAND (cond, 1);
5791 false_value = TREE_OPERAND (cond, 2);
5792 /* If this operand throws an expression, then it does not make
5793 sense to try to perform a logical or arithmetic operation
5794 involving it. */
5795 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5796 lhs = true_value;
5797 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5798 rhs = false_value;
5800 else
5802 tree testtype = TREE_TYPE (cond);
5803 test = cond;
5804 true_value = constant_boolean_node (true, testtype);
5805 false_value = constant_boolean_node (false, testtype);
5808 arg = fold_convert (arg_type, arg);
5809 if (lhs == 0)
5811 true_value = fold_convert (cond_type, true_value);
5812 if (cond_first_p)
5813 lhs = fold_build2 (code, type, true_value, arg);
5814 else
5815 lhs = fold_build2 (code, type, arg, true_value);
5817 if (rhs == 0)
5819 false_value = fold_convert (cond_type, false_value);
5820 if (cond_first_p)
5821 rhs = fold_build2 (code, type, false_value, arg);
5822 else
5823 rhs = fold_build2 (code, type, arg, false_value);
5826 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5827 return fold_convert (type, test);
5831 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5833 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5834 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5835 ADDEND is the same as X.
5837 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5838 and finite. The problematic cases are when X is zero, and its mode
5839 has signed zeros. In the case of rounding towards -infinity,
5840 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5841 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5843 static bool
5844 fold_real_zero_addition_p (tree type, tree addend, int negate)
5846 if (!real_zerop (addend))
5847 return false;
5849 /* Don't allow the fold with -fsignaling-nans. */
5850 if (HONOR_SNANS (TYPE_MODE (type)))
5851 return false;
5853 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5854 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5855 return true;
5857 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5858 if (TREE_CODE (addend) == REAL_CST
5859 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5860 negate = !negate;
5862 /* The mode has signed zeros, and we have to honor their sign.
5863 In this situation, there is only one case we can return true for.
5864 X - 0 is the same as X unless rounding towards -infinity is
5865 supported. */
5866 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5869 /* Subroutine of fold() that checks comparisons of built-in math
5870 functions against real constants.
5872 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5873 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5874 is the type of the result and ARG0 and ARG1 are the operands of the
5875 comparison. ARG1 must be a TREE_REAL_CST.
5877 The function returns the constant folded tree if a simplification
5878 can be made, and NULL_TREE otherwise. */
5880 static tree
5881 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5882 tree type, tree arg0, tree arg1)
5884 REAL_VALUE_TYPE c;
5886 if (BUILTIN_SQRT_P (fcode))
5888 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5889 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5891 c = TREE_REAL_CST (arg1);
5892 if (REAL_VALUE_NEGATIVE (c))
5894 /* sqrt(x) < y is always false, if y is negative. */
5895 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5896 return omit_one_operand (type, integer_zero_node, arg);
5898 /* sqrt(x) > y is always true, if y is negative and we
5899 don't care about NaNs, i.e. negative values of x. */
5900 if (code == NE_EXPR || !HONOR_NANS (mode))
5901 return omit_one_operand (type, integer_one_node, arg);
5903 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5904 return fold_build2 (GE_EXPR, type, arg,
5905 build_real (TREE_TYPE (arg), dconst0));
5907 else if (code == GT_EXPR || code == GE_EXPR)
5909 REAL_VALUE_TYPE c2;
5911 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5912 real_convert (&c2, mode, &c2);
5914 if (REAL_VALUE_ISINF (c2))
5916 /* sqrt(x) > y is x == +Inf, when y is very large. */
5917 if (HONOR_INFINITIES (mode))
5918 return fold_build2 (EQ_EXPR, type, arg,
5919 build_real (TREE_TYPE (arg), c2));
5921 /* sqrt(x) > y is always false, when y is very large
5922 and we don't care about infinities. */
5923 return omit_one_operand (type, integer_zero_node, arg);
5926 /* sqrt(x) > c is the same as x > c*c. */
5927 return fold_build2 (code, type, arg,
5928 build_real (TREE_TYPE (arg), c2));
5930 else if (code == LT_EXPR || code == LE_EXPR)
5932 REAL_VALUE_TYPE c2;
5934 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5935 real_convert (&c2, mode, &c2);
5937 if (REAL_VALUE_ISINF (c2))
5939 /* sqrt(x) < y is always true, when y is a very large
5940 value and we don't care about NaNs or Infinities. */
5941 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5942 return omit_one_operand (type, integer_one_node, arg);
5944 /* sqrt(x) < y is x != +Inf when y is very large and we
5945 don't care about NaNs. */
5946 if (! HONOR_NANS (mode))
5947 return fold_build2 (NE_EXPR, type, arg,
5948 build_real (TREE_TYPE (arg), c2));
5950 /* sqrt(x) < y is x >= 0 when y is very large and we
5951 don't care about Infinities. */
5952 if (! HONOR_INFINITIES (mode))
5953 return fold_build2 (GE_EXPR, type, arg,
5954 build_real (TREE_TYPE (arg), dconst0));
5956 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5957 if (lang_hooks.decls.global_bindings_p () != 0
5958 || CONTAINS_PLACEHOLDER_P (arg))
5959 return NULL_TREE;
5961 arg = save_expr (arg);
5962 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5963 fold_build2 (GE_EXPR, type, arg,
5964 build_real (TREE_TYPE (arg),
5965 dconst0)),
5966 fold_build2 (NE_EXPR, type, arg,
5967 build_real (TREE_TYPE (arg),
5968 c2)));
5971 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5972 if (! HONOR_NANS (mode))
5973 return fold_build2 (code, type, arg,
5974 build_real (TREE_TYPE (arg), c2));
5976 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5977 if (lang_hooks.decls.global_bindings_p () == 0
5978 && ! CONTAINS_PLACEHOLDER_P (arg))
5980 arg = save_expr (arg);
5981 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5982 fold_build2 (GE_EXPR, type, arg,
5983 build_real (TREE_TYPE (arg),
5984 dconst0)),
5985 fold_build2 (code, type, arg,
5986 build_real (TREE_TYPE (arg),
5987 c2)));
5992 return NULL_TREE;
5995 /* Subroutine of fold() that optimizes comparisons against Infinities,
5996 either +Inf or -Inf.
5998 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5999 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6000 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6002 The function returns the constant folded tree if a simplification
6003 can be made, and NULL_TREE otherwise. */
6005 static tree
6006 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6008 enum machine_mode mode;
6009 REAL_VALUE_TYPE max;
6010 tree temp;
6011 bool neg;
6013 mode = TYPE_MODE (TREE_TYPE (arg0));
6015 /* For negative infinity swap the sense of the comparison. */
6016 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6017 if (neg)
6018 code = swap_tree_comparison (code);
6020 switch (code)
6022 case GT_EXPR:
6023 /* x > +Inf is always false, if with ignore sNANs. */
6024 if (HONOR_SNANS (mode))
6025 return NULL_TREE;
6026 return omit_one_operand (type, integer_zero_node, arg0);
6028 case LE_EXPR:
6029 /* x <= +Inf is always true, if we don't case about NaNs. */
6030 if (! HONOR_NANS (mode))
6031 return omit_one_operand (type, integer_one_node, arg0);
6033 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6034 if (lang_hooks.decls.global_bindings_p () == 0
6035 && ! CONTAINS_PLACEHOLDER_P (arg0))
6037 arg0 = save_expr (arg0);
6038 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6040 break;
6042 case EQ_EXPR:
6043 case GE_EXPR:
6044 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6045 real_maxval (&max, neg, mode);
6046 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6047 arg0, build_real (TREE_TYPE (arg0), max));
6049 case LT_EXPR:
6050 /* x < +Inf is always equal to x <= DBL_MAX. */
6051 real_maxval (&max, neg, mode);
6052 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6053 arg0, build_real (TREE_TYPE (arg0), max));
6055 case NE_EXPR:
6056 /* x != +Inf is always equal to !(x > DBL_MAX). */
6057 real_maxval (&max, neg, mode);
6058 if (! HONOR_NANS (mode))
6059 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6060 arg0, build_real (TREE_TYPE (arg0), max));
6062 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6063 arg0, build_real (TREE_TYPE (arg0), max));
6064 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6066 default:
6067 break;
6070 return NULL_TREE;
6073 /* Subroutine of fold() that optimizes comparisons of a division by
6074 a nonzero integer constant against an integer constant, i.e.
6075 X/C1 op C2.
6077 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6078 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6079 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6081 The function returns the constant folded tree if a simplification
6082 can be made, and NULL_TREE otherwise. */
6084 static tree
6085 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6087 tree prod, tmp, hi, lo;
6088 tree arg00 = TREE_OPERAND (arg0, 0);
6089 tree arg01 = TREE_OPERAND (arg0, 1);
6090 unsigned HOST_WIDE_INT lpart;
6091 HOST_WIDE_INT hpart;
6092 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6093 bool neg_overflow;
6094 int overflow;
6096 /* We have to do this the hard way to detect unsigned overflow.
6097 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6098 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6099 TREE_INT_CST_HIGH (arg01),
6100 TREE_INT_CST_LOW (arg1),
6101 TREE_INT_CST_HIGH (arg1),
6102 &lpart, &hpart, unsigned_p);
6103 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6104 prod = force_fit_type (prod, -1, overflow, false);
6105 neg_overflow = false;
6107 if (unsigned_p)
6109 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6110 lo = prod;
6112 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6113 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6114 TREE_INT_CST_HIGH (prod),
6115 TREE_INT_CST_LOW (tmp),
6116 TREE_INT_CST_HIGH (tmp),
6117 &lpart, &hpart, unsigned_p);
6118 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6119 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6120 TREE_CONSTANT_OVERFLOW (prod));
6122 else if (tree_int_cst_sgn (arg01) >= 0)
6124 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6125 switch (tree_int_cst_sgn (arg1))
6127 case -1:
6128 neg_overflow = true;
6129 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6130 hi = prod;
6131 break;
6133 case 0:
6134 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6135 hi = tmp;
6136 break;
6138 case 1:
6139 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6140 lo = prod;
6141 break;
6143 default:
6144 gcc_unreachable ();
6147 else
6149 /* A negative divisor reverses the relational operators. */
6150 code = swap_tree_comparison (code);
6152 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6153 switch (tree_int_cst_sgn (arg1))
6155 case -1:
6156 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6157 lo = prod;
6158 break;
6160 case 0:
6161 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6162 lo = tmp;
6163 break;
6165 case 1:
6166 neg_overflow = true;
6167 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6168 hi = prod;
6169 break;
6171 default:
6172 gcc_unreachable ();
6176 switch (code)
6178 case EQ_EXPR:
6179 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6180 return omit_one_operand (type, integer_zero_node, arg00);
6181 if (TREE_OVERFLOW (hi))
6182 return fold_build2 (GE_EXPR, type, arg00, lo);
6183 if (TREE_OVERFLOW (lo))
6184 return fold_build2 (LE_EXPR, type, arg00, hi);
6185 return build_range_check (type, arg00, 1, lo, hi);
6187 case NE_EXPR:
6188 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6189 return omit_one_operand (type, integer_one_node, arg00);
6190 if (TREE_OVERFLOW (hi))
6191 return fold_build2 (LT_EXPR, type, arg00, lo);
6192 if (TREE_OVERFLOW (lo))
6193 return fold_build2 (GT_EXPR, type, arg00, hi);
6194 return build_range_check (type, arg00, 0, lo, hi);
6196 case LT_EXPR:
6197 if (TREE_OVERFLOW (lo))
6199 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6200 return omit_one_operand (type, tmp, arg00);
6202 return fold_build2 (LT_EXPR, type, arg00, lo);
6204 case LE_EXPR:
6205 if (TREE_OVERFLOW (hi))
6207 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6208 return omit_one_operand (type, tmp, arg00);
6210 return fold_build2 (LE_EXPR, type, arg00, hi);
6212 case GT_EXPR:
6213 if (TREE_OVERFLOW (hi))
6215 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6216 return omit_one_operand (type, tmp, arg00);
6218 return fold_build2 (GT_EXPR, type, arg00, hi);
6220 case GE_EXPR:
6221 if (TREE_OVERFLOW (lo))
6223 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6224 return omit_one_operand (type, tmp, arg00);
6226 return fold_build2 (GE_EXPR, type, arg00, lo);
6228 default:
6229 break;
6232 return NULL_TREE;
6236 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6237 equality/inequality test, then return a simplified form of the test
6238 using a sign testing. Otherwise return NULL. TYPE is the desired
6239 result type. */
6241 static tree
6242 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6243 tree result_type)
6245 /* If this is testing a single bit, we can optimize the test. */
6246 if ((code == NE_EXPR || code == EQ_EXPR)
6247 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6248 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6250 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6251 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6252 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6254 if (arg00 != NULL_TREE
6255 /* This is only a win if casting to a signed type is cheap,
6256 i.e. when arg00's type is not a partial mode. */
6257 && TYPE_PRECISION (TREE_TYPE (arg00))
6258 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6260 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6261 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6262 result_type, fold_convert (stype, arg00),
6263 build_int_cst (stype, 0));
6267 return NULL_TREE;
6270 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6271 equality/inequality test, then return a simplified form of
6272 the test using shifts and logical operations. Otherwise return
6273 NULL. TYPE is the desired result type. */
6275 tree
6276 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6277 tree result_type)
6279 /* If this is testing a single bit, we can optimize the test. */
6280 if ((code == NE_EXPR || code == EQ_EXPR)
6281 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6282 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6284 tree inner = TREE_OPERAND (arg0, 0);
6285 tree type = TREE_TYPE (arg0);
6286 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6287 enum machine_mode operand_mode = TYPE_MODE (type);
6288 int ops_unsigned;
6289 tree signed_type, unsigned_type, intermediate_type;
6290 tree tem;
6292 /* First, see if we can fold the single bit test into a sign-bit
6293 test. */
6294 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6295 result_type);
6296 if (tem)
6297 return tem;
6299 /* Otherwise we have (A & C) != 0 where C is a single bit,
6300 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6301 Similarly for (A & C) == 0. */
6303 /* If INNER is a right shift of a constant and it plus BITNUM does
6304 not overflow, adjust BITNUM and INNER. */
6305 if (TREE_CODE (inner) == RSHIFT_EXPR
6306 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6307 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6308 && bitnum < TYPE_PRECISION (type)
6309 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6310 bitnum - TYPE_PRECISION (type)))
6312 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6313 inner = TREE_OPERAND (inner, 0);
6316 /* If we are going to be able to omit the AND below, we must do our
6317 operations as unsigned. If we must use the AND, we have a choice.
6318 Normally unsigned is faster, but for some machines signed is. */
6319 #ifdef LOAD_EXTEND_OP
6320 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6321 && !flag_syntax_only) ? 0 : 1;
6322 #else
6323 ops_unsigned = 1;
6324 #endif
6326 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6327 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6328 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6329 inner = fold_convert (intermediate_type, inner);
6331 if (bitnum != 0)
6332 inner = build2 (RSHIFT_EXPR, intermediate_type,
6333 inner, size_int (bitnum));
6335 if (code == EQ_EXPR)
6336 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6337 inner, integer_one_node);
6339 /* Put the AND last so it can combine with more things. */
6340 inner = build2 (BIT_AND_EXPR, intermediate_type,
6341 inner, integer_one_node);
6343 /* Make sure to return the proper type. */
6344 inner = fold_convert (result_type, inner);
6346 return inner;
6348 return NULL_TREE;
6351 /* Check whether we are allowed to reorder operands arg0 and arg1,
6352 such that the evaluation of arg1 occurs before arg0. */
6354 static bool
6355 reorder_operands_p (tree arg0, tree arg1)
6357 if (! flag_evaluation_order)
6358 return true;
6359 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6360 return true;
6361 return ! TREE_SIDE_EFFECTS (arg0)
6362 && ! TREE_SIDE_EFFECTS (arg1);
6365 /* Test whether it is preferable two swap two operands, ARG0 and
6366 ARG1, for example because ARG0 is an integer constant and ARG1
6367 isn't. If REORDER is true, only recommend swapping if we can
6368 evaluate the operands in reverse order. */
6370 bool
6371 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6373 STRIP_SIGN_NOPS (arg0);
6374 STRIP_SIGN_NOPS (arg1);
6376 if (TREE_CODE (arg1) == INTEGER_CST)
6377 return 0;
6378 if (TREE_CODE (arg0) == INTEGER_CST)
6379 return 1;
6381 if (TREE_CODE (arg1) == REAL_CST)
6382 return 0;
6383 if (TREE_CODE (arg0) == REAL_CST)
6384 return 1;
6386 if (TREE_CODE (arg1) == COMPLEX_CST)
6387 return 0;
6388 if (TREE_CODE (arg0) == COMPLEX_CST)
6389 return 1;
6391 if (TREE_CONSTANT (arg1))
6392 return 0;
6393 if (TREE_CONSTANT (arg0))
6394 return 1;
6396 if (optimize_size)
6397 return 0;
6399 if (reorder && flag_evaluation_order
6400 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6401 return 0;
6403 if (DECL_P (arg1))
6404 return 0;
6405 if (DECL_P (arg0))
6406 return 1;
6408 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6409 for commutative and comparison operators. Ensuring a canonical
6410 form allows the optimizers to find additional redundancies without
6411 having to explicitly check for both orderings. */
6412 if (TREE_CODE (arg0) == SSA_NAME
6413 && TREE_CODE (arg1) == SSA_NAME
6414 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6415 return 1;
6417 return 0;
6420 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6421 ARG0 is extended to a wider type. */
6423 static tree
6424 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6426 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6427 tree arg1_unw;
6428 tree shorter_type, outer_type;
6429 tree min, max;
6430 bool above, below;
6432 if (arg0_unw == arg0)
6433 return NULL_TREE;
6434 shorter_type = TREE_TYPE (arg0_unw);
6436 #ifdef HAVE_canonicalize_funcptr_for_compare
6437 /* Disable this optimization if we're casting a function pointer
6438 type on targets that require function pointer canonicalization. */
6439 if (HAVE_canonicalize_funcptr_for_compare
6440 && TREE_CODE (shorter_type) == POINTER_TYPE
6441 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6442 return NULL_TREE;
6443 #endif
6445 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6446 return NULL_TREE;
6448 arg1_unw = get_unwidened (arg1, shorter_type);
6450 /* If possible, express the comparison in the shorter mode. */
6451 if ((code == EQ_EXPR || code == NE_EXPR
6452 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6453 && (TREE_TYPE (arg1_unw) == shorter_type
6454 || (TREE_CODE (arg1_unw) == INTEGER_CST
6455 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6456 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6457 && int_fits_type_p (arg1_unw, shorter_type))))
6458 return fold_build2 (code, type, arg0_unw,
6459 fold_convert (shorter_type, arg1_unw));
6461 if (TREE_CODE (arg1_unw) != INTEGER_CST
6462 || TREE_CODE (shorter_type) != INTEGER_TYPE
6463 || !int_fits_type_p (arg1_unw, shorter_type))
6464 return NULL_TREE;
6466 /* If we are comparing with the integer that does not fit into the range
6467 of the shorter type, the result is known. */
6468 outer_type = TREE_TYPE (arg1_unw);
6469 min = lower_bound_in_type (outer_type, shorter_type);
6470 max = upper_bound_in_type (outer_type, shorter_type);
6472 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6473 max, arg1_unw));
6474 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6475 arg1_unw, min));
6477 switch (code)
6479 case EQ_EXPR:
6480 if (above || below)
6481 return omit_one_operand (type, integer_zero_node, arg0);
6482 break;
6484 case NE_EXPR:
6485 if (above || below)
6486 return omit_one_operand (type, integer_one_node, arg0);
6487 break;
6489 case LT_EXPR:
6490 case LE_EXPR:
6491 if (above)
6492 return omit_one_operand (type, integer_one_node, arg0);
6493 else if (below)
6494 return omit_one_operand (type, integer_zero_node, arg0);
6496 case GT_EXPR:
6497 case GE_EXPR:
6498 if (above)
6499 return omit_one_operand (type, integer_zero_node, arg0);
6500 else if (below)
6501 return omit_one_operand (type, integer_one_node, arg0);
6503 default:
6504 break;
6507 return NULL_TREE;
6510 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6511 ARG0 just the signedness is changed. */
6513 static tree
6514 fold_sign_changed_comparison (enum tree_code code, tree type,
6515 tree arg0, tree arg1)
6517 tree arg0_inner, tmp;
6518 tree inner_type, outer_type;
6520 if (TREE_CODE (arg0) != NOP_EXPR
6521 && TREE_CODE (arg0) != CONVERT_EXPR)
6522 return NULL_TREE;
6524 outer_type = TREE_TYPE (arg0);
6525 arg0_inner = TREE_OPERAND (arg0, 0);
6526 inner_type = TREE_TYPE (arg0_inner);
6528 #ifdef HAVE_canonicalize_funcptr_for_compare
6529 /* Disable this optimization if we're casting a function pointer
6530 type on targets that require function pointer canonicalization. */
6531 if (HAVE_canonicalize_funcptr_for_compare
6532 && TREE_CODE (inner_type) == POINTER_TYPE
6533 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6534 return NULL_TREE;
6535 #endif
6537 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6538 return NULL_TREE;
6540 if (TREE_CODE (arg1) != INTEGER_CST
6541 && !((TREE_CODE (arg1) == NOP_EXPR
6542 || TREE_CODE (arg1) == CONVERT_EXPR)
6543 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6544 return NULL_TREE;
6546 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6547 && code != NE_EXPR
6548 && code != EQ_EXPR)
6549 return NULL_TREE;
6551 if (TREE_CODE (arg1) == INTEGER_CST)
6553 tmp = build_int_cst_wide (inner_type,
6554 TREE_INT_CST_LOW (arg1),
6555 TREE_INT_CST_HIGH (arg1));
6556 arg1 = force_fit_type (tmp, 0,
6557 TREE_OVERFLOW (arg1),
6558 TREE_CONSTANT_OVERFLOW (arg1));
6560 else
6561 arg1 = fold_convert (inner_type, arg1);
6563 return fold_build2 (code, type, arg0_inner, arg1);
6566 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6567 step of the array. Reconstructs s and delta in the case of s * delta
6568 being an integer constant (and thus already folded).
6569 ADDR is the address. MULT is the multiplicative expression.
6570 If the function succeeds, the new address expression is returned. Otherwise
6571 NULL_TREE is returned. */
6573 static tree
6574 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6576 tree s, delta, step;
6577 tree ref = TREE_OPERAND (addr, 0), pref;
6578 tree ret, pos;
6579 tree itype;
6581 /* Canonicalize op1 into a possibly non-constant delta
6582 and an INTEGER_CST s. */
6583 if (TREE_CODE (op1) == MULT_EXPR)
6585 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6587 STRIP_NOPS (arg0);
6588 STRIP_NOPS (arg1);
6590 if (TREE_CODE (arg0) == INTEGER_CST)
6592 s = arg0;
6593 delta = arg1;
6595 else if (TREE_CODE (arg1) == INTEGER_CST)
6597 s = arg1;
6598 delta = arg0;
6600 else
6601 return NULL_TREE;
6603 else if (TREE_CODE (op1) == INTEGER_CST)
6605 delta = op1;
6606 s = NULL_TREE;
6608 else
6610 /* Simulate we are delta * 1. */
6611 delta = op1;
6612 s = integer_one_node;
6615 for (;; ref = TREE_OPERAND (ref, 0))
6617 if (TREE_CODE (ref) == ARRAY_REF)
6619 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6620 if (! itype)
6621 continue;
6623 step = array_ref_element_size (ref);
6624 if (TREE_CODE (step) != INTEGER_CST)
6625 continue;
6627 if (s)
6629 if (! tree_int_cst_equal (step, s))
6630 continue;
6632 else
6634 /* Try if delta is a multiple of step. */
6635 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6636 if (! tmp)
6637 continue;
6638 delta = tmp;
6641 break;
6644 if (!handled_component_p (ref))
6645 return NULL_TREE;
6648 /* We found the suitable array reference. So copy everything up to it,
6649 and replace the index. */
6651 pref = TREE_OPERAND (addr, 0);
6652 ret = copy_node (pref);
6653 pos = ret;
6655 while (pref != ref)
6657 pref = TREE_OPERAND (pref, 0);
6658 TREE_OPERAND (pos, 0) = copy_node (pref);
6659 pos = TREE_OPERAND (pos, 0);
6662 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6663 fold_convert (itype,
6664 TREE_OPERAND (pos, 1)),
6665 fold_convert (itype, delta));
6667 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6671 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6672 means A >= Y && A != MAX, but in this case we know that
6673 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6675 static tree
6676 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6678 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6680 if (TREE_CODE (bound) == LT_EXPR)
6681 a = TREE_OPERAND (bound, 0);
6682 else if (TREE_CODE (bound) == GT_EXPR)
6683 a = TREE_OPERAND (bound, 1);
6684 else
6685 return NULL_TREE;
6687 typea = TREE_TYPE (a);
6688 if (!INTEGRAL_TYPE_P (typea)
6689 && !POINTER_TYPE_P (typea))
6690 return NULL_TREE;
6692 if (TREE_CODE (ineq) == LT_EXPR)
6694 a1 = TREE_OPERAND (ineq, 1);
6695 y = TREE_OPERAND (ineq, 0);
6697 else if (TREE_CODE (ineq) == GT_EXPR)
6699 a1 = TREE_OPERAND (ineq, 0);
6700 y = TREE_OPERAND (ineq, 1);
6702 else
6703 return NULL_TREE;
6705 if (TREE_TYPE (a1) != typea)
6706 return NULL_TREE;
6708 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6709 if (!integer_onep (diff))
6710 return NULL_TREE;
6712 return fold_build2 (GE_EXPR, type, a, y);
6715 /* Fold a sum or difference of at least one multiplication.
6716 Returns the folded tree or NULL if no simplification could be made. */
6718 static tree
6719 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6721 tree arg00, arg01, arg10, arg11;
6722 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6724 /* (A * C) +- (B * C) -> (A+-B) * C.
6725 (A * C) +- A -> A * (C+-1).
6726 We are most concerned about the case where C is a constant,
6727 but other combinations show up during loop reduction. Since
6728 it is not difficult, try all four possibilities. */
6730 if (TREE_CODE (arg0) == MULT_EXPR)
6732 arg00 = TREE_OPERAND (arg0, 0);
6733 arg01 = TREE_OPERAND (arg0, 1);
6735 else
6737 arg00 = arg0;
6738 arg01 = build_one_cst (type);
6740 if (TREE_CODE (arg1) == MULT_EXPR)
6742 arg10 = TREE_OPERAND (arg1, 0);
6743 arg11 = TREE_OPERAND (arg1, 1);
6745 else
6747 arg10 = arg1;
6748 arg11 = build_one_cst (type);
6750 same = NULL_TREE;
6752 if (operand_equal_p (arg01, arg11, 0))
6753 same = arg01, alt0 = arg00, alt1 = arg10;
6754 else if (operand_equal_p (arg00, arg10, 0))
6755 same = arg00, alt0 = arg01, alt1 = arg11;
6756 else if (operand_equal_p (arg00, arg11, 0))
6757 same = arg00, alt0 = arg01, alt1 = arg10;
6758 else if (operand_equal_p (arg01, arg10, 0))
6759 same = arg01, alt0 = arg00, alt1 = arg11;
6761 /* No identical multiplicands; see if we can find a common
6762 power-of-two factor in non-power-of-two multiplies. This
6763 can help in multi-dimensional array access. */
6764 else if (host_integerp (arg01, 0)
6765 && host_integerp (arg11, 0))
6767 HOST_WIDE_INT int01, int11, tmp;
6768 bool swap = false;
6769 tree maybe_same;
6770 int01 = TREE_INT_CST_LOW (arg01);
6771 int11 = TREE_INT_CST_LOW (arg11);
6773 /* Move min of absolute values to int11. */
6774 if ((int01 >= 0 ? int01 : -int01)
6775 < (int11 >= 0 ? int11 : -int11))
6777 tmp = int01, int01 = int11, int11 = tmp;
6778 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6779 maybe_same = arg01;
6780 swap = true;
6782 else
6783 maybe_same = arg11;
6785 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6787 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6788 build_int_cst (TREE_TYPE (arg00),
6789 int01 / int11));
6790 alt1 = arg10;
6791 same = maybe_same;
6792 if (swap)
6793 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6797 if (same)
6798 return fold_build2 (MULT_EXPR, type,
6799 fold_build2 (code, type,
6800 fold_convert (type, alt0),
6801 fold_convert (type, alt1)),
6802 fold_convert (type, same));
6804 return NULL_TREE;
6807 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6808 specified by EXPR into the buffer PTR of length LEN bytes.
6809 Return the number of bytes placed in the buffer, or zero
6810 upon failure. */
6812 static int
6813 native_encode_int (tree expr, unsigned char *ptr, int len)
6815 tree type = TREE_TYPE (expr);
6816 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6817 int byte, offset, word, words;
6818 unsigned char value;
6820 if (total_bytes > len)
6821 return 0;
6822 words = total_bytes / UNITS_PER_WORD;
6824 for (byte = 0; byte < total_bytes; byte++)
6826 int bitpos = byte * BITS_PER_UNIT;
6827 if (bitpos < HOST_BITS_PER_WIDE_INT)
6828 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6829 else
6830 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6831 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6833 if (total_bytes > UNITS_PER_WORD)
6835 word = byte / UNITS_PER_WORD;
6836 if (WORDS_BIG_ENDIAN)
6837 word = (words - 1) - word;
6838 offset = word * UNITS_PER_WORD;
6839 if (BYTES_BIG_ENDIAN)
6840 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6841 else
6842 offset += byte % UNITS_PER_WORD;
6844 else
6845 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6846 ptr[offset] = value;
6848 return total_bytes;
6852 /* Subroutine of native_encode_expr. Encode the REAL_CST
6853 specified by EXPR into the buffer PTR of length LEN bytes.
6854 Return the number of bytes placed in the buffer, or zero
6855 upon failure. */
6857 static int
6858 native_encode_real (tree expr, unsigned char *ptr, int len)
6860 tree type = TREE_TYPE (expr);
6861 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6862 int byte, offset, word, words;
6863 unsigned char value;
6865 /* There are always 32 bits in each long, no matter the size of
6866 the hosts long. We handle floating point representations with
6867 up to 192 bits. */
6868 long tmp[6];
6870 if (total_bytes > len)
6871 return 0;
6872 words = total_bytes / UNITS_PER_WORD;
6874 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6876 for (byte = 0; byte < total_bytes; byte++)
6878 int bitpos = byte * BITS_PER_UNIT;
6879 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6881 if (total_bytes > UNITS_PER_WORD)
6883 word = byte / UNITS_PER_WORD;
6884 if (FLOAT_WORDS_BIG_ENDIAN)
6885 word = (words - 1) - word;
6886 offset = word * UNITS_PER_WORD;
6887 if (BYTES_BIG_ENDIAN)
6888 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6889 else
6890 offset += byte % UNITS_PER_WORD;
6892 else
6893 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6894 ptr[offset] = value;
6896 return total_bytes;
6899 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6900 specified by EXPR into the buffer PTR of length LEN bytes.
6901 Return the number of bytes placed in the buffer, or zero
6902 upon failure. */
6904 static int
6905 native_encode_complex (tree expr, unsigned char *ptr, int len)
6907 int rsize, isize;
6908 tree part;
6910 part = TREE_REALPART (expr);
6911 rsize = native_encode_expr (part, ptr, len);
6912 if (rsize == 0)
6913 return 0;
6914 part = TREE_IMAGPART (expr);
6915 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6916 if (isize != rsize)
6917 return 0;
6918 return rsize + isize;
6922 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6923 specified by EXPR into the buffer PTR of length LEN bytes.
6924 Return the number of bytes placed in the buffer, or zero
6925 upon failure. */
6927 static int
6928 native_encode_vector (tree expr, unsigned char *ptr, int len)
6930 int i, size, offset, count;
6931 tree itype, elem, elements;
6933 offset = 0;
6934 elements = TREE_VECTOR_CST_ELTS (expr);
6935 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6936 itype = TREE_TYPE (TREE_TYPE (expr));
6937 size = GET_MODE_SIZE (TYPE_MODE (itype));
6938 for (i = 0; i < count; i++)
6940 if (elements)
6942 elem = TREE_VALUE (elements);
6943 elements = TREE_CHAIN (elements);
6945 else
6946 elem = NULL_TREE;
6948 if (elem)
6950 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6951 return 0;
6953 else
6955 if (offset + size > len)
6956 return 0;
6957 memset (ptr+offset, 0, size);
6959 offset += size;
6961 return offset;
6965 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6966 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6967 buffer PTR of length LEN bytes. Return the number of bytes
6968 placed in the buffer, or zero upon failure. */
6970 static int
6971 native_encode_expr (tree expr, unsigned char *ptr, int len)
6973 switch (TREE_CODE (expr))
6975 case INTEGER_CST:
6976 return native_encode_int (expr, ptr, len);
6978 case REAL_CST:
6979 return native_encode_real (expr, ptr, len);
6981 case COMPLEX_CST:
6982 return native_encode_complex (expr, ptr, len);
6984 case VECTOR_CST:
6985 return native_encode_vector (expr, ptr, len);
6987 default:
6988 return 0;
6993 /* Subroutine of native_interpret_expr. Interpret the contents of
6994 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6995 If the buffer cannot be interpreted, return NULL_TREE. */
6997 static tree
6998 native_interpret_int (tree type, unsigned char *ptr, int len)
7000 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7001 int byte, offset, word, words;
7002 unsigned char value;
7003 unsigned int HOST_WIDE_INT lo = 0;
7004 HOST_WIDE_INT hi = 0;
7006 if (total_bytes > len)
7007 return NULL_TREE;
7008 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7009 return NULL_TREE;
7010 words = total_bytes / UNITS_PER_WORD;
7012 for (byte = 0; byte < total_bytes; byte++)
7014 int bitpos = byte * BITS_PER_UNIT;
7015 if (total_bytes > UNITS_PER_WORD)
7017 word = byte / UNITS_PER_WORD;
7018 if (WORDS_BIG_ENDIAN)
7019 word = (words - 1) - word;
7020 offset = word * UNITS_PER_WORD;
7021 if (BYTES_BIG_ENDIAN)
7022 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7023 else
7024 offset += byte % UNITS_PER_WORD;
7026 else
7027 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7028 value = ptr[offset];
7030 if (bitpos < HOST_BITS_PER_WIDE_INT)
7031 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7032 else
7033 hi |= (unsigned HOST_WIDE_INT) value
7034 << (bitpos - HOST_BITS_PER_WIDE_INT);
7037 return force_fit_type (build_int_cst_wide (type, lo, hi),
7038 0, false, false);
7042 /* Subroutine of native_interpret_expr. Interpret the contents of
7043 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7044 If the buffer cannot be interpreted, return NULL_TREE. */
7046 static tree
7047 native_interpret_real (tree type, unsigned char *ptr, int len)
7049 enum machine_mode mode = TYPE_MODE (type);
7050 int total_bytes = GET_MODE_SIZE (mode);
7051 int byte, offset, word, words;
7052 unsigned char value;
7053 /* There are always 32 bits in each long, no matter the size of
7054 the hosts long. We handle floating point representations with
7055 up to 192 bits. */
7056 REAL_VALUE_TYPE r;
7057 long tmp[6];
7059 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7060 if (total_bytes > len || total_bytes > 24)
7061 return NULL_TREE;
7062 words = total_bytes / UNITS_PER_WORD;
7064 memset (tmp, 0, sizeof (tmp));
7065 for (byte = 0; byte < total_bytes; byte++)
7067 int bitpos = byte * BITS_PER_UNIT;
7068 if (total_bytes > UNITS_PER_WORD)
7070 word = byte / UNITS_PER_WORD;
7071 if (FLOAT_WORDS_BIG_ENDIAN)
7072 word = (words - 1) - word;
7073 offset = word * UNITS_PER_WORD;
7074 if (BYTES_BIG_ENDIAN)
7075 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7076 else
7077 offset += byte % UNITS_PER_WORD;
7079 else
7080 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7081 value = ptr[offset];
7083 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7086 real_from_target (&r, tmp, mode);
7087 return build_real (type, r);
7091 /* Subroutine of native_interpret_expr. Interpret the contents of
7092 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7093 If the buffer cannot be interpreted, return NULL_TREE. */
7095 static tree
7096 native_interpret_complex (tree type, unsigned char *ptr, int len)
7098 tree etype, rpart, ipart;
7099 int size;
7101 etype = TREE_TYPE (type);
7102 size = GET_MODE_SIZE (TYPE_MODE (etype));
7103 if (size * 2 > len)
7104 return NULL_TREE;
7105 rpart = native_interpret_expr (etype, ptr, size);
7106 if (!rpart)
7107 return NULL_TREE;
7108 ipart = native_interpret_expr (etype, ptr+size, size);
7109 if (!ipart)
7110 return NULL_TREE;
7111 return build_complex (type, rpart, ipart);
7115 /* Subroutine of native_interpret_expr. Interpret the contents of
7116 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7117 If the buffer cannot be interpreted, return NULL_TREE. */
7119 static tree
7120 native_interpret_vector (tree type, unsigned char *ptr, int len)
7122 tree etype, elem, elements;
7123 int i, size, count;
7125 etype = TREE_TYPE (type);
7126 size = GET_MODE_SIZE (TYPE_MODE (etype));
7127 count = TYPE_VECTOR_SUBPARTS (type);
7128 if (size * count > len)
7129 return NULL_TREE;
7131 elements = NULL_TREE;
7132 for (i = count - 1; i >= 0; i--)
7134 elem = native_interpret_expr (etype, ptr+(i*size), size);
7135 if (!elem)
7136 return NULL_TREE;
7137 elements = tree_cons (NULL_TREE, elem, elements);
7139 return build_vector (type, elements);
7143 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7144 the buffer PTR of length LEN as a constant of type TYPE. For
7145 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7146 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7147 return NULL_TREE. */
7149 static tree
7150 native_interpret_expr (tree type, unsigned char *ptr, int len)
7152 switch (TREE_CODE (type))
7154 case INTEGER_TYPE:
7155 case ENUMERAL_TYPE:
7156 case BOOLEAN_TYPE:
7157 return native_interpret_int (type, ptr, len);
7159 case REAL_TYPE:
7160 return native_interpret_real (type, ptr, len);
7162 case COMPLEX_TYPE:
7163 return native_interpret_complex (type, ptr, len);
7165 case VECTOR_TYPE:
7166 return native_interpret_vector (type, ptr, len);
7168 default:
7169 return NULL_TREE;
7174 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7175 TYPE at compile-time. If we're unable to perform the conversion
7176 return NULL_TREE. */
7178 static tree
7179 fold_view_convert_expr (tree type, tree expr)
7181 /* We support up to 512-bit values (for V8DFmode). */
7182 unsigned char buffer[64];
7183 int len;
7185 /* Check that the host and target are sane. */
7186 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7187 return NULL_TREE;
7189 len = native_encode_expr (expr, buffer, sizeof (buffer));
7190 if (len == 0)
7191 return NULL_TREE;
7193 return native_interpret_expr (type, buffer, len);
7197 /* Fold a unary expression of code CODE and type TYPE with operand
7198 OP0. Return the folded expression if folding is successful.
7199 Otherwise, return NULL_TREE. */
7201 tree
7202 fold_unary (enum tree_code code, tree type, tree op0)
7204 tree tem;
7205 tree arg0;
7206 enum tree_code_class kind = TREE_CODE_CLASS (code);
7208 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7209 && TREE_CODE_LENGTH (code) == 1);
7211 arg0 = op0;
7212 if (arg0)
7214 if (code == NOP_EXPR || code == CONVERT_EXPR
7215 || code == FLOAT_EXPR || code == ABS_EXPR)
7217 /* Don't use STRIP_NOPS, because signedness of argument type
7218 matters. */
7219 STRIP_SIGN_NOPS (arg0);
7221 else
7223 /* Strip any conversions that don't change the mode. This
7224 is safe for every expression, except for a comparison
7225 expression because its signedness is derived from its
7226 operands.
7228 Note that this is done as an internal manipulation within
7229 the constant folder, in order to find the simplest
7230 representation of the arguments so that their form can be
7231 studied. In any cases, the appropriate type conversions
7232 should be put back in the tree that will get out of the
7233 constant folder. */
7234 STRIP_NOPS (arg0);
7238 if (TREE_CODE_CLASS (code) == tcc_unary)
7240 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7241 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7242 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7243 else if (TREE_CODE (arg0) == COND_EXPR)
7245 tree arg01 = TREE_OPERAND (arg0, 1);
7246 tree arg02 = TREE_OPERAND (arg0, 2);
7247 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7248 arg01 = fold_build1 (code, type, arg01);
7249 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7250 arg02 = fold_build1 (code, type, arg02);
7251 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7252 arg01, arg02);
7254 /* If this was a conversion, and all we did was to move into
7255 inside the COND_EXPR, bring it back out. But leave it if
7256 it is a conversion from integer to integer and the
7257 result precision is no wider than a word since such a
7258 conversion is cheap and may be optimized away by combine,
7259 while it couldn't if it were outside the COND_EXPR. Then return
7260 so we don't get into an infinite recursion loop taking the
7261 conversion out and then back in. */
7263 if ((code == NOP_EXPR || code == CONVERT_EXPR
7264 || code == NON_LVALUE_EXPR)
7265 && TREE_CODE (tem) == COND_EXPR
7266 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7267 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7268 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7269 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7270 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7271 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7272 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7273 && (INTEGRAL_TYPE_P
7274 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7275 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7276 || flag_syntax_only))
7277 tem = build1 (code, type,
7278 build3 (COND_EXPR,
7279 TREE_TYPE (TREE_OPERAND
7280 (TREE_OPERAND (tem, 1), 0)),
7281 TREE_OPERAND (tem, 0),
7282 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7283 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7284 return tem;
7286 else if (COMPARISON_CLASS_P (arg0))
7288 if (TREE_CODE (type) == BOOLEAN_TYPE)
7290 arg0 = copy_node (arg0);
7291 TREE_TYPE (arg0) = type;
7292 return arg0;
7294 else if (TREE_CODE (type) != INTEGER_TYPE)
7295 return fold_build3 (COND_EXPR, type, arg0,
7296 fold_build1 (code, type,
7297 integer_one_node),
7298 fold_build1 (code, type,
7299 integer_zero_node));
7303 switch (code)
7305 case NOP_EXPR:
7306 case FLOAT_EXPR:
7307 case CONVERT_EXPR:
7308 case FIX_TRUNC_EXPR:
7309 case FIX_CEIL_EXPR:
7310 case FIX_FLOOR_EXPR:
7311 case FIX_ROUND_EXPR:
7312 if (TREE_TYPE (op0) == type)
7313 return op0;
7315 /* If we have (type) (a CMP b) and type is an integral type, return
7316 new expression involving the new type. */
7317 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7318 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7319 TREE_OPERAND (op0, 1));
7321 /* Handle cases of two conversions in a row. */
7322 if (TREE_CODE (op0) == NOP_EXPR
7323 || TREE_CODE (op0) == CONVERT_EXPR)
7325 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7326 tree inter_type = TREE_TYPE (op0);
7327 int inside_int = INTEGRAL_TYPE_P (inside_type);
7328 int inside_ptr = POINTER_TYPE_P (inside_type);
7329 int inside_float = FLOAT_TYPE_P (inside_type);
7330 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7331 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7332 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7333 int inter_int = INTEGRAL_TYPE_P (inter_type);
7334 int inter_ptr = POINTER_TYPE_P (inter_type);
7335 int inter_float = FLOAT_TYPE_P (inter_type);
7336 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7337 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7338 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7339 int final_int = INTEGRAL_TYPE_P (type);
7340 int final_ptr = POINTER_TYPE_P (type);
7341 int final_float = FLOAT_TYPE_P (type);
7342 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7343 unsigned int final_prec = TYPE_PRECISION (type);
7344 int final_unsignedp = TYPE_UNSIGNED (type);
7346 /* In addition to the cases of two conversions in a row
7347 handled below, if we are converting something to its own
7348 type via an object of identical or wider precision, neither
7349 conversion is needed. */
7350 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7351 && (((inter_int || inter_ptr) && final_int)
7352 || (inter_float && final_float))
7353 && inter_prec >= final_prec)
7354 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7356 /* Likewise, if the intermediate and final types are either both
7357 float or both integer, we don't need the middle conversion if
7358 it is wider than the final type and doesn't change the signedness
7359 (for integers). Avoid this if the final type is a pointer
7360 since then we sometimes need the inner conversion. Likewise if
7361 the outer has a precision not equal to the size of its mode. */
7362 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7363 || (inter_float && inside_float)
7364 || (inter_vec && inside_vec))
7365 && inter_prec >= inside_prec
7366 && (inter_float || inter_vec
7367 || inter_unsignedp == inside_unsignedp)
7368 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7369 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7370 && ! final_ptr
7371 && (! final_vec || inter_prec == inside_prec))
7372 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7374 /* If we have a sign-extension of a zero-extended value, we can
7375 replace that by a single zero-extension. */
7376 if (inside_int && inter_int && final_int
7377 && inside_prec < inter_prec && inter_prec < final_prec
7378 && inside_unsignedp && !inter_unsignedp)
7379 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7381 /* Two conversions in a row are not needed unless:
7382 - some conversion is floating-point (overstrict for now), or
7383 - some conversion is a vector (overstrict for now), or
7384 - the intermediate type is narrower than both initial and
7385 final, or
7386 - the intermediate type and innermost type differ in signedness,
7387 and the outermost type is wider than the intermediate, or
7388 - the initial type is a pointer type and the precisions of the
7389 intermediate and final types differ, or
7390 - the final type is a pointer type and the precisions of the
7391 initial and intermediate types differ.
7392 - the final type is a pointer type and the initial type not
7393 - the initial type is a pointer to an array and the final type
7394 not. */
7395 if (! inside_float && ! inter_float && ! final_float
7396 && ! inside_vec && ! inter_vec && ! final_vec
7397 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7398 && ! (inside_int && inter_int
7399 && inter_unsignedp != inside_unsignedp
7400 && inter_prec < final_prec)
7401 && ((inter_unsignedp && inter_prec > inside_prec)
7402 == (final_unsignedp && final_prec > inter_prec))
7403 && ! (inside_ptr && inter_prec != final_prec)
7404 && ! (final_ptr && inside_prec != inter_prec)
7405 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7406 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7407 && final_ptr == inside_ptr
7408 && ! (inside_ptr
7409 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7410 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7411 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7414 /* Handle (T *)&A.B.C for A being of type T and B and C
7415 living at offset zero. This occurs frequently in
7416 C++ upcasting and then accessing the base. */
7417 if (TREE_CODE (op0) == ADDR_EXPR
7418 && POINTER_TYPE_P (type)
7419 && handled_component_p (TREE_OPERAND (op0, 0)))
7421 HOST_WIDE_INT bitsize, bitpos;
7422 tree offset;
7423 enum machine_mode mode;
7424 int unsignedp, volatilep;
7425 tree base = TREE_OPERAND (op0, 0);
7426 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7427 &mode, &unsignedp, &volatilep, false);
7428 /* If the reference was to a (constant) zero offset, we can use
7429 the address of the base if it has the same base type
7430 as the result type. */
7431 if (! offset && bitpos == 0
7432 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7433 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7434 return fold_convert (type, build_fold_addr_expr (base));
7437 if (TREE_CODE (op0) == MODIFY_EXPR
7438 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7439 /* Detect assigning a bitfield. */
7440 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7441 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7443 /* Don't leave an assignment inside a conversion
7444 unless assigning a bitfield. */
7445 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7446 /* First do the assignment, then return converted constant. */
7447 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7448 TREE_NO_WARNING (tem) = 1;
7449 TREE_USED (tem) = 1;
7450 return tem;
7453 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7454 constants (if x has signed type, the sign bit cannot be set
7455 in c). This folds extension into the BIT_AND_EXPR. */
7456 if (INTEGRAL_TYPE_P (type)
7457 && TREE_CODE (type) != BOOLEAN_TYPE
7458 && TREE_CODE (op0) == BIT_AND_EXPR
7459 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7461 tree and = op0;
7462 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7463 int change = 0;
7465 if (TYPE_UNSIGNED (TREE_TYPE (and))
7466 || (TYPE_PRECISION (type)
7467 <= TYPE_PRECISION (TREE_TYPE (and))))
7468 change = 1;
7469 else if (TYPE_PRECISION (TREE_TYPE (and1))
7470 <= HOST_BITS_PER_WIDE_INT
7471 && host_integerp (and1, 1))
7473 unsigned HOST_WIDE_INT cst;
7475 cst = tree_low_cst (and1, 1);
7476 cst &= (HOST_WIDE_INT) -1
7477 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7478 change = (cst == 0);
7479 #ifdef LOAD_EXTEND_OP
7480 if (change
7481 && !flag_syntax_only
7482 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7483 == ZERO_EXTEND))
7485 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7486 and0 = fold_convert (uns, and0);
7487 and1 = fold_convert (uns, and1);
7489 #endif
7491 if (change)
7493 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7494 TREE_INT_CST_HIGH (and1));
7495 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7496 TREE_CONSTANT_OVERFLOW (and1));
7497 return fold_build2 (BIT_AND_EXPR, type,
7498 fold_convert (type, and0), tem);
7502 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7503 T2 being pointers to types of the same size. */
7504 if (POINTER_TYPE_P (type)
7505 && BINARY_CLASS_P (arg0)
7506 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7507 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7509 tree arg00 = TREE_OPERAND (arg0, 0);
7510 tree t0 = type;
7511 tree t1 = TREE_TYPE (arg00);
7512 tree tt0 = TREE_TYPE (t0);
7513 tree tt1 = TREE_TYPE (t1);
7514 tree s0 = TYPE_SIZE (tt0);
7515 tree s1 = TYPE_SIZE (tt1);
7517 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7518 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7519 TREE_OPERAND (arg0, 1));
7522 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7523 of the same precision, and X is a integer type not narrower than
7524 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7525 if (INTEGRAL_TYPE_P (type)
7526 && TREE_CODE (op0) == BIT_NOT_EXPR
7527 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7528 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7529 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7530 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7532 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7533 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7534 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7535 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7538 tem = fold_convert_const (code, type, arg0);
7539 return tem ? tem : NULL_TREE;
7541 case VIEW_CONVERT_EXPR:
7542 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7543 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7544 return fold_view_convert_expr (type, op0);
7546 case NEGATE_EXPR:
7547 tem = fold_negate_expr (arg0);
7548 if (tem)
7549 return fold_convert (type, tem);
7550 return NULL_TREE;
7552 case ABS_EXPR:
7553 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7554 return fold_abs_const (arg0, type);
7555 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7556 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7557 /* Convert fabs((double)float) into (double)fabsf(float). */
7558 else if (TREE_CODE (arg0) == NOP_EXPR
7559 && TREE_CODE (type) == REAL_TYPE)
7561 tree targ0 = strip_float_extensions (arg0);
7562 if (targ0 != arg0)
7563 return fold_convert (type, fold_build1 (ABS_EXPR,
7564 TREE_TYPE (targ0),
7565 targ0));
7567 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7568 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7569 return arg0;
7571 /* Strip sign ops from argument. */
7572 if (TREE_CODE (type) == REAL_TYPE)
7574 tem = fold_strip_sign_ops (arg0);
7575 if (tem)
7576 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7578 return NULL_TREE;
7580 case CONJ_EXPR:
7581 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7582 return fold_convert (type, arg0);
7583 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7585 tree itype = TREE_TYPE (type);
7586 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7587 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7588 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7590 if (TREE_CODE (arg0) == COMPLEX_CST)
7592 tree itype = TREE_TYPE (type);
7593 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7594 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7595 return build_complex (type, rpart, negate_expr (ipart));
7597 if (TREE_CODE (arg0) == CONJ_EXPR)
7598 return fold_convert (type, TREE_OPERAND (arg0, 0));
7599 return NULL_TREE;
7601 case BIT_NOT_EXPR:
7602 if (TREE_CODE (arg0) == INTEGER_CST)
7603 return fold_not_const (arg0, type);
7604 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7605 return TREE_OPERAND (arg0, 0);
7606 /* Convert ~ (-A) to A - 1. */
7607 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7608 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7609 build_int_cst (type, 1));
7610 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7611 else if (INTEGRAL_TYPE_P (type)
7612 && ((TREE_CODE (arg0) == MINUS_EXPR
7613 && integer_onep (TREE_OPERAND (arg0, 1)))
7614 || (TREE_CODE (arg0) == PLUS_EXPR
7615 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7616 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7617 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7618 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7619 && (tem = fold_unary (BIT_NOT_EXPR, type,
7620 fold_convert (type,
7621 TREE_OPERAND (arg0, 0)))))
7622 return fold_build2 (BIT_XOR_EXPR, type, tem,
7623 fold_convert (type, TREE_OPERAND (arg0, 1)));
7624 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7625 && (tem = fold_unary (BIT_NOT_EXPR, type,
7626 fold_convert (type,
7627 TREE_OPERAND (arg0, 1)))))
7628 return fold_build2 (BIT_XOR_EXPR, type,
7629 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7631 return NULL_TREE;
7633 case TRUTH_NOT_EXPR:
7634 /* The argument to invert_truthvalue must have Boolean type. */
7635 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7636 arg0 = fold_convert (boolean_type_node, arg0);
7638 /* Note that the operand of this must be an int
7639 and its values must be 0 or 1.
7640 ("true" is a fixed value perhaps depending on the language,
7641 but we don't handle values other than 1 correctly yet.) */
7642 tem = fold_truth_not_expr (arg0);
7643 if (!tem)
7644 return NULL_TREE;
7645 return fold_convert (type, tem);
7647 case REALPART_EXPR:
7648 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7649 return fold_convert (type, arg0);
7650 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7651 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7652 TREE_OPERAND (arg0, 1));
7653 if (TREE_CODE (arg0) == COMPLEX_CST)
7654 return fold_convert (type, TREE_REALPART (arg0));
7655 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7657 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7658 tem = fold_build2 (TREE_CODE (arg0), itype,
7659 fold_build1 (REALPART_EXPR, itype,
7660 TREE_OPERAND (arg0, 0)),
7661 fold_build1 (REALPART_EXPR, itype,
7662 TREE_OPERAND (arg0, 1)));
7663 return fold_convert (type, tem);
7665 if (TREE_CODE (arg0) == CONJ_EXPR)
7667 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7668 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7669 return fold_convert (type, tem);
7671 return NULL_TREE;
7673 case IMAGPART_EXPR:
7674 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7675 return fold_convert (type, integer_zero_node);
7676 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7677 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7678 TREE_OPERAND (arg0, 0));
7679 if (TREE_CODE (arg0) == COMPLEX_CST)
7680 return fold_convert (type, TREE_IMAGPART (arg0));
7681 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7683 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7684 tem = fold_build2 (TREE_CODE (arg0), itype,
7685 fold_build1 (IMAGPART_EXPR, itype,
7686 TREE_OPERAND (arg0, 0)),
7687 fold_build1 (IMAGPART_EXPR, itype,
7688 TREE_OPERAND (arg0, 1)));
7689 return fold_convert (type, tem);
7691 if (TREE_CODE (arg0) == CONJ_EXPR)
7693 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7694 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7695 return fold_convert (type, negate_expr (tem));
7697 return NULL_TREE;
7699 default:
7700 return NULL_TREE;
7701 } /* switch (code) */
7704 /* Fold a binary expression of code CODE and type TYPE with operands
7705 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7706 Return the folded expression if folding is successful. Otherwise,
7707 return NULL_TREE. */
7709 static tree
7710 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7712 enum tree_code compl_code;
7714 if (code == MIN_EXPR)
7715 compl_code = MAX_EXPR;
7716 else if (code == MAX_EXPR)
7717 compl_code = MIN_EXPR;
7718 else
7719 gcc_unreachable ();
7721 /* MIN (MAX (a, b), b) == b.  */
7722 if (TREE_CODE (op0) == compl_code
7723 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7724 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7726 /* MIN (MAX (b, a), b) == b.  */
7727 if (TREE_CODE (op0) == compl_code
7728 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7729 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7730 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7732 /* MIN (a, MAX (a, b)) == a.  */
7733 if (TREE_CODE (op1) == compl_code
7734 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7735 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7736 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7738 /* MIN (a, MAX (b, a)) == a.  */
7739 if (TREE_CODE (op1) == compl_code
7740 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7741 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7742 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7744 return NULL_TREE;
7747 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7748 by changing CODE to reduce the magnitude of constants involved in
7749 ARG0 of the comparison.
7750 Returns a canonicalized comparison tree if a simplification was
7751 possible, otherwise returns NULL_TREE. */
7753 static tree
7754 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7755 tree arg0, tree arg1)
7757 enum tree_code code0 = TREE_CODE (arg0);
7758 tree t, cst0 = NULL_TREE;
7759 int sgn0;
7760 bool swap = false;
7762 /* Match A +- CST code arg1 and CST code arg1. */
7763 if (!(((code0 == MINUS_EXPR
7764 || code0 == PLUS_EXPR)
7765 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7766 || code0 == INTEGER_CST))
7767 return NULL_TREE;
7769 /* Identify the constant in arg0 and its sign. */
7770 if (code0 == INTEGER_CST)
7771 cst0 = arg0;
7772 else
7773 cst0 = TREE_OPERAND (arg0, 1);
7774 sgn0 = tree_int_cst_sgn (cst0);
7776 /* Overflowed constants and zero will cause problems. */
7777 if (integer_zerop (cst0)
7778 || TREE_OVERFLOW (cst0))
7779 return NULL_TREE;
7781 /* See if we can reduce the mangitude of the constant in
7782 arg0 by changing the comparison code. */
7783 if (code0 == INTEGER_CST)
7785 /* CST <= arg1 -> CST-1 < arg1. */
7786 if (code == LE_EXPR && sgn0 == 1)
7787 code = LT_EXPR;
7788 /* -CST < arg1 -> -CST-1 <= arg1. */
7789 else if (code == LT_EXPR && sgn0 == -1)
7790 code = LE_EXPR;
7791 /* CST > arg1 -> CST-1 >= arg1. */
7792 else if (code == GT_EXPR && sgn0 == 1)
7793 code = GE_EXPR;
7794 /* -CST >= arg1 -> -CST-1 > arg1. */
7795 else if (code == GE_EXPR && sgn0 == -1)
7796 code = GT_EXPR;
7797 else
7798 return NULL_TREE;
7799 /* arg1 code' CST' might be more canonical. */
7800 swap = true;
7802 else
7804 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7805 if (code == LT_EXPR
7806 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7807 code = LE_EXPR;
7808 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7809 else if (code == GT_EXPR
7810 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7811 code = GE_EXPR;
7812 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7813 else if (code == LE_EXPR
7814 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7815 code = LT_EXPR;
7816 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7817 else if (code == GE_EXPR
7818 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7819 code = GT_EXPR;
7820 else
7821 return NULL_TREE;
7824 /* Now build the constant reduced in magnitude. */
7825 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7826 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7827 if (code0 != INTEGER_CST)
7828 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7830 /* If swapping might yield to a more canonical form, do so. */
7831 if (swap)
7832 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7833 else
7834 return fold_build2 (code, type, t, arg1);
7837 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7838 overflow further. Try to decrease the magnitude of constants involved
7839 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7840 and put sole constants at the second argument position.
7841 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7843 static tree
7844 maybe_canonicalize_comparison (enum tree_code code, tree type,
7845 tree arg0, tree arg1)
7847 tree t;
7849 /* In principle pointers also have undefined overflow behavior,
7850 but that causes problems elsewhere. */
7851 if ((flag_wrapv || flag_trapv)
7852 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7853 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7854 return NULL_TREE;
7856 /* Try canonicalization by simplifying arg0. */
7857 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7858 if (t)
7859 return t;
7861 /* Try canonicalization by simplifying arg1 using the swapped
7862 comparsion. */
7863 code = swap_tree_comparison (code);
7864 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7867 /* Subroutine of fold_binary. This routine performs all of the
7868 transformations that are common to the equality/inequality
7869 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7870 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7871 fold_binary should call fold_binary. Fold a comparison with
7872 tree code CODE and type TYPE with operands OP0 and OP1. Return
7873 the folded comparison or NULL_TREE. */
7875 static tree
7876 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7878 tree arg0, arg1, tem;
7880 arg0 = op0;
7881 arg1 = op1;
7883 STRIP_SIGN_NOPS (arg0);
7884 STRIP_SIGN_NOPS (arg1);
7886 tem = fold_relational_const (code, type, arg0, arg1);
7887 if (tem != NULL_TREE)
7888 return tem;
7890 /* If one arg is a real or integer constant, put it last. */
7891 if (tree_swap_operands_p (arg0, arg1, true))
7892 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7894 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7895 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7896 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7897 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7898 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7899 && !(flag_wrapv || flag_trapv))
7900 && (TREE_CODE (arg1) == INTEGER_CST
7901 && !TREE_OVERFLOW (arg1)))
7903 tree const1 = TREE_OPERAND (arg0, 1);
7904 tree const2 = arg1;
7905 tree variable = TREE_OPERAND (arg0, 0);
7906 tree lhs;
7907 int lhs_add;
7908 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7910 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7911 TREE_TYPE (arg1), const2, const1);
7912 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7913 && (TREE_CODE (lhs) != INTEGER_CST
7914 || !TREE_OVERFLOW (lhs)))
7915 return fold_build2 (code, type, variable, lhs);
7918 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7919 same object, then we can fold this to a comparison of the two offsets in
7920 signed size type. This is possible because pointer arithmetic is
7921 restricted to retain within an object and overflow on pointer differences
7922 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7923 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7924 && !flag_wrapv && !flag_trapv)
7926 tree base0, offset0, base1, offset1;
7928 if (extract_array_ref (arg0, &base0, &offset0)
7929 && extract_array_ref (arg1, &base1, &offset1)
7930 && operand_equal_p (base0, base1, 0))
7932 tree signed_size_type_node;
7933 signed_size_type_node = signed_type_for (size_type_node);
7935 /* By converting to signed size type we cover middle-end pointer
7936 arithmetic which operates on unsigned pointer types of size
7937 type size and ARRAY_REF offsets which are properly sign or
7938 zero extended from their type in case it is narrower than
7939 size type. */
7940 if (offset0 == NULL_TREE)
7941 offset0 = build_int_cst (signed_size_type_node, 0);
7942 else
7943 offset0 = fold_convert (signed_size_type_node, offset0);
7944 if (offset1 == NULL_TREE)
7945 offset1 = build_int_cst (signed_size_type_node, 0);
7946 else
7947 offset1 = fold_convert (signed_size_type_node, offset1);
7949 return fold_build2 (code, type, offset0, offset1);
7953 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
7954 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
7955 the resulting offset is smaller in absolute value than the
7956 original one. */
7957 if (!(flag_wrapv || flag_trapv)
7958 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7959 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7960 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7961 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
7962 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
7963 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7964 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
7966 tree const1 = TREE_OPERAND (arg0, 1);
7967 tree const2 = TREE_OPERAND (arg1, 1);
7968 tree variable1 = TREE_OPERAND (arg0, 0);
7969 tree variable2 = TREE_OPERAND (arg1, 0);
7970 tree cst;
7972 /* Put the constant on the side where it doesn't overflow and is
7973 of lower absolute value than before. */
7974 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
7975 ? MINUS_EXPR : PLUS_EXPR,
7976 const2, const1, 0);
7977 if (!TREE_OVERFLOW (cst)
7978 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
7979 return fold_build2 (code, type,
7980 variable1,
7981 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
7982 variable2, cst));
7984 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
7985 ? MINUS_EXPR : PLUS_EXPR,
7986 const1, const2, 0);
7987 if (!TREE_OVERFLOW (cst)
7988 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
7989 return fold_build2 (code, type,
7990 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
7991 variable1, cst),
7992 variable2);
7995 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
7996 if (tem)
7997 return tem;
7999 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8001 tree targ0 = strip_float_extensions (arg0);
8002 tree targ1 = strip_float_extensions (arg1);
8003 tree newtype = TREE_TYPE (targ0);
8005 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8006 newtype = TREE_TYPE (targ1);
8008 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8009 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8010 return fold_build2 (code, type, fold_convert (newtype, targ0),
8011 fold_convert (newtype, targ1));
8013 /* (-a) CMP (-b) -> b CMP a */
8014 if (TREE_CODE (arg0) == NEGATE_EXPR
8015 && TREE_CODE (arg1) == NEGATE_EXPR)
8016 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8017 TREE_OPERAND (arg0, 0));
8019 if (TREE_CODE (arg1) == REAL_CST)
8021 REAL_VALUE_TYPE cst;
8022 cst = TREE_REAL_CST (arg1);
8024 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8025 if (TREE_CODE (arg0) == NEGATE_EXPR)
8026 return fold_build2 (swap_tree_comparison (code), type,
8027 TREE_OPERAND (arg0, 0),
8028 build_real (TREE_TYPE (arg1),
8029 REAL_VALUE_NEGATE (cst)));
8031 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8032 /* a CMP (-0) -> a CMP 0 */
8033 if (REAL_VALUE_MINUS_ZERO (cst))
8034 return fold_build2 (code, type, arg0,
8035 build_real (TREE_TYPE (arg1), dconst0));
8037 /* x != NaN is always true, other ops are always false. */
8038 if (REAL_VALUE_ISNAN (cst)
8039 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8041 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8042 return omit_one_operand (type, tem, arg0);
8045 /* Fold comparisons against infinity. */
8046 if (REAL_VALUE_ISINF (cst))
8048 tem = fold_inf_compare (code, type, arg0, arg1);
8049 if (tem != NULL_TREE)
8050 return tem;
8054 /* If this is a comparison of a real constant with a PLUS_EXPR
8055 or a MINUS_EXPR of a real constant, we can convert it into a
8056 comparison with a revised real constant as long as no overflow
8057 occurs when unsafe_math_optimizations are enabled. */
8058 if (flag_unsafe_math_optimizations
8059 && TREE_CODE (arg1) == REAL_CST
8060 && (TREE_CODE (arg0) == PLUS_EXPR
8061 || TREE_CODE (arg0) == MINUS_EXPR)
8062 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8063 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8064 ? MINUS_EXPR : PLUS_EXPR,
8065 arg1, TREE_OPERAND (arg0, 1), 0))
8066 && ! TREE_CONSTANT_OVERFLOW (tem))
8067 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8069 /* Likewise, we can simplify a comparison of a real constant with
8070 a MINUS_EXPR whose first operand is also a real constant, i.e.
8071 (c1 - x) < c2 becomes x > c1-c2. */
8072 if (flag_unsafe_math_optimizations
8073 && TREE_CODE (arg1) == REAL_CST
8074 && TREE_CODE (arg0) == MINUS_EXPR
8075 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8076 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8077 arg1, 0))
8078 && ! TREE_CONSTANT_OVERFLOW (tem))
8079 return fold_build2 (swap_tree_comparison (code), type,
8080 TREE_OPERAND (arg0, 1), tem);
8082 /* Fold comparisons against built-in math functions. */
8083 if (TREE_CODE (arg1) == REAL_CST
8084 && flag_unsafe_math_optimizations
8085 && ! flag_errno_math)
8087 enum built_in_function fcode = builtin_mathfn_code (arg0);
8089 if (fcode != END_BUILTINS)
8091 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8092 if (tem != NULL_TREE)
8093 return tem;
8098 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8099 if (TREE_CONSTANT (arg1)
8100 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8101 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8102 /* This optimization is invalid for ordered comparisons
8103 if CONST+INCR overflows or if foo+incr might overflow.
8104 This optimization is invalid for floating point due to rounding.
8105 For pointer types we assume overflow doesn't happen. */
8106 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8107 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8108 && (code == EQ_EXPR || code == NE_EXPR))))
8110 tree varop, newconst;
8112 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8114 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8115 arg1, TREE_OPERAND (arg0, 1));
8116 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8117 TREE_OPERAND (arg0, 0),
8118 TREE_OPERAND (arg0, 1));
8120 else
8122 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8123 arg1, TREE_OPERAND (arg0, 1));
8124 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8125 TREE_OPERAND (arg0, 0),
8126 TREE_OPERAND (arg0, 1));
8130 /* If VAROP is a reference to a bitfield, we must mask
8131 the constant by the width of the field. */
8132 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8133 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8134 && host_integerp (DECL_SIZE (TREE_OPERAND
8135 (TREE_OPERAND (varop, 0), 1)), 1))
8137 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8138 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8139 tree folded_compare, shift;
8141 /* First check whether the comparison would come out
8142 always the same. If we don't do that we would
8143 change the meaning with the masking. */
8144 folded_compare = fold_build2 (code, type,
8145 TREE_OPERAND (varop, 0), arg1);
8146 if (TREE_CODE (folded_compare) == INTEGER_CST)
8147 return omit_one_operand (type, folded_compare, varop);
8149 shift = build_int_cst (NULL_TREE,
8150 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8151 shift = fold_convert (TREE_TYPE (varop), shift);
8152 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8153 newconst, shift);
8154 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8155 newconst, shift);
8158 return fold_build2 (code, type, varop, newconst);
8161 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8162 && (TREE_CODE (arg0) == NOP_EXPR
8163 || TREE_CODE (arg0) == CONVERT_EXPR))
8165 /* If we are widening one operand of an integer comparison,
8166 see if the other operand is similarly being widened. Perhaps we
8167 can do the comparison in the narrower type. */
8168 tem = fold_widened_comparison (code, type, arg0, arg1);
8169 if (tem)
8170 return tem;
8172 /* Or if we are changing signedness. */
8173 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8174 if (tem)
8175 return tem;
8178 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8179 constant, we can simplify it. */
8180 if (TREE_CODE (arg1) == INTEGER_CST
8181 && (TREE_CODE (arg0) == MIN_EXPR
8182 || TREE_CODE (arg0) == MAX_EXPR)
8183 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8185 tem = optimize_minmax_comparison (code, type, op0, op1);
8186 if (tem)
8187 return tem;
8190 /* Simplify comparison of something with itself. (For IEEE
8191 floating-point, we can only do some of these simplifications.) */
8192 if (operand_equal_p (arg0, arg1, 0))
8194 switch (code)
8196 case EQ_EXPR:
8197 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8198 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8199 return constant_boolean_node (1, type);
8200 break;
8202 case GE_EXPR:
8203 case LE_EXPR:
8204 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8205 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8206 return constant_boolean_node (1, type);
8207 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8209 case NE_EXPR:
8210 /* For NE, we can only do this simplification if integer
8211 or we don't honor IEEE floating point NaNs. */
8212 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8213 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8214 break;
8215 /* ... fall through ... */
8216 case GT_EXPR:
8217 case LT_EXPR:
8218 return constant_boolean_node (0, type);
8219 default:
8220 gcc_unreachable ();
8224 /* If we are comparing an expression that just has comparisons
8225 of two integer values, arithmetic expressions of those comparisons,
8226 and constants, we can simplify it. There are only three cases
8227 to check: the two values can either be equal, the first can be
8228 greater, or the second can be greater. Fold the expression for
8229 those three values. Since each value must be 0 or 1, we have
8230 eight possibilities, each of which corresponds to the constant 0
8231 or 1 or one of the six possible comparisons.
8233 This handles common cases like (a > b) == 0 but also handles
8234 expressions like ((x > y) - (y > x)) > 0, which supposedly
8235 occur in macroized code. */
8237 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8239 tree cval1 = 0, cval2 = 0;
8240 int save_p = 0;
8242 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8243 /* Don't handle degenerate cases here; they should already
8244 have been handled anyway. */
8245 && cval1 != 0 && cval2 != 0
8246 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8247 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8248 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8249 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8250 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8251 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8252 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8254 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8255 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8257 /* We can't just pass T to eval_subst in case cval1 or cval2
8258 was the same as ARG1. */
8260 tree high_result
8261 = fold_build2 (code, type,
8262 eval_subst (arg0, cval1, maxval,
8263 cval2, minval),
8264 arg1);
8265 tree equal_result
8266 = fold_build2 (code, type,
8267 eval_subst (arg0, cval1, maxval,
8268 cval2, maxval),
8269 arg1);
8270 tree low_result
8271 = fold_build2 (code, type,
8272 eval_subst (arg0, cval1, minval,
8273 cval2, maxval),
8274 arg1);
8276 /* All three of these results should be 0 or 1. Confirm they are.
8277 Then use those values to select the proper code to use. */
8279 if (TREE_CODE (high_result) == INTEGER_CST
8280 && TREE_CODE (equal_result) == INTEGER_CST
8281 && TREE_CODE (low_result) == INTEGER_CST)
8283 /* Make a 3-bit mask with the high-order bit being the
8284 value for `>', the next for '=', and the low for '<'. */
8285 switch ((integer_onep (high_result) * 4)
8286 + (integer_onep (equal_result) * 2)
8287 + integer_onep (low_result))
8289 case 0:
8290 /* Always false. */
8291 return omit_one_operand (type, integer_zero_node, arg0);
8292 case 1:
8293 code = LT_EXPR;
8294 break;
8295 case 2:
8296 code = EQ_EXPR;
8297 break;
8298 case 3:
8299 code = LE_EXPR;
8300 break;
8301 case 4:
8302 code = GT_EXPR;
8303 break;
8304 case 5:
8305 code = NE_EXPR;
8306 break;
8307 case 6:
8308 code = GE_EXPR;
8309 break;
8310 case 7:
8311 /* Always true. */
8312 return omit_one_operand (type, integer_one_node, arg0);
8315 if (save_p)
8316 return save_expr (build2 (code, type, cval1, cval2));
8317 return fold_build2 (code, type, cval1, cval2);
8322 /* Fold a comparison of the address of COMPONENT_REFs with the same
8323 type and component to a comparison of the address of the base
8324 object. In short, &x->a OP &y->a to x OP y and
8325 &x->a OP &y.a to x OP &y */
8326 if (TREE_CODE (arg0) == ADDR_EXPR
8327 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8328 && TREE_CODE (arg1) == ADDR_EXPR
8329 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8331 tree cref0 = TREE_OPERAND (arg0, 0);
8332 tree cref1 = TREE_OPERAND (arg1, 0);
8333 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8335 tree op0 = TREE_OPERAND (cref0, 0);
8336 tree op1 = TREE_OPERAND (cref1, 0);
8337 return fold_build2 (code, type,
8338 build_fold_addr_expr (op0),
8339 build_fold_addr_expr (op1));
8343 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8344 into a single range test. */
8345 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8346 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8347 && TREE_CODE (arg1) == INTEGER_CST
8348 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8349 && !integer_zerop (TREE_OPERAND (arg0, 1))
8350 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8351 && !TREE_OVERFLOW (arg1))
8353 tem = fold_div_compare (code, type, arg0, arg1);
8354 if (tem != NULL_TREE)
8355 return tem;
8358 /* Fold ~X op ~Y as Y op X. */
8359 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8360 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8361 return fold_build2 (code, type,
8362 TREE_OPERAND (arg1, 0),
8363 TREE_OPERAND (arg0, 0));
8365 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8366 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8367 && TREE_CODE (arg1) == INTEGER_CST)
8368 return fold_build2 (swap_tree_comparison (code), type,
8369 TREE_OPERAND (arg0, 0),
8370 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8372 return NULL_TREE;
8376 /* Subroutine of fold_binary. Optimize complex multiplications of the
8377 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8378 argument EXPR represents the expression "z" of type TYPE. */
8380 static tree
8381 fold_mult_zconjz (tree type, tree expr)
8383 tree itype = TREE_TYPE (type);
8384 tree rpart, ipart, tem;
8386 if (TREE_CODE (expr) == COMPLEX_EXPR)
8388 rpart = TREE_OPERAND (expr, 0);
8389 ipart = TREE_OPERAND (expr, 1);
8391 else if (TREE_CODE (expr) == COMPLEX_CST)
8393 rpart = TREE_REALPART (expr);
8394 ipart = TREE_IMAGPART (expr);
8396 else
8398 expr = save_expr (expr);
8399 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8400 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8403 rpart = save_expr (rpart);
8404 ipart = save_expr (ipart);
8405 tem = fold_build2 (PLUS_EXPR, itype,
8406 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8407 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8408 return fold_build2 (COMPLEX_EXPR, type, tem,
8409 fold_convert (itype, integer_zero_node));
8413 /* Fold a binary expression of code CODE and type TYPE with operands
8414 OP0 and OP1. Return the folded expression if folding is
8415 successful. Otherwise, return NULL_TREE. */
8417 tree
8418 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8420 enum tree_code_class kind = TREE_CODE_CLASS (code);
8421 tree arg0, arg1, tem;
8422 tree t1 = NULL_TREE;
8424 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8425 && TREE_CODE_LENGTH (code) == 2
8426 && op0 != NULL_TREE
8427 && op1 != NULL_TREE);
8429 arg0 = op0;
8430 arg1 = op1;
8432 /* Strip any conversions that don't change the mode. This is
8433 safe for every expression, except for a comparison expression
8434 because its signedness is derived from its operands. So, in
8435 the latter case, only strip conversions that don't change the
8436 signedness.
8438 Note that this is done as an internal manipulation within the
8439 constant folder, in order to find the simplest representation
8440 of the arguments so that their form can be studied. In any
8441 cases, the appropriate type conversions should be put back in
8442 the tree that will get out of the constant folder. */
8444 if (kind == tcc_comparison)
8446 STRIP_SIGN_NOPS (arg0);
8447 STRIP_SIGN_NOPS (arg1);
8449 else
8451 STRIP_NOPS (arg0);
8452 STRIP_NOPS (arg1);
8455 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8456 constant but we can't do arithmetic on them. */
8457 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8458 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8459 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8460 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8462 if (kind == tcc_binary)
8463 tem = const_binop (code, arg0, arg1, 0);
8464 else if (kind == tcc_comparison)
8465 tem = fold_relational_const (code, type, arg0, arg1);
8466 else
8467 tem = NULL_TREE;
8469 if (tem != NULL_TREE)
8471 if (TREE_TYPE (tem) != type)
8472 tem = fold_convert (type, tem);
8473 return tem;
8477 /* If this is a commutative operation, and ARG0 is a constant, move it
8478 to ARG1 to reduce the number of tests below. */
8479 if (commutative_tree_code (code)
8480 && tree_swap_operands_p (arg0, arg1, true))
8481 return fold_build2 (code, type, op1, op0);
8483 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8485 First check for cases where an arithmetic operation is applied to a
8486 compound, conditional, or comparison operation. Push the arithmetic
8487 operation inside the compound or conditional to see if any folding
8488 can then be done. Convert comparison to conditional for this purpose.
8489 The also optimizes non-constant cases that used to be done in
8490 expand_expr.
8492 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8493 one of the operands is a comparison and the other is a comparison, a
8494 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8495 code below would make the expression more complex. Change it to a
8496 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8497 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8499 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8500 || code == EQ_EXPR || code == NE_EXPR)
8501 && ((truth_value_p (TREE_CODE (arg0))
8502 && (truth_value_p (TREE_CODE (arg1))
8503 || (TREE_CODE (arg1) == BIT_AND_EXPR
8504 && integer_onep (TREE_OPERAND (arg1, 1)))))
8505 || (truth_value_p (TREE_CODE (arg1))
8506 && (truth_value_p (TREE_CODE (arg0))
8507 || (TREE_CODE (arg0) == BIT_AND_EXPR
8508 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8510 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8511 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8512 : TRUTH_XOR_EXPR,
8513 boolean_type_node,
8514 fold_convert (boolean_type_node, arg0),
8515 fold_convert (boolean_type_node, arg1));
8517 if (code == EQ_EXPR)
8518 tem = invert_truthvalue (tem);
8520 return fold_convert (type, tem);
8523 if (TREE_CODE_CLASS (code) == tcc_binary
8524 || TREE_CODE_CLASS (code) == tcc_comparison)
8526 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8527 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8528 fold_build2 (code, type,
8529 TREE_OPERAND (arg0, 1), op1));
8530 if (TREE_CODE (arg1) == COMPOUND_EXPR
8531 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8532 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8533 fold_build2 (code, type,
8534 op0, TREE_OPERAND (arg1, 1)));
8536 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8538 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8539 arg0, arg1,
8540 /*cond_first_p=*/1);
8541 if (tem != NULL_TREE)
8542 return tem;
8545 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8547 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8548 arg1, arg0,
8549 /*cond_first_p=*/0);
8550 if (tem != NULL_TREE)
8551 return tem;
8555 switch (code)
8557 case PLUS_EXPR:
8558 /* A + (-B) -> A - B */
8559 if (TREE_CODE (arg1) == NEGATE_EXPR)
8560 return fold_build2 (MINUS_EXPR, type,
8561 fold_convert (type, arg0),
8562 fold_convert (type, TREE_OPERAND (arg1, 0)));
8563 /* (-A) + B -> B - A */
8564 if (TREE_CODE (arg0) == NEGATE_EXPR
8565 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8566 return fold_build2 (MINUS_EXPR, type,
8567 fold_convert (type, arg1),
8568 fold_convert (type, TREE_OPERAND (arg0, 0)));
8569 /* Convert ~A + 1 to -A. */
8570 if (INTEGRAL_TYPE_P (type)
8571 && TREE_CODE (arg0) == BIT_NOT_EXPR
8572 && integer_onep (arg1))
8573 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8575 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8576 same or one. */
8577 if ((TREE_CODE (arg0) == MULT_EXPR
8578 || TREE_CODE (arg1) == MULT_EXPR)
8579 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8581 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8582 if (tem)
8583 return tem;
8586 if (! FLOAT_TYPE_P (type))
8588 if (integer_zerop (arg1))
8589 return non_lvalue (fold_convert (type, arg0));
8591 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8592 with a constant, and the two constants have no bits in common,
8593 we should treat this as a BIT_IOR_EXPR since this may produce more
8594 simplifications. */
8595 if (TREE_CODE (arg0) == BIT_AND_EXPR
8596 && TREE_CODE (arg1) == BIT_AND_EXPR
8597 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8598 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8599 && integer_zerop (const_binop (BIT_AND_EXPR,
8600 TREE_OPERAND (arg0, 1),
8601 TREE_OPERAND (arg1, 1), 0)))
8603 code = BIT_IOR_EXPR;
8604 goto bit_ior;
8607 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8608 (plus (plus (mult) (mult)) (foo)) so that we can
8609 take advantage of the factoring cases below. */
8610 if (((TREE_CODE (arg0) == PLUS_EXPR
8611 || TREE_CODE (arg0) == MINUS_EXPR)
8612 && TREE_CODE (arg1) == MULT_EXPR)
8613 || ((TREE_CODE (arg1) == PLUS_EXPR
8614 || TREE_CODE (arg1) == MINUS_EXPR)
8615 && TREE_CODE (arg0) == MULT_EXPR))
8617 tree parg0, parg1, parg, marg;
8618 enum tree_code pcode;
8620 if (TREE_CODE (arg1) == MULT_EXPR)
8621 parg = arg0, marg = arg1;
8622 else
8623 parg = arg1, marg = arg0;
8624 pcode = TREE_CODE (parg);
8625 parg0 = TREE_OPERAND (parg, 0);
8626 parg1 = TREE_OPERAND (parg, 1);
8627 STRIP_NOPS (parg0);
8628 STRIP_NOPS (parg1);
8630 if (TREE_CODE (parg0) == MULT_EXPR
8631 && TREE_CODE (parg1) != MULT_EXPR)
8632 return fold_build2 (pcode, type,
8633 fold_build2 (PLUS_EXPR, type,
8634 fold_convert (type, parg0),
8635 fold_convert (type, marg)),
8636 fold_convert (type, parg1));
8637 if (TREE_CODE (parg0) != MULT_EXPR
8638 && TREE_CODE (parg1) == MULT_EXPR)
8639 return fold_build2 (PLUS_EXPR, type,
8640 fold_convert (type, parg0),
8641 fold_build2 (pcode, type,
8642 fold_convert (type, marg),
8643 fold_convert (type,
8644 parg1)));
8647 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8648 of the array. Loop optimizer sometimes produce this type of
8649 expressions. */
8650 if (TREE_CODE (arg0) == ADDR_EXPR)
8652 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8653 if (tem)
8654 return fold_convert (type, tem);
8656 else if (TREE_CODE (arg1) == ADDR_EXPR)
8658 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8659 if (tem)
8660 return fold_convert (type, tem);
8663 else
8665 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8666 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8667 return non_lvalue (fold_convert (type, arg0));
8669 /* Likewise if the operands are reversed. */
8670 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8671 return non_lvalue (fold_convert (type, arg1));
8673 /* Convert X + -C into X - C. */
8674 if (TREE_CODE (arg1) == REAL_CST
8675 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8677 tem = fold_negate_const (arg1, type);
8678 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8679 return fold_build2 (MINUS_EXPR, type,
8680 fold_convert (type, arg0),
8681 fold_convert (type, tem));
8684 if (flag_unsafe_math_optimizations
8685 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8686 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8687 && (tem = distribute_real_division (code, type, arg0, arg1)))
8688 return tem;
8690 /* Convert x+x into x*2.0. */
8691 if (operand_equal_p (arg0, arg1, 0)
8692 && SCALAR_FLOAT_TYPE_P (type))
8693 return fold_build2 (MULT_EXPR, type, arg0,
8694 build_real (type, dconst2));
8696 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8697 if (flag_unsafe_math_optimizations
8698 && TREE_CODE (arg1) == PLUS_EXPR
8699 && TREE_CODE (arg0) != MULT_EXPR)
8701 tree tree10 = TREE_OPERAND (arg1, 0);
8702 tree tree11 = TREE_OPERAND (arg1, 1);
8703 if (TREE_CODE (tree11) == MULT_EXPR
8704 && TREE_CODE (tree10) == MULT_EXPR)
8706 tree tree0;
8707 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8708 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8711 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8712 if (flag_unsafe_math_optimizations
8713 && TREE_CODE (arg0) == PLUS_EXPR
8714 && TREE_CODE (arg1) != MULT_EXPR)
8716 tree tree00 = TREE_OPERAND (arg0, 0);
8717 tree tree01 = TREE_OPERAND (arg0, 1);
8718 if (TREE_CODE (tree01) == MULT_EXPR
8719 && TREE_CODE (tree00) == MULT_EXPR)
8721 tree tree0;
8722 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8723 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8728 bit_rotate:
8729 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8730 is a rotate of A by C1 bits. */
8731 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8732 is a rotate of A by B bits. */
8734 enum tree_code code0, code1;
8735 code0 = TREE_CODE (arg0);
8736 code1 = TREE_CODE (arg1);
8737 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8738 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8739 && operand_equal_p (TREE_OPERAND (arg0, 0),
8740 TREE_OPERAND (arg1, 0), 0)
8741 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8743 tree tree01, tree11;
8744 enum tree_code code01, code11;
8746 tree01 = TREE_OPERAND (arg0, 1);
8747 tree11 = TREE_OPERAND (arg1, 1);
8748 STRIP_NOPS (tree01);
8749 STRIP_NOPS (tree11);
8750 code01 = TREE_CODE (tree01);
8751 code11 = TREE_CODE (tree11);
8752 if (code01 == INTEGER_CST
8753 && code11 == INTEGER_CST
8754 && TREE_INT_CST_HIGH (tree01) == 0
8755 && TREE_INT_CST_HIGH (tree11) == 0
8756 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8757 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8758 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8759 code0 == LSHIFT_EXPR ? tree01 : tree11);
8760 else if (code11 == MINUS_EXPR)
8762 tree tree110, tree111;
8763 tree110 = TREE_OPERAND (tree11, 0);
8764 tree111 = TREE_OPERAND (tree11, 1);
8765 STRIP_NOPS (tree110);
8766 STRIP_NOPS (tree111);
8767 if (TREE_CODE (tree110) == INTEGER_CST
8768 && 0 == compare_tree_int (tree110,
8769 TYPE_PRECISION
8770 (TREE_TYPE (TREE_OPERAND
8771 (arg0, 0))))
8772 && operand_equal_p (tree01, tree111, 0))
8773 return build2 ((code0 == LSHIFT_EXPR
8774 ? LROTATE_EXPR
8775 : RROTATE_EXPR),
8776 type, TREE_OPERAND (arg0, 0), tree01);
8778 else if (code01 == MINUS_EXPR)
8780 tree tree010, tree011;
8781 tree010 = TREE_OPERAND (tree01, 0);
8782 tree011 = TREE_OPERAND (tree01, 1);
8783 STRIP_NOPS (tree010);
8784 STRIP_NOPS (tree011);
8785 if (TREE_CODE (tree010) == INTEGER_CST
8786 && 0 == compare_tree_int (tree010,
8787 TYPE_PRECISION
8788 (TREE_TYPE (TREE_OPERAND
8789 (arg0, 0))))
8790 && operand_equal_p (tree11, tree011, 0))
8791 return build2 ((code0 != LSHIFT_EXPR
8792 ? LROTATE_EXPR
8793 : RROTATE_EXPR),
8794 type, TREE_OPERAND (arg0, 0), tree11);
8799 associate:
8800 /* In most languages, can't associate operations on floats through
8801 parentheses. Rather than remember where the parentheses were, we
8802 don't associate floats at all, unless the user has specified
8803 -funsafe-math-optimizations. */
8805 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8807 tree var0, con0, lit0, minus_lit0;
8808 tree var1, con1, lit1, minus_lit1;
8810 /* Split both trees into variables, constants, and literals. Then
8811 associate each group together, the constants with literals,
8812 then the result with variables. This increases the chances of
8813 literals being recombined later and of generating relocatable
8814 expressions for the sum of a constant and literal. */
8815 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8816 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8817 code == MINUS_EXPR);
8819 /* Only do something if we found more than two objects. Otherwise,
8820 nothing has changed and we risk infinite recursion. */
8821 if (2 < ((var0 != 0) + (var1 != 0)
8822 + (con0 != 0) + (con1 != 0)
8823 + (lit0 != 0) + (lit1 != 0)
8824 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8826 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8827 if (code == MINUS_EXPR)
8828 code = PLUS_EXPR;
8830 var0 = associate_trees (var0, var1, code, type);
8831 con0 = associate_trees (con0, con1, code, type);
8832 lit0 = associate_trees (lit0, lit1, code, type);
8833 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8835 /* Preserve the MINUS_EXPR if the negative part of the literal is
8836 greater than the positive part. Otherwise, the multiplicative
8837 folding code (i.e extract_muldiv) may be fooled in case
8838 unsigned constants are subtracted, like in the following
8839 example: ((X*2 + 4) - 8U)/2. */
8840 if (minus_lit0 && lit0)
8842 if (TREE_CODE (lit0) == INTEGER_CST
8843 && TREE_CODE (minus_lit0) == INTEGER_CST
8844 && tree_int_cst_lt (lit0, minus_lit0))
8846 minus_lit0 = associate_trees (minus_lit0, lit0,
8847 MINUS_EXPR, type);
8848 lit0 = 0;
8850 else
8852 lit0 = associate_trees (lit0, minus_lit0,
8853 MINUS_EXPR, type);
8854 minus_lit0 = 0;
8857 if (minus_lit0)
8859 if (con0 == 0)
8860 return fold_convert (type,
8861 associate_trees (var0, minus_lit0,
8862 MINUS_EXPR, type));
8863 else
8865 con0 = associate_trees (con0, minus_lit0,
8866 MINUS_EXPR, type);
8867 return fold_convert (type,
8868 associate_trees (var0, con0,
8869 PLUS_EXPR, type));
8873 con0 = associate_trees (con0, lit0, code, type);
8874 return fold_convert (type, associate_trees (var0, con0,
8875 code, type));
8879 return NULL_TREE;
8881 case MINUS_EXPR:
8882 /* A - (-B) -> A + B */
8883 if (TREE_CODE (arg1) == NEGATE_EXPR)
8884 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8885 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8886 if (TREE_CODE (arg0) == NEGATE_EXPR
8887 && (FLOAT_TYPE_P (type)
8888 || INTEGRAL_TYPE_P (type))
8889 && negate_expr_p (arg1)
8890 && reorder_operands_p (arg0, arg1))
8891 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8892 TREE_OPERAND (arg0, 0));
8893 /* Convert -A - 1 to ~A. */
8894 if (INTEGRAL_TYPE_P (type)
8895 && TREE_CODE (arg0) == NEGATE_EXPR
8896 && integer_onep (arg1))
8897 return fold_build1 (BIT_NOT_EXPR, type,
8898 fold_convert (type, TREE_OPERAND (arg0, 0)));
8900 /* Convert -1 - A to ~A. */
8901 if (INTEGRAL_TYPE_P (type)
8902 && integer_all_onesp (arg0))
8903 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8905 if (! FLOAT_TYPE_P (type))
8907 if (integer_zerop (arg0))
8908 return negate_expr (fold_convert (type, arg1));
8909 if (integer_zerop (arg1))
8910 return non_lvalue (fold_convert (type, arg0));
8912 /* Fold A - (A & B) into ~B & A. */
8913 if (!TREE_SIDE_EFFECTS (arg0)
8914 && TREE_CODE (arg1) == BIT_AND_EXPR)
8916 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8917 return fold_build2 (BIT_AND_EXPR, type,
8918 fold_build1 (BIT_NOT_EXPR, type,
8919 TREE_OPERAND (arg1, 0)),
8920 arg0);
8921 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8922 return fold_build2 (BIT_AND_EXPR, type,
8923 fold_build1 (BIT_NOT_EXPR, type,
8924 TREE_OPERAND (arg1, 1)),
8925 arg0);
8928 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8929 any power of 2 minus 1. */
8930 if (TREE_CODE (arg0) == BIT_AND_EXPR
8931 && TREE_CODE (arg1) == BIT_AND_EXPR
8932 && operand_equal_p (TREE_OPERAND (arg0, 0),
8933 TREE_OPERAND (arg1, 0), 0))
8935 tree mask0 = TREE_OPERAND (arg0, 1);
8936 tree mask1 = TREE_OPERAND (arg1, 1);
8937 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8939 if (operand_equal_p (tem, mask1, 0))
8941 tem = fold_build2 (BIT_XOR_EXPR, type,
8942 TREE_OPERAND (arg0, 0), mask1);
8943 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8948 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8949 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8950 return non_lvalue (fold_convert (type, arg0));
8952 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8953 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8954 (-ARG1 + ARG0) reduces to -ARG1. */
8955 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8956 return negate_expr (fold_convert (type, arg1));
8958 /* Fold &x - &x. This can happen from &x.foo - &x.
8959 This is unsafe for certain floats even in non-IEEE formats.
8960 In IEEE, it is unsafe because it does wrong for NaNs.
8961 Also note that operand_equal_p is always false if an operand
8962 is volatile. */
8964 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8965 && operand_equal_p (arg0, arg1, 0))
8966 return fold_convert (type, integer_zero_node);
8968 /* A - B -> A + (-B) if B is easily negatable. */
8969 if (negate_expr_p (arg1)
8970 && ((FLOAT_TYPE_P (type)
8971 /* Avoid this transformation if B is a positive REAL_CST. */
8972 && (TREE_CODE (arg1) != REAL_CST
8973 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8974 || INTEGRAL_TYPE_P (type)))
8975 return fold_build2 (PLUS_EXPR, type,
8976 fold_convert (type, arg0),
8977 fold_convert (type, negate_expr (arg1)));
8979 /* Try folding difference of addresses. */
8981 HOST_WIDE_INT diff;
8983 if ((TREE_CODE (arg0) == ADDR_EXPR
8984 || TREE_CODE (arg1) == ADDR_EXPR)
8985 && ptr_difference_const (arg0, arg1, &diff))
8986 return build_int_cst_type (type, diff);
8989 /* Fold &a[i] - &a[j] to i-j. */
8990 if (TREE_CODE (arg0) == ADDR_EXPR
8991 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8992 && TREE_CODE (arg1) == ADDR_EXPR
8993 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8995 tree aref0 = TREE_OPERAND (arg0, 0);
8996 tree aref1 = TREE_OPERAND (arg1, 0);
8997 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8998 TREE_OPERAND (aref1, 0), 0))
9000 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9001 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9002 tree esz = array_ref_element_size (aref0);
9003 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9004 return fold_build2 (MULT_EXPR, type, diff,
9005 fold_convert (type, esz));
9010 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9011 of the array. Loop optimizer sometimes produce this type of
9012 expressions. */
9013 if (TREE_CODE (arg0) == ADDR_EXPR)
9015 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9016 if (tem)
9017 return fold_convert (type, tem);
9020 if (flag_unsafe_math_optimizations
9021 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9022 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9023 && (tem = distribute_real_division (code, type, arg0, arg1)))
9024 return tem;
9026 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9027 same or one. */
9028 if ((TREE_CODE (arg0) == MULT_EXPR
9029 || TREE_CODE (arg1) == MULT_EXPR)
9030 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9032 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9033 if (tem)
9034 return tem;
9037 goto associate;
9039 case MULT_EXPR:
9040 /* (-A) * (-B) -> A * B */
9041 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9042 return fold_build2 (MULT_EXPR, type,
9043 fold_convert (type, TREE_OPERAND (arg0, 0)),
9044 fold_convert (type, negate_expr (arg1)));
9045 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9046 return fold_build2 (MULT_EXPR, type,
9047 fold_convert (type, negate_expr (arg0)),
9048 fold_convert (type, TREE_OPERAND (arg1, 0)));
9050 if (! FLOAT_TYPE_P (type))
9052 if (integer_zerop (arg1))
9053 return omit_one_operand (type, arg1, arg0);
9054 if (integer_onep (arg1))
9055 return non_lvalue (fold_convert (type, arg0));
9056 /* Transform x * -1 into -x. */
9057 if (integer_all_onesp (arg1))
9058 return fold_convert (type, negate_expr (arg0));
9059 /* Transform x * -C into -x * C if x is easily negatable. */
9060 if (TREE_CODE (arg1) == INTEGER_CST
9061 && tree_int_cst_sgn (arg1) == -1
9062 && negate_expr_p (arg0)
9063 && (tem = negate_expr (arg1)) != arg1
9064 && !TREE_OVERFLOW (tem))
9065 return fold_build2 (MULT_EXPR, type,
9066 negate_expr (arg0), tem);
9068 /* (a * (1 << b)) is (a << b) */
9069 if (TREE_CODE (arg1) == LSHIFT_EXPR
9070 && integer_onep (TREE_OPERAND (arg1, 0)))
9071 return fold_build2 (LSHIFT_EXPR, type, arg0,
9072 TREE_OPERAND (arg1, 1));
9073 if (TREE_CODE (arg0) == LSHIFT_EXPR
9074 && integer_onep (TREE_OPERAND (arg0, 0)))
9075 return fold_build2 (LSHIFT_EXPR, type, arg1,
9076 TREE_OPERAND (arg0, 1));
9078 if (TREE_CODE (arg1) == INTEGER_CST
9079 && 0 != (tem = extract_muldiv (op0,
9080 fold_convert (type, arg1),
9081 code, NULL_TREE)))
9082 return fold_convert (type, tem);
9084 /* Optimize z * conj(z) for integer complex numbers. */
9085 if (TREE_CODE (arg0) == CONJ_EXPR
9086 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9087 return fold_mult_zconjz (type, arg1);
9088 if (TREE_CODE (arg1) == CONJ_EXPR
9089 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9090 return fold_mult_zconjz (type, arg0);
9092 else
9094 /* Maybe fold x * 0 to 0. The expressions aren't the same
9095 when x is NaN, since x * 0 is also NaN. Nor are they the
9096 same in modes with signed zeros, since multiplying a
9097 negative value by 0 gives -0, not +0. */
9098 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9099 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9100 && real_zerop (arg1))
9101 return omit_one_operand (type, arg1, arg0);
9102 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9103 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9104 && real_onep (arg1))
9105 return non_lvalue (fold_convert (type, arg0));
9107 /* Transform x * -1.0 into -x. */
9108 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9109 && real_minus_onep (arg1))
9110 return fold_convert (type, negate_expr (arg0));
9112 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9113 if (flag_unsafe_math_optimizations
9114 && TREE_CODE (arg0) == RDIV_EXPR
9115 && TREE_CODE (arg1) == REAL_CST
9116 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9118 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9119 arg1, 0);
9120 if (tem)
9121 return fold_build2 (RDIV_EXPR, type, tem,
9122 TREE_OPERAND (arg0, 1));
9125 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9126 if (operand_equal_p (arg0, arg1, 0))
9128 tree tem = fold_strip_sign_ops (arg0);
9129 if (tem != NULL_TREE)
9131 tem = fold_convert (type, tem);
9132 return fold_build2 (MULT_EXPR, type, tem, tem);
9136 /* Optimize z * conj(z) for floating point complex numbers.
9137 Guarded by flag_unsafe_math_optimizations as non-finite
9138 imaginary components don't produce scalar results. */
9139 if (flag_unsafe_math_optimizations
9140 && TREE_CODE (arg0) == CONJ_EXPR
9141 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9142 return fold_mult_zconjz (type, arg1);
9143 if (flag_unsafe_math_optimizations
9144 && TREE_CODE (arg1) == CONJ_EXPR
9145 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9146 return fold_mult_zconjz (type, arg0);
9148 if (flag_unsafe_math_optimizations)
9150 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9151 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9153 /* Optimizations of root(...)*root(...). */
9154 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9156 tree rootfn, arg, arglist;
9157 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9158 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9160 /* Optimize sqrt(x)*sqrt(x) as x. */
9161 if (BUILTIN_SQRT_P (fcode0)
9162 && operand_equal_p (arg00, arg10, 0)
9163 && ! HONOR_SNANS (TYPE_MODE (type)))
9164 return arg00;
9166 /* Optimize root(x)*root(y) as root(x*y). */
9167 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9168 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9169 arglist = build_tree_list (NULL_TREE, arg);
9170 return build_function_call_expr (rootfn, arglist);
9173 /* Optimize expN(x)*expN(y) as expN(x+y). */
9174 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9176 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9177 tree arg = fold_build2 (PLUS_EXPR, type,
9178 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9179 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9180 tree arglist = build_tree_list (NULL_TREE, arg);
9181 return build_function_call_expr (expfn, arglist);
9184 /* Optimizations of pow(...)*pow(...). */
9185 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9186 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9187 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9189 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9190 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9191 1)));
9192 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9193 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9194 1)));
9196 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9197 if (operand_equal_p (arg01, arg11, 0))
9199 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9200 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9201 tree arglist = tree_cons (NULL_TREE, arg,
9202 build_tree_list (NULL_TREE,
9203 arg01));
9204 return build_function_call_expr (powfn, arglist);
9207 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9208 if (operand_equal_p (arg00, arg10, 0))
9210 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9211 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9212 tree arglist = tree_cons (NULL_TREE, arg00,
9213 build_tree_list (NULL_TREE,
9214 arg));
9215 return build_function_call_expr (powfn, arglist);
9219 /* Optimize tan(x)*cos(x) as sin(x). */
9220 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9221 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9222 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9223 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9224 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9225 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9226 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9227 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9229 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9231 if (sinfn != NULL_TREE)
9232 return build_function_call_expr (sinfn,
9233 TREE_OPERAND (arg0, 1));
9236 /* Optimize x*pow(x,c) as pow(x,c+1). */
9237 if (fcode1 == BUILT_IN_POW
9238 || fcode1 == BUILT_IN_POWF
9239 || fcode1 == BUILT_IN_POWL)
9241 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9242 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9243 1)));
9244 if (TREE_CODE (arg11) == REAL_CST
9245 && ! TREE_CONSTANT_OVERFLOW (arg11)
9246 && operand_equal_p (arg0, arg10, 0))
9248 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9249 REAL_VALUE_TYPE c;
9250 tree arg, arglist;
9252 c = TREE_REAL_CST (arg11);
9253 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9254 arg = build_real (type, c);
9255 arglist = build_tree_list (NULL_TREE, arg);
9256 arglist = tree_cons (NULL_TREE, arg0, arglist);
9257 return build_function_call_expr (powfn, arglist);
9261 /* Optimize pow(x,c)*x as pow(x,c+1). */
9262 if (fcode0 == BUILT_IN_POW
9263 || fcode0 == BUILT_IN_POWF
9264 || fcode0 == BUILT_IN_POWL)
9266 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9267 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9268 1)));
9269 if (TREE_CODE (arg01) == REAL_CST
9270 && ! TREE_CONSTANT_OVERFLOW (arg01)
9271 && operand_equal_p (arg1, arg00, 0))
9273 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9274 REAL_VALUE_TYPE c;
9275 tree arg, arglist;
9277 c = TREE_REAL_CST (arg01);
9278 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9279 arg = build_real (type, c);
9280 arglist = build_tree_list (NULL_TREE, arg);
9281 arglist = tree_cons (NULL_TREE, arg1, arglist);
9282 return build_function_call_expr (powfn, arglist);
9286 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9287 if (! optimize_size
9288 && operand_equal_p (arg0, arg1, 0))
9290 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9292 if (powfn)
9294 tree arg = build_real (type, dconst2);
9295 tree arglist = build_tree_list (NULL_TREE, arg);
9296 arglist = tree_cons (NULL_TREE, arg0, arglist);
9297 return build_function_call_expr (powfn, arglist);
9302 goto associate;
9304 case BIT_IOR_EXPR:
9305 bit_ior:
9306 if (integer_all_onesp (arg1))
9307 return omit_one_operand (type, arg1, arg0);
9308 if (integer_zerop (arg1))
9309 return non_lvalue (fold_convert (type, arg0));
9310 if (operand_equal_p (arg0, arg1, 0))
9311 return non_lvalue (fold_convert (type, arg0));
9313 /* ~X | X is -1. */
9314 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9315 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9317 t1 = build_int_cst (type, -1);
9318 t1 = force_fit_type (t1, 0, false, false);
9319 return omit_one_operand (type, t1, arg1);
9322 /* X | ~X is -1. */
9323 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9324 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9326 t1 = build_int_cst (type, -1);
9327 t1 = force_fit_type (t1, 0, false, false);
9328 return omit_one_operand (type, t1, arg0);
9331 /* Canonicalize (X & C1) | C2. */
9332 if (TREE_CODE (arg0) == BIT_AND_EXPR
9333 && TREE_CODE (arg1) == INTEGER_CST
9334 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9336 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9337 int width = TYPE_PRECISION (type);
9338 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9339 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9340 hi2 = TREE_INT_CST_HIGH (arg1);
9341 lo2 = TREE_INT_CST_LOW (arg1);
9343 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9344 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9345 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9347 if (width > HOST_BITS_PER_WIDE_INT)
9349 mhi = (unsigned HOST_WIDE_INT) -1
9350 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9351 mlo = -1;
9353 else
9355 mhi = 0;
9356 mlo = (unsigned HOST_WIDE_INT) -1
9357 >> (HOST_BITS_PER_WIDE_INT - width);
9360 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9361 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9362 return fold_build2 (BIT_IOR_EXPR, type,
9363 TREE_OPERAND (arg0, 0), arg1);
9365 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9366 hi1 &= mhi;
9367 lo1 &= mlo;
9368 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9369 return fold_build2 (BIT_IOR_EXPR, type,
9370 fold_build2 (BIT_AND_EXPR, type,
9371 TREE_OPERAND (arg0, 0),
9372 build_int_cst_wide (type,
9373 lo1 & ~lo2,
9374 hi1 & ~hi2)),
9375 arg1);
9378 /* (X & Y) | Y is (X, Y). */
9379 if (TREE_CODE (arg0) == BIT_AND_EXPR
9380 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9381 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9382 /* (X & Y) | X is (Y, X). */
9383 if (TREE_CODE (arg0) == BIT_AND_EXPR
9384 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9385 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9386 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9387 /* X | (X & Y) is (Y, X). */
9388 if (TREE_CODE (arg1) == BIT_AND_EXPR
9389 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9390 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9391 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9392 /* X | (Y & X) is (Y, X). */
9393 if (TREE_CODE (arg1) == BIT_AND_EXPR
9394 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9395 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9396 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9398 t1 = distribute_bit_expr (code, type, arg0, arg1);
9399 if (t1 != NULL_TREE)
9400 return t1;
9402 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9404 This results in more efficient code for machines without a NAND
9405 instruction. Combine will canonicalize to the first form
9406 which will allow use of NAND instructions provided by the
9407 backend if they exist. */
9408 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9409 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9411 return fold_build1 (BIT_NOT_EXPR, type,
9412 build2 (BIT_AND_EXPR, type,
9413 TREE_OPERAND (arg0, 0),
9414 TREE_OPERAND (arg1, 0)));
9417 /* See if this can be simplified into a rotate first. If that
9418 is unsuccessful continue in the association code. */
9419 goto bit_rotate;
9421 case BIT_XOR_EXPR:
9422 if (integer_zerop (arg1))
9423 return non_lvalue (fold_convert (type, arg0));
9424 if (integer_all_onesp (arg1))
9425 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9426 if (operand_equal_p (arg0, arg1, 0))
9427 return omit_one_operand (type, integer_zero_node, arg0);
9429 /* ~X ^ X is -1. */
9430 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9431 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9433 t1 = build_int_cst (type, -1);
9434 t1 = force_fit_type (t1, 0, false, false);
9435 return omit_one_operand (type, t1, arg1);
9438 /* X ^ ~X is -1. */
9439 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9440 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9442 t1 = build_int_cst (type, -1);
9443 t1 = force_fit_type (t1, 0, false, false);
9444 return omit_one_operand (type, t1, arg0);
9447 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9448 with a constant, and the two constants have no bits in common,
9449 we should treat this as a BIT_IOR_EXPR since this may produce more
9450 simplifications. */
9451 if (TREE_CODE (arg0) == BIT_AND_EXPR
9452 && TREE_CODE (arg1) == BIT_AND_EXPR
9453 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9454 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9455 && integer_zerop (const_binop (BIT_AND_EXPR,
9456 TREE_OPERAND (arg0, 1),
9457 TREE_OPERAND (arg1, 1), 0)))
9459 code = BIT_IOR_EXPR;
9460 goto bit_ior;
9463 /* (X | Y) ^ X -> Y & ~ X*/
9464 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9465 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9467 tree t2 = TREE_OPERAND (arg0, 1);
9468 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9469 arg1);
9470 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9471 fold_convert (type, t1));
9472 return t1;
9475 /* (Y | X) ^ X -> Y & ~ X*/
9476 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9477 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9479 tree t2 = TREE_OPERAND (arg0, 0);
9480 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9481 arg1);
9482 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9483 fold_convert (type, t1));
9484 return t1;
9487 /* X ^ (X | Y) -> Y & ~ X*/
9488 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9489 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9491 tree t2 = TREE_OPERAND (arg1, 1);
9492 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9493 arg0);
9494 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9495 fold_convert (type, t1));
9496 return t1;
9499 /* X ^ (Y | X) -> Y & ~ X*/
9500 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9501 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9503 tree t2 = TREE_OPERAND (arg1, 0);
9504 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9505 arg0);
9506 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9507 fold_convert (type, t1));
9508 return t1;
9511 /* Convert ~X ^ ~Y to X ^ Y. */
9512 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9513 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9514 return fold_build2 (code, type,
9515 fold_convert (type, TREE_OPERAND (arg0, 0)),
9516 fold_convert (type, TREE_OPERAND (arg1, 0)));
9518 /* Convert ~X ^ C to X ^ ~C. */
9519 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9520 && TREE_CODE (arg1) == INTEGER_CST)
9521 return fold_build2 (code, type,
9522 fold_convert (type, TREE_OPERAND (arg0, 0)),
9523 fold_build1 (BIT_NOT_EXPR, type, arg1));
9525 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9526 if (TREE_CODE (arg0) == BIT_AND_EXPR
9527 && integer_onep (TREE_OPERAND (arg0, 1))
9528 && integer_onep (arg1))
9529 return fold_build2 (EQ_EXPR, type, arg0,
9530 build_int_cst (TREE_TYPE (arg0), 0));
9532 /* Fold (X & Y) ^ Y as ~X & Y. */
9533 if (TREE_CODE (arg0) == BIT_AND_EXPR
9534 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9536 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9537 return fold_build2 (BIT_AND_EXPR, type,
9538 fold_build1 (BIT_NOT_EXPR, type, tem),
9539 fold_convert (type, arg1));
9541 /* Fold (X & Y) ^ X as ~Y & X. */
9542 if (TREE_CODE (arg0) == BIT_AND_EXPR
9543 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9544 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9546 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9547 return fold_build2 (BIT_AND_EXPR, type,
9548 fold_build1 (BIT_NOT_EXPR, type, tem),
9549 fold_convert (type, arg1));
9551 /* Fold X ^ (X & Y) as X & ~Y. */
9552 if (TREE_CODE (arg1) == BIT_AND_EXPR
9553 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9555 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9556 return fold_build2 (BIT_AND_EXPR, type,
9557 fold_convert (type, arg0),
9558 fold_build1 (BIT_NOT_EXPR, type, tem));
9560 /* Fold X ^ (Y & X) as ~Y & X. */
9561 if (TREE_CODE (arg1) == BIT_AND_EXPR
9562 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9563 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9565 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9566 return fold_build2 (BIT_AND_EXPR, type,
9567 fold_build1 (BIT_NOT_EXPR, type, tem),
9568 fold_convert (type, arg0));
9571 /* See if this can be simplified into a rotate first. If that
9572 is unsuccessful continue in the association code. */
9573 goto bit_rotate;
9575 case BIT_AND_EXPR:
9576 if (integer_all_onesp (arg1))
9577 return non_lvalue (fold_convert (type, arg0));
9578 if (integer_zerop (arg1))
9579 return omit_one_operand (type, arg1, arg0);
9580 if (operand_equal_p (arg0, arg1, 0))
9581 return non_lvalue (fold_convert (type, arg0));
9583 /* ~X & X is always zero. */
9584 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9585 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9586 return omit_one_operand (type, integer_zero_node, arg1);
9588 /* X & ~X is always zero. */
9589 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9590 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9591 return omit_one_operand (type, integer_zero_node, arg0);
9593 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9594 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9595 && TREE_CODE (arg1) == INTEGER_CST
9596 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9597 return fold_build2 (BIT_IOR_EXPR, type,
9598 fold_build2 (BIT_AND_EXPR, type,
9599 TREE_OPERAND (arg0, 0), arg1),
9600 fold_build2 (BIT_AND_EXPR, type,
9601 TREE_OPERAND (arg0, 1), arg1));
9603 /* (X | Y) & Y is (X, Y). */
9604 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9605 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9606 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9607 /* (X | Y) & X is (Y, X). */
9608 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9609 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9610 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9611 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9612 /* X & (X | Y) is (Y, X). */
9613 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9614 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9615 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9616 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9617 /* X & (Y | X) is (Y, X). */
9618 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9619 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9620 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9621 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9623 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9624 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9625 && integer_onep (TREE_OPERAND (arg0, 1))
9626 && integer_onep (arg1))
9628 tem = TREE_OPERAND (arg0, 0);
9629 return fold_build2 (EQ_EXPR, type,
9630 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9631 build_int_cst (TREE_TYPE (tem), 1)),
9632 build_int_cst (TREE_TYPE (tem), 0));
9634 /* Fold ~X & 1 as (X & 1) == 0. */
9635 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9636 && integer_onep (arg1))
9638 tem = TREE_OPERAND (arg0, 0);
9639 return fold_build2 (EQ_EXPR, type,
9640 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9641 build_int_cst (TREE_TYPE (tem), 1)),
9642 build_int_cst (TREE_TYPE (tem), 0));
9645 /* Fold (X ^ Y) & Y as ~X & Y. */
9646 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9647 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9649 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9650 return fold_build2 (BIT_AND_EXPR, type,
9651 fold_build1 (BIT_NOT_EXPR, type, tem),
9652 fold_convert (type, arg1));
9654 /* Fold (X ^ Y) & X as ~Y & X. */
9655 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9656 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9657 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9659 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9660 return fold_build2 (BIT_AND_EXPR, type,
9661 fold_build1 (BIT_NOT_EXPR, type, tem),
9662 fold_convert (type, arg1));
9664 /* Fold X & (X ^ Y) as X & ~Y. */
9665 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9666 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9668 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9669 return fold_build2 (BIT_AND_EXPR, type,
9670 fold_convert (type, arg0),
9671 fold_build1 (BIT_NOT_EXPR, type, tem));
9673 /* Fold X & (Y ^ X) as ~Y & X. */
9674 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9675 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9676 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9678 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9679 return fold_build2 (BIT_AND_EXPR, type,
9680 fold_build1 (BIT_NOT_EXPR, type, tem),
9681 fold_convert (type, arg0));
9684 t1 = distribute_bit_expr (code, type, arg0, arg1);
9685 if (t1 != NULL_TREE)
9686 return t1;
9687 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9688 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9689 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9691 unsigned int prec
9692 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9694 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9695 && (~TREE_INT_CST_LOW (arg1)
9696 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9697 return fold_convert (type, TREE_OPERAND (arg0, 0));
9700 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9702 This results in more efficient code for machines without a NOR
9703 instruction. Combine will canonicalize to the first form
9704 which will allow use of NOR instructions provided by the
9705 backend if they exist. */
9706 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9707 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9709 return fold_build1 (BIT_NOT_EXPR, type,
9710 build2 (BIT_IOR_EXPR, type,
9711 TREE_OPERAND (arg0, 0),
9712 TREE_OPERAND (arg1, 0)));
9715 goto associate;
9717 case RDIV_EXPR:
9718 /* Don't touch a floating-point divide by zero unless the mode
9719 of the constant can represent infinity. */
9720 if (TREE_CODE (arg1) == REAL_CST
9721 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9722 && real_zerop (arg1))
9723 return NULL_TREE;
9725 /* Optimize A / A to 1.0 if we don't care about
9726 NaNs or Infinities. Skip the transformation
9727 for non-real operands. */
9728 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9729 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9730 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9731 && operand_equal_p (arg0, arg1, 0))
9733 tree r = build_real (TREE_TYPE (arg0), dconst1);
9735 return omit_two_operands (type, r, arg0, arg1);
9738 /* The complex version of the above A / A optimization. */
9739 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9740 && operand_equal_p (arg0, arg1, 0))
9742 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9743 if (! HONOR_NANS (TYPE_MODE (elem_type))
9744 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9746 tree r = build_real (elem_type, dconst1);
9747 /* omit_two_operands will call fold_convert for us. */
9748 return omit_two_operands (type, r, arg0, arg1);
9752 /* (-A) / (-B) -> A / B */
9753 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9754 return fold_build2 (RDIV_EXPR, type,
9755 TREE_OPERAND (arg0, 0),
9756 negate_expr (arg1));
9757 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9758 return fold_build2 (RDIV_EXPR, type,
9759 negate_expr (arg0),
9760 TREE_OPERAND (arg1, 0));
9762 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9763 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9764 && real_onep (arg1))
9765 return non_lvalue (fold_convert (type, arg0));
9767 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9768 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9769 && real_minus_onep (arg1))
9770 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9772 /* If ARG1 is a constant, we can convert this to a multiply by the
9773 reciprocal. This does not have the same rounding properties,
9774 so only do this if -funsafe-math-optimizations. We can actually
9775 always safely do it if ARG1 is a power of two, but it's hard to
9776 tell if it is or not in a portable manner. */
9777 if (TREE_CODE (arg1) == REAL_CST)
9779 if (flag_unsafe_math_optimizations
9780 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9781 arg1, 0)))
9782 return fold_build2 (MULT_EXPR, type, arg0, tem);
9783 /* Find the reciprocal if optimizing and the result is exact. */
9784 if (optimize)
9786 REAL_VALUE_TYPE r;
9787 r = TREE_REAL_CST (arg1);
9788 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9790 tem = build_real (type, r);
9791 return fold_build2 (MULT_EXPR, type,
9792 fold_convert (type, arg0), tem);
9796 /* Convert A/B/C to A/(B*C). */
9797 if (flag_unsafe_math_optimizations
9798 && TREE_CODE (arg0) == RDIV_EXPR)
9799 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9800 fold_build2 (MULT_EXPR, type,
9801 TREE_OPERAND (arg0, 1), arg1));
9803 /* Convert A/(B/C) to (A/B)*C. */
9804 if (flag_unsafe_math_optimizations
9805 && TREE_CODE (arg1) == RDIV_EXPR)
9806 return fold_build2 (MULT_EXPR, type,
9807 fold_build2 (RDIV_EXPR, type, arg0,
9808 TREE_OPERAND (arg1, 0)),
9809 TREE_OPERAND (arg1, 1));
9811 /* Convert C1/(X*C2) into (C1/C2)/X. */
9812 if (flag_unsafe_math_optimizations
9813 && TREE_CODE (arg1) == MULT_EXPR
9814 && TREE_CODE (arg0) == REAL_CST
9815 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9817 tree tem = const_binop (RDIV_EXPR, arg0,
9818 TREE_OPERAND (arg1, 1), 0);
9819 if (tem)
9820 return fold_build2 (RDIV_EXPR, type, tem,
9821 TREE_OPERAND (arg1, 0));
9824 if (flag_unsafe_math_optimizations)
9826 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9827 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9829 /* Optimize sin(x)/cos(x) as tan(x). */
9830 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9831 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9832 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9833 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9834 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9836 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9838 if (tanfn != NULL_TREE)
9839 return build_function_call_expr (tanfn,
9840 TREE_OPERAND (arg0, 1));
9843 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9844 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9845 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9846 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9847 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9848 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9850 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9852 if (tanfn != NULL_TREE)
9854 tree tmp = TREE_OPERAND (arg0, 1);
9855 tmp = build_function_call_expr (tanfn, tmp);
9856 return fold_build2 (RDIV_EXPR, type,
9857 build_real (type, dconst1), tmp);
9861 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9862 NaNs or Infinities. */
9863 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9864 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9865 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9867 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9868 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9870 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9871 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9872 && operand_equal_p (arg00, arg01, 0))
9874 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9876 if (cosfn != NULL_TREE)
9877 return build_function_call_expr (cosfn,
9878 TREE_OPERAND (arg0, 1));
9882 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9883 NaNs or Infinities. */
9884 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9885 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9886 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9888 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9889 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9891 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9892 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9893 && operand_equal_p (arg00, arg01, 0))
9895 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9897 if (cosfn != NULL_TREE)
9899 tree tmp = TREE_OPERAND (arg0, 1);
9900 tmp = build_function_call_expr (cosfn, tmp);
9901 return fold_build2 (RDIV_EXPR, type,
9902 build_real (type, dconst1),
9903 tmp);
9908 /* Optimize pow(x,c)/x as pow(x,c-1). */
9909 if (fcode0 == BUILT_IN_POW
9910 || fcode0 == BUILT_IN_POWF
9911 || fcode0 == BUILT_IN_POWL)
9913 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9914 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9915 if (TREE_CODE (arg01) == REAL_CST
9916 && ! TREE_CONSTANT_OVERFLOW (arg01)
9917 && operand_equal_p (arg1, arg00, 0))
9919 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9920 REAL_VALUE_TYPE c;
9921 tree arg, arglist;
9923 c = TREE_REAL_CST (arg01);
9924 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9925 arg = build_real (type, c);
9926 arglist = build_tree_list (NULL_TREE, arg);
9927 arglist = tree_cons (NULL_TREE, arg1, arglist);
9928 return build_function_call_expr (powfn, arglist);
9932 /* Optimize x/expN(y) into x*expN(-y). */
9933 if (BUILTIN_EXPONENT_P (fcode1))
9935 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9936 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9937 tree arglist = build_tree_list (NULL_TREE,
9938 fold_convert (type, arg));
9939 arg1 = build_function_call_expr (expfn, arglist);
9940 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9943 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9944 if (fcode1 == BUILT_IN_POW
9945 || fcode1 == BUILT_IN_POWF
9946 || fcode1 == BUILT_IN_POWL)
9948 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9949 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9950 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9951 tree neg11 = fold_convert (type, negate_expr (arg11));
9952 tree arglist = tree_cons(NULL_TREE, arg10,
9953 build_tree_list (NULL_TREE, neg11));
9954 arg1 = build_function_call_expr (powfn, arglist);
9955 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9958 return NULL_TREE;
9960 case TRUNC_DIV_EXPR:
9961 case FLOOR_DIV_EXPR:
9962 /* Simplify A / (B << N) where A and B are positive and B is
9963 a power of 2, to A >> (N + log2(B)). */
9964 if (TREE_CODE (arg1) == LSHIFT_EXPR
9965 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9967 tree sval = TREE_OPERAND (arg1, 0);
9968 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9970 tree sh_cnt = TREE_OPERAND (arg1, 1);
9971 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9973 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9974 sh_cnt, build_int_cst (NULL_TREE, pow2));
9975 return fold_build2 (RSHIFT_EXPR, type,
9976 fold_convert (type, arg0), sh_cnt);
9979 /* Fall thru */
9981 case ROUND_DIV_EXPR:
9982 case CEIL_DIV_EXPR:
9983 case EXACT_DIV_EXPR:
9984 if (integer_onep (arg1))
9985 return non_lvalue (fold_convert (type, arg0));
9986 if (integer_zerop (arg1))
9987 return NULL_TREE;
9988 /* X / -1 is -X. */
9989 if (!TYPE_UNSIGNED (type)
9990 && TREE_CODE (arg1) == INTEGER_CST
9991 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9992 && TREE_INT_CST_HIGH (arg1) == -1)
9993 return fold_convert (type, negate_expr (arg0));
9995 /* Convert -A / -B to A / B when the type is signed and overflow is
9996 undefined. */
9997 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9998 && TREE_CODE (arg0) == NEGATE_EXPR
9999 && negate_expr_p (arg1))
10000 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10001 negate_expr (arg1));
10002 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10003 && TREE_CODE (arg1) == NEGATE_EXPR
10004 && negate_expr_p (arg0))
10005 return fold_build2 (code, type, negate_expr (arg0),
10006 TREE_OPERAND (arg1, 0));
10008 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10009 operation, EXACT_DIV_EXPR.
10011 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10012 At one time others generated faster code, it's not clear if they do
10013 after the last round to changes to the DIV code in expmed.c. */
10014 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10015 && multiple_of_p (type, arg0, arg1))
10016 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10018 if (TREE_CODE (arg1) == INTEGER_CST
10019 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10020 return fold_convert (type, tem);
10022 return NULL_TREE;
10024 case CEIL_MOD_EXPR:
10025 case FLOOR_MOD_EXPR:
10026 case ROUND_MOD_EXPR:
10027 case TRUNC_MOD_EXPR:
10028 /* X % 1 is always zero, but be sure to preserve any side
10029 effects in X. */
10030 if (integer_onep (arg1))
10031 return omit_one_operand (type, integer_zero_node, arg0);
10033 /* X % 0, return X % 0 unchanged so that we can get the
10034 proper warnings and errors. */
10035 if (integer_zerop (arg1))
10036 return NULL_TREE;
10038 /* 0 % X is always zero, but be sure to preserve any side
10039 effects in X. Place this after checking for X == 0. */
10040 if (integer_zerop (arg0))
10041 return omit_one_operand (type, integer_zero_node, arg1);
10043 /* X % -1 is zero. */
10044 if (!TYPE_UNSIGNED (type)
10045 && TREE_CODE (arg1) == INTEGER_CST
10046 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10047 && TREE_INT_CST_HIGH (arg1) == -1)
10048 return omit_one_operand (type, integer_zero_node, arg0);
10050 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10051 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10052 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10053 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10055 tree c = arg1;
10056 /* Also optimize A % (C << N) where C is a power of 2,
10057 to A & ((C << N) - 1). */
10058 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10059 c = TREE_OPERAND (arg1, 0);
10061 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10063 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10064 arg1, integer_one_node);
10065 return fold_build2 (BIT_AND_EXPR, type,
10066 fold_convert (type, arg0),
10067 fold_convert (type, mask));
10071 /* X % -C is the same as X % C. */
10072 if (code == TRUNC_MOD_EXPR
10073 && !TYPE_UNSIGNED (type)
10074 && TREE_CODE (arg1) == INTEGER_CST
10075 && !TREE_CONSTANT_OVERFLOW (arg1)
10076 && TREE_INT_CST_HIGH (arg1) < 0
10077 && !flag_trapv
10078 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10079 && !sign_bit_p (arg1, arg1))
10080 return fold_build2 (code, type, fold_convert (type, arg0),
10081 fold_convert (type, negate_expr (arg1)));
10083 /* X % -Y is the same as X % Y. */
10084 if (code == TRUNC_MOD_EXPR
10085 && !TYPE_UNSIGNED (type)
10086 && TREE_CODE (arg1) == NEGATE_EXPR
10087 && !flag_trapv)
10088 return fold_build2 (code, type, fold_convert (type, arg0),
10089 fold_convert (type, TREE_OPERAND (arg1, 0)));
10091 if (TREE_CODE (arg1) == INTEGER_CST
10092 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10093 return fold_convert (type, tem);
10095 return NULL_TREE;
10097 case LROTATE_EXPR:
10098 case RROTATE_EXPR:
10099 if (integer_all_onesp (arg0))
10100 return omit_one_operand (type, arg0, arg1);
10101 goto shift;
10103 case RSHIFT_EXPR:
10104 /* Optimize -1 >> x for arithmetic right shifts. */
10105 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10106 return omit_one_operand (type, arg0, arg1);
10107 /* ... fall through ... */
10109 case LSHIFT_EXPR:
10110 shift:
10111 if (integer_zerop (arg1))
10112 return non_lvalue (fold_convert (type, arg0));
10113 if (integer_zerop (arg0))
10114 return omit_one_operand (type, arg0, arg1);
10116 /* Since negative shift count is not well-defined,
10117 don't try to compute it in the compiler. */
10118 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10119 return NULL_TREE;
10121 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10122 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10123 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10124 && host_integerp (TREE_OPERAND (arg0, 1), false)
10125 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10127 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10128 + TREE_INT_CST_LOW (arg1));
10130 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10131 being well defined. */
10132 if (low >= TYPE_PRECISION (type))
10134 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10135 low = low % TYPE_PRECISION (type);
10136 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10137 return build_int_cst (type, 0);
10138 else
10139 low = TYPE_PRECISION (type) - 1;
10142 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10143 build_int_cst (type, low));
10146 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10147 into x & ((unsigned)-1 >> c) for unsigned types. */
10148 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10149 || (TYPE_UNSIGNED (type)
10150 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10151 && host_integerp (arg1, false)
10152 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10153 && host_integerp (TREE_OPERAND (arg0, 1), false)
10154 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10156 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10157 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10158 tree lshift;
10159 tree arg00;
10161 if (low0 == low1)
10163 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10165 lshift = build_int_cst (type, -1);
10166 lshift = int_const_binop (code, lshift, arg1, 0);
10168 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10172 /* Rewrite an LROTATE_EXPR by a constant into an
10173 RROTATE_EXPR by a new constant. */
10174 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10176 tree tem = build_int_cst (NULL_TREE,
10177 GET_MODE_BITSIZE (TYPE_MODE (type)));
10178 tem = fold_convert (TREE_TYPE (arg1), tem);
10179 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10180 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10183 /* If we have a rotate of a bit operation with the rotate count and
10184 the second operand of the bit operation both constant,
10185 permute the two operations. */
10186 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10187 && (TREE_CODE (arg0) == BIT_AND_EXPR
10188 || TREE_CODE (arg0) == BIT_IOR_EXPR
10189 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10190 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10191 return fold_build2 (TREE_CODE (arg0), type,
10192 fold_build2 (code, type,
10193 TREE_OPERAND (arg0, 0), arg1),
10194 fold_build2 (code, type,
10195 TREE_OPERAND (arg0, 1), arg1));
10197 /* Two consecutive rotates adding up to the width of the mode can
10198 be ignored. */
10199 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10200 && TREE_CODE (arg0) == RROTATE_EXPR
10201 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10202 && TREE_INT_CST_HIGH (arg1) == 0
10203 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10204 && ((TREE_INT_CST_LOW (arg1)
10205 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10206 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10207 return TREE_OPERAND (arg0, 0);
10209 return NULL_TREE;
10211 case MIN_EXPR:
10212 if (operand_equal_p (arg0, arg1, 0))
10213 return omit_one_operand (type, arg0, arg1);
10214 if (INTEGRAL_TYPE_P (type)
10215 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10216 return omit_one_operand (type, arg1, arg0);
10217 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10218 if (tem)
10219 return tem;
10220 goto associate;
10222 case MAX_EXPR:
10223 if (operand_equal_p (arg0, arg1, 0))
10224 return omit_one_operand (type, arg0, arg1);
10225 if (INTEGRAL_TYPE_P (type)
10226 && TYPE_MAX_VALUE (type)
10227 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10228 return omit_one_operand (type, arg1, arg0);
10229 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10230 if (tem)
10231 return tem;
10232 goto associate;
10234 case TRUTH_ANDIF_EXPR:
10235 /* Note that the operands of this must be ints
10236 and their values must be 0 or 1.
10237 ("true" is a fixed value perhaps depending on the language.) */
10238 /* If first arg is constant zero, return it. */
10239 if (integer_zerop (arg0))
10240 return fold_convert (type, arg0);
10241 case TRUTH_AND_EXPR:
10242 /* If either arg is constant true, drop it. */
10243 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10244 return non_lvalue (fold_convert (type, arg1));
10245 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10246 /* Preserve sequence points. */
10247 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10248 return non_lvalue (fold_convert (type, arg0));
10249 /* If second arg is constant zero, result is zero, but first arg
10250 must be evaluated. */
10251 if (integer_zerop (arg1))
10252 return omit_one_operand (type, arg1, arg0);
10253 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10254 case will be handled here. */
10255 if (integer_zerop (arg0))
10256 return omit_one_operand (type, arg0, arg1);
10258 /* !X && X is always false. */
10259 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10260 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10261 return omit_one_operand (type, integer_zero_node, arg1);
10262 /* X && !X is always false. */
10263 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10264 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10265 return omit_one_operand (type, integer_zero_node, arg0);
10267 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10268 means A >= Y && A != MAX, but in this case we know that
10269 A < X <= MAX. */
10271 if (!TREE_SIDE_EFFECTS (arg0)
10272 && !TREE_SIDE_EFFECTS (arg1))
10274 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10275 if (tem && !operand_equal_p (tem, arg0, 0))
10276 return fold_build2 (code, type, tem, arg1);
10278 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10279 if (tem && !operand_equal_p (tem, arg1, 0))
10280 return fold_build2 (code, type, arg0, tem);
10283 truth_andor:
10284 /* We only do these simplifications if we are optimizing. */
10285 if (!optimize)
10286 return NULL_TREE;
10288 /* Check for things like (A || B) && (A || C). We can convert this
10289 to A || (B && C). Note that either operator can be any of the four
10290 truth and/or operations and the transformation will still be
10291 valid. Also note that we only care about order for the
10292 ANDIF and ORIF operators. If B contains side effects, this
10293 might change the truth-value of A. */
10294 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10295 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10296 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10297 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10298 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10299 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10301 tree a00 = TREE_OPERAND (arg0, 0);
10302 tree a01 = TREE_OPERAND (arg0, 1);
10303 tree a10 = TREE_OPERAND (arg1, 0);
10304 tree a11 = TREE_OPERAND (arg1, 1);
10305 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10306 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10307 && (code == TRUTH_AND_EXPR
10308 || code == TRUTH_OR_EXPR));
10310 if (operand_equal_p (a00, a10, 0))
10311 return fold_build2 (TREE_CODE (arg0), type, a00,
10312 fold_build2 (code, type, a01, a11));
10313 else if (commutative && operand_equal_p (a00, a11, 0))
10314 return fold_build2 (TREE_CODE (arg0), type, a00,
10315 fold_build2 (code, type, a01, a10));
10316 else if (commutative && operand_equal_p (a01, a10, 0))
10317 return fold_build2 (TREE_CODE (arg0), type, a01,
10318 fold_build2 (code, type, a00, a11));
10320 /* This case if tricky because we must either have commutative
10321 operators or else A10 must not have side-effects. */
10323 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10324 && operand_equal_p (a01, a11, 0))
10325 return fold_build2 (TREE_CODE (arg0), type,
10326 fold_build2 (code, type, a00, a10),
10327 a01);
10330 /* See if we can build a range comparison. */
10331 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10332 return tem;
10334 /* Check for the possibility of merging component references. If our
10335 lhs is another similar operation, try to merge its rhs with our
10336 rhs. Then try to merge our lhs and rhs. */
10337 if (TREE_CODE (arg0) == code
10338 && 0 != (tem = fold_truthop (code, type,
10339 TREE_OPERAND (arg0, 1), arg1)))
10340 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10342 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10343 return tem;
10345 return NULL_TREE;
10347 case TRUTH_ORIF_EXPR:
10348 /* Note that the operands of this must be ints
10349 and their values must be 0 or true.
10350 ("true" is a fixed value perhaps depending on the language.) */
10351 /* If first arg is constant true, return it. */
10352 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10353 return fold_convert (type, arg0);
10354 case TRUTH_OR_EXPR:
10355 /* If either arg is constant zero, drop it. */
10356 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10357 return non_lvalue (fold_convert (type, arg1));
10358 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10359 /* Preserve sequence points. */
10360 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10361 return non_lvalue (fold_convert (type, arg0));
10362 /* If second arg is constant true, result is true, but we must
10363 evaluate first arg. */
10364 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10365 return omit_one_operand (type, arg1, arg0);
10366 /* Likewise for first arg, but note this only occurs here for
10367 TRUTH_OR_EXPR. */
10368 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10369 return omit_one_operand (type, arg0, arg1);
10371 /* !X || X is always true. */
10372 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10373 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10374 return omit_one_operand (type, integer_one_node, arg1);
10375 /* X || !X is always true. */
10376 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10377 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10378 return omit_one_operand (type, integer_one_node, arg0);
10380 goto truth_andor;
10382 case TRUTH_XOR_EXPR:
10383 /* If the second arg is constant zero, drop it. */
10384 if (integer_zerop (arg1))
10385 return non_lvalue (fold_convert (type, arg0));
10386 /* If the second arg is constant true, this is a logical inversion. */
10387 if (integer_onep (arg1))
10389 /* Only call invert_truthvalue if operand is a truth value. */
10390 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10391 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10392 else
10393 tem = invert_truthvalue (arg0);
10394 return non_lvalue (fold_convert (type, tem));
10396 /* Identical arguments cancel to zero. */
10397 if (operand_equal_p (arg0, arg1, 0))
10398 return omit_one_operand (type, integer_zero_node, arg0);
10400 /* !X ^ X is always true. */
10401 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10402 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10403 return omit_one_operand (type, integer_one_node, arg1);
10405 /* X ^ !X is always true. */
10406 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10407 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10408 return omit_one_operand (type, integer_one_node, arg0);
10410 return NULL_TREE;
10412 case EQ_EXPR:
10413 case NE_EXPR:
10414 tem = fold_comparison (code, type, op0, op1);
10415 if (tem != NULL_TREE)
10416 return tem;
10418 /* bool_var != 0 becomes bool_var. */
10419 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10420 && code == NE_EXPR)
10421 return non_lvalue (fold_convert (type, arg0));
10423 /* bool_var == 1 becomes bool_var. */
10424 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10425 && code == EQ_EXPR)
10426 return non_lvalue (fold_convert (type, arg0));
10428 /* bool_var != 1 becomes !bool_var. */
10429 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10430 && code == NE_EXPR)
10431 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10433 /* bool_var == 0 becomes !bool_var. */
10434 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10435 && code == EQ_EXPR)
10436 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10438 /* If this is an equality comparison of the address of a non-weak
10439 object against zero, then we know the result. */
10440 if (TREE_CODE (arg0) == ADDR_EXPR
10441 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10442 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10443 && integer_zerop (arg1))
10444 return constant_boolean_node (code != EQ_EXPR, type);
10446 /* If this is an equality comparison of the address of two non-weak,
10447 unaliased symbols neither of which are extern (since we do not
10448 have access to attributes for externs), then we know the result. */
10449 if (TREE_CODE (arg0) == ADDR_EXPR
10450 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10451 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10452 && ! lookup_attribute ("alias",
10453 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10454 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10455 && TREE_CODE (arg1) == ADDR_EXPR
10456 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10457 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10458 && ! lookup_attribute ("alias",
10459 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10460 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10462 /* We know that we're looking at the address of two
10463 non-weak, unaliased, static _DECL nodes.
10465 It is both wasteful and incorrect to call operand_equal_p
10466 to compare the two ADDR_EXPR nodes. It is wasteful in that
10467 all we need to do is test pointer equality for the arguments
10468 to the two ADDR_EXPR nodes. It is incorrect to use
10469 operand_equal_p as that function is NOT equivalent to a
10470 C equality test. It can in fact return false for two
10471 objects which would test as equal using the C equality
10472 operator. */
10473 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10474 return constant_boolean_node (equal
10475 ? code == EQ_EXPR : code != EQ_EXPR,
10476 type);
10479 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10480 a MINUS_EXPR of a constant, we can convert it into a comparison with
10481 a revised constant as long as no overflow occurs. */
10482 if (TREE_CODE (arg1) == INTEGER_CST
10483 && (TREE_CODE (arg0) == PLUS_EXPR
10484 || TREE_CODE (arg0) == MINUS_EXPR)
10485 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10486 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10487 ? MINUS_EXPR : PLUS_EXPR,
10488 fold_convert (TREE_TYPE (arg0), arg1),
10489 TREE_OPERAND (arg0, 1), 0))
10490 && ! TREE_CONSTANT_OVERFLOW (tem))
10491 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10493 /* Similarly for a NEGATE_EXPR. */
10494 if (TREE_CODE (arg0) == NEGATE_EXPR
10495 && TREE_CODE (arg1) == INTEGER_CST
10496 && 0 != (tem = negate_expr (arg1))
10497 && TREE_CODE (tem) == INTEGER_CST
10498 && ! TREE_CONSTANT_OVERFLOW (tem))
10499 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10501 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10502 for !=. Don't do this for ordered comparisons due to overflow. */
10503 if (TREE_CODE (arg0) == MINUS_EXPR
10504 && integer_zerop (arg1))
10505 return fold_build2 (code, type,
10506 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10508 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10509 if (TREE_CODE (arg0) == ABS_EXPR
10510 && (integer_zerop (arg1) || real_zerop (arg1)))
10511 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10513 /* If this is an EQ or NE comparison with zero and ARG0 is
10514 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10515 two operations, but the latter can be done in one less insn
10516 on machines that have only two-operand insns or on which a
10517 constant cannot be the first operand. */
10518 if (TREE_CODE (arg0) == BIT_AND_EXPR
10519 && integer_zerop (arg1))
10521 tree arg00 = TREE_OPERAND (arg0, 0);
10522 tree arg01 = TREE_OPERAND (arg0, 1);
10523 if (TREE_CODE (arg00) == LSHIFT_EXPR
10524 && integer_onep (TREE_OPERAND (arg00, 0)))
10525 return
10526 fold_build2 (code, type,
10527 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10528 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10529 arg01, TREE_OPERAND (arg00, 1)),
10530 fold_convert (TREE_TYPE (arg0),
10531 integer_one_node)),
10532 arg1);
10533 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10534 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10535 return
10536 fold_build2 (code, type,
10537 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10538 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10539 arg00, TREE_OPERAND (arg01, 1)),
10540 fold_convert (TREE_TYPE (arg0),
10541 integer_one_node)),
10542 arg1);
10545 /* If this is an NE or EQ comparison of zero against the result of a
10546 signed MOD operation whose second operand is a power of 2, make
10547 the MOD operation unsigned since it is simpler and equivalent. */
10548 if (integer_zerop (arg1)
10549 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10550 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10551 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10552 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10553 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10554 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10556 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10557 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10558 fold_convert (newtype,
10559 TREE_OPERAND (arg0, 0)),
10560 fold_convert (newtype,
10561 TREE_OPERAND (arg0, 1)));
10563 return fold_build2 (code, type, newmod,
10564 fold_convert (newtype, arg1));
10567 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10568 C1 is a valid shift constant, and C2 is a power of two, i.e.
10569 a single bit. */
10570 if (TREE_CODE (arg0) == BIT_AND_EXPR
10571 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10572 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10573 == INTEGER_CST
10574 && integer_pow2p (TREE_OPERAND (arg0, 1))
10575 && integer_zerop (arg1))
10577 tree itype = TREE_TYPE (arg0);
10578 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10579 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10581 /* Check for a valid shift count. */
10582 if (TREE_INT_CST_HIGH (arg001) == 0
10583 && TREE_INT_CST_LOW (arg001) < prec)
10585 tree arg01 = TREE_OPERAND (arg0, 1);
10586 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10587 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10588 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10589 can be rewritten as (X & (C2 << C1)) != 0. */
10590 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10592 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10593 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10594 return fold_build2 (code, type, tem, arg1);
10596 /* Otherwise, for signed (arithmetic) shifts,
10597 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10598 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10599 else if (!TYPE_UNSIGNED (itype))
10600 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10601 arg000, build_int_cst (itype, 0));
10602 /* Otherwise, of unsigned (logical) shifts,
10603 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10604 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10605 else
10606 return omit_one_operand (type,
10607 code == EQ_EXPR ? integer_one_node
10608 : integer_zero_node,
10609 arg000);
10613 /* If this is an NE comparison of zero with an AND of one, remove the
10614 comparison since the AND will give the correct value. */
10615 if (code == NE_EXPR
10616 && integer_zerop (arg1)
10617 && TREE_CODE (arg0) == BIT_AND_EXPR
10618 && integer_onep (TREE_OPERAND (arg0, 1)))
10619 return fold_convert (type, arg0);
10621 /* If we have (A & C) == C where C is a power of 2, convert this into
10622 (A & C) != 0. Similarly for NE_EXPR. */
10623 if (TREE_CODE (arg0) == BIT_AND_EXPR
10624 && integer_pow2p (TREE_OPERAND (arg0, 1))
10625 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10626 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10627 arg0, fold_convert (TREE_TYPE (arg0),
10628 integer_zero_node));
10630 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10631 bit, then fold the expression into A < 0 or A >= 0. */
10632 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10633 if (tem)
10634 return tem;
10636 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10637 Similarly for NE_EXPR. */
10638 if (TREE_CODE (arg0) == BIT_AND_EXPR
10639 && TREE_CODE (arg1) == INTEGER_CST
10640 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10642 tree notc = fold_build1 (BIT_NOT_EXPR,
10643 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10644 TREE_OPERAND (arg0, 1));
10645 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10646 arg1, notc);
10647 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10648 if (integer_nonzerop (dandnotc))
10649 return omit_one_operand (type, rslt, arg0);
10652 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10653 Similarly for NE_EXPR. */
10654 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10655 && TREE_CODE (arg1) == INTEGER_CST
10656 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10658 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10659 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10660 TREE_OPERAND (arg0, 1), notd);
10661 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10662 if (integer_nonzerop (candnotd))
10663 return omit_one_operand (type, rslt, arg0);
10666 /* If this is a comparison of a field, we may be able to simplify it. */
10667 if (((TREE_CODE (arg0) == COMPONENT_REF
10668 && lang_hooks.can_use_bit_fields_p ())
10669 || TREE_CODE (arg0) == BIT_FIELD_REF)
10670 /* Handle the constant case even without -O
10671 to make sure the warnings are given. */
10672 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10674 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10675 if (t1)
10676 return t1;
10679 /* Optimize comparisons of strlen vs zero to a compare of the
10680 first character of the string vs zero. To wit,
10681 strlen(ptr) == 0 => *ptr == 0
10682 strlen(ptr) != 0 => *ptr != 0
10683 Other cases should reduce to one of these two (or a constant)
10684 due to the return value of strlen being unsigned. */
10685 if (TREE_CODE (arg0) == CALL_EXPR
10686 && integer_zerop (arg1))
10688 tree fndecl = get_callee_fndecl (arg0);
10689 tree arglist;
10691 if (fndecl
10692 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10693 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10694 && (arglist = TREE_OPERAND (arg0, 1))
10695 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10696 && ! TREE_CHAIN (arglist))
10698 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10699 return fold_build2 (code, type, iref,
10700 build_int_cst (TREE_TYPE (iref), 0));
10704 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10705 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10706 if (TREE_CODE (arg0) == RSHIFT_EXPR
10707 && integer_zerop (arg1)
10708 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10710 tree arg00 = TREE_OPERAND (arg0, 0);
10711 tree arg01 = TREE_OPERAND (arg0, 1);
10712 tree itype = TREE_TYPE (arg00);
10713 if (TREE_INT_CST_HIGH (arg01) == 0
10714 && TREE_INT_CST_LOW (arg01)
10715 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10717 if (TYPE_UNSIGNED (itype))
10719 itype = lang_hooks.types.signed_type (itype);
10720 arg00 = fold_convert (itype, arg00);
10722 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10723 type, arg00, build_int_cst (itype, 0));
10727 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10728 if (integer_zerop (arg1)
10729 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10730 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10731 TREE_OPERAND (arg0, 1));
10733 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10734 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10735 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10736 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10737 build_int_cst (TREE_TYPE (arg1), 0));
10738 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10739 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10740 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10741 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10742 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10743 build_int_cst (TREE_TYPE (arg1), 0));
10745 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10746 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10747 && TREE_CODE (arg1) == INTEGER_CST
10748 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10749 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10750 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10751 TREE_OPERAND (arg0, 1), arg1));
10753 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10754 (X & C) == 0 when C is a single bit. */
10755 if (TREE_CODE (arg0) == BIT_AND_EXPR
10756 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10757 && integer_zerop (arg1)
10758 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10760 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10761 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10762 TREE_OPERAND (arg0, 1));
10763 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10764 type, tem, arg1);
10767 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10768 constant C is a power of two, i.e. a single bit. */
10769 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10770 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10771 && integer_zerop (arg1)
10772 && integer_pow2p (TREE_OPERAND (arg0, 1))
10773 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10774 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10776 tree arg00 = TREE_OPERAND (arg0, 0);
10777 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10778 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10781 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10782 when is C is a power of two, i.e. a single bit. */
10783 if (TREE_CODE (arg0) == BIT_AND_EXPR
10784 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10785 && integer_zerop (arg1)
10786 && integer_pow2p (TREE_OPERAND (arg0, 1))
10787 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10788 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10790 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10791 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10792 arg000, TREE_OPERAND (arg0, 1));
10793 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10794 tem, build_int_cst (TREE_TYPE (tem), 0));
10797 if (integer_zerop (arg1)
10798 && tree_expr_nonzero_p (arg0))
10800 tree res = constant_boolean_node (code==NE_EXPR, type);
10801 return omit_one_operand (type, res, arg0);
10804 /* Fold -X op -Y as X op Y, where op is eq/ne. */
10805 if (TREE_CODE (arg0) == NEGATE_EXPR
10806 && TREE_CODE (arg1) == NEGATE_EXPR)
10807 return fold_build2 (code, type,
10808 TREE_OPERAND (arg0, 0),
10809 TREE_OPERAND (arg1, 0));
10811 return NULL_TREE;
10813 case LT_EXPR:
10814 case GT_EXPR:
10815 case LE_EXPR:
10816 case GE_EXPR:
10817 tem = fold_comparison (code, type, op0, op1);
10818 if (tem != NULL_TREE)
10819 return tem;
10821 /* Transform comparisons of the form X +- C CMP X. */
10822 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10823 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10824 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10825 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10826 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10827 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10828 && !(flag_wrapv || flag_trapv))))
10830 tree arg01 = TREE_OPERAND (arg0, 1);
10831 enum tree_code code0 = TREE_CODE (arg0);
10832 int is_positive;
10834 if (TREE_CODE (arg01) == REAL_CST)
10835 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10836 else
10837 is_positive = tree_int_cst_sgn (arg01);
10839 /* (X - c) > X becomes false. */
10840 if (code == GT_EXPR
10841 && ((code0 == MINUS_EXPR && is_positive >= 0)
10842 || (code0 == PLUS_EXPR && is_positive <= 0)))
10843 return constant_boolean_node (0, type);
10845 /* Likewise (X + c) < X becomes false. */
10846 if (code == LT_EXPR
10847 && ((code0 == PLUS_EXPR && is_positive >= 0)
10848 || (code0 == MINUS_EXPR && is_positive <= 0)))
10849 return constant_boolean_node (0, type);
10851 /* Convert (X - c) <= X to true. */
10852 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10853 && code == LE_EXPR
10854 && ((code0 == MINUS_EXPR && is_positive >= 0)
10855 || (code0 == PLUS_EXPR && is_positive <= 0)))
10856 return constant_boolean_node (1, type);
10858 /* Convert (X + c) >= X to true. */
10859 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10860 && code == GE_EXPR
10861 && ((code0 == PLUS_EXPR && is_positive >= 0)
10862 || (code0 == MINUS_EXPR && is_positive <= 0)))
10863 return constant_boolean_node (1, type);
10865 if (TREE_CODE (arg01) == INTEGER_CST)
10867 /* Convert X + c > X and X - c < X to true for integers. */
10868 if (code == GT_EXPR
10869 && ((code0 == PLUS_EXPR && is_positive > 0)
10870 || (code0 == MINUS_EXPR && is_positive < 0)))
10871 return constant_boolean_node (1, type);
10873 if (code == LT_EXPR
10874 && ((code0 == MINUS_EXPR && is_positive > 0)
10875 || (code0 == PLUS_EXPR && is_positive < 0)))
10876 return constant_boolean_node (1, type);
10878 /* Convert X + c <= X and X - c >= X to false for integers. */
10879 if (code == LE_EXPR
10880 && ((code0 == PLUS_EXPR && is_positive > 0)
10881 || (code0 == MINUS_EXPR && is_positive < 0)))
10882 return constant_boolean_node (0, type);
10884 if (code == GE_EXPR
10885 && ((code0 == MINUS_EXPR && is_positive > 0)
10886 || (code0 == PLUS_EXPR && is_positive < 0)))
10887 return constant_boolean_node (0, type);
10891 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10892 This transformation affects the cases which are handled in later
10893 optimizations involving comparisons with non-negative constants. */
10894 if (TREE_CODE (arg1) == INTEGER_CST
10895 && TREE_CODE (arg0) != INTEGER_CST
10896 && tree_int_cst_sgn (arg1) > 0)
10898 if (code == GE_EXPR)
10900 arg1 = const_binop (MINUS_EXPR, arg1,
10901 build_int_cst (TREE_TYPE (arg1), 1), 0);
10902 return fold_build2 (GT_EXPR, type, arg0,
10903 fold_convert (TREE_TYPE (arg0), arg1));
10905 if (code == LT_EXPR)
10907 arg1 = const_binop (MINUS_EXPR, arg1,
10908 build_int_cst (TREE_TYPE (arg1), 1), 0);
10909 return fold_build2 (LE_EXPR, type, arg0,
10910 fold_convert (TREE_TYPE (arg0), arg1));
10914 /* Comparisons with the highest or lowest possible integer of
10915 the specified size will have known values. */
10917 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10919 if (TREE_CODE (arg1) == INTEGER_CST
10920 && ! TREE_CONSTANT_OVERFLOW (arg1)
10921 && width <= 2 * HOST_BITS_PER_WIDE_INT
10922 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10923 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10925 HOST_WIDE_INT signed_max_hi;
10926 unsigned HOST_WIDE_INT signed_max_lo;
10927 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10929 if (width <= HOST_BITS_PER_WIDE_INT)
10931 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10932 - 1;
10933 signed_max_hi = 0;
10934 max_hi = 0;
10936 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10938 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10939 min_lo = 0;
10940 min_hi = 0;
10942 else
10944 max_lo = signed_max_lo;
10945 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10946 min_hi = -1;
10949 else
10951 width -= HOST_BITS_PER_WIDE_INT;
10952 signed_max_lo = -1;
10953 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10954 - 1;
10955 max_lo = -1;
10956 min_lo = 0;
10958 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10960 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10961 min_hi = 0;
10963 else
10965 max_hi = signed_max_hi;
10966 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10970 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10971 && TREE_INT_CST_LOW (arg1) == max_lo)
10972 switch (code)
10974 case GT_EXPR:
10975 return omit_one_operand (type, integer_zero_node, arg0);
10977 case GE_EXPR:
10978 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10980 case LE_EXPR:
10981 return omit_one_operand (type, integer_one_node, arg0);
10983 case LT_EXPR:
10984 return fold_build2 (NE_EXPR, type, arg0, arg1);
10986 /* The GE_EXPR and LT_EXPR cases above are not normally
10987 reached because of previous transformations. */
10989 default:
10990 break;
10992 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10993 == max_hi
10994 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10995 switch (code)
10997 case GT_EXPR:
10998 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10999 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11000 case LE_EXPR:
11001 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11002 return fold_build2 (NE_EXPR, type, arg0, arg1);
11003 default:
11004 break;
11006 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11007 == min_hi
11008 && TREE_INT_CST_LOW (arg1) == min_lo)
11009 switch (code)
11011 case LT_EXPR:
11012 return omit_one_operand (type, integer_zero_node, arg0);
11014 case LE_EXPR:
11015 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11017 case GE_EXPR:
11018 return omit_one_operand (type, integer_one_node, arg0);
11020 case GT_EXPR:
11021 return fold_build2 (NE_EXPR, type, op0, op1);
11023 default:
11024 break;
11026 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11027 == min_hi
11028 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11029 switch (code)
11031 case GE_EXPR:
11032 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11033 return fold_build2 (NE_EXPR, type, arg0, arg1);
11034 case LT_EXPR:
11035 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11036 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11037 default:
11038 break;
11041 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11042 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11043 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11044 /* signed_type does not work on pointer types. */
11045 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11047 /* The following case also applies to X < signed_max+1
11048 and X >= signed_max+1 because previous transformations. */
11049 if (code == LE_EXPR || code == GT_EXPR)
11051 tree st0, st1;
11052 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11053 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11054 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11055 type, fold_convert (st0, arg0),
11056 build_int_cst (st1, 0));
11062 /* If we are comparing an ABS_EXPR with a constant, we can
11063 convert all the cases into explicit comparisons, but they may
11064 well not be faster than doing the ABS and one comparison.
11065 But ABS (X) <= C is a range comparison, which becomes a subtraction
11066 and a comparison, and is probably faster. */
11067 if (code == LE_EXPR
11068 && TREE_CODE (arg1) == INTEGER_CST
11069 && TREE_CODE (arg0) == ABS_EXPR
11070 && ! TREE_SIDE_EFFECTS (arg0)
11071 && (0 != (tem = negate_expr (arg1)))
11072 && TREE_CODE (tem) == INTEGER_CST
11073 && ! TREE_CONSTANT_OVERFLOW (tem))
11074 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11075 build2 (GE_EXPR, type,
11076 TREE_OPERAND (arg0, 0), tem),
11077 build2 (LE_EXPR, type,
11078 TREE_OPERAND (arg0, 0), arg1));
11080 /* Convert ABS_EXPR<x> >= 0 to true. */
11081 if (code == GE_EXPR
11082 && tree_expr_nonnegative_p (arg0)
11083 && (integer_zerop (arg1)
11084 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11085 && real_zerop (arg1))))
11086 return omit_one_operand (type, integer_one_node, arg0);
11088 /* Convert ABS_EXPR<x> < 0 to false. */
11089 if (code == LT_EXPR
11090 && tree_expr_nonnegative_p (arg0)
11091 && (integer_zerop (arg1) || real_zerop (arg1)))
11092 return omit_one_operand (type, integer_zero_node, arg0);
11094 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11095 and similarly for >= into !=. */
11096 if ((code == LT_EXPR || code == GE_EXPR)
11097 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11098 && TREE_CODE (arg1) == LSHIFT_EXPR
11099 && integer_onep (TREE_OPERAND (arg1, 0)))
11100 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11101 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11102 TREE_OPERAND (arg1, 1)),
11103 build_int_cst (TREE_TYPE (arg0), 0));
11105 if ((code == LT_EXPR || code == GE_EXPR)
11106 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11107 && (TREE_CODE (arg1) == NOP_EXPR
11108 || TREE_CODE (arg1) == CONVERT_EXPR)
11109 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11110 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11111 return
11112 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11113 fold_convert (TREE_TYPE (arg0),
11114 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11115 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11116 1))),
11117 build_int_cst (TREE_TYPE (arg0), 0));
11119 return NULL_TREE;
11121 case UNORDERED_EXPR:
11122 case ORDERED_EXPR:
11123 case UNLT_EXPR:
11124 case UNLE_EXPR:
11125 case UNGT_EXPR:
11126 case UNGE_EXPR:
11127 case UNEQ_EXPR:
11128 case LTGT_EXPR:
11129 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11131 t1 = fold_relational_const (code, type, arg0, arg1);
11132 if (t1 != NULL_TREE)
11133 return t1;
11136 /* If the first operand is NaN, the result is constant. */
11137 if (TREE_CODE (arg0) == REAL_CST
11138 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11139 && (code != LTGT_EXPR || ! flag_trapping_math))
11141 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11142 ? integer_zero_node
11143 : integer_one_node;
11144 return omit_one_operand (type, t1, arg1);
11147 /* If the second operand is NaN, the result is constant. */
11148 if (TREE_CODE (arg1) == REAL_CST
11149 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11150 && (code != LTGT_EXPR || ! flag_trapping_math))
11152 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11153 ? integer_zero_node
11154 : integer_one_node;
11155 return omit_one_operand (type, t1, arg0);
11158 /* Simplify unordered comparison of something with itself. */
11159 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11160 && operand_equal_p (arg0, arg1, 0))
11161 return constant_boolean_node (1, type);
11163 if (code == LTGT_EXPR
11164 && !flag_trapping_math
11165 && operand_equal_p (arg0, arg1, 0))
11166 return constant_boolean_node (0, type);
11168 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11170 tree targ0 = strip_float_extensions (arg0);
11171 tree targ1 = strip_float_extensions (arg1);
11172 tree newtype = TREE_TYPE (targ0);
11174 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11175 newtype = TREE_TYPE (targ1);
11177 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11178 return fold_build2 (code, type, fold_convert (newtype, targ0),
11179 fold_convert (newtype, targ1));
11182 return NULL_TREE;
11184 case COMPOUND_EXPR:
11185 /* When pedantic, a compound expression can be neither an lvalue
11186 nor an integer constant expression. */
11187 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11188 return NULL_TREE;
11189 /* Don't let (0, 0) be null pointer constant. */
11190 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11191 : fold_convert (type, arg1);
11192 return pedantic_non_lvalue (tem);
11194 case COMPLEX_EXPR:
11195 if ((TREE_CODE (arg0) == REAL_CST
11196 && TREE_CODE (arg1) == REAL_CST)
11197 || (TREE_CODE (arg0) == INTEGER_CST
11198 && TREE_CODE (arg1) == INTEGER_CST))
11199 return build_complex (type, arg0, arg1);
11200 return NULL_TREE;
11202 case ASSERT_EXPR:
11203 /* An ASSERT_EXPR should never be passed to fold_binary. */
11204 gcc_unreachable ();
11206 default:
11207 return NULL_TREE;
11208 } /* switch (code) */
11211 /* Callback for walk_tree, looking for LABEL_EXPR.
11212 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11213 Do not check the sub-tree of GOTO_EXPR. */
11215 static tree
11216 contains_label_1 (tree *tp,
11217 int *walk_subtrees,
11218 void *data ATTRIBUTE_UNUSED)
11220 switch (TREE_CODE (*tp))
11222 case LABEL_EXPR:
11223 return *tp;
11224 case GOTO_EXPR:
11225 *walk_subtrees = 0;
11226 /* no break */
11227 default:
11228 return NULL_TREE;
11232 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11233 accessible from outside the sub-tree. Returns NULL_TREE if no
11234 addressable label is found. */
11236 static bool
11237 contains_label_p (tree st)
11239 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11242 /* Fold a ternary expression of code CODE and type TYPE with operands
11243 OP0, OP1, and OP2. Return the folded expression if folding is
11244 successful. Otherwise, return NULL_TREE. */
11246 tree
11247 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11249 tree tem;
11250 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11251 enum tree_code_class kind = TREE_CODE_CLASS (code);
11253 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11254 && TREE_CODE_LENGTH (code) == 3);
11256 /* Strip any conversions that don't change the mode. This is safe
11257 for every expression, except for a comparison expression because
11258 its signedness is derived from its operands. So, in the latter
11259 case, only strip conversions that don't change the signedness.
11261 Note that this is done as an internal manipulation within the
11262 constant folder, in order to find the simplest representation of
11263 the arguments so that their form can be studied. In any cases,
11264 the appropriate type conversions should be put back in the tree
11265 that will get out of the constant folder. */
11266 if (op0)
11268 arg0 = op0;
11269 STRIP_NOPS (arg0);
11272 if (op1)
11274 arg1 = op1;
11275 STRIP_NOPS (arg1);
11278 switch (code)
11280 case COMPONENT_REF:
11281 if (TREE_CODE (arg0) == CONSTRUCTOR
11282 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11284 unsigned HOST_WIDE_INT idx;
11285 tree field, value;
11286 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11287 if (field == arg1)
11288 return value;
11290 return NULL_TREE;
11292 case COND_EXPR:
11293 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11294 so all simple results must be passed through pedantic_non_lvalue. */
11295 if (TREE_CODE (arg0) == INTEGER_CST)
11297 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11298 tem = integer_zerop (arg0) ? op2 : op1;
11299 /* Only optimize constant conditions when the selected branch
11300 has the same type as the COND_EXPR. This avoids optimizing
11301 away "c ? x : throw", where the throw has a void type.
11302 Avoid throwing away that operand which contains label. */
11303 if ((!TREE_SIDE_EFFECTS (unused_op)
11304 || !contains_label_p (unused_op))
11305 && (! VOID_TYPE_P (TREE_TYPE (tem))
11306 || VOID_TYPE_P (type)))
11307 return pedantic_non_lvalue (tem);
11308 return NULL_TREE;
11310 if (operand_equal_p (arg1, op2, 0))
11311 return pedantic_omit_one_operand (type, arg1, arg0);
11313 /* If we have A op B ? A : C, we may be able to convert this to a
11314 simpler expression, depending on the operation and the values
11315 of B and C. Signed zeros prevent all of these transformations,
11316 for reasons given above each one.
11318 Also try swapping the arguments and inverting the conditional. */
11319 if (COMPARISON_CLASS_P (arg0)
11320 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11321 arg1, TREE_OPERAND (arg0, 1))
11322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11324 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11325 if (tem)
11326 return tem;
11329 if (COMPARISON_CLASS_P (arg0)
11330 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11331 op2,
11332 TREE_OPERAND (arg0, 1))
11333 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11335 tem = fold_truth_not_expr (arg0);
11336 if (tem && COMPARISON_CLASS_P (tem))
11338 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11339 if (tem)
11340 return tem;
11344 /* If the second operand is simpler than the third, swap them
11345 since that produces better jump optimization results. */
11346 if (truth_value_p (TREE_CODE (arg0))
11347 && tree_swap_operands_p (op1, op2, false))
11349 /* See if this can be inverted. If it can't, possibly because
11350 it was a floating-point inequality comparison, don't do
11351 anything. */
11352 tem = fold_truth_not_expr (arg0);
11353 if (tem)
11354 return fold_build3 (code, type, tem, op2, op1);
11357 /* Convert A ? 1 : 0 to simply A. */
11358 if (integer_onep (op1)
11359 && integer_zerop (op2)
11360 /* If we try to convert OP0 to our type, the
11361 call to fold will try to move the conversion inside
11362 a COND, which will recurse. In that case, the COND_EXPR
11363 is probably the best choice, so leave it alone. */
11364 && type == TREE_TYPE (arg0))
11365 return pedantic_non_lvalue (arg0);
11367 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11368 over COND_EXPR in cases such as floating point comparisons. */
11369 if (integer_zerop (op1)
11370 && integer_onep (op2)
11371 && truth_value_p (TREE_CODE (arg0)))
11372 return pedantic_non_lvalue (fold_convert (type,
11373 invert_truthvalue (arg0)));
11375 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11376 if (TREE_CODE (arg0) == LT_EXPR
11377 && integer_zerop (TREE_OPERAND (arg0, 1))
11378 && integer_zerop (op2)
11379 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11381 /* sign_bit_p only checks ARG1 bits within A's precision.
11382 If <sign bit of A> has wider type than A, bits outside
11383 of A's precision in <sign bit of A> need to be checked.
11384 If they are all 0, this optimization needs to be done
11385 in unsigned A's type, if they are all 1 in signed A's type,
11386 otherwise this can't be done. */
11387 if (TYPE_PRECISION (TREE_TYPE (tem))
11388 < TYPE_PRECISION (TREE_TYPE (arg1))
11389 && TYPE_PRECISION (TREE_TYPE (tem))
11390 < TYPE_PRECISION (type))
11392 unsigned HOST_WIDE_INT mask_lo;
11393 HOST_WIDE_INT mask_hi;
11394 int inner_width, outer_width;
11395 tree tem_type;
11397 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11398 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11399 if (outer_width > TYPE_PRECISION (type))
11400 outer_width = TYPE_PRECISION (type);
11402 if (outer_width > HOST_BITS_PER_WIDE_INT)
11404 mask_hi = ((unsigned HOST_WIDE_INT) -1
11405 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11406 mask_lo = -1;
11408 else
11410 mask_hi = 0;
11411 mask_lo = ((unsigned HOST_WIDE_INT) -1
11412 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11414 if (inner_width > HOST_BITS_PER_WIDE_INT)
11416 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11417 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11418 mask_lo = 0;
11420 else
11421 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11422 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11424 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11425 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11427 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11428 tem = fold_convert (tem_type, tem);
11430 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11431 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11433 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11434 tem = fold_convert (tem_type, tem);
11436 else
11437 tem = NULL;
11440 if (tem)
11441 return fold_convert (type,
11442 fold_build2 (BIT_AND_EXPR,
11443 TREE_TYPE (tem), tem,
11444 fold_convert (TREE_TYPE (tem),
11445 arg1)));
11448 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11449 already handled above. */
11450 if (TREE_CODE (arg0) == BIT_AND_EXPR
11451 && integer_onep (TREE_OPERAND (arg0, 1))
11452 && integer_zerop (op2)
11453 && integer_pow2p (arg1))
11455 tree tem = TREE_OPERAND (arg0, 0);
11456 STRIP_NOPS (tem);
11457 if (TREE_CODE (tem) == RSHIFT_EXPR
11458 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11459 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11460 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11461 return fold_build2 (BIT_AND_EXPR, type,
11462 TREE_OPERAND (tem, 0), arg1);
11465 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11466 is probably obsolete because the first operand should be a
11467 truth value (that's why we have the two cases above), but let's
11468 leave it in until we can confirm this for all front-ends. */
11469 if (integer_zerop (op2)
11470 && TREE_CODE (arg0) == NE_EXPR
11471 && integer_zerop (TREE_OPERAND (arg0, 1))
11472 && integer_pow2p (arg1)
11473 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11474 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11475 arg1, OEP_ONLY_CONST))
11476 return pedantic_non_lvalue (fold_convert (type,
11477 TREE_OPERAND (arg0, 0)));
11479 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11480 if (integer_zerop (op2)
11481 && truth_value_p (TREE_CODE (arg0))
11482 && truth_value_p (TREE_CODE (arg1)))
11483 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11484 fold_convert (type, arg0),
11485 arg1);
11487 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11488 if (integer_onep (op2)
11489 && truth_value_p (TREE_CODE (arg0))
11490 && truth_value_p (TREE_CODE (arg1)))
11492 /* Only perform transformation if ARG0 is easily inverted. */
11493 tem = fold_truth_not_expr (arg0);
11494 if (tem)
11495 return fold_build2 (TRUTH_ORIF_EXPR, type,
11496 fold_convert (type, tem),
11497 arg1);
11500 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11501 if (integer_zerop (arg1)
11502 && truth_value_p (TREE_CODE (arg0))
11503 && truth_value_p (TREE_CODE (op2)))
11505 /* Only perform transformation if ARG0 is easily inverted. */
11506 tem = fold_truth_not_expr (arg0);
11507 if (tem)
11508 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11509 fold_convert (type, tem),
11510 op2);
11513 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11514 if (integer_onep (arg1)
11515 && truth_value_p (TREE_CODE (arg0))
11516 && truth_value_p (TREE_CODE (op2)))
11517 return fold_build2 (TRUTH_ORIF_EXPR, type,
11518 fold_convert (type, arg0),
11519 op2);
11521 return NULL_TREE;
11523 case CALL_EXPR:
11524 /* Check for a built-in function. */
11525 if (TREE_CODE (op0) == ADDR_EXPR
11526 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11527 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11528 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11529 return NULL_TREE;
11531 case BIT_FIELD_REF:
11532 if (TREE_CODE (arg0) == VECTOR_CST
11533 && type == TREE_TYPE (TREE_TYPE (arg0))
11534 && host_integerp (arg1, 1)
11535 && host_integerp (op2, 1))
11537 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11538 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11540 if (width != 0
11541 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11542 && (idx % width) == 0
11543 && (idx = idx / width)
11544 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11546 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11547 while (idx-- > 0 && elements)
11548 elements = TREE_CHAIN (elements);
11549 if (elements)
11550 return TREE_VALUE (elements);
11551 else
11552 return fold_convert (type, integer_zero_node);
11555 return NULL_TREE;
11557 default:
11558 return NULL_TREE;
11559 } /* switch (code) */
11562 /* Perform constant folding and related simplification of EXPR.
11563 The related simplifications include x*1 => x, x*0 => 0, etc.,
11564 and application of the associative law.
11565 NOP_EXPR conversions may be removed freely (as long as we
11566 are careful not to change the type of the overall expression).
11567 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11568 but we can constant-fold them if they have constant operands. */
11570 #ifdef ENABLE_FOLD_CHECKING
11571 # define fold(x) fold_1 (x)
11572 static tree fold_1 (tree);
11573 static
11574 #endif
11575 tree
11576 fold (tree expr)
11578 const tree t = expr;
11579 enum tree_code code = TREE_CODE (t);
11580 enum tree_code_class kind = TREE_CODE_CLASS (code);
11581 tree tem;
11583 /* Return right away if a constant. */
11584 if (kind == tcc_constant)
11585 return t;
11587 if (IS_EXPR_CODE_CLASS (kind))
11589 tree type = TREE_TYPE (t);
11590 tree op0, op1, op2;
11592 switch (TREE_CODE_LENGTH (code))
11594 case 1:
11595 op0 = TREE_OPERAND (t, 0);
11596 tem = fold_unary (code, type, op0);
11597 return tem ? tem : expr;
11598 case 2:
11599 op0 = TREE_OPERAND (t, 0);
11600 op1 = TREE_OPERAND (t, 1);
11601 tem = fold_binary (code, type, op0, op1);
11602 return tem ? tem : expr;
11603 case 3:
11604 op0 = TREE_OPERAND (t, 0);
11605 op1 = TREE_OPERAND (t, 1);
11606 op2 = TREE_OPERAND (t, 2);
11607 tem = fold_ternary (code, type, op0, op1, op2);
11608 return tem ? tem : expr;
11609 default:
11610 break;
11614 switch (code)
11616 case CONST_DECL:
11617 return fold (DECL_INITIAL (t));
11619 default:
11620 return t;
11621 } /* switch (code) */
11624 #ifdef ENABLE_FOLD_CHECKING
11625 #undef fold
11627 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11628 static void fold_check_failed (tree, tree);
11629 void print_fold_checksum (tree);
11631 /* When --enable-checking=fold, compute a digest of expr before
11632 and after actual fold call to see if fold did not accidentally
11633 change original expr. */
11635 tree
11636 fold (tree expr)
11638 tree ret;
11639 struct md5_ctx ctx;
11640 unsigned char checksum_before[16], checksum_after[16];
11641 htab_t ht;
11643 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11644 md5_init_ctx (&ctx);
11645 fold_checksum_tree (expr, &ctx, ht);
11646 md5_finish_ctx (&ctx, checksum_before);
11647 htab_empty (ht);
11649 ret = fold_1 (expr);
11651 md5_init_ctx (&ctx);
11652 fold_checksum_tree (expr, &ctx, ht);
11653 md5_finish_ctx (&ctx, checksum_after);
11654 htab_delete (ht);
11656 if (memcmp (checksum_before, checksum_after, 16))
11657 fold_check_failed (expr, ret);
11659 return ret;
11662 void
11663 print_fold_checksum (tree expr)
11665 struct md5_ctx ctx;
11666 unsigned char checksum[16], cnt;
11667 htab_t ht;
11669 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11670 md5_init_ctx (&ctx);
11671 fold_checksum_tree (expr, &ctx, ht);
11672 md5_finish_ctx (&ctx, checksum);
11673 htab_delete (ht);
11674 for (cnt = 0; cnt < 16; ++cnt)
11675 fprintf (stderr, "%02x", checksum[cnt]);
11676 putc ('\n', stderr);
11679 static void
11680 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11682 internal_error ("fold check: original tree changed by fold");
11685 static void
11686 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11688 void **slot;
11689 enum tree_code code;
11690 struct tree_function_decl buf;
11691 int i, len;
11693 recursive_label:
11695 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11696 <= sizeof (struct tree_function_decl))
11697 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11698 if (expr == NULL)
11699 return;
11700 slot = htab_find_slot (ht, expr, INSERT);
11701 if (*slot != NULL)
11702 return;
11703 *slot = expr;
11704 code = TREE_CODE (expr);
11705 if (TREE_CODE_CLASS (code) == tcc_declaration
11706 && DECL_ASSEMBLER_NAME_SET_P (expr))
11708 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11709 memcpy ((char *) &buf, expr, tree_size (expr));
11710 expr = (tree) &buf;
11711 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11713 else if (TREE_CODE_CLASS (code) == tcc_type
11714 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11715 || TYPE_CACHED_VALUES_P (expr)
11716 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11718 /* Allow these fields to be modified. */
11719 memcpy ((char *) &buf, expr, tree_size (expr));
11720 expr = (tree) &buf;
11721 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11722 TYPE_POINTER_TO (expr) = NULL;
11723 TYPE_REFERENCE_TO (expr) = NULL;
11724 if (TYPE_CACHED_VALUES_P (expr))
11726 TYPE_CACHED_VALUES_P (expr) = 0;
11727 TYPE_CACHED_VALUES (expr) = NULL;
11730 md5_process_bytes (expr, tree_size (expr), ctx);
11731 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11732 if (TREE_CODE_CLASS (code) != tcc_type
11733 && TREE_CODE_CLASS (code) != tcc_declaration
11734 && code != TREE_LIST)
11735 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11736 switch (TREE_CODE_CLASS (code))
11738 case tcc_constant:
11739 switch (code)
11741 case STRING_CST:
11742 md5_process_bytes (TREE_STRING_POINTER (expr),
11743 TREE_STRING_LENGTH (expr), ctx);
11744 break;
11745 case COMPLEX_CST:
11746 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11747 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11748 break;
11749 case VECTOR_CST:
11750 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11751 break;
11752 default:
11753 break;
11755 break;
11756 case tcc_exceptional:
11757 switch (code)
11759 case TREE_LIST:
11760 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11761 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11762 expr = TREE_CHAIN (expr);
11763 goto recursive_label;
11764 break;
11765 case TREE_VEC:
11766 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11767 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11768 break;
11769 default:
11770 break;
11772 break;
11773 case tcc_expression:
11774 case tcc_reference:
11775 case tcc_comparison:
11776 case tcc_unary:
11777 case tcc_binary:
11778 case tcc_statement:
11779 len = TREE_CODE_LENGTH (code);
11780 for (i = 0; i < len; ++i)
11781 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11782 break;
11783 case tcc_declaration:
11784 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11785 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11786 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11788 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11789 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11790 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11791 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11792 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11794 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11795 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11797 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11799 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11800 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11801 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11803 break;
11804 case tcc_type:
11805 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11806 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11807 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11808 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11809 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11810 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11811 if (INTEGRAL_TYPE_P (expr)
11812 || SCALAR_FLOAT_TYPE_P (expr))
11814 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11815 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11817 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11818 if (TREE_CODE (expr) == RECORD_TYPE
11819 || TREE_CODE (expr) == UNION_TYPE
11820 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11821 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11822 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11823 break;
11824 default:
11825 break;
11829 #endif
11831 /* Fold a unary tree expression with code CODE of type TYPE with an
11832 operand OP0. Return a folded expression if successful. Otherwise,
11833 return a tree expression with code CODE of type TYPE with an
11834 operand OP0. */
11836 tree
11837 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11839 tree tem;
11840 #ifdef ENABLE_FOLD_CHECKING
11841 unsigned char checksum_before[16], checksum_after[16];
11842 struct md5_ctx ctx;
11843 htab_t ht;
11845 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11846 md5_init_ctx (&ctx);
11847 fold_checksum_tree (op0, &ctx, ht);
11848 md5_finish_ctx (&ctx, checksum_before);
11849 htab_empty (ht);
11850 #endif
11852 tem = fold_unary (code, type, op0);
11853 if (!tem)
11854 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11856 #ifdef ENABLE_FOLD_CHECKING
11857 md5_init_ctx (&ctx);
11858 fold_checksum_tree (op0, &ctx, ht);
11859 md5_finish_ctx (&ctx, checksum_after);
11860 htab_delete (ht);
11862 if (memcmp (checksum_before, checksum_after, 16))
11863 fold_check_failed (op0, tem);
11864 #endif
11865 return tem;
11868 /* Fold a binary tree expression with code CODE of type TYPE with
11869 operands OP0 and OP1. Return a folded expression if successful.
11870 Otherwise, return a tree expression with code CODE of type TYPE
11871 with operands OP0 and OP1. */
11873 tree
11874 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11875 MEM_STAT_DECL)
11877 tree tem;
11878 #ifdef ENABLE_FOLD_CHECKING
11879 unsigned char checksum_before_op0[16],
11880 checksum_before_op1[16],
11881 checksum_after_op0[16],
11882 checksum_after_op1[16];
11883 struct md5_ctx ctx;
11884 htab_t ht;
11886 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11887 md5_init_ctx (&ctx);
11888 fold_checksum_tree (op0, &ctx, ht);
11889 md5_finish_ctx (&ctx, checksum_before_op0);
11890 htab_empty (ht);
11892 md5_init_ctx (&ctx);
11893 fold_checksum_tree (op1, &ctx, ht);
11894 md5_finish_ctx (&ctx, checksum_before_op1);
11895 htab_empty (ht);
11896 #endif
11898 tem = fold_binary (code, type, op0, op1);
11899 if (!tem)
11900 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11902 #ifdef ENABLE_FOLD_CHECKING
11903 md5_init_ctx (&ctx);
11904 fold_checksum_tree (op0, &ctx, ht);
11905 md5_finish_ctx (&ctx, checksum_after_op0);
11906 htab_empty (ht);
11908 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11909 fold_check_failed (op0, tem);
11911 md5_init_ctx (&ctx);
11912 fold_checksum_tree (op1, &ctx, ht);
11913 md5_finish_ctx (&ctx, checksum_after_op1);
11914 htab_delete (ht);
11916 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11917 fold_check_failed (op1, tem);
11918 #endif
11919 return tem;
11922 /* Fold a ternary tree expression with code CODE of type TYPE with
11923 operands OP0, OP1, and OP2. Return a folded expression if
11924 successful. Otherwise, return a tree expression with code CODE of
11925 type TYPE with operands OP0, OP1, and OP2. */
11927 tree
11928 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11929 MEM_STAT_DECL)
11931 tree tem;
11932 #ifdef ENABLE_FOLD_CHECKING
11933 unsigned char checksum_before_op0[16],
11934 checksum_before_op1[16],
11935 checksum_before_op2[16],
11936 checksum_after_op0[16],
11937 checksum_after_op1[16],
11938 checksum_after_op2[16];
11939 struct md5_ctx ctx;
11940 htab_t ht;
11942 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11943 md5_init_ctx (&ctx);
11944 fold_checksum_tree (op0, &ctx, ht);
11945 md5_finish_ctx (&ctx, checksum_before_op0);
11946 htab_empty (ht);
11948 md5_init_ctx (&ctx);
11949 fold_checksum_tree (op1, &ctx, ht);
11950 md5_finish_ctx (&ctx, checksum_before_op1);
11951 htab_empty (ht);
11953 md5_init_ctx (&ctx);
11954 fold_checksum_tree (op2, &ctx, ht);
11955 md5_finish_ctx (&ctx, checksum_before_op2);
11956 htab_empty (ht);
11957 #endif
11959 tem = fold_ternary (code, type, op0, op1, op2);
11960 if (!tem)
11961 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11963 #ifdef ENABLE_FOLD_CHECKING
11964 md5_init_ctx (&ctx);
11965 fold_checksum_tree (op0, &ctx, ht);
11966 md5_finish_ctx (&ctx, checksum_after_op0);
11967 htab_empty (ht);
11969 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11970 fold_check_failed (op0, tem);
11972 md5_init_ctx (&ctx);
11973 fold_checksum_tree (op1, &ctx, ht);
11974 md5_finish_ctx (&ctx, checksum_after_op1);
11975 htab_empty (ht);
11977 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11978 fold_check_failed (op1, tem);
11980 md5_init_ctx (&ctx);
11981 fold_checksum_tree (op2, &ctx, ht);
11982 md5_finish_ctx (&ctx, checksum_after_op2);
11983 htab_delete (ht);
11985 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11986 fold_check_failed (op2, tem);
11987 #endif
11988 return tem;
11991 /* Perform constant folding and related simplification of initializer
11992 expression EXPR. These behave identically to "fold_buildN" but ignore
11993 potential run-time traps and exceptions that fold must preserve. */
11995 #define START_FOLD_INIT \
11996 int saved_signaling_nans = flag_signaling_nans;\
11997 int saved_trapping_math = flag_trapping_math;\
11998 int saved_rounding_math = flag_rounding_math;\
11999 int saved_trapv = flag_trapv;\
12000 int saved_folding_initializer = folding_initializer;\
12001 flag_signaling_nans = 0;\
12002 flag_trapping_math = 0;\
12003 flag_rounding_math = 0;\
12004 flag_trapv = 0;\
12005 folding_initializer = 1;
12007 #define END_FOLD_INIT \
12008 flag_signaling_nans = saved_signaling_nans;\
12009 flag_trapping_math = saved_trapping_math;\
12010 flag_rounding_math = saved_rounding_math;\
12011 flag_trapv = saved_trapv;\
12012 folding_initializer = saved_folding_initializer;
12014 tree
12015 fold_build1_initializer (enum tree_code code, tree type, tree op)
12017 tree result;
12018 START_FOLD_INIT;
12020 result = fold_build1 (code, type, op);
12022 END_FOLD_INIT;
12023 return result;
12026 tree
12027 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12029 tree result;
12030 START_FOLD_INIT;
12032 result = fold_build2 (code, type, op0, op1);
12034 END_FOLD_INIT;
12035 return result;
12038 tree
12039 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12040 tree op2)
12042 tree result;
12043 START_FOLD_INIT;
12045 result = fold_build3 (code, type, op0, op1, op2);
12047 END_FOLD_INIT;
12048 return result;
12051 #undef START_FOLD_INIT
12052 #undef END_FOLD_INIT
12054 /* Determine if first argument is a multiple of second argument. Return 0 if
12055 it is not, or we cannot easily determined it to be.
12057 An example of the sort of thing we care about (at this point; this routine
12058 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12059 fold cases do now) is discovering that
12061 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12063 is a multiple of
12065 SAVE_EXPR (J * 8)
12067 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12069 This code also handles discovering that
12071 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12073 is a multiple of 8 so we don't have to worry about dealing with a
12074 possible remainder.
12076 Note that we *look* inside a SAVE_EXPR only to determine how it was
12077 calculated; it is not safe for fold to do much of anything else with the
12078 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12079 at run time. For example, the latter example above *cannot* be implemented
12080 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12081 evaluation time of the original SAVE_EXPR is not necessarily the same at
12082 the time the new expression is evaluated. The only optimization of this
12083 sort that would be valid is changing
12085 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12087 divided by 8 to
12089 SAVE_EXPR (I) * SAVE_EXPR (J)
12091 (where the same SAVE_EXPR (J) is used in the original and the
12092 transformed version). */
12094 static int
12095 multiple_of_p (tree type, tree top, tree bottom)
12097 if (operand_equal_p (top, bottom, 0))
12098 return 1;
12100 if (TREE_CODE (type) != INTEGER_TYPE)
12101 return 0;
12103 switch (TREE_CODE (top))
12105 case BIT_AND_EXPR:
12106 /* Bitwise and provides a power of two multiple. If the mask is
12107 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12108 if (!integer_pow2p (bottom))
12109 return 0;
12110 /* FALLTHRU */
12112 case MULT_EXPR:
12113 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12114 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12116 case PLUS_EXPR:
12117 case MINUS_EXPR:
12118 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12119 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12121 case LSHIFT_EXPR:
12122 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12124 tree op1, t1;
12126 op1 = TREE_OPERAND (top, 1);
12127 /* const_binop may not detect overflow correctly,
12128 so check for it explicitly here. */
12129 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12130 > TREE_INT_CST_LOW (op1)
12131 && TREE_INT_CST_HIGH (op1) == 0
12132 && 0 != (t1 = fold_convert (type,
12133 const_binop (LSHIFT_EXPR,
12134 size_one_node,
12135 op1, 0)))
12136 && ! TREE_OVERFLOW (t1))
12137 return multiple_of_p (type, t1, bottom);
12139 return 0;
12141 case NOP_EXPR:
12142 /* Can't handle conversions from non-integral or wider integral type. */
12143 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12144 || (TYPE_PRECISION (type)
12145 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12146 return 0;
12148 /* .. fall through ... */
12150 case SAVE_EXPR:
12151 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12153 case INTEGER_CST:
12154 if (TREE_CODE (bottom) != INTEGER_CST
12155 || (TYPE_UNSIGNED (type)
12156 && (tree_int_cst_sgn (top) < 0
12157 || tree_int_cst_sgn (bottom) < 0)))
12158 return 0;
12159 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12160 top, bottom, 0));
12162 default:
12163 return 0;
12167 /* Return true if `t' is known to be non-negative. */
12169 bool
12170 tree_expr_nonnegative_p (tree t)
12172 if (t == error_mark_node)
12173 return false;
12175 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12176 return true;
12178 switch (TREE_CODE (t))
12180 case SSA_NAME:
12181 /* Query VRP to see if it has recorded any information about
12182 the range of this object. */
12183 return ssa_name_nonnegative_p (t);
12185 case ABS_EXPR:
12186 /* We can't return 1 if flag_wrapv is set because
12187 ABS_EXPR<INT_MIN> = INT_MIN. */
12188 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12189 return true;
12190 break;
12192 case INTEGER_CST:
12193 return tree_int_cst_sgn (t) >= 0;
12195 case REAL_CST:
12196 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12198 case PLUS_EXPR:
12199 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12200 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12201 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12203 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12204 both unsigned and at least 2 bits shorter than the result. */
12205 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12206 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12207 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12209 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12210 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12211 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12212 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12214 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12215 TYPE_PRECISION (inner2)) + 1;
12216 return prec < TYPE_PRECISION (TREE_TYPE (t));
12219 break;
12221 case MULT_EXPR:
12222 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12224 /* x * x for floating point x is always non-negative. */
12225 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12226 return true;
12227 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12228 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12231 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12232 both unsigned and their total bits is shorter than the result. */
12233 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12234 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12235 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12237 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12238 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12239 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12240 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12241 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12242 < TYPE_PRECISION (TREE_TYPE (t));
12244 return false;
12246 case BIT_AND_EXPR:
12247 case MAX_EXPR:
12248 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12249 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12251 case BIT_IOR_EXPR:
12252 case BIT_XOR_EXPR:
12253 case MIN_EXPR:
12254 case RDIV_EXPR:
12255 case TRUNC_DIV_EXPR:
12256 case CEIL_DIV_EXPR:
12257 case FLOOR_DIV_EXPR:
12258 case ROUND_DIV_EXPR:
12259 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12260 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12262 case TRUNC_MOD_EXPR:
12263 case CEIL_MOD_EXPR:
12264 case FLOOR_MOD_EXPR:
12265 case ROUND_MOD_EXPR:
12266 case SAVE_EXPR:
12267 case NON_LVALUE_EXPR:
12268 case FLOAT_EXPR:
12269 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12271 case COMPOUND_EXPR:
12272 case MODIFY_EXPR:
12273 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12275 case BIND_EXPR:
12276 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12278 case COND_EXPR:
12279 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12280 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12282 case NOP_EXPR:
12284 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12285 tree outer_type = TREE_TYPE (t);
12287 if (TREE_CODE (outer_type) == REAL_TYPE)
12289 if (TREE_CODE (inner_type) == REAL_TYPE)
12290 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12291 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12293 if (TYPE_UNSIGNED (inner_type))
12294 return true;
12295 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12298 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12300 if (TREE_CODE (inner_type) == REAL_TYPE)
12301 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12302 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12303 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12304 && TYPE_UNSIGNED (inner_type);
12307 break;
12309 case TARGET_EXPR:
12311 tree temp = TARGET_EXPR_SLOT (t);
12312 t = TARGET_EXPR_INITIAL (t);
12314 /* If the initializer is non-void, then it's a normal expression
12315 that will be assigned to the slot. */
12316 if (!VOID_TYPE_P (t))
12317 return tree_expr_nonnegative_p (t);
12319 /* Otherwise, the initializer sets the slot in some way. One common
12320 way is an assignment statement at the end of the initializer. */
12321 while (1)
12323 if (TREE_CODE (t) == BIND_EXPR)
12324 t = expr_last (BIND_EXPR_BODY (t));
12325 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12326 || TREE_CODE (t) == TRY_CATCH_EXPR)
12327 t = expr_last (TREE_OPERAND (t, 0));
12328 else if (TREE_CODE (t) == STATEMENT_LIST)
12329 t = expr_last (t);
12330 else
12331 break;
12333 if (TREE_CODE (t) == MODIFY_EXPR
12334 && TREE_OPERAND (t, 0) == temp)
12335 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12337 return false;
12340 case CALL_EXPR:
12342 tree fndecl = get_callee_fndecl (t);
12343 tree arglist = TREE_OPERAND (t, 1);
12344 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12345 switch (DECL_FUNCTION_CODE (fndecl))
12347 CASE_FLT_FN (BUILT_IN_ACOS):
12348 CASE_FLT_FN (BUILT_IN_ACOSH):
12349 CASE_FLT_FN (BUILT_IN_CABS):
12350 CASE_FLT_FN (BUILT_IN_COSH):
12351 CASE_FLT_FN (BUILT_IN_ERFC):
12352 CASE_FLT_FN (BUILT_IN_EXP):
12353 CASE_FLT_FN (BUILT_IN_EXP10):
12354 CASE_FLT_FN (BUILT_IN_EXP2):
12355 CASE_FLT_FN (BUILT_IN_FABS):
12356 CASE_FLT_FN (BUILT_IN_FDIM):
12357 CASE_FLT_FN (BUILT_IN_HYPOT):
12358 CASE_FLT_FN (BUILT_IN_POW10):
12359 CASE_INT_FN (BUILT_IN_FFS):
12360 CASE_INT_FN (BUILT_IN_PARITY):
12361 CASE_INT_FN (BUILT_IN_POPCOUNT):
12362 case BUILT_IN_BSWAP32:
12363 case BUILT_IN_BSWAP64:
12364 /* Always true. */
12365 return true;
12367 CASE_FLT_FN (BUILT_IN_SQRT):
12368 /* sqrt(-0.0) is -0.0. */
12369 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12370 return true;
12371 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12373 CASE_FLT_FN (BUILT_IN_ASINH):
12374 CASE_FLT_FN (BUILT_IN_ATAN):
12375 CASE_FLT_FN (BUILT_IN_ATANH):
12376 CASE_FLT_FN (BUILT_IN_CBRT):
12377 CASE_FLT_FN (BUILT_IN_CEIL):
12378 CASE_FLT_FN (BUILT_IN_ERF):
12379 CASE_FLT_FN (BUILT_IN_EXPM1):
12380 CASE_FLT_FN (BUILT_IN_FLOOR):
12381 CASE_FLT_FN (BUILT_IN_FMOD):
12382 CASE_FLT_FN (BUILT_IN_FREXP):
12383 CASE_FLT_FN (BUILT_IN_LCEIL):
12384 CASE_FLT_FN (BUILT_IN_LDEXP):
12385 CASE_FLT_FN (BUILT_IN_LFLOOR):
12386 CASE_FLT_FN (BUILT_IN_LLCEIL):
12387 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12388 CASE_FLT_FN (BUILT_IN_LLRINT):
12389 CASE_FLT_FN (BUILT_IN_LLROUND):
12390 CASE_FLT_FN (BUILT_IN_LRINT):
12391 CASE_FLT_FN (BUILT_IN_LROUND):
12392 CASE_FLT_FN (BUILT_IN_MODF):
12393 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12394 CASE_FLT_FN (BUILT_IN_RINT):
12395 CASE_FLT_FN (BUILT_IN_ROUND):
12396 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12397 CASE_FLT_FN (BUILT_IN_SINH):
12398 CASE_FLT_FN (BUILT_IN_TANH):
12399 CASE_FLT_FN (BUILT_IN_TRUNC):
12400 /* True if the 1st argument is nonnegative. */
12401 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12403 CASE_FLT_FN (BUILT_IN_FMAX):
12404 /* True if the 1st OR 2nd arguments are nonnegative. */
12405 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12406 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12408 CASE_FLT_FN (BUILT_IN_FMIN):
12409 /* True if the 1st AND 2nd arguments are nonnegative. */
12410 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12411 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12413 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12414 /* True if the 2nd argument is nonnegative. */
12415 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12417 CASE_FLT_FN (BUILT_IN_POWI):
12418 /* True if the 1st argument is nonnegative or the second
12419 argument is an even integer. */
12420 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12422 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12423 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12424 return true;
12426 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12428 CASE_FLT_FN (BUILT_IN_POW):
12429 /* True if the 1st argument is nonnegative or the second
12430 argument is an even integer valued real. */
12431 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12433 REAL_VALUE_TYPE c;
12434 HOST_WIDE_INT n;
12436 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12437 n = real_to_integer (&c);
12438 if ((n & 1) == 0)
12440 REAL_VALUE_TYPE cint;
12441 real_from_integer (&cint, VOIDmode, n,
12442 n < 0 ? -1 : 0, 0);
12443 if (real_identical (&c, &cint))
12444 return true;
12447 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12449 default:
12450 break;
12454 /* ... fall through ... */
12456 default:
12457 if (truth_value_p (TREE_CODE (t)))
12458 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12459 return true;
12462 /* We don't know sign of `t', so be conservative and return false. */
12463 return false;
12466 /* Return true when T is an address and is known to be nonzero.
12467 For floating point we further ensure that T is not denormal.
12468 Similar logic is present in nonzero_address in rtlanal.h. */
12470 bool
12471 tree_expr_nonzero_p (tree t)
12473 tree type = TREE_TYPE (t);
12475 /* Doing something useful for floating point would need more work. */
12476 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12477 return false;
12479 switch (TREE_CODE (t))
12481 case SSA_NAME:
12482 /* Query VRP to see if it has recorded any information about
12483 the range of this object. */
12484 return ssa_name_nonzero_p (t);
12486 case ABS_EXPR:
12487 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12489 case INTEGER_CST:
12490 /* We used to test for !integer_zerop here. This does not work correctly
12491 if TREE_CONSTANT_OVERFLOW (t). */
12492 return (TREE_INT_CST_LOW (t) != 0
12493 || TREE_INT_CST_HIGH (t) != 0);
12495 case PLUS_EXPR:
12496 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12498 /* With the presence of negative values it is hard
12499 to say something. */
12500 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12501 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12502 return false;
12503 /* One of operands must be positive and the other non-negative. */
12504 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12505 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12507 break;
12509 case MULT_EXPR:
12510 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12512 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12513 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12515 break;
12517 case NOP_EXPR:
12519 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12520 tree outer_type = TREE_TYPE (t);
12522 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12523 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12525 break;
12527 case ADDR_EXPR:
12529 tree base = get_base_address (TREE_OPERAND (t, 0));
12531 if (!base)
12532 return false;
12534 /* Weak declarations may link to NULL. */
12535 if (VAR_OR_FUNCTION_DECL_P (base))
12536 return !DECL_WEAK (base);
12538 /* Constants are never weak. */
12539 if (CONSTANT_CLASS_P (base))
12540 return true;
12542 return false;
12545 case COND_EXPR:
12546 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12547 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12549 case MIN_EXPR:
12550 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12551 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12553 case MAX_EXPR:
12554 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12556 /* When both operands are nonzero, then MAX must be too. */
12557 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12558 return true;
12560 /* MAX where operand 0 is positive is positive. */
12561 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12563 /* MAX where operand 1 is positive is positive. */
12564 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12565 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12566 return true;
12567 break;
12569 case COMPOUND_EXPR:
12570 case MODIFY_EXPR:
12571 case BIND_EXPR:
12572 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12574 case SAVE_EXPR:
12575 case NON_LVALUE_EXPR:
12576 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12578 case BIT_IOR_EXPR:
12579 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12580 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12582 case CALL_EXPR:
12583 return alloca_call_p (t);
12585 default:
12586 break;
12588 return false;
12591 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12592 attempt to fold the expression to a constant without modifying TYPE,
12593 OP0 or OP1.
12595 If the expression could be simplified to a constant, then return
12596 the constant. If the expression would not be simplified to a
12597 constant, then return NULL_TREE. */
12599 tree
12600 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12602 tree tem = fold_binary (code, type, op0, op1);
12603 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12606 /* Given the components of a unary expression CODE, TYPE and OP0,
12607 attempt to fold the expression to a constant without modifying
12608 TYPE or OP0.
12610 If the expression could be simplified to a constant, then return
12611 the constant. If the expression would not be simplified to a
12612 constant, then return NULL_TREE. */
12614 tree
12615 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12617 tree tem = fold_unary (code, type, op0);
12618 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12621 /* If EXP represents referencing an element in a constant string
12622 (either via pointer arithmetic or array indexing), return the
12623 tree representing the value accessed, otherwise return NULL. */
12625 tree
12626 fold_read_from_constant_string (tree exp)
12628 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12630 tree exp1 = TREE_OPERAND (exp, 0);
12631 tree index;
12632 tree string;
12634 if (TREE_CODE (exp) == INDIRECT_REF)
12635 string = string_constant (exp1, &index);
12636 else
12638 tree low_bound = array_ref_low_bound (exp);
12639 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12641 /* Optimize the special-case of a zero lower bound.
12643 We convert the low_bound to sizetype to avoid some problems
12644 with constant folding. (E.g. suppose the lower bound is 1,
12645 and its mode is QI. Without the conversion,l (ARRAY
12646 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12647 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12648 if (! integer_zerop (low_bound))
12649 index = size_diffop (index, fold_convert (sizetype, low_bound));
12651 string = exp1;
12654 if (string
12655 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12656 && TREE_CODE (string) == STRING_CST
12657 && TREE_CODE (index) == INTEGER_CST
12658 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12659 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12660 == MODE_INT)
12661 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12662 return fold_convert (TREE_TYPE (exp),
12663 build_int_cst (NULL_TREE,
12664 (TREE_STRING_POINTER (string)
12665 [TREE_INT_CST_LOW (index)])));
12667 return NULL;
12670 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12671 an integer constant or real constant.
12673 TYPE is the type of the result. */
12675 static tree
12676 fold_negate_const (tree arg0, tree type)
12678 tree t = NULL_TREE;
12680 switch (TREE_CODE (arg0))
12682 case INTEGER_CST:
12684 unsigned HOST_WIDE_INT low;
12685 HOST_WIDE_INT high;
12686 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12687 TREE_INT_CST_HIGH (arg0),
12688 &low, &high);
12689 t = build_int_cst_wide (type, low, high);
12690 t = force_fit_type (t, 1,
12691 (overflow | TREE_OVERFLOW (arg0))
12692 && !TYPE_UNSIGNED (type),
12693 TREE_CONSTANT_OVERFLOW (arg0));
12694 break;
12697 case REAL_CST:
12698 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12699 break;
12701 default:
12702 gcc_unreachable ();
12705 return t;
12708 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12709 an integer constant or real constant.
12711 TYPE is the type of the result. */
12713 tree
12714 fold_abs_const (tree arg0, tree type)
12716 tree t = NULL_TREE;
12718 switch (TREE_CODE (arg0))
12720 case INTEGER_CST:
12721 /* If the value is unsigned, then the absolute value is
12722 the same as the ordinary value. */
12723 if (TYPE_UNSIGNED (type))
12724 t = arg0;
12725 /* Similarly, if the value is non-negative. */
12726 else if (INT_CST_LT (integer_minus_one_node, arg0))
12727 t = arg0;
12728 /* If the value is negative, then the absolute value is
12729 its negation. */
12730 else
12732 unsigned HOST_WIDE_INT low;
12733 HOST_WIDE_INT high;
12734 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12735 TREE_INT_CST_HIGH (arg0),
12736 &low, &high);
12737 t = build_int_cst_wide (type, low, high);
12738 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12739 TREE_CONSTANT_OVERFLOW (arg0));
12741 break;
12743 case REAL_CST:
12744 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12745 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12746 else
12747 t = arg0;
12748 break;
12750 default:
12751 gcc_unreachable ();
12754 return t;
12757 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12758 constant. TYPE is the type of the result. */
12760 static tree
12761 fold_not_const (tree arg0, tree type)
12763 tree t = NULL_TREE;
12765 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12767 t = build_int_cst_wide (type,
12768 ~ TREE_INT_CST_LOW (arg0),
12769 ~ TREE_INT_CST_HIGH (arg0));
12770 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12771 TREE_CONSTANT_OVERFLOW (arg0));
12773 return t;
12776 /* Given CODE, a relational operator, the target type, TYPE and two
12777 constant operands OP0 and OP1, return the result of the
12778 relational operation. If the result is not a compile time
12779 constant, then return NULL_TREE. */
12781 static tree
12782 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12784 int result, invert;
12786 /* From here on, the only cases we handle are when the result is
12787 known to be a constant. */
12789 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12791 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12792 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12794 /* Handle the cases where either operand is a NaN. */
12795 if (real_isnan (c0) || real_isnan (c1))
12797 switch (code)
12799 case EQ_EXPR:
12800 case ORDERED_EXPR:
12801 result = 0;
12802 break;
12804 case NE_EXPR:
12805 case UNORDERED_EXPR:
12806 case UNLT_EXPR:
12807 case UNLE_EXPR:
12808 case UNGT_EXPR:
12809 case UNGE_EXPR:
12810 case UNEQ_EXPR:
12811 result = 1;
12812 break;
12814 case LT_EXPR:
12815 case LE_EXPR:
12816 case GT_EXPR:
12817 case GE_EXPR:
12818 case LTGT_EXPR:
12819 if (flag_trapping_math)
12820 return NULL_TREE;
12821 result = 0;
12822 break;
12824 default:
12825 gcc_unreachable ();
12828 return constant_boolean_node (result, type);
12831 return constant_boolean_node (real_compare (code, c0, c1), type);
12834 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12836 To compute GT, swap the arguments and do LT.
12837 To compute GE, do LT and invert the result.
12838 To compute LE, swap the arguments, do LT and invert the result.
12839 To compute NE, do EQ and invert the result.
12841 Therefore, the code below must handle only EQ and LT. */
12843 if (code == LE_EXPR || code == GT_EXPR)
12845 tree tem = op0;
12846 op0 = op1;
12847 op1 = tem;
12848 code = swap_tree_comparison (code);
12851 /* Note that it is safe to invert for real values here because we
12852 have already handled the one case that it matters. */
12854 invert = 0;
12855 if (code == NE_EXPR || code == GE_EXPR)
12857 invert = 1;
12858 code = invert_tree_comparison (code, false);
12861 /* Compute a result for LT or EQ if args permit;
12862 Otherwise return T. */
12863 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12865 if (code == EQ_EXPR)
12866 result = tree_int_cst_equal (op0, op1);
12867 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12868 result = INT_CST_LT_UNSIGNED (op0, op1);
12869 else
12870 result = INT_CST_LT (op0, op1);
12872 else
12873 return NULL_TREE;
12875 if (invert)
12876 result ^= 1;
12877 return constant_boolean_node (result, type);
12880 /* Build an expression for the a clean point containing EXPR with type TYPE.
12881 Don't build a cleanup point expression for EXPR which don't have side
12882 effects. */
12884 tree
12885 fold_build_cleanup_point_expr (tree type, tree expr)
12887 /* If the expression does not have side effects then we don't have to wrap
12888 it with a cleanup point expression. */
12889 if (!TREE_SIDE_EFFECTS (expr))
12890 return expr;
12892 /* If the expression is a return, check to see if the expression inside the
12893 return has no side effects or the right hand side of the modify expression
12894 inside the return. If either don't have side effects set we don't need to
12895 wrap the expression in a cleanup point expression. Note we don't check the
12896 left hand side of the modify because it should always be a return decl. */
12897 if (TREE_CODE (expr) == RETURN_EXPR)
12899 tree op = TREE_OPERAND (expr, 0);
12900 if (!op || !TREE_SIDE_EFFECTS (op))
12901 return expr;
12902 op = TREE_OPERAND (op, 1);
12903 if (!TREE_SIDE_EFFECTS (op))
12904 return expr;
12907 return build1 (CLEANUP_POINT_EXPR, type, expr);
12910 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12911 avoid confusing the gimplify process. */
12913 tree
12914 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12916 /* The size of the object is not relevant when talking about its address. */
12917 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12918 t = TREE_OPERAND (t, 0);
12920 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12921 if (TREE_CODE (t) == INDIRECT_REF
12922 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12924 t = TREE_OPERAND (t, 0);
12925 if (TREE_TYPE (t) != ptrtype)
12926 t = build1 (NOP_EXPR, ptrtype, t);
12928 else
12930 tree base = t;
12932 while (handled_component_p (base))
12933 base = TREE_OPERAND (base, 0);
12934 if (DECL_P (base))
12935 TREE_ADDRESSABLE (base) = 1;
12937 t = build1 (ADDR_EXPR, ptrtype, t);
12940 return t;
12943 tree
12944 build_fold_addr_expr (tree t)
12946 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12949 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12950 of an indirection through OP0, or NULL_TREE if no simplification is
12951 possible. */
12953 tree
12954 fold_indirect_ref_1 (tree type, tree op0)
12956 tree sub = op0;
12957 tree subtype;
12959 STRIP_NOPS (sub);
12960 subtype = TREE_TYPE (sub);
12961 if (!POINTER_TYPE_P (subtype))
12962 return NULL_TREE;
12964 if (TREE_CODE (sub) == ADDR_EXPR)
12966 tree op = TREE_OPERAND (sub, 0);
12967 tree optype = TREE_TYPE (op);
12968 /* *&p => p; make sure to handle *&"str"[cst] here. */
12969 if (type == optype)
12971 tree fop = fold_read_from_constant_string (op);
12972 if (fop)
12973 return fop;
12974 else
12975 return op;
12977 /* *(foo *)&fooarray => fooarray[0] */
12978 else if (TREE_CODE (optype) == ARRAY_TYPE
12979 && type == TREE_TYPE (optype))
12981 tree type_domain = TYPE_DOMAIN (optype);
12982 tree min_val = size_zero_node;
12983 if (type_domain && TYPE_MIN_VALUE (type_domain))
12984 min_val = TYPE_MIN_VALUE (type_domain);
12985 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12987 /* *(foo *)&complexfoo => __real__ complexfoo */
12988 else if (TREE_CODE (optype) == COMPLEX_TYPE
12989 && type == TREE_TYPE (optype))
12990 return fold_build1 (REALPART_EXPR, type, op);
12993 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12994 if (TREE_CODE (sub) == PLUS_EXPR
12995 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12997 tree op00 = TREE_OPERAND (sub, 0);
12998 tree op01 = TREE_OPERAND (sub, 1);
12999 tree op00type;
13001 STRIP_NOPS (op00);
13002 op00type = TREE_TYPE (op00);
13003 if (TREE_CODE (op00) == ADDR_EXPR
13004 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13005 && type == TREE_TYPE (TREE_TYPE (op00type)))
13007 tree size = TYPE_SIZE_UNIT (type);
13008 if (tree_int_cst_equal (size, op01))
13009 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13013 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13014 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13015 && type == TREE_TYPE (TREE_TYPE (subtype)))
13017 tree type_domain;
13018 tree min_val = size_zero_node;
13019 sub = build_fold_indirect_ref (sub);
13020 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13021 if (type_domain && TYPE_MIN_VALUE (type_domain))
13022 min_val = TYPE_MIN_VALUE (type_domain);
13023 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13026 return NULL_TREE;
13029 /* Builds an expression for an indirection through T, simplifying some
13030 cases. */
13032 tree
13033 build_fold_indirect_ref (tree t)
13035 tree type = TREE_TYPE (TREE_TYPE (t));
13036 tree sub = fold_indirect_ref_1 (type, t);
13038 if (sub)
13039 return sub;
13040 else
13041 return build1 (INDIRECT_REF, type, t);
13044 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13046 tree
13047 fold_indirect_ref (tree t)
13049 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13051 if (sub)
13052 return sub;
13053 else
13054 return t;
13057 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13058 whose result is ignored. The type of the returned tree need not be
13059 the same as the original expression. */
13061 tree
13062 fold_ignored_result (tree t)
13064 if (!TREE_SIDE_EFFECTS (t))
13065 return integer_zero_node;
13067 for (;;)
13068 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13070 case tcc_unary:
13071 t = TREE_OPERAND (t, 0);
13072 break;
13074 case tcc_binary:
13075 case tcc_comparison:
13076 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13077 t = TREE_OPERAND (t, 0);
13078 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13079 t = TREE_OPERAND (t, 1);
13080 else
13081 return t;
13082 break;
13084 case tcc_expression:
13085 switch (TREE_CODE (t))
13087 case COMPOUND_EXPR:
13088 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13089 return t;
13090 t = TREE_OPERAND (t, 0);
13091 break;
13093 case COND_EXPR:
13094 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13095 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13096 return t;
13097 t = TREE_OPERAND (t, 0);
13098 break;
13100 default:
13101 return t;
13103 break;
13105 default:
13106 return t;
13110 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13111 This can only be applied to objects of a sizetype. */
13113 tree
13114 round_up (tree value, int divisor)
13116 tree div = NULL_TREE;
13118 gcc_assert (divisor > 0);
13119 if (divisor == 1)
13120 return value;
13122 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13123 have to do anything. Only do this when we are not given a const,
13124 because in that case, this check is more expensive than just
13125 doing it. */
13126 if (TREE_CODE (value) != INTEGER_CST)
13128 div = build_int_cst (TREE_TYPE (value), divisor);
13130 if (multiple_of_p (TREE_TYPE (value), value, div))
13131 return value;
13134 /* If divisor is a power of two, simplify this to bit manipulation. */
13135 if (divisor == (divisor & -divisor))
13137 tree t;
13139 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13140 value = size_binop (PLUS_EXPR, value, t);
13141 t = build_int_cst (TREE_TYPE (value), -divisor);
13142 value = size_binop (BIT_AND_EXPR, value, t);
13144 else
13146 if (!div)
13147 div = build_int_cst (TREE_TYPE (value), divisor);
13148 value = size_binop (CEIL_DIV_EXPR, value, div);
13149 value = size_binop (MULT_EXPR, value, div);
13152 return value;
13155 /* Likewise, but round down. */
13157 tree
13158 round_down (tree value, int divisor)
13160 tree div = NULL_TREE;
13162 gcc_assert (divisor > 0);
13163 if (divisor == 1)
13164 return value;
13166 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13167 have to do anything. Only do this when we are not given a const,
13168 because in that case, this check is more expensive than just
13169 doing it. */
13170 if (TREE_CODE (value) != INTEGER_CST)
13172 div = build_int_cst (TREE_TYPE (value), divisor);
13174 if (multiple_of_p (TREE_TYPE (value), value, div))
13175 return value;
13178 /* If divisor is a power of two, simplify this to bit manipulation. */
13179 if (divisor == (divisor & -divisor))
13181 tree t;
13183 t = build_int_cst (TREE_TYPE (value), -divisor);
13184 value = size_binop (BIT_AND_EXPR, value, t);
13186 else
13188 if (!div)
13189 div = build_int_cst (TREE_TYPE (value), divisor);
13190 value = size_binop (FLOOR_DIV_EXPR, value, div);
13191 value = size_binop (MULT_EXPR, value, div);
13194 return value;
13197 /* Returns the pointer to the base of the object addressed by EXP and
13198 extracts the information about the offset of the access, storing it
13199 to PBITPOS and POFFSET. */
13201 static tree
13202 split_address_to_core_and_offset (tree exp,
13203 HOST_WIDE_INT *pbitpos, tree *poffset)
13205 tree core;
13206 enum machine_mode mode;
13207 int unsignedp, volatilep;
13208 HOST_WIDE_INT bitsize;
13210 if (TREE_CODE (exp) == ADDR_EXPR)
13212 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13213 poffset, &mode, &unsignedp, &volatilep,
13214 false);
13215 core = build_fold_addr_expr (core);
13217 else
13219 core = exp;
13220 *pbitpos = 0;
13221 *poffset = NULL_TREE;
13224 return core;
13227 /* Returns true if addresses of E1 and E2 differ by a constant, false
13228 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13230 bool
13231 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13233 tree core1, core2;
13234 HOST_WIDE_INT bitpos1, bitpos2;
13235 tree toffset1, toffset2, tdiff, type;
13237 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13238 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13240 if (bitpos1 % BITS_PER_UNIT != 0
13241 || bitpos2 % BITS_PER_UNIT != 0
13242 || !operand_equal_p (core1, core2, 0))
13243 return false;
13245 if (toffset1 && toffset2)
13247 type = TREE_TYPE (toffset1);
13248 if (type != TREE_TYPE (toffset2))
13249 toffset2 = fold_convert (type, toffset2);
13251 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13252 if (!cst_and_fits_in_hwi (tdiff))
13253 return false;
13255 *diff = int_cst_value (tdiff);
13257 else if (toffset1 || toffset2)
13259 /* If only one of the offsets is non-constant, the difference cannot
13260 be a constant. */
13261 return false;
13263 else
13264 *diff = 0;
13266 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13267 return true;
13270 /* Simplify the floating point expression EXP when the sign of the
13271 result is not significant. Return NULL_TREE if no simplification
13272 is possible. */
13274 tree
13275 fold_strip_sign_ops (tree exp)
13277 tree arg0, arg1;
13279 switch (TREE_CODE (exp))
13281 case ABS_EXPR:
13282 case NEGATE_EXPR:
13283 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13284 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13286 case MULT_EXPR:
13287 case RDIV_EXPR:
13288 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13289 return NULL_TREE;
13290 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13291 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13292 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13293 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13294 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13295 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13296 break;
13298 default:
13299 break;
13301 return NULL_TREE;