* defaults.h (FRAME_GROWS_DOWNWARD): Define to 0 if not defined.
[official-gcc.git] / gcc / fold-const.c
blob564cec3ec87a0621ebb9671c87ab7c40b5b3d5b8
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static bool tree_expr_nonzero_p (tree);
137 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
138 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
139 and SUM1. Then this yields nonzero if overflow occurred during the
140 addition.
142 Overflow occurs if A and B have the same sign, but A and SUM differ in
143 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 sign. */
145 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
147 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
148 We do that by representing the two-word integer in 4 words, with only
149 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
150 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 #define LOWPART(x) \
153 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
154 #define HIGHPART(x) \
155 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
156 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
158 /* Unpack a two-word integer into 4 words.
159 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
160 WORDS points to the array of HOST_WIDE_INTs. */
162 static void
163 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
165 words[0] = LOWPART (low);
166 words[1] = HIGHPART (low);
167 words[2] = LOWPART (hi);
168 words[3] = HIGHPART (hi);
171 /* Pack an array of 4 words into a two-word integer.
172 WORDS points to the array of words.
173 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 static void
176 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
177 HOST_WIDE_INT *hi)
179 *low = words[0] + words[1] * BASE;
180 *hi = words[2] + words[3] * BASE;
183 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
184 in overflow of the value, when >0 we are only interested in signed
185 overflow, for <0 we are interested in any overflow. OVERFLOWED
186 indicates whether overflow has already occurred. CONST_OVERFLOWED
187 indicates whether constant overflow has already occurred. We force
188 T's value to be within range of T's type (by setting to 0 or 1 all
189 the bits outside the type's range). We set TREE_OVERFLOWED if,
190 OVERFLOWED is nonzero,
191 or OVERFLOWABLE is >0 and signed overflow occurs
192 or OVERFLOWABLE is <0 and any overflow occurs
193 We set TREE_CONSTANT_OVERFLOWED if,
194 CONST_OVERFLOWED is nonzero
195 or we set TREE_OVERFLOWED.
196 We return either the original T, or a copy. */
198 tree
199 force_fit_type (tree t, int overflowable,
200 bool overflowed, bool overflowed_const)
202 unsigned HOST_WIDE_INT low;
203 HOST_WIDE_INT high;
204 unsigned int prec;
205 int sign_extended_type;
207 gcc_assert (TREE_CODE (t) == INTEGER_CST);
209 low = TREE_INT_CST_LOW (t);
210 high = TREE_INT_CST_HIGH (t);
212 if (POINTER_TYPE_P (TREE_TYPE (t))
213 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
214 prec = POINTER_SIZE;
215 else
216 prec = TYPE_PRECISION (TREE_TYPE (t));
217 /* Size types *are* sign extended. */
218 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
219 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
220 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
222 /* First clear all bits that are beyond the type's precision. */
224 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
226 else if (prec > HOST_BITS_PER_WIDE_INT)
227 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
228 else
230 high = 0;
231 if (prec < HOST_BITS_PER_WIDE_INT)
232 low &= ~((HOST_WIDE_INT) (-1) << prec);
235 if (!sign_extended_type)
236 /* No sign extension */;
237 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238 /* Correct width already. */;
239 else if (prec > HOST_BITS_PER_WIDE_INT)
241 /* Sign extend top half? */
242 if (high & ((unsigned HOST_WIDE_INT)1
243 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
244 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
246 else if (prec == HOST_BITS_PER_WIDE_INT)
248 if ((HOST_WIDE_INT)low < 0)
249 high = -1;
251 else
253 /* Sign extend bottom half? */
254 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 high = -1;
257 low |= (HOST_WIDE_INT)(-1) << prec;
261 /* If the value changed, return a new node. */
262 if (overflowed || overflowed_const
263 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
265 t = build_int_cst_wide (TREE_TYPE (t), low, high);
267 if (overflowed
268 || overflowable < 0
269 || (overflowable > 0 && sign_extended_type))
271 t = copy_node (t);
272 TREE_OVERFLOW (t) = 1;
273 TREE_CONSTANT_OVERFLOW (t) = 1;
275 else if (overflowed_const)
277 t = copy_node (t);
278 TREE_CONSTANT_OVERFLOW (t) = 1;
282 return t;
285 /* Add two doubleword integers with doubleword result.
286 Each argument is given as two `HOST_WIDE_INT' pieces.
287 One argument is L1 and H1; the other, L2 and H2.
288 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
291 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
292 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
293 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
295 unsigned HOST_WIDE_INT l;
296 HOST_WIDE_INT h;
298 l = l1 + l2;
299 h = h1 + h2 + (l < l1);
301 *lv = l;
302 *hv = h;
303 return OVERFLOW_SUM_SIGN (h1, h2, h);
306 /* Negate a doubleword integer with doubleword result.
307 Return nonzero if the operation overflows, assuming it's signed.
308 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
309 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
312 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
313 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
315 if (l1 == 0)
317 *lv = 0;
318 *hv = - h1;
319 return (*hv & h1) < 0;
321 else
323 *lv = -l1;
324 *hv = ~h1;
325 return 0;
329 /* Multiply two doubleword integers with doubleword result.
330 Return nonzero if the operation overflows, assuming it's signed.
331 Each argument is given as two `HOST_WIDE_INT' pieces.
332 One argument is L1 and H1; the other, L2 and H2.
333 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
336 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
337 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
338 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
340 HOST_WIDE_INT arg1[4];
341 HOST_WIDE_INT arg2[4];
342 HOST_WIDE_INT prod[4 * 2];
343 unsigned HOST_WIDE_INT carry;
344 int i, j, k;
345 unsigned HOST_WIDE_INT toplow, neglow;
346 HOST_WIDE_INT tophigh, neghigh;
348 encode (arg1, l1, h1);
349 encode (arg2, l2, h2);
351 memset (prod, 0, sizeof prod);
353 for (i = 0; i < 4; i++)
355 carry = 0;
356 for (j = 0; j < 4; j++)
358 k = i + j;
359 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
360 carry += arg1[i] * arg2[j];
361 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 carry += prod[k];
363 prod[k] = LOWPART (carry);
364 carry = HIGHPART (carry);
366 prod[i + 4] = carry;
369 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
371 /* Check for overflow by calculating the top half of the answer in full;
372 it should agree with the low half's sign bit. */
373 decode (prod + 4, &toplow, &tophigh);
374 if (h1 < 0)
376 neg_double (l2, h2, &neglow, &neghigh);
377 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
379 if (h2 < 0)
381 neg_double (l1, h1, &neglow, &neghigh);
382 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
384 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
387 /* Shift the doubleword integer in L1, H1 left by COUNT places
388 keeping only PREC bits of result.
389 Shift right if COUNT is negative.
390 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
391 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 void
394 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
395 HOST_WIDE_INT count, unsigned int prec,
396 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
398 unsigned HOST_WIDE_INT signmask;
400 if (count < 0)
402 rshift_double (l1, h1, -count, prec, lv, hv, arith);
403 return;
406 if (SHIFT_COUNT_TRUNCATED)
407 count %= prec;
409 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
411 /* Shifting by the host word size is undefined according to the
412 ANSI standard, so we must handle this as a special case. */
413 *hv = 0;
414 *lv = 0;
416 else if (count >= HOST_BITS_PER_WIDE_INT)
418 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
419 *lv = 0;
421 else
423 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
424 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
425 *lv = l1 << count;
428 /* Sign extend all bits that are beyond the precision. */
430 signmask = -((prec > HOST_BITS_PER_WIDE_INT
431 ? ((unsigned HOST_WIDE_INT) *hv
432 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
433 : (*lv >> (prec - 1))) & 1);
435 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
437 else if (prec >= HOST_BITS_PER_WIDE_INT)
439 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
440 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
442 else
444 *hv = signmask;
445 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
446 *lv |= signmask << prec;
450 /* Shift the doubleword integer in L1, H1 right by COUNT places
451 keeping only PREC bits of result. COUNT must be positive.
452 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
453 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 void
456 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
457 HOST_WIDE_INT count, unsigned int prec,
458 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
459 int arith)
461 unsigned HOST_WIDE_INT signmask;
463 signmask = (arith
464 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
465 : 0);
467 if (SHIFT_COUNT_TRUNCATED)
468 count %= prec;
470 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
472 /* Shifting by the host word size is undefined according to the
473 ANSI standard, so we must handle this as a special case. */
474 *hv = 0;
475 *lv = 0;
477 else if (count >= HOST_BITS_PER_WIDE_INT)
479 *hv = 0;
480 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
482 else
484 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
485 *lv = ((l1 >> count)
486 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
489 /* Zero / sign extend all bits that are beyond the precision. */
491 if (count >= (HOST_WIDE_INT)prec)
493 *hv = signmask;
494 *lv = signmask;
496 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
498 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
500 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
501 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
503 else
505 *hv = signmask;
506 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
507 *lv |= signmask << (prec - count);
511 /* Rotate the doubleword integer in L1, H1 left by COUNT places
512 keeping only PREC bits of result.
513 Rotate right if COUNT is negative.
514 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 void
517 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
518 HOST_WIDE_INT count, unsigned int prec,
519 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
521 unsigned HOST_WIDE_INT s1l, s2l;
522 HOST_WIDE_INT s1h, s2h;
524 count %= prec;
525 if (count < 0)
526 count += prec;
528 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
529 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
530 *lv = s1l | s2l;
531 *hv = s1h | s2h;
534 /* Rotate the doubleword integer in L1, H1 left by COUNT places
535 keeping only PREC bits of result. COUNT must be positive.
536 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 void
539 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
540 HOST_WIDE_INT count, unsigned int prec,
541 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
543 unsigned HOST_WIDE_INT s1l, s2l;
544 HOST_WIDE_INT s1h, s2h;
546 count %= prec;
547 if (count < 0)
548 count += prec;
550 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
551 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
552 *lv = s1l | s2l;
553 *hv = s1h | s2h;
556 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
557 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
558 CODE is a tree code for a kind of division, one of
559 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 or EXACT_DIV_EXPR
561 It controls how the quotient is rounded to an integer.
562 Return nonzero if the operation overflows.
563 UNS nonzero says do unsigned division. */
566 div_and_round_double (enum tree_code code, int uns,
567 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
568 HOST_WIDE_INT hnum_orig,
569 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
570 HOST_WIDE_INT hden_orig,
571 unsigned HOST_WIDE_INT *lquo,
572 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
573 HOST_WIDE_INT *hrem)
575 int quo_neg = 0;
576 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
577 HOST_WIDE_INT den[4], quo[4];
578 int i, j;
579 unsigned HOST_WIDE_INT work;
580 unsigned HOST_WIDE_INT carry = 0;
581 unsigned HOST_WIDE_INT lnum = lnum_orig;
582 HOST_WIDE_INT hnum = hnum_orig;
583 unsigned HOST_WIDE_INT lden = lden_orig;
584 HOST_WIDE_INT hden = hden_orig;
585 int overflow = 0;
587 if (hden == 0 && lden == 0)
588 overflow = 1, lden = 1;
590 /* Calculate quotient sign and convert operands to unsigned. */
591 if (!uns)
593 if (hnum < 0)
595 quo_neg = ~ quo_neg;
596 /* (minimum integer) / (-1) is the only overflow case. */
597 if (neg_double (lnum, hnum, &lnum, &hnum)
598 && ((HOST_WIDE_INT) lden & hden) == -1)
599 overflow = 1;
601 if (hden < 0)
603 quo_neg = ~ quo_neg;
604 neg_double (lden, hden, &lden, &hden);
608 if (hnum == 0 && hden == 0)
609 { /* single precision */
610 *hquo = *hrem = 0;
611 /* This unsigned division rounds toward zero. */
612 *lquo = lnum / lden;
613 goto finish_up;
616 if (hnum == 0)
617 { /* trivial case: dividend < divisor */
618 /* hden != 0 already checked. */
619 *hquo = *lquo = 0;
620 *hrem = hnum;
621 *lrem = lnum;
622 goto finish_up;
625 memset (quo, 0, sizeof quo);
627 memset (num, 0, sizeof num); /* to zero 9th element */
628 memset (den, 0, sizeof den);
630 encode (num, lnum, hnum);
631 encode (den, lden, hden);
633 /* Special code for when the divisor < BASE. */
634 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
636 /* hnum != 0 already checked. */
637 for (i = 4 - 1; i >= 0; i--)
639 work = num[i] + carry * BASE;
640 quo[i] = work / lden;
641 carry = work % lden;
644 else
646 /* Full double precision division,
647 with thanks to Don Knuth's "Seminumerical Algorithms". */
648 int num_hi_sig, den_hi_sig;
649 unsigned HOST_WIDE_INT quo_est, scale;
651 /* Find the highest nonzero divisor digit. */
652 for (i = 4 - 1;; i--)
653 if (den[i] != 0)
655 den_hi_sig = i;
656 break;
659 /* Insure that the first digit of the divisor is at least BASE/2.
660 This is required by the quotient digit estimation algorithm. */
662 scale = BASE / (den[den_hi_sig] + 1);
663 if (scale > 1)
664 { /* scale divisor and dividend */
665 carry = 0;
666 for (i = 0; i <= 4 - 1; i++)
668 work = (num[i] * scale) + carry;
669 num[i] = LOWPART (work);
670 carry = HIGHPART (work);
673 num[4] = carry;
674 carry = 0;
675 for (i = 0; i <= 4 - 1; i++)
677 work = (den[i] * scale) + carry;
678 den[i] = LOWPART (work);
679 carry = HIGHPART (work);
680 if (den[i] != 0) den_hi_sig = i;
684 num_hi_sig = 4;
686 /* Main loop */
687 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
689 /* Guess the next quotient digit, quo_est, by dividing the first
690 two remaining dividend digits by the high order quotient digit.
691 quo_est is never low and is at most 2 high. */
692 unsigned HOST_WIDE_INT tmp;
694 num_hi_sig = i + den_hi_sig + 1;
695 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
696 if (num[num_hi_sig] != den[den_hi_sig])
697 quo_est = work / den[den_hi_sig];
698 else
699 quo_est = BASE - 1;
701 /* Refine quo_est so it's usually correct, and at most one high. */
702 tmp = work - quo_est * den[den_hi_sig];
703 if (tmp < BASE
704 && (den[den_hi_sig - 1] * quo_est
705 > (tmp * BASE + num[num_hi_sig - 2])))
706 quo_est--;
708 /* Try QUO_EST as the quotient digit, by multiplying the
709 divisor by QUO_EST and subtracting from the remaining dividend.
710 Keep in mind that QUO_EST is the I - 1st digit. */
712 carry = 0;
713 for (j = 0; j <= den_hi_sig; j++)
715 work = quo_est * den[j] + carry;
716 carry = HIGHPART (work);
717 work = num[i + j] - LOWPART (work);
718 num[i + j] = LOWPART (work);
719 carry += HIGHPART (work) != 0;
722 /* If quo_est was high by one, then num[i] went negative and
723 we need to correct things. */
724 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 quo_est--;
727 carry = 0; /* add divisor back in */
728 for (j = 0; j <= den_hi_sig; j++)
730 work = num[i + j] + den[j] + carry;
731 carry = HIGHPART (work);
732 num[i + j] = LOWPART (work);
735 num [num_hi_sig] += carry;
738 /* Store the quotient digit. */
739 quo[i] = quo_est;
743 decode (quo, lquo, hquo);
745 finish_up:
746 /* If result is negative, make it so. */
747 if (quo_neg)
748 neg_double (*lquo, *hquo, lquo, hquo);
750 /* Compute trial remainder: rem = num - (quo * den) */
751 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
752 neg_double (*lrem, *hrem, lrem, hrem);
753 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
755 switch (code)
757 case TRUNC_DIV_EXPR:
758 case TRUNC_MOD_EXPR: /* round toward zero */
759 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
760 return overflow;
762 case FLOOR_DIV_EXPR:
763 case FLOOR_MOD_EXPR: /* round toward negative infinity */
764 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 /* quo = quo - 1; */
767 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
768 lquo, hquo);
770 else
771 return overflow;
772 break;
774 case CEIL_DIV_EXPR:
775 case CEIL_MOD_EXPR: /* round toward positive infinity */
776 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
778 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
779 lquo, hquo);
781 else
782 return overflow;
783 break;
785 case ROUND_DIV_EXPR:
786 case ROUND_MOD_EXPR: /* round to closest integer */
788 unsigned HOST_WIDE_INT labs_rem = *lrem;
789 HOST_WIDE_INT habs_rem = *hrem;
790 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
791 HOST_WIDE_INT habs_den = hden, htwice;
793 /* Get absolute values. */
794 if (*hrem < 0)
795 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
796 if (hden < 0)
797 neg_double (lden, hden, &labs_den, &habs_den);
799 /* If (2 * abs (lrem) >= abs (lden)) */
800 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
801 labs_rem, habs_rem, &ltwice, &htwice);
803 if (((unsigned HOST_WIDE_INT) habs_den
804 < (unsigned HOST_WIDE_INT) htwice)
805 || (((unsigned HOST_WIDE_INT) habs_den
806 == (unsigned HOST_WIDE_INT) htwice)
807 && (labs_den < ltwice)))
809 if (*hquo < 0)
810 /* quo = quo - 1; */
811 add_double (*lquo, *hquo,
812 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
813 else
814 /* quo = quo + 1; */
815 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
816 lquo, hquo);
818 else
819 return overflow;
821 break;
823 default:
824 gcc_unreachable ();
827 /* Compute true remainder: rem = num - (quo * den) */
828 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
829 neg_double (*lrem, *hrem, lrem, hrem);
830 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
831 return overflow;
834 /* If ARG2 divides ARG1 with zero remainder, carries out the division
835 of type CODE and returns the quotient.
836 Otherwise returns NULL_TREE. */
838 static tree
839 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
841 unsigned HOST_WIDE_INT int1l, int2l;
842 HOST_WIDE_INT int1h, int2h;
843 unsigned HOST_WIDE_INT quol, reml;
844 HOST_WIDE_INT quoh, remh;
845 tree type = TREE_TYPE (arg1);
846 int uns = TYPE_UNSIGNED (type);
848 int1l = TREE_INT_CST_LOW (arg1);
849 int1h = TREE_INT_CST_HIGH (arg1);
850 int2l = TREE_INT_CST_LOW (arg2);
851 int2h = TREE_INT_CST_HIGH (arg2);
853 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
854 &quol, &quoh, &reml, &remh);
855 if (remh != 0 || reml != 0)
856 return NULL_TREE;
858 return build_int_cst_wide (type, quol, quoh);
861 /* Return true if built-in mathematical function specified by CODE
862 preserves the sign of it argument, i.e. -f(x) == f(-x). */
864 static bool
865 negate_mathfn_p (enum built_in_function code)
867 switch (code)
869 case BUILT_IN_ASIN:
870 case BUILT_IN_ASINF:
871 case BUILT_IN_ASINL:
872 case BUILT_IN_ATAN:
873 case BUILT_IN_ATANF:
874 case BUILT_IN_ATANL:
875 case BUILT_IN_SIN:
876 case BUILT_IN_SINF:
877 case BUILT_IN_SINL:
878 case BUILT_IN_TAN:
879 case BUILT_IN_TANF:
880 case BUILT_IN_TANL:
881 return true;
883 default:
884 break;
886 return false;
889 /* Check whether we may negate an integer constant T without causing
890 overflow. */
892 bool
893 may_negate_without_overflow_p (tree t)
895 unsigned HOST_WIDE_INT val;
896 unsigned int prec;
897 tree type;
899 gcc_assert (TREE_CODE (t) == INTEGER_CST);
901 type = TREE_TYPE (t);
902 if (TYPE_UNSIGNED (type))
903 return false;
905 prec = TYPE_PRECISION (type);
906 if (prec > HOST_BITS_PER_WIDE_INT)
908 if (TREE_INT_CST_LOW (t) != 0)
909 return true;
910 prec -= HOST_BITS_PER_WIDE_INT;
911 val = TREE_INT_CST_HIGH (t);
913 else
914 val = TREE_INT_CST_LOW (t);
915 if (prec < HOST_BITS_PER_WIDE_INT)
916 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
917 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
920 /* Determine whether an expression T can be cheaply negated using
921 the function negate_expr. */
923 static bool
924 negate_expr_p (tree t)
926 tree type;
928 if (t == 0)
929 return false;
931 type = TREE_TYPE (t);
933 STRIP_SIGN_NOPS (t);
934 switch (TREE_CODE (t))
936 case INTEGER_CST:
937 if (TYPE_UNSIGNED (type) || ! flag_trapv)
938 return true;
940 /* Check that -CST will not overflow type. */
941 return may_negate_without_overflow_p (t);
943 case REAL_CST:
944 case NEGATE_EXPR:
945 return true;
947 case COMPLEX_CST:
948 return negate_expr_p (TREE_REALPART (t))
949 && negate_expr_p (TREE_IMAGPART (t));
951 case PLUS_EXPR:
952 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
953 return false;
954 /* -(A + B) -> (-B) - A. */
955 if (negate_expr_p (TREE_OPERAND (t, 1))
956 && reorder_operands_p (TREE_OPERAND (t, 0),
957 TREE_OPERAND (t, 1)))
958 return true;
959 /* -(A + B) -> (-A) - B. */
960 return negate_expr_p (TREE_OPERAND (t, 0));
962 case MINUS_EXPR:
963 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
964 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
965 && reorder_operands_p (TREE_OPERAND (t, 0),
966 TREE_OPERAND (t, 1));
968 case MULT_EXPR:
969 if (TYPE_UNSIGNED (TREE_TYPE (t)))
970 break;
972 /* Fall through. */
974 case RDIV_EXPR:
975 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
976 return negate_expr_p (TREE_OPERAND (t, 1))
977 || negate_expr_p (TREE_OPERAND (t, 0));
978 break;
980 case NOP_EXPR:
981 /* Negate -((double)float) as (double)(-float). */
982 if (TREE_CODE (type) == REAL_TYPE)
984 tree tem = strip_float_extensions (t);
985 if (tem != t)
986 return negate_expr_p (tem);
988 break;
990 case CALL_EXPR:
991 /* Negate -f(x) as f(-x). */
992 if (negate_mathfn_p (builtin_mathfn_code (t)))
993 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
994 break;
996 case RSHIFT_EXPR:
997 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
998 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1000 tree op1 = TREE_OPERAND (t, 1);
1001 if (TREE_INT_CST_HIGH (op1) == 0
1002 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1003 == TREE_INT_CST_LOW (op1))
1004 return true;
1006 break;
1008 default:
1009 break;
1011 return false;
1014 /* Given T, an expression, return the negation of T. Allow for T to be
1015 null, in which case return null. */
1017 static tree
1018 negate_expr (tree t)
1020 tree type;
1021 tree tem;
1023 if (t == 0)
1024 return 0;
1026 type = TREE_TYPE (t);
1027 STRIP_SIGN_NOPS (t);
1029 switch (TREE_CODE (t))
1031 case INTEGER_CST:
1032 tem = fold_negate_const (t, type);
1033 if (! TREE_OVERFLOW (tem)
1034 || TYPE_UNSIGNED (type)
1035 || ! flag_trapv)
1036 return tem;
1037 break;
1039 case REAL_CST:
1040 tem = fold_negate_const (t, type);
1041 /* Two's complement FP formats, such as c4x, may overflow. */
1042 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1043 return fold_convert (type, tem);
1044 break;
1046 case COMPLEX_CST:
1048 tree rpart = negate_expr (TREE_REALPART (t));
1049 tree ipart = negate_expr (TREE_IMAGPART (t));
1051 if ((TREE_CODE (rpart) == REAL_CST
1052 && TREE_CODE (ipart) == REAL_CST)
1053 || (TREE_CODE (rpart) == INTEGER_CST
1054 && TREE_CODE (ipart) == INTEGER_CST))
1055 return build_complex (type, rpart, ipart);
1057 break;
1059 case NEGATE_EXPR:
1060 return fold_convert (type, TREE_OPERAND (t, 0));
1062 case PLUS_EXPR:
1063 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 /* -(A + B) -> (-B) - A. */
1066 if (negate_expr_p (TREE_OPERAND (t, 1))
1067 && reorder_operands_p (TREE_OPERAND (t, 0),
1068 TREE_OPERAND (t, 1)))
1070 tem = negate_expr (TREE_OPERAND (t, 1));
1071 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1072 tem, TREE_OPERAND (t, 0));
1073 return fold_convert (type, tem);
1076 /* -(A + B) -> (-A) - B. */
1077 if (negate_expr_p (TREE_OPERAND (t, 0)))
1079 tem = negate_expr (TREE_OPERAND (t, 0));
1080 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1081 tem, TREE_OPERAND (t, 1));
1082 return fold_convert (type, tem);
1085 break;
1087 case MINUS_EXPR:
1088 /* - (A - B) -> B - A */
1089 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1090 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1091 return fold_convert (type,
1092 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1093 TREE_OPERAND (t, 1),
1094 TREE_OPERAND (t, 0)));
1095 break;
1097 case MULT_EXPR:
1098 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1099 break;
1101 /* Fall through. */
1103 case RDIV_EXPR:
1104 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1106 tem = TREE_OPERAND (t, 1);
1107 if (negate_expr_p (tem))
1108 return fold_convert (type,
1109 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1110 TREE_OPERAND (t, 0),
1111 negate_expr (tem)));
1112 tem = TREE_OPERAND (t, 0);
1113 if (negate_expr_p (tem))
1114 return fold_convert (type,
1115 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1116 negate_expr (tem),
1117 TREE_OPERAND (t, 1)));
1119 break;
1121 case NOP_EXPR:
1122 /* Convert -((double)float) into (double)(-float). */
1123 if (TREE_CODE (type) == REAL_TYPE)
1125 tem = strip_float_extensions (t);
1126 if (tem != t && negate_expr_p (tem))
1127 return fold_convert (type, negate_expr (tem));
1129 break;
1131 case CALL_EXPR:
1132 /* Negate -f(x) as f(-x). */
1133 if (negate_mathfn_p (builtin_mathfn_code (t))
1134 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1136 tree fndecl, arg, arglist;
1138 fndecl = get_callee_fndecl (t);
1139 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1140 arglist = build_tree_list (NULL_TREE, arg);
1141 return build_function_call_expr (fndecl, arglist);
1143 break;
1145 case RSHIFT_EXPR:
1146 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1147 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1149 tree op1 = TREE_OPERAND (t, 1);
1150 if (TREE_INT_CST_HIGH (op1) == 0
1151 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1152 == TREE_INT_CST_LOW (op1))
1154 tree ntype = TYPE_UNSIGNED (type)
1155 ? lang_hooks.types.signed_type (type)
1156 : lang_hooks.types.unsigned_type (type);
1157 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1158 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1159 return fold_convert (type, temp);
1162 break;
1164 default:
1165 break;
1168 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1169 return fold_convert (type, tem);
1172 /* Split a tree IN into a constant, literal and variable parts that could be
1173 combined with CODE to make IN. "constant" means an expression with
1174 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1175 commutative arithmetic operation. Store the constant part into *CONP,
1176 the literal in *LITP and return the variable part. If a part isn't
1177 present, set it to null. If the tree does not decompose in this way,
1178 return the entire tree as the variable part and the other parts as null.
1180 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1181 case, we negate an operand that was subtracted. Except if it is a
1182 literal for which we use *MINUS_LITP instead.
1184 If NEGATE_P is true, we are negating all of IN, again except a literal
1185 for which we use *MINUS_LITP instead.
1187 If IN is itself a literal or constant, return it as appropriate.
1189 Note that we do not guarantee that any of the three values will be the
1190 same type as IN, but they will have the same signedness and mode. */
1192 static tree
1193 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1194 tree *minus_litp, int negate_p)
1196 tree var = 0;
1198 *conp = 0;
1199 *litp = 0;
1200 *minus_litp = 0;
1202 /* Strip any conversions that don't change the machine mode or signedness. */
1203 STRIP_SIGN_NOPS (in);
1205 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1206 *litp = in;
1207 else if (TREE_CODE (in) == code
1208 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1209 /* We can associate addition and subtraction together (even
1210 though the C standard doesn't say so) for integers because
1211 the value is not affected. For reals, the value might be
1212 affected, so we can't. */
1213 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1214 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1216 tree op0 = TREE_OPERAND (in, 0);
1217 tree op1 = TREE_OPERAND (in, 1);
1218 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1219 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1221 /* First see if either of the operands is a literal, then a constant. */
1222 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1223 *litp = op0, op0 = 0;
1224 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1225 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1227 if (op0 != 0 && TREE_CONSTANT (op0))
1228 *conp = op0, op0 = 0;
1229 else if (op1 != 0 && TREE_CONSTANT (op1))
1230 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1232 /* If we haven't dealt with either operand, this is not a case we can
1233 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1234 if (op0 != 0 && op1 != 0)
1235 var = in;
1236 else if (op0 != 0)
1237 var = op0;
1238 else
1239 var = op1, neg_var_p = neg1_p;
1241 /* Now do any needed negations. */
1242 if (neg_litp_p)
1243 *minus_litp = *litp, *litp = 0;
1244 if (neg_conp_p)
1245 *conp = negate_expr (*conp);
1246 if (neg_var_p)
1247 var = negate_expr (var);
1249 else if (TREE_CONSTANT (in))
1250 *conp = in;
1251 else
1252 var = in;
1254 if (negate_p)
1256 if (*litp)
1257 *minus_litp = *litp, *litp = 0;
1258 else if (*minus_litp)
1259 *litp = *minus_litp, *minus_litp = 0;
1260 *conp = negate_expr (*conp);
1261 var = negate_expr (var);
1264 return var;
1267 /* Re-associate trees split by the above function. T1 and T2 are either
1268 expressions to associate or null. Return the new expression, if any. If
1269 we build an operation, do it in TYPE and with CODE. */
1271 static tree
1272 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1274 if (t1 == 0)
1275 return t2;
1276 else if (t2 == 0)
1277 return t1;
1279 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1280 try to fold this since we will have infinite recursion. But do
1281 deal with any NEGATE_EXPRs. */
1282 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1283 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1285 if (code == PLUS_EXPR)
1287 if (TREE_CODE (t1) == NEGATE_EXPR)
1288 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1289 fold_convert (type, TREE_OPERAND (t1, 0)));
1290 else if (TREE_CODE (t2) == NEGATE_EXPR)
1291 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1292 fold_convert (type, TREE_OPERAND (t2, 0)));
1293 else if (integer_zerop (t2))
1294 return fold_convert (type, t1);
1296 else if (code == MINUS_EXPR)
1298 if (integer_zerop (t2))
1299 return fold_convert (type, t1);
1302 return build2 (code, type, fold_convert (type, t1),
1303 fold_convert (type, t2));
1306 return fold_build2 (code, type, fold_convert (type, t1),
1307 fold_convert (type, t2));
1310 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1311 to produce a new constant.
1313 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1315 tree
1316 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1318 unsigned HOST_WIDE_INT int1l, int2l;
1319 HOST_WIDE_INT int1h, int2h;
1320 unsigned HOST_WIDE_INT low;
1321 HOST_WIDE_INT hi;
1322 unsigned HOST_WIDE_INT garbagel;
1323 HOST_WIDE_INT garbageh;
1324 tree t;
1325 tree type = TREE_TYPE (arg1);
1326 int uns = TYPE_UNSIGNED (type);
1327 int is_sizetype
1328 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1329 int overflow = 0;
1331 int1l = TREE_INT_CST_LOW (arg1);
1332 int1h = TREE_INT_CST_HIGH (arg1);
1333 int2l = TREE_INT_CST_LOW (arg2);
1334 int2h = TREE_INT_CST_HIGH (arg2);
1336 switch (code)
1338 case BIT_IOR_EXPR:
1339 low = int1l | int2l, hi = int1h | int2h;
1340 break;
1342 case BIT_XOR_EXPR:
1343 low = int1l ^ int2l, hi = int1h ^ int2h;
1344 break;
1346 case BIT_AND_EXPR:
1347 low = int1l & int2l, hi = int1h & int2h;
1348 break;
1350 case RSHIFT_EXPR:
1351 int2l = -int2l;
1352 case LSHIFT_EXPR:
1353 /* It's unclear from the C standard whether shifts can overflow.
1354 The following code ignores overflow; perhaps a C standard
1355 interpretation ruling is needed. */
1356 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1357 &low, &hi, !uns);
1358 break;
1360 case RROTATE_EXPR:
1361 int2l = - int2l;
1362 case LROTATE_EXPR:
1363 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1364 &low, &hi);
1365 break;
1367 case PLUS_EXPR:
1368 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1369 break;
1371 case MINUS_EXPR:
1372 neg_double (int2l, int2h, &low, &hi);
1373 add_double (int1l, int1h, low, hi, &low, &hi);
1374 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1375 break;
1377 case MULT_EXPR:
1378 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1379 break;
1381 case TRUNC_DIV_EXPR:
1382 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1383 case EXACT_DIV_EXPR:
1384 /* This is a shortcut for a common special case. */
1385 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1386 && ! TREE_CONSTANT_OVERFLOW (arg1)
1387 && ! TREE_CONSTANT_OVERFLOW (arg2)
1388 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1390 if (code == CEIL_DIV_EXPR)
1391 int1l += int2l - 1;
1393 low = int1l / int2l, hi = 0;
1394 break;
1397 /* ... fall through ... */
1399 case ROUND_DIV_EXPR:
1400 if (int2h == 0 && int2l == 1)
1402 low = int1l, hi = int1h;
1403 break;
1405 if (int1l == int2l && int1h == int2h
1406 && ! (int1l == 0 && int1h == 0))
1408 low = 1, hi = 0;
1409 break;
1411 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1412 &low, &hi, &garbagel, &garbageh);
1413 break;
1415 case TRUNC_MOD_EXPR:
1416 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1417 /* This is a shortcut for a common special case. */
1418 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1419 && ! TREE_CONSTANT_OVERFLOW (arg1)
1420 && ! TREE_CONSTANT_OVERFLOW (arg2)
1421 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1423 if (code == CEIL_MOD_EXPR)
1424 int1l += int2l - 1;
1425 low = int1l % int2l, hi = 0;
1426 break;
1429 /* ... fall through ... */
1431 case ROUND_MOD_EXPR:
1432 overflow = div_and_round_double (code, uns,
1433 int1l, int1h, int2l, int2h,
1434 &garbagel, &garbageh, &low, &hi);
1435 break;
1437 case MIN_EXPR:
1438 case MAX_EXPR:
1439 if (uns)
1440 low = (((unsigned HOST_WIDE_INT) int1h
1441 < (unsigned HOST_WIDE_INT) int2h)
1442 || (((unsigned HOST_WIDE_INT) int1h
1443 == (unsigned HOST_WIDE_INT) int2h)
1444 && int1l < int2l));
1445 else
1446 low = (int1h < int2h
1447 || (int1h == int2h && int1l < int2l));
1449 if (low == (code == MIN_EXPR))
1450 low = int1l, hi = int1h;
1451 else
1452 low = int2l, hi = int2h;
1453 break;
1455 default:
1456 gcc_unreachable ();
1459 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1461 if (notrunc)
1463 /* Propagate overflow flags ourselves. */
1464 if (((!uns || is_sizetype) && overflow)
1465 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1467 t = copy_node (t);
1468 TREE_OVERFLOW (t) = 1;
1469 TREE_CONSTANT_OVERFLOW (t) = 1;
1471 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1473 t = copy_node (t);
1474 TREE_CONSTANT_OVERFLOW (t) = 1;
1477 else
1478 t = force_fit_type (t, 1,
1479 ((!uns || is_sizetype) && overflow)
1480 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1481 TREE_CONSTANT_OVERFLOW (arg1)
1482 | TREE_CONSTANT_OVERFLOW (arg2));
1484 return t;
1487 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1488 constant. We assume ARG1 and ARG2 have the same data type, or at least
1489 are the same kind of constant and the same machine mode.
1491 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1493 static tree
1494 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1496 STRIP_NOPS (arg1);
1497 STRIP_NOPS (arg2);
1499 if (TREE_CODE (arg1) == INTEGER_CST)
1500 return int_const_binop (code, arg1, arg2, notrunc);
1502 if (TREE_CODE (arg1) == REAL_CST)
1504 enum machine_mode mode;
1505 REAL_VALUE_TYPE d1;
1506 REAL_VALUE_TYPE d2;
1507 REAL_VALUE_TYPE value;
1508 REAL_VALUE_TYPE result;
1509 bool inexact;
1510 tree t, type;
1512 d1 = TREE_REAL_CST (arg1);
1513 d2 = TREE_REAL_CST (arg2);
1515 type = TREE_TYPE (arg1);
1516 mode = TYPE_MODE (type);
1518 /* Don't perform operation if we honor signaling NaNs and
1519 either operand is a NaN. */
1520 if (HONOR_SNANS (mode)
1521 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1522 return NULL_TREE;
1524 /* Don't perform operation if it would raise a division
1525 by zero exception. */
1526 if (code == RDIV_EXPR
1527 && REAL_VALUES_EQUAL (d2, dconst0)
1528 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1529 return NULL_TREE;
1531 /* If either operand is a NaN, just return it. Otherwise, set up
1532 for floating-point trap; we return an overflow. */
1533 if (REAL_VALUE_ISNAN (d1))
1534 return arg1;
1535 else if (REAL_VALUE_ISNAN (d2))
1536 return arg2;
1538 inexact = real_arithmetic (&value, code, &d1, &d2);
1539 real_convert (&result, mode, &value);
1541 /* Don't constant fold this floating point operation if the
1542 result may dependent upon the run-time rounding mode and
1543 flag_rounding_math is set, or if GCC's software emulation
1544 is unable to accurately represent the result. */
1546 if ((flag_rounding_math
1547 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1548 && !flag_unsafe_math_optimizations))
1549 && (inexact || !real_identical (&result, &value)))
1550 return NULL_TREE;
1552 t = build_real (type, result);
1554 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1555 TREE_CONSTANT_OVERFLOW (t)
1556 = TREE_OVERFLOW (t)
1557 | TREE_CONSTANT_OVERFLOW (arg1)
1558 | TREE_CONSTANT_OVERFLOW (arg2);
1559 return t;
1561 if (TREE_CODE (arg1) == COMPLEX_CST)
1563 tree type = TREE_TYPE (arg1);
1564 tree r1 = TREE_REALPART (arg1);
1565 tree i1 = TREE_IMAGPART (arg1);
1566 tree r2 = TREE_REALPART (arg2);
1567 tree i2 = TREE_IMAGPART (arg2);
1568 tree t;
1570 switch (code)
1572 case PLUS_EXPR:
1573 t = build_complex (type,
1574 const_binop (PLUS_EXPR, r1, r2, notrunc),
1575 const_binop (PLUS_EXPR, i1, i2, notrunc));
1576 break;
1578 case MINUS_EXPR:
1579 t = build_complex (type,
1580 const_binop (MINUS_EXPR, r1, r2, notrunc),
1581 const_binop (MINUS_EXPR, i1, i2, notrunc));
1582 break;
1584 case MULT_EXPR:
1585 t = build_complex (type,
1586 const_binop (MINUS_EXPR,
1587 const_binop (MULT_EXPR,
1588 r1, r2, notrunc),
1589 const_binop (MULT_EXPR,
1590 i1, i2, notrunc),
1591 notrunc),
1592 const_binop (PLUS_EXPR,
1593 const_binop (MULT_EXPR,
1594 r1, i2, notrunc),
1595 const_binop (MULT_EXPR,
1596 i1, r2, notrunc),
1597 notrunc));
1598 break;
1600 case RDIV_EXPR:
1602 tree t1, t2, real, imag;
1603 tree magsquared
1604 = const_binop (PLUS_EXPR,
1605 const_binop (MULT_EXPR, r2, r2, notrunc),
1606 const_binop (MULT_EXPR, i2, i2, notrunc),
1607 notrunc);
1609 t1 = const_binop (PLUS_EXPR,
1610 const_binop (MULT_EXPR, r1, r2, notrunc),
1611 const_binop (MULT_EXPR, i1, i2, notrunc),
1612 notrunc);
1613 t2 = const_binop (MINUS_EXPR,
1614 const_binop (MULT_EXPR, i1, r2, notrunc),
1615 const_binop (MULT_EXPR, r1, i2, notrunc),
1616 notrunc);
1618 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1620 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1621 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1623 else
1625 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1626 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1627 if (!real || !imag)
1628 return NULL_TREE;
1631 t = build_complex (type, real, imag);
1633 break;
1635 default:
1636 gcc_unreachable ();
1638 return t;
1640 return 0;
1643 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1644 indicates which particular sizetype to create. */
1646 tree
1647 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1649 return build_int_cst (sizetype_tab[(int) kind], number);
1652 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1653 is a tree code. The type of the result is taken from the operands.
1654 Both must be the same type integer type and it must be a size type.
1655 If the operands are constant, so is the result. */
1657 tree
1658 size_binop (enum tree_code code, tree arg0, tree arg1)
1660 tree type = TREE_TYPE (arg0);
1662 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1663 && type == TREE_TYPE (arg1));
1665 /* Handle the special case of two integer constants faster. */
1666 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1668 /* And some specific cases even faster than that. */
1669 if (code == PLUS_EXPR && integer_zerop (arg0))
1670 return arg1;
1671 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1672 && integer_zerop (arg1))
1673 return arg0;
1674 else if (code == MULT_EXPR && integer_onep (arg0))
1675 return arg1;
1677 /* Handle general case of two integer constants. */
1678 return int_const_binop (code, arg0, arg1, 0);
1681 if (arg0 == error_mark_node || arg1 == error_mark_node)
1682 return error_mark_node;
1684 return fold_build2 (code, type, arg0, arg1);
1687 /* Given two values, either both of sizetype or both of bitsizetype,
1688 compute the difference between the two values. Return the value
1689 in signed type corresponding to the type of the operands. */
1691 tree
1692 size_diffop (tree arg0, tree arg1)
1694 tree type = TREE_TYPE (arg0);
1695 tree ctype;
1697 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1698 && type == TREE_TYPE (arg1));
1700 /* If the type is already signed, just do the simple thing. */
1701 if (!TYPE_UNSIGNED (type))
1702 return size_binop (MINUS_EXPR, arg0, arg1);
1704 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1706 /* If either operand is not a constant, do the conversions to the signed
1707 type and subtract. The hardware will do the right thing with any
1708 overflow in the subtraction. */
1709 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1710 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1711 fold_convert (ctype, arg1));
1713 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1714 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1715 overflow) and negate (which can't either). Special-case a result
1716 of zero while we're here. */
1717 if (tree_int_cst_equal (arg0, arg1))
1718 return fold_convert (ctype, integer_zero_node);
1719 else if (tree_int_cst_lt (arg1, arg0))
1720 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1721 else
1722 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1723 fold_convert (ctype, size_binop (MINUS_EXPR,
1724 arg1, arg0)));
1727 /* A subroutine of fold_convert_const handling conversions of an
1728 INTEGER_CST to another integer type. */
1730 static tree
1731 fold_convert_const_int_from_int (tree type, tree arg1)
1733 tree t;
1735 /* Given an integer constant, make new constant with new type,
1736 appropriately sign-extended or truncated. */
1737 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1738 TREE_INT_CST_HIGH (arg1));
1740 t = force_fit_type (t,
1741 /* Don't set the overflow when
1742 converting a pointer */
1743 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1744 (TREE_INT_CST_HIGH (arg1) < 0
1745 && (TYPE_UNSIGNED (type)
1746 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1747 | TREE_OVERFLOW (arg1),
1748 TREE_CONSTANT_OVERFLOW (arg1));
1750 return t;
1753 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1754 to an integer type. */
1756 static tree
1757 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1759 int overflow = 0;
1760 tree t;
1762 /* The following code implements the floating point to integer
1763 conversion rules required by the Java Language Specification,
1764 that IEEE NaNs are mapped to zero and values that overflow
1765 the target precision saturate, i.e. values greater than
1766 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1767 are mapped to INT_MIN. These semantics are allowed by the
1768 C and C++ standards that simply state that the behavior of
1769 FP-to-integer conversion is unspecified upon overflow. */
1771 HOST_WIDE_INT high, low;
1772 REAL_VALUE_TYPE r;
1773 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1775 switch (code)
1777 case FIX_TRUNC_EXPR:
1778 real_trunc (&r, VOIDmode, &x);
1779 break;
1781 case FIX_CEIL_EXPR:
1782 real_ceil (&r, VOIDmode, &x);
1783 break;
1785 case FIX_FLOOR_EXPR:
1786 real_floor (&r, VOIDmode, &x);
1787 break;
1789 case FIX_ROUND_EXPR:
1790 real_round (&r, VOIDmode, &x);
1791 break;
1793 default:
1794 gcc_unreachable ();
1797 /* If R is NaN, return zero and show we have an overflow. */
1798 if (REAL_VALUE_ISNAN (r))
1800 overflow = 1;
1801 high = 0;
1802 low = 0;
1805 /* See if R is less than the lower bound or greater than the
1806 upper bound. */
1808 if (! overflow)
1810 tree lt = TYPE_MIN_VALUE (type);
1811 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1812 if (REAL_VALUES_LESS (r, l))
1814 overflow = 1;
1815 high = TREE_INT_CST_HIGH (lt);
1816 low = TREE_INT_CST_LOW (lt);
1820 if (! overflow)
1822 tree ut = TYPE_MAX_VALUE (type);
1823 if (ut)
1825 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1826 if (REAL_VALUES_LESS (u, r))
1828 overflow = 1;
1829 high = TREE_INT_CST_HIGH (ut);
1830 low = TREE_INT_CST_LOW (ut);
1835 if (! overflow)
1836 REAL_VALUE_TO_INT (&low, &high, r);
1838 t = build_int_cst_wide (type, low, high);
1840 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1841 TREE_CONSTANT_OVERFLOW (arg1));
1842 return t;
1845 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1846 to another floating point type. */
1848 static tree
1849 fold_convert_const_real_from_real (tree type, tree arg1)
1851 REAL_VALUE_TYPE value;
1852 tree t;
1854 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1855 t = build_real (type, value);
1857 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1858 TREE_CONSTANT_OVERFLOW (t)
1859 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1860 return t;
1863 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1864 type TYPE. If no simplification can be done return NULL_TREE. */
1866 static tree
1867 fold_convert_const (enum tree_code code, tree type, tree arg1)
1869 if (TREE_TYPE (arg1) == type)
1870 return arg1;
1872 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1874 if (TREE_CODE (arg1) == INTEGER_CST)
1875 return fold_convert_const_int_from_int (type, arg1);
1876 else if (TREE_CODE (arg1) == REAL_CST)
1877 return fold_convert_const_int_from_real (code, type, arg1);
1879 else if (TREE_CODE (type) == REAL_TYPE)
1881 if (TREE_CODE (arg1) == INTEGER_CST)
1882 return build_real_from_int_cst (type, arg1);
1883 if (TREE_CODE (arg1) == REAL_CST)
1884 return fold_convert_const_real_from_real (type, arg1);
1886 return NULL_TREE;
1889 /* Construct a vector of zero elements of vector type TYPE. */
1891 static tree
1892 build_zero_vector (tree type)
1894 tree elem, list;
1895 int i, units;
1897 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1898 units = TYPE_VECTOR_SUBPARTS (type);
1900 list = NULL_TREE;
1901 for (i = 0; i < units; i++)
1902 list = tree_cons (NULL_TREE, elem, list);
1903 return build_vector (type, list);
1906 /* Convert expression ARG to type TYPE. Used by the middle-end for
1907 simple conversions in preference to calling the front-end's convert. */
1909 tree
1910 fold_convert (tree type, tree arg)
1912 tree orig = TREE_TYPE (arg);
1913 tree tem;
1915 if (type == orig)
1916 return arg;
1918 if (TREE_CODE (arg) == ERROR_MARK
1919 || TREE_CODE (type) == ERROR_MARK
1920 || TREE_CODE (orig) == ERROR_MARK)
1921 return error_mark_node;
1923 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1924 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1925 TYPE_MAIN_VARIANT (orig)))
1926 return fold_build1 (NOP_EXPR, type, arg);
1928 switch (TREE_CODE (type))
1930 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1931 case POINTER_TYPE: case REFERENCE_TYPE:
1932 case OFFSET_TYPE:
1933 if (TREE_CODE (arg) == INTEGER_CST)
1935 tem = fold_convert_const (NOP_EXPR, type, arg);
1936 if (tem != NULL_TREE)
1937 return tem;
1939 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1940 || TREE_CODE (orig) == OFFSET_TYPE)
1941 return fold_build1 (NOP_EXPR, type, arg);
1942 if (TREE_CODE (orig) == COMPLEX_TYPE)
1944 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1945 return fold_convert (type, tem);
1947 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1948 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1949 return fold_build1 (NOP_EXPR, type, arg);
1951 case REAL_TYPE:
1952 if (TREE_CODE (arg) == INTEGER_CST)
1954 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1955 if (tem != NULL_TREE)
1956 return tem;
1958 else if (TREE_CODE (arg) == REAL_CST)
1960 tem = fold_convert_const (NOP_EXPR, type, arg);
1961 if (tem != NULL_TREE)
1962 return tem;
1965 switch (TREE_CODE (orig))
1967 case INTEGER_TYPE: case CHAR_TYPE:
1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 case POINTER_TYPE: case REFERENCE_TYPE:
1970 return fold_build1 (FLOAT_EXPR, type, arg);
1972 case REAL_TYPE:
1973 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1974 type, arg);
1976 case COMPLEX_TYPE:
1977 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1978 return fold_convert (type, tem);
1980 default:
1981 gcc_unreachable ();
1984 case COMPLEX_TYPE:
1985 switch (TREE_CODE (orig))
1987 case INTEGER_TYPE: case CHAR_TYPE:
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1990 case REAL_TYPE:
1991 return build2 (COMPLEX_EXPR, type,
1992 fold_convert (TREE_TYPE (type), arg),
1993 fold_convert (TREE_TYPE (type), integer_zero_node));
1994 case COMPLEX_TYPE:
1996 tree rpart, ipart;
1998 if (TREE_CODE (arg) == COMPLEX_EXPR)
2000 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2001 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2002 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2005 arg = save_expr (arg);
2006 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2007 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2008 rpart = fold_convert (TREE_TYPE (type), rpart);
2009 ipart = fold_convert (TREE_TYPE (type), ipart);
2010 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2013 default:
2014 gcc_unreachable ();
2017 case VECTOR_TYPE:
2018 if (integer_zerop (arg))
2019 return build_zero_vector (type);
2020 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2021 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2022 || TREE_CODE (orig) == VECTOR_TYPE);
2023 return fold_build1 (NOP_EXPR, type, arg);
2025 case VOID_TYPE:
2026 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2028 default:
2029 gcc_unreachable ();
2033 /* Return false if expr can be assumed not to be an value, true
2034 otherwise. */
2036 static bool
2037 maybe_lvalue_p (tree x)
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x))
2042 case VAR_DECL:
2043 case PARM_DECL:
2044 case RESULT_DECL:
2045 case LABEL_DECL:
2046 case FUNCTION_DECL:
2047 case SSA_NAME:
2049 case COMPONENT_REF:
2050 case INDIRECT_REF:
2051 case ALIGN_INDIRECT_REF:
2052 case MISALIGNED_INDIRECT_REF:
2053 case ARRAY_REF:
2054 case ARRAY_RANGE_REF:
2055 case BIT_FIELD_REF:
2056 case OBJ_TYPE_REF:
2058 case REALPART_EXPR:
2059 case IMAGPART_EXPR:
2060 case PREINCREMENT_EXPR:
2061 case PREDECREMENT_EXPR:
2062 case SAVE_EXPR:
2063 case TRY_CATCH_EXPR:
2064 case WITH_CLEANUP_EXPR:
2065 case COMPOUND_EXPR:
2066 case MODIFY_EXPR:
2067 case TARGET_EXPR:
2068 case COND_EXPR:
2069 case BIND_EXPR:
2070 case MIN_EXPR:
2071 case MAX_EXPR:
2072 break;
2074 default:
2075 /* Assume the worst for front-end tree codes. */
2076 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2077 break;
2078 return false;
2081 return true;
2084 /* Return an expr equal to X but certainly not valid as an lvalue. */
2086 tree
2087 non_lvalue (tree x)
2089 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2090 us. */
2091 if (in_gimple_form)
2092 return x;
2094 if (! maybe_lvalue_p (x))
2095 return x;
2096 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2099 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2100 Zero means allow extended lvalues. */
2102 int pedantic_lvalues;
2104 /* When pedantic, return an expr equal to X but certainly not valid as a
2105 pedantic lvalue. Otherwise, return X. */
2107 static tree
2108 pedantic_non_lvalue (tree x)
2110 if (pedantic_lvalues)
2111 return non_lvalue (x);
2112 else
2113 return x;
2116 /* Given a tree comparison code, return the code that is the logical inverse
2117 of the given code. It is not safe to do this for floating-point
2118 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2119 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2121 enum tree_code
2122 invert_tree_comparison (enum tree_code code, bool honor_nans)
2124 if (honor_nans && flag_trapping_math)
2125 return ERROR_MARK;
2127 switch (code)
2129 case EQ_EXPR:
2130 return NE_EXPR;
2131 case NE_EXPR:
2132 return EQ_EXPR;
2133 case GT_EXPR:
2134 return honor_nans ? UNLE_EXPR : LE_EXPR;
2135 case GE_EXPR:
2136 return honor_nans ? UNLT_EXPR : LT_EXPR;
2137 case LT_EXPR:
2138 return honor_nans ? UNGE_EXPR : GE_EXPR;
2139 case LE_EXPR:
2140 return honor_nans ? UNGT_EXPR : GT_EXPR;
2141 case LTGT_EXPR:
2142 return UNEQ_EXPR;
2143 case UNEQ_EXPR:
2144 return LTGT_EXPR;
2145 case UNGT_EXPR:
2146 return LE_EXPR;
2147 case UNGE_EXPR:
2148 return LT_EXPR;
2149 case UNLT_EXPR:
2150 return GE_EXPR;
2151 case UNLE_EXPR:
2152 return GT_EXPR;
2153 case ORDERED_EXPR:
2154 return UNORDERED_EXPR;
2155 case UNORDERED_EXPR:
2156 return ORDERED_EXPR;
2157 default:
2158 gcc_unreachable ();
2162 /* Similar, but return the comparison that results if the operands are
2163 swapped. This is safe for floating-point. */
2165 enum tree_code
2166 swap_tree_comparison (enum tree_code code)
2168 switch (code)
2170 case EQ_EXPR:
2171 case NE_EXPR:
2172 case ORDERED_EXPR:
2173 case UNORDERED_EXPR:
2174 case LTGT_EXPR:
2175 case UNEQ_EXPR:
2176 return code;
2177 case GT_EXPR:
2178 return LT_EXPR;
2179 case GE_EXPR:
2180 return LE_EXPR;
2181 case LT_EXPR:
2182 return GT_EXPR;
2183 case LE_EXPR:
2184 return GE_EXPR;
2185 case UNGT_EXPR:
2186 return UNLT_EXPR;
2187 case UNGE_EXPR:
2188 return UNLE_EXPR;
2189 case UNLT_EXPR:
2190 return UNGT_EXPR;
2191 case UNLE_EXPR:
2192 return UNGE_EXPR;
2193 default:
2194 gcc_unreachable ();
2199 /* Convert a comparison tree code from an enum tree_code representation
2200 into a compcode bit-based encoding. This function is the inverse of
2201 compcode_to_comparison. */
2203 static enum comparison_code
2204 comparison_to_compcode (enum tree_code code)
2206 switch (code)
2208 case LT_EXPR:
2209 return COMPCODE_LT;
2210 case EQ_EXPR:
2211 return COMPCODE_EQ;
2212 case LE_EXPR:
2213 return COMPCODE_LE;
2214 case GT_EXPR:
2215 return COMPCODE_GT;
2216 case NE_EXPR:
2217 return COMPCODE_NE;
2218 case GE_EXPR:
2219 return COMPCODE_GE;
2220 case ORDERED_EXPR:
2221 return COMPCODE_ORD;
2222 case UNORDERED_EXPR:
2223 return COMPCODE_UNORD;
2224 case UNLT_EXPR:
2225 return COMPCODE_UNLT;
2226 case UNEQ_EXPR:
2227 return COMPCODE_UNEQ;
2228 case UNLE_EXPR:
2229 return COMPCODE_UNLE;
2230 case UNGT_EXPR:
2231 return COMPCODE_UNGT;
2232 case LTGT_EXPR:
2233 return COMPCODE_LTGT;
2234 case UNGE_EXPR:
2235 return COMPCODE_UNGE;
2236 default:
2237 gcc_unreachable ();
2241 /* Convert a compcode bit-based encoding of a comparison operator back
2242 to GCC's enum tree_code representation. This function is the
2243 inverse of comparison_to_compcode. */
2245 static enum tree_code
2246 compcode_to_comparison (enum comparison_code code)
2248 switch (code)
2250 case COMPCODE_LT:
2251 return LT_EXPR;
2252 case COMPCODE_EQ:
2253 return EQ_EXPR;
2254 case COMPCODE_LE:
2255 return LE_EXPR;
2256 case COMPCODE_GT:
2257 return GT_EXPR;
2258 case COMPCODE_NE:
2259 return NE_EXPR;
2260 case COMPCODE_GE:
2261 return GE_EXPR;
2262 case COMPCODE_ORD:
2263 return ORDERED_EXPR;
2264 case COMPCODE_UNORD:
2265 return UNORDERED_EXPR;
2266 case COMPCODE_UNLT:
2267 return UNLT_EXPR;
2268 case COMPCODE_UNEQ:
2269 return UNEQ_EXPR;
2270 case COMPCODE_UNLE:
2271 return UNLE_EXPR;
2272 case COMPCODE_UNGT:
2273 return UNGT_EXPR;
2274 case COMPCODE_LTGT:
2275 return LTGT_EXPR;
2276 case COMPCODE_UNGE:
2277 return UNGE_EXPR;
2278 default:
2279 gcc_unreachable ();
2283 /* Return a tree for the comparison which is the combination of
2284 doing the AND or OR (depending on CODE) of the two operations LCODE
2285 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2286 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2287 if this makes the transformation invalid. */
2289 tree
2290 combine_comparisons (enum tree_code code, enum tree_code lcode,
2291 enum tree_code rcode, tree truth_type,
2292 tree ll_arg, tree lr_arg)
2294 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2295 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2296 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2297 enum comparison_code compcode;
2299 switch (code)
2301 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2302 compcode = lcompcode & rcompcode;
2303 break;
2305 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2306 compcode = lcompcode | rcompcode;
2307 break;
2309 default:
2310 return NULL_TREE;
2313 if (!honor_nans)
2315 /* Eliminate unordered comparisons, as well as LTGT and ORD
2316 which are not used unless the mode has NaNs. */
2317 compcode &= ~COMPCODE_UNORD;
2318 if (compcode == COMPCODE_LTGT)
2319 compcode = COMPCODE_NE;
2320 else if (compcode == COMPCODE_ORD)
2321 compcode = COMPCODE_TRUE;
2323 else if (flag_trapping_math)
2325 /* Check that the original operation and the optimized ones will trap
2326 under the same condition. */
2327 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2328 && (lcompcode != COMPCODE_EQ)
2329 && (lcompcode != COMPCODE_ORD);
2330 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2331 && (rcompcode != COMPCODE_EQ)
2332 && (rcompcode != COMPCODE_ORD);
2333 bool trap = (compcode & COMPCODE_UNORD) == 0
2334 && (compcode != COMPCODE_EQ)
2335 && (compcode != COMPCODE_ORD);
2337 /* In a short-circuited boolean expression the LHS might be
2338 such that the RHS, if evaluated, will never trap. For
2339 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2340 if neither x nor y is NaN. (This is a mixed blessing: for
2341 example, the expression above will never trap, hence
2342 optimizing it to x < y would be invalid). */
2343 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2344 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2345 rtrap = false;
2347 /* If the comparison was short-circuited, and only the RHS
2348 trapped, we may now generate a spurious trap. */
2349 if (rtrap && !ltrap
2350 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2351 return NULL_TREE;
2353 /* If we changed the conditions that cause a trap, we lose. */
2354 if ((ltrap || rtrap) != trap)
2355 return NULL_TREE;
2358 if (compcode == COMPCODE_TRUE)
2359 return constant_boolean_node (true, truth_type);
2360 else if (compcode == COMPCODE_FALSE)
2361 return constant_boolean_node (false, truth_type);
2362 else
2363 return fold_build2 (compcode_to_comparison (compcode),
2364 truth_type, ll_arg, lr_arg);
2367 /* Return nonzero if CODE is a tree code that represents a truth value. */
2369 static int
2370 truth_value_p (enum tree_code code)
2372 return (TREE_CODE_CLASS (code) == tcc_comparison
2373 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2374 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2375 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2378 /* Return nonzero if two operands (typically of the same tree node)
2379 are necessarily equal. If either argument has side-effects this
2380 function returns zero. FLAGS modifies behavior as follows:
2382 If OEP_ONLY_CONST is set, only return nonzero for constants.
2383 This function tests whether the operands are indistinguishable;
2384 it does not test whether they are equal using C's == operation.
2385 The distinction is important for IEEE floating point, because
2386 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2387 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2389 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2390 even though it may hold multiple values during a function.
2391 This is because a GCC tree node guarantees that nothing else is
2392 executed between the evaluation of its "operands" (which may often
2393 be evaluated in arbitrary order). Hence if the operands themselves
2394 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2395 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2396 unset means assuming isochronic (or instantaneous) tree equivalence.
2397 Unless comparing arbitrary expression trees, such as from different
2398 statements, this flag can usually be left unset.
2400 If OEP_PURE_SAME is set, then pure functions with identical arguments
2401 are considered the same. It is used when the caller has other ways
2402 to ensure that global memory is unchanged in between. */
2405 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2407 /* If either is ERROR_MARK, they aren't equal. */
2408 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2409 return 0;
2411 /* If both types don't have the same signedness, then we can't consider
2412 them equal. We must check this before the STRIP_NOPS calls
2413 because they may change the signedness of the arguments. */
2414 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2415 return 0;
2417 STRIP_NOPS (arg0);
2418 STRIP_NOPS (arg1);
2420 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2421 /* This is needed for conversions and for COMPONENT_REF.
2422 Might as well play it safe and always test this. */
2423 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2424 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2425 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2426 return 0;
2428 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2429 We don't care about side effects in that case because the SAVE_EXPR
2430 takes care of that for us. In all other cases, two expressions are
2431 equal if they have no side effects. If we have two identical
2432 expressions with side effects that should be treated the same due
2433 to the only side effects being identical SAVE_EXPR's, that will
2434 be detected in the recursive calls below. */
2435 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2436 && (TREE_CODE (arg0) == SAVE_EXPR
2437 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2438 return 1;
2440 /* Next handle constant cases, those for which we can return 1 even
2441 if ONLY_CONST is set. */
2442 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2443 switch (TREE_CODE (arg0))
2445 case INTEGER_CST:
2446 return (! TREE_CONSTANT_OVERFLOW (arg0)
2447 && ! TREE_CONSTANT_OVERFLOW (arg1)
2448 && tree_int_cst_equal (arg0, arg1));
2450 case REAL_CST:
2451 return (! TREE_CONSTANT_OVERFLOW (arg0)
2452 && ! TREE_CONSTANT_OVERFLOW (arg1)
2453 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2454 TREE_REAL_CST (arg1)));
2456 case VECTOR_CST:
2458 tree v1, v2;
2460 if (TREE_CONSTANT_OVERFLOW (arg0)
2461 || TREE_CONSTANT_OVERFLOW (arg1))
2462 return 0;
2464 v1 = TREE_VECTOR_CST_ELTS (arg0);
2465 v2 = TREE_VECTOR_CST_ELTS (arg1);
2466 while (v1 && v2)
2468 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2469 flags))
2470 return 0;
2471 v1 = TREE_CHAIN (v1);
2472 v2 = TREE_CHAIN (v2);
2475 return v1 == v2;
2478 case COMPLEX_CST:
2479 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2480 flags)
2481 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2482 flags));
2484 case STRING_CST:
2485 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2486 && ! memcmp (TREE_STRING_POINTER (arg0),
2487 TREE_STRING_POINTER (arg1),
2488 TREE_STRING_LENGTH (arg0)));
2490 case ADDR_EXPR:
2491 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2493 default:
2494 break;
2497 if (flags & OEP_ONLY_CONST)
2498 return 0;
2500 /* Define macros to test an operand from arg0 and arg1 for equality and a
2501 variant that allows null and views null as being different from any
2502 non-null value. In the latter case, if either is null, the both
2503 must be; otherwise, do the normal comparison. */
2504 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2505 TREE_OPERAND (arg1, N), flags)
2507 #define OP_SAME_WITH_NULL(N) \
2508 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2509 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2511 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2513 case tcc_unary:
2514 /* Two conversions are equal only if signedness and modes match. */
2515 switch (TREE_CODE (arg0))
2517 case NOP_EXPR:
2518 case CONVERT_EXPR:
2519 case FIX_CEIL_EXPR:
2520 case FIX_TRUNC_EXPR:
2521 case FIX_FLOOR_EXPR:
2522 case FIX_ROUND_EXPR:
2523 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2524 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2525 return 0;
2526 break;
2527 default:
2528 break;
2531 return OP_SAME (0);
2534 case tcc_comparison:
2535 case tcc_binary:
2536 if (OP_SAME (0) && OP_SAME (1))
2537 return 1;
2539 /* For commutative ops, allow the other order. */
2540 return (commutative_tree_code (TREE_CODE (arg0))
2541 && operand_equal_p (TREE_OPERAND (arg0, 0),
2542 TREE_OPERAND (arg1, 1), flags)
2543 && operand_equal_p (TREE_OPERAND (arg0, 1),
2544 TREE_OPERAND (arg1, 0), flags));
2546 case tcc_reference:
2547 /* If either of the pointer (or reference) expressions we are
2548 dereferencing contain a side effect, these cannot be equal. */
2549 if (TREE_SIDE_EFFECTS (arg0)
2550 || TREE_SIDE_EFFECTS (arg1))
2551 return 0;
2553 switch (TREE_CODE (arg0))
2555 case INDIRECT_REF:
2556 case ALIGN_INDIRECT_REF:
2557 case MISALIGNED_INDIRECT_REF:
2558 case REALPART_EXPR:
2559 case IMAGPART_EXPR:
2560 return OP_SAME (0);
2562 case ARRAY_REF:
2563 case ARRAY_RANGE_REF:
2564 /* Operands 2 and 3 may be null. */
2565 return (OP_SAME (0)
2566 && OP_SAME (1)
2567 && OP_SAME_WITH_NULL (2)
2568 && OP_SAME_WITH_NULL (3));
2570 case COMPONENT_REF:
2571 /* Handle operand 2 the same as for ARRAY_REF. */
2572 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2574 case BIT_FIELD_REF:
2575 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2577 default:
2578 return 0;
2581 case tcc_expression:
2582 switch (TREE_CODE (arg0))
2584 case ADDR_EXPR:
2585 case TRUTH_NOT_EXPR:
2586 return OP_SAME (0);
2588 case TRUTH_ANDIF_EXPR:
2589 case TRUTH_ORIF_EXPR:
2590 return OP_SAME (0) && OP_SAME (1);
2592 case TRUTH_AND_EXPR:
2593 case TRUTH_OR_EXPR:
2594 case TRUTH_XOR_EXPR:
2595 if (OP_SAME (0) && OP_SAME (1))
2596 return 1;
2598 /* Otherwise take into account this is a commutative operation. */
2599 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2600 TREE_OPERAND (arg1, 1), flags)
2601 && operand_equal_p (TREE_OPERAND (arg0, 1),
2602 TREE_OPERAND (arg1, 0), flags));
2604 case CALL_EXPR:
2605 /* If the CALL_EXPRs call different functions, then they
2606 clearly can not be equal. */
2607 if (!OP_SAME (0))
2608 return 0;
2611 unsigned int cef = call_expr_flags (arg0);
2612 if (flags & OEP_PURE_SAME)
2613 cef &= ECF_CONST | ECF_PURE;
2614 else
2615 cef &= ECF_CONST;
2616 if (!cef)
2617 return 0;
2620 /* Now see if all the arguments are the same. operand_equal_p
2621 does not handle TREE_LIST, so we walk the operands here
2622 feeding them to operand_equal_p. */
2623 arg0 = TREE_OPERAND (arg0, 1);
2624 arg1 = TREE_OPERAND (arg1, 1);
2625 while (arg0 && arg1)
2627 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2628 flags))
2629 return 0;
2631 arg0 = TREE_CHAIN (arg0);
2632 arg1 = TREE_CHAIN (arg1);
2635 /* If we get here and both argument lists are exhausted
2636 then the CALL_EXPRs are equal. */
2637 return ! (arg0 || arg1);
2639 default:
2640 return 0;
2643 case tcc_declaration:
2644 /* Consider __builtin_sqrt equal to sqrt. */
2645 return (TREE_CODE (arg0) == FUNCTION_DECL
2646 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2647 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2648 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2650 default:
2651 return 0;
2654 #undef OP_SAME
2655 #undef OP_SAME_WITH_NULL
2658 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2659 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2661 When in doubt, return 0. */
2663 static int
2664 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2666 int unsignedp1, unsignedpo;
2667 tree primarg0, primarg1, primother;
2668 unsigned int correct_width;
2670 if (operand_equal_p (arg0, arg1, 0))
2671 return 1;
2673 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2674 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2675 return 0;
2677 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2678 and see if the inner values are the same. This removes any
2679 signedness comparison, which doesn't matter here. */
2680 primarg0 = arg0, primarg1 = arg1;
2681 STRIP_NOPS (primarg0);
2682 STRIP_NOPS (primarg1);
2683 if (operand_equal_p (primarg0, primarg1, 0))
2684 return 1;
2686 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2687 actual comparison operand, ARG0.
2689 First throw away any conversions to wider types
2690 already present in the operands. */
2692 primarg1 = get_narrower (arg1, &unsignedp1);
2693 primother = get_narrower (other, &unsignedpo);
2695 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2696 if (unsignedp1 == unsignedpo
2697 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2698 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2700 tree type = TREE_TYPE (arg0);
2702 /* Make sure shorter operand is extended the right way
2703 to match the longer operand. */
2704 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2705 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2707 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2708 return 1;
2711 return 0;
2714 /* See if ARG is an expression that is either a comparison or is performing
2715 arithmetic on comparisons. The comparisons must only be comparing
2716 two different values, which will be stored in *CVAL1 and *CVAL2; if
2717 they are nonzero it means that some operands have already been found.
2718 No variables may be used anywhere else in the expression except in the
2719 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2720 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2722 If this is true, return 1. Otherwise, return zero. */
2724 static int
2725 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2727 enum tree_code code = TREE_CODE (arg);
2728 enum tree_code_class class = TREE_CODE_CLASS (code);
2730 /* We can handle some of the tcc_expression cases here. */
2731 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2732 class = tcc_unary;
2733 else if (class == tcc_expression
2734 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2735 || code == COMPOUND_EXPR))
2736 class = tcc_binary;
2738 else if (class == tcc_expression && code == SAVE_EXPR
2739 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2741 /* If we've already found a CVAL1 or CVAL2, this expression is
2742 two complex to handle. */
2743 if (*cval1 || *cval2)
2744 return 0;
2746 class = tcc_unary;
2747 *save_p = 1;
2750 switch (class)
2752 case tcc_unary:
2753 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2755 case tcc_binary:
2756 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2757 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2758 cval1, cval2, save_p));
2760 case tcc_constant:
2761 return 1;
2763 case tcc_expression:
2764 if (code == COND_EXPR)
2765 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2766 cval1, cval2, save_p)
2767 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2768 cval1, cval2, save_p)
2769 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2770 cval1, cval2, save_p));
2771 return 0;
2773 case tcc_comparison:
2774 /* First see if we can handle the first operand, then the second. For
2775 the second operand, we know *CVAL1 can't be zero. It must be that
2776 one side of the comparison is each of the values; test for the
2777 case where this isn't true by failing if the two operands
2778 are the same. */
2780 if (operand_equal_p (TREE_OPERAND (arg, 0),
2781 TREE_OPERAND (arg, 1), 0))
2782 return 0;
2784 if (*cval1 == 0)
2785 *cval1 = TREE_OPERAND (arg, 0);
2786 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2788 else if (*cval2 == 0)
2789 *cval2 = TREE_OPERAND (arg, 0);
2790 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2792 else
2793 return 0;
2795 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2797 else if (*cval2 == 0)
2798 *cval2 = TREE_OPERAND (arg, 1);
2799 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2801 else
2802 return 0;
2804 return 1;
2806 default:
2807 return 0;
2811 /* ARG is a tree that is known to contain just arithmetic operations and
2812 comparisons. Evaluate the operations in the tree substituting NEW0 for
2813 any occurrence of OLD0 as an operand of a comparison and likewise for
2814 NEW1 and OLD1. */
2816 static tree
2817 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2819 tree type = TREE_TYPE (arg);
2820 enum tree_code code = TREE_CODE (arg);
2821 enum tree_code_class class = TREE_CODE_CLASS (code);
2823 /* We can handle some of the tcc_expression cases here. */
2824 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2825 class = tcc_unary;
2826 else if (class == tcc_expression
2827 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2828 class = tcc_binary;
2830 switch (class)
2832 case tcc_unary:
2833 return fold_build1 (code, type,
2834 eval_subst (TREE_OPERAND (arg, 0),
2835 old0, new0, old1, new1));
2837 case tcc_binary:
2838 return fold_build2 (code, type,
2839 eval_subst (TREE_OPERAND (arg, 0),
2840 old0, new0, old1, new1),
2841 eval_subst (TREE_OPERAND (arg, 1),
2842 old0, new0, old1, new1));
2844 case tcc_expression:
2845 switch (code)
2847 case SAVE_EXPR:
2848 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2850 case COMPOUND_EXPR:
2851 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2853 case COND_EXPR:
2854 return fold_build3 (code, type,
2855 eval_subst (TREE_OPERAND (arg, 0),
2856 old0, new0, old1, new1),
2857 eval_subst (TREE_OPERAND (arg, 1),
2858 old0, new0, old1, new1),
2859 eval_subst (TREE_OPERAND (arg, 2),
2860 old0, new0, old1, new1));
2861 default:
2862 break;
2864 /* Fall through - ??? */
2866 case tcc_comparison:
2868 tree arg0 = TREE_OPERAND (arg, 0);
2869 tree arg1 = TREE_OPERAND (arg, 1);
2871 /* We need to check both for exact equality and tree equality. The
2872 former will be true if the operand has a side-effect. In that
2873 case, we know the operand occurred exactly once. */
2875 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2876 arg0 = new0;
2877 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2878 arg0 = new1;
2880 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2881 arg1 = new0;
2882 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2883 arg1 = new1;
2885 return fold_build2 (code, type, arg0, arg1);
2888 default:
2889 return arg;
2893 /* Return a tree for the case when the result of an expression is RESULT
2894 converted to TYPE and OMITTED was previously an operand of the expression
2895 but is now not needed (e.g., we folded OMITTED * 0).
2897 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2898 the conversion of RESULT to TYPE. */
2900 tree
2901 omit_one_operand (tree type, tree result, tree omitted)
2903 tree t = fold_convert (type, result);
2905 if (TREE_SIDE_EFFECTS (omitted))
2906 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2908 return non_lvalue (t);
2911 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2913 static tree
2914 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2916 tree t = fold_convert (type, result);
2918 if (TREE_SIDE_EFFECTS (omitted))
2919 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2921 return pedantic_non_lvalue (t);
2924 /* Return a tree for the case when the result of an expression is RESULT
2925 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2926 of the expression but are now not needed.
2928 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2929 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2930 evaluated before OMITTED2. Otherwise, if neither has side effects,
2931 just do the conversion of RESULT to TYPE. */
2933 tree
2934 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2936 tree t = fold_convert (type, result);
2938 if (TREE_SIDE_EFFECTS (omitted2))
2939 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2940 if (TREE_SIDE_EFFECTS (omitted1))
2941 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2943 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2947 /* Return a simplified tree node for the truth-negation of ARG. This
2948 never alters ARG itself. We assume that ARG is an operation that
2949 returns a truth value (0 or 1).
2951 FIXME: one would think we would fold the result, but it causes
2952 problems with the dominator optimizer. */
2953 tree
2954 invert_truthvalue (tree arg)
2956 tree type = TREE_TYPE (arg);
2957 enum tree_code code = TREE_CODE (arg);
2959 if (code == ERROR_MARK)
2960 return arg;
2962 /* If this is a comparison, we can simply invert it, except for
2963 floating-point non-equality comparisons, in which case we just
2964 enclose a TRUTH_NOT_EXPR around what we have. */
2966 if (TREE_CODE_CLASS (code) == tcc_comparison)
2968 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2969 if (FLOAT_TYPE_P (op_type)
2970 && flag_trapping_math
2971 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2972 && code != NE_EXPR && code != EQ_EXPR)
2973 return build1 (TRUTH_NOT_EXPR, type, arg);
2974 else
2976 code = invert_tree_comparison (code,
2977 HONOR_NANS (TYPE_MODE (op_type)));
2978 if (code == ERROR_MARK)
2979 return build1 (TRUTH_NOT_EXPR, type, arg);
2980 else
2981 return build2 (code, type,
2982 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2986 switch (code)
2988 case INTEGER_CST:
2989 return constant_boolean_node (integer_zerop (arg), type);
2991 case TRUTH_AND_EXPR:
2992 return build2 (TRUTH_OR_EXPR, type,
2993 invert_truthvalue (TREE_OPERAND (arg, 0)),
2994 invert_truthvalue (TREE_OPERAND (arg, 1)));
2996 case TRUTH_OR_EXPR:
2997 return build2 (TRUTH_AND_EXPR, type,
2998 invert_truthvalue (TREE_OPERAND (arg, 0)),
2999 invert_truthvalue (TREE_OPERAND (arg, 1)));
3001 case TRUTH_XOR_EXPR:
3002 /* Here we can invert either operand. We invert the first operand
3003 unless the second operand is a TRUTH_NOT_EXPR in which case our
3004 result is the XOR of the first operand with the inside of the
3005 negation of the second operand. */
3007 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3008 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3009 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3010 else
3011 return build2 (TRUTH_XOR_EXPR, type,
3012 invert_truthvalue (TREE_OPERAND (arg, 0)),
3013 TREE_OPERAND (arg, 1));
3015 case TRUTH_ANDIF_EXPR:
3016 return build2 (TRUTH_ORIF_EXPR, type,
3017 invert_truthvalue (TREE_OPERAND (arg, 0)),
3018 invert_truthvalue (TREE_OPERAND (arg, 1)));
3020 case TRUTH_ORIF_EXPR:
3021 return build2 (TRUTH_ANDIF_EXPR, type,
3022 invert_truthvalue (TREE_OPERAND (arg, 0)),
3023 invert_truthvalue (TREE_OPERAND (arg, 1)));
3025 case TRUTH_NOT_EXPR:
3026 return TREE_OPERAND (arg, 0);
3028 case COND_EXPR:
3029 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3030 invert_truthvalue (TREE_OPERAND (arg, 1)),
3031 invert_truthvalue (TREE_OPERAND (arg, 2)));
3033 case COMPOUND_EXPR:
3034 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3035 invert_truthvalue (TREE_OPERAND (arg, 1)));
3037 case NON_LVALUE_EXPR:
3038 return invert_truthvalue (TREE_OPERAND (arg, 0));
3040 case NOP_EXPR:
3041 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3042 break;
3044 case CONVERT_EXPR:
3045 case FLOAT_EXPR:
3046 return build1 (TREE_CODE (arg), type,
3047 invert_truthvalue (TREE_OPERAND (arg, 0)));
3049 case BIT_AND_EXPR:
3050 if (!integer_onep (TREE_OPERAND (arg, 1)))
3051 break;
3052 return build2 (EQ_EXPR, type, arg,
3053 fold_convert (type, integer_zero_node));
3055 case SAVE_EXPR:
3056 return build1 (TRUTH_NOT_EXPR, type, arg);
3058 case CLEANUP_POINT_EXPR:
3059 return build1 (CLEANUP_POINT_EXPR, type,
3060 invert_truthvalue (TREE_OPERAND (arg, 0)));
3062 default:
3063 break;
3065 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3066 return build1 (TRUTH_NOT_EXPR, type, arg);
3069 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3070 operands are another bit-wise operation with a common input. If so,
3071 distribute the bit operations to save an operation and possibly two if
3072 constants are involved. For example, convert
3073 (A | B) & (A | C) into A | (B & C)
3074 Further simplification will occur if B and C are constants.
3076 If this optimization cannot be done, 0 will be returned. */
3078 static tree
3079 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3081 tree common;
3082 tree left, right;
3084 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3085 || TREE_CODE (arg0) == code
3086 || (TREE_CODE (arg0) != BIT_AND_EXPR
3087 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3088 return 0;
3090 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3092 common = TREE_OPERAND (arg0, 0);
3093 left = TREE_OPERAND (arg0, 1);
3094 right = TREE_OPERAND (arg1, 1);
3096 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3098 common = TREE_OPERAND (arg0, 0);
3099 left = TREE_OPERAND (arg0, 1);
3100 right = TREE_OPERAND (arg1, 0);
3102 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3104 common = TREE_OPERAND (arg0, 1);
3105 left = TREE_OPERAND (arg0, 0);
3106 right = TREE_OPERAND (arg1, 1);
3108 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3110 common = TREE_OPERAND (arg0, 1);
3111 left = TREE_OPERAND (arg0, 0);
3112 right = TREE_OPERAND (arg1, 0);
3114 else
3115 return 0;
3117 return fold_build2 (TREE_CODE (arg0), type, common,
3118 fold_build2 (code, type, left, right));
3121 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3122 with code CODE. This optimization is unsafe. */
3123 static tree
3124 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3126 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3127 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3129 /* (A / C) +- (B / C) -> (A +- B) / C. */
3130 if (mul0 == mul1
3131 && operand_equal_p (TREE_OPERAND (arg0, 1),
3132 TREE_OPERAND (arg1, 1), 0))
3133 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3134 fold_build2 (code, type,
3135 TREE_OPERAND (arg0, 0),
3136 TREE_OPERAND (arg1, 0)),
3137 TREE_OPERAND (arg0, 1));
3139 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3140 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3141 TREE_OPERAND (arg1, 0), 0)
3142 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3143 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3145 REAL_VALUE_TYPE r0, r1;
3146 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3147 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3148 if (!mul0)
3149 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3150 if (!mul1)
3151 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3152 real_arithmetic (&r0, code, &r0, &r1);
3153 return fold_build2 (MULT_EXPR, type,
3154 TREE_OPERAND (arg0, 0),
3155 build_real (type, r0));
3158 return NULL_TREE;
3161 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3162 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3164 static tree
3165 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3166 int unsignedp)
3168 tree result;
3170 if (bitpos == 0)
3172 tree size = TYPE_SIZE (TREE_TYPE (inner));
3173 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3174 || POINTER_TYPE_P (TREE_TYPE (inner)))
3175 && host_integerp (size, 0)
3176 && tree_low_cst (size, 0) == bitsize)
3177 return fold_convert (type, inner);
3180 result = build3 (BIT_FIELD_REF, type, inner,
3181 size_int (bitsize), bitsize_int (bitpos));
3183 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3185 return result;
3188 /* Optimize a bit-field compare.
3190 There are two cases: First is a compare against a constant and the
3191 second is a comparison of two items where the fields are at the same
3192 bit position relative to the start of a chunk (byte, halfword, word)
3193 large enough to contain it. In these cases we can avoid the shift
3194 implicit in bitfield extractions.
3196 For constants, we emit a compare of the shifted constant with the
3197 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3198 compared. For two fields at the same position, we do the ANDs with the
3199 similar mask and compare the result of the ANDs.
3201 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3202 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3203 are the left and right operands of the comparison, respectively.
3205 If the optimization described above can be done, we return the resulting
3206 tree. Otherwise we return zero. */
3208 static tree
3209 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3210 tree lhs, tree rhs)
3212 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3213 tree type = TREE_TYPE (lhs);
3214 tree signed_type, unsigned_type;
3215 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3216 enum machine_mode lmode, rmode, nmode;
3217 int lunsignedp, runsignedp;
3218 int lvolatilep = 0, rvolatilep = 0;
3219 tree linner, rinner = NULL_TREE;
3220 tree mask;
3221 tree offset;
3223 /* Get all the information about the extractions being done. If the bit size
3224 if the same as the size of the underlying object, we aren't doing an
3225 extraction at all and so can do nothing. We also don't want to
3226 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3227 then will no longer be able to replace it. */
3228 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3229 &lunsignedp, &lvolatilep, false);
3230 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3231 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3232 return 0;
3234 if (!const_p)
3236 /* If this is not a constant, we can only do something if bit positions,
3237 sizes, and signedness are the same. */
3238 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3239 &runsignedp, &rvolatilep, false);
3241 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3242 || lunsignedp != runsignedp || offset != 0
3243 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3244 return 0;
3247 /* See if we can find a mode to refer to this field. We should be able to,
3248 but fail if we can't. */
3249 nmode = get_best_mode (lbitsize, lbitpos,
3250 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3251 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3252 TYPE_ALIGN (TREE_TYPE (rinner))),
3253 word_mode, lvolatilep || rvolatilep);
3254 if (nmode == VOIDmode)
3255 return 0;
3257 /* Set signed and unsigned types of the precision of this mode for the
3258 shifts below. */
3259 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3260 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3262 /* Compute the bit position and size for the new reference and our offset
3263 within it. If the new reference is the same size as the original, we
3264 won't optimize anything, so return zero. */
3265 nbitsize = GET_MODE_BITSIZE (nmode);
3266 nbitpos = lbitpos & ~ (nbitsize - 1);
3267 lbitpos -= nbitpos;
3268 if (nbitsize == lbitsize)
3269 return 0;
3271 if (BYTES_BIG_ENDIAN)
3272 lbitpos = nbitsize - lbitsize - lbitpos;
3274 /* Make the mask to be used against the extracted field. */
3275 mask = build_int_cst (unsigned_type, -1);
3276 mask = force_fit_type (mask, 0, false, false);
3277 mask = fold_convert (unsigned_type, mask);
3278 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3279 mask = const_binop (RSHIFT_EXPR, mask,
3280 size_int (nbitsize - lbitsize - lbitpos), 0);
3282 if (! const_p)
3283 /* If not comparing with constant, just rework the comparison
3284 and return. */
3285 return build2 (code, compare_type,
3286 build2 (BIT_AND_EXPR, unsigned_type,
3287 make_bit_field_ref (linner, unsigned_type,
3288 nbitsize, nbitpos, 1),
3289 mask),
3290 build2 (BIT_AND_EXPR, unsigned_type,
3291 make_bit_field_ref (rinner, unsigned_type,
3292 nbitsize, nbitpos, 1),
3293 mask));
3295 /* Otherwise, we are handling the constant case. See if the constant is too
3296 big for the field. Warn and return a tree of for 0 (false) if so. We do
3297 this not only for its own sake, but to avoid having to test for this
3298 error case below. If we didn't, we might generate wrong code.
3300 For unsigned fields, the constant shifted right by the field length should
3301 be all zero. For signed fields, the high-order bits should agree with
3302 the sign bit. */
3304 if (lunsignedp)
3306 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3307 fold_convert (unsigned_type, rhs),
3308 size_int (lbitsize), 0)))
3310 warning (0, "comparison is always %d due to width of bit-field",
3311 code == NE_EXPR);
3312 return constant_boolean_node (code == NE_EXPR, compare_type);
3315 else
3317 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3318 size_int (lbitsize - 1), 0);
3319 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3321 warning (0, "comparison is always %d due to width of bit-field",
3322 code == NE_EXPR);
3323 return constant_boolean_node (code == NE_EXPR, compare_type);
3327 /* Single-bit compares should always be against zero. */
3328 if (lbitsize == 1 && ! integer_zerop (rhs))
3330 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3331 rhs = fold_convert (type, integer_zero_node);
3334 /* Make a new bitfield reference, shift the constant over the
3335 appropriate number of bits and mask it with the computed mask
3336 (in case this was a signed field). If we changed it, make a new one. */
3337 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3338 if (lvolatilep)
3340 TREE_SIDE_EFFECTS (lhs) = 1;
3341 TREE_THIS_VOLATILE (lhs) = 1;
3344 rhs = fold (const_binop (BIT_AND_EXPR,
3345 const_binop (LSHIFT_EXPR,
3346 fold_convert (unsigned_type, rhs),
3347 size_int (lbitpos), 0),
3348 mask, 0));
3350 return build2 (code, compare_type,
3351 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3352 rhs);
3355 /* Subroutine for fold_truthop: decode a field reference.
3357 If EXP is a comparison reference, we return the innermost reference.
3359 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3360 set to the starting bit number.
3362 If the innermost field can be completely contained in a mode-sized
3363 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3365 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3366 otherwise it is not changed.
3368 *PUNSIGNEDP is set to the signedness of the field.
3370 *PMASK is set to the mask used. This is either contained in a
3371 BIT_AND_EXPR or derived from the width of the field.
3373 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3375 Return 0 if this is not a component reference or is one that we can't
3376 do anything with. */
3378 static tree
3379 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3380 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3381 int *punsignedp, int *pvolatilep,
3382 tree *pmask, tree *pand_mask)
3384 tree outer_type = 0;
3385 tree and_mask = 0;
3386 tree mask, inner, offset;
3387 tree unsigned_type;
3388 unsigned int precision;
3390 /* All the optimizations using this function assume integer fields.
3391 There are problems with FP fields since the type_for_size call
3392 below can fail for, e.g., XFmode. */
3393 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3394 return 0;
3396 /* We are interested in the bare arrangement of bits, so strip everything
3397 that doesn't affect the machine mode. However, record the type of the
3398 outermost expression if it may matter below. */
3399 if (TREE_CODE (exp) == NOP_EXPR
3400 || TREE_CODE (exp) == CONVERT_EXPR
3401 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3402 outer_type = TREE_TYPE (exp);
3403 STRIP_NOPS (exp);
3405 if (TREE_CODE (exp) == BIT_AND_EXPR)
3407 and_mask = TREE_OPERAND (exp, 1);
3408 exp = TREE_OPERAND (exp, 0);
3409 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3410 if (TREE_CODE (and_mask) != INTEGER_CST)
3411 return 0;
3414 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3415 punsignedp, pvolatilep, false);
3416 if ((inner == exp && and_mask == 0)
3417 || *pbitsize < 0 || offset != 0
3418 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3419 return 0;
3421 /* If the number of bits in the reference is the same as the bitsize of
3422 the outer type, then the outer type gives the signedness. Otherwise
3423 (in case of a small bitfield) the signedness is unchanged. */
3424 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3425 *punsignedp = TYPE_UNSIGNED (outer_type);
3427 /* Compute the mask to access the bitfield. */
3428 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3429 precision = TYPE_PRECISION (unsigned_type);
3431 mask = build_int_cst (unsigned_type, -1);
3432 mask = force_fit_type (mask, 0, false, false);
3434 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3435 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3437 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3438 if (and_mask != 0)
3439 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3440 fold_convert (unsigned_type, and_mask), mask);
3442 *pmask = mask;
3443 *pand_mask = and_mask;
3444 return inner;
3447 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3448 bit positions. */
3450 static int
3451 all_ones_mask_p (tree mask, int size)
3453 tree type = TREE_TYPE (mask);
3454 unsigned int precision = TYPE_PRECISION (type);
3455 tree tmask;
3457 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3458 tmask = force_fit_type (tmask, 0, false, false);
3460 return
3461 tree_int_cst_equal (mask,
3462 const_binop (RSHIFT_EXPR,
3463 const_binop (LSHIFT_EXPR, tmask,
3464 size_int (precision - size),
3466 size_int (precision - size), 0));
3469 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3470 represents the sign bit of EXP's type. If EXP represents a sign
3471 or zero extension, also test VAL against the unextended type.
3472 The return value is the (sub)expression whose sign bit is VAL,
3473 or NULL_TREE otherwise. */
3475 static tree
3476 sign_bit_p (tree exp, tree val)
3478 unsigned HOST_WIDE_INT mask_lo, lo;
3479 HOST_WIDE_INT mask_hi, hi;
3480 int width;
3481 tree t;
3483 /* Tree EXP must have an integral type. */
3484 t = TREE_TYPE (exp);
3485 if (! INTEGRAL_TYPE_P (t))
3486 return NULL_TREE;
3488 /* Tree VAL must be an integer constant. */
3489 if (TREE_CODE (val) != INTEGER_CST
3490 || TREE_CONSTANT_OVERFLOW (val))
3491 return NULL_TREE;
3493 width = TYPE_PRECISION (t);
3494 if (width > HOST_BITS_PER_WIDE_INT)
3496 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3497 lo = 0;
3499 mask_hi = ((unsigned HOST_WIDE_INT) -1
3500 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3501 mask_lo = -1;
3503 else
3505 hi = 0;
3506 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3508 mask_hi = 0;
3509 mask_lo = ((unsigned HOST_WIDE_INT) -1
3510 >> (HOST_BITS_PER_WIDE_INT - width));
3513 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3514 treat VAL as if it were unsigned. */
3515 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3516 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3517 return exp;
3519 /* Handle extension from a narrower type. */
3520 if (TREE_CODE (exp) == NOP_EXPR
3521 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3522 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3524 return NULL_TREE;
3527 /* Subroutine for fold_truthop: determine if an operand is simple enough
3528 to be evaluated unconditionally. */
3530 static int
3531 simple_operand_p (tree exp)
3533 /* Strip any conversions that don't change the machine mode. */
3534 STRIP_NOPS (exp);
3536 return (CONSTANT_CLASS_P (exp)
3537 || TREE_CODE (exp) == SSA_NAME
3538 || (DECL_P (exp)
3539 && ! TREE_ADDRESSABLE (exp)
3540 && ! TREE_THIS_VOLATILE (exp)
3541 && ! DECL_NONLOCAL (exp)
3542 /* Don't regard global variables as simple. They may be
3543 allocated in ways unknown to the compiler (shared memory,
3544 #pragma weak, etc). */
3545 && ! TREE_PUBLIC (exp)
3546 && ! DECL_EXTERNAL (exp)
3547 /* Loading a static variable is unduly expensive, but global
3548 registers aren't expensive. */
3549 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3552 /* The following functions are subroutines to fold_range_test and allow it to
3553 try to change a logical combination of comparisons into a range test.
3555 For example, both
3556 X == 2 || X == 3 || X == 4 || X == 5
3558 X >= 2 && X <= 5
3559 are converted to
3560 (unsigned) (X - 2) <= 3
3562 We describe each set of comparisons as being either inside or outside
3563 a range, using a variable named like IN_P, and then describe the
3564 range with a lower and upper bound. If one of the bounds is omitted,
3565 it represents either the highest or lowest value of the type.
3567 In the comments below, we represent a range by two numbers in brackets
3568 preceded by a "+" to designate being inside that range, or a "-" to
3569 designate being outside that range, so the condition can be inverted by
3570 flipping the prefix. An omitted bound is represented by a "-". For
3571 example, "- [-, 10]" means being outside the range starting at the lowest
3572 possible value and ending at 10, in other words, being greater than 10.
3573 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3574 always false.
3576 We set up things so that the missing bounds are handled in a consistent
3577 manner so neither a missing bound nor "true" and "false" need to be
3578 handled using a special case. */
3580 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3581 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3582 and UPPER1_P are nonzero if the respective argument is an upper bound
3583 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3584 must be specified for a comparison. ARG1 will be converted to ARG0's
3585 type if both are specified. */
3587 static tree
3588 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3589 tree arg1, int upper1_p)
3591 tree tem;
3592 int result;
3593 int sgn0, sgn1;
3595 /* If neither arg represents infinity, do the normal operation.
3596 Else, if not a comparison, return infinity. Else handle the special
3597 comparison rules. Note that most of the cases below won't occur, but
3598 are handled for consistency. */
3600 if (arg0 != 0 && arg1 != 0)
3602 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3603 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3604 STRIP_NOPS (tem);
3605 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3608 if (TREE_CODE_CLASS (code) != tcc_comparison)
3609 return 0;
3611 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3612 for neither. In real maths, we cannot assume open ended ranges are
3613 the same. But, this is computer arithmetic, where numbers are finite.
3614 We can therefore make the transformation of any unbounded range with
3615 the value Z, Z being greater than any representable number. This permits
3616 us to treat unbounded ranges as equal. */
3617 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3618 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3619 switch (code)
3621 case EQ_EXPR:
3622 result = sgn0 == sgn1;
3623 break;
3624 case NE_EXPR:
3625 result = sgn0 != sgn1;
3626 break;
3627 case LT_EXPR:
3628 result = sgn0 < sgn1;
3629 break;
3630 case LE_EXPR:
3631 result = sgn0 <= sgn1;
3632 break;
3633 case GT_EXPR:
3634 result = sgn0 > sgn1;
3635 break;
3636 case GE_EXPR:
3637 result = sgn0 >= sgn1;
3638 break;
3639 default:
3640 gcc_unreachable ();
3643 return constant_boolean_node (result, type);
3646 /* Given EXP, a logical expression, set the range it is testing into
3647 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3648 actually being tested. *PLOW and *PHIGH will be made of the same type
3649 as the returned expression. If EXP is not a comparison, we will most
3650 likely not be returning a useful value and range. */
3652 static tree
3653 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3655 enum tree_code code;
3656 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3657 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3658 int in_p, n_in_p;
3659 tree low, high, n_low, n_high;
3661 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3662 and see if we can refine the range. Some of the cases below may not
3663 happen, but it doesn't seem worth worrying about this. We "continue"
3664 the outer loop when we've changed something; otherwise we "break"
3665 the switch, which will "break" the while. */
3667 in_p = 0;
3668 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3670 while (1)
3672 code = TREE_CODE (exp);
3673 exp_type = TREE_TYPE (exp);
3675 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3677 if (TREE_CODE_LENGTH (code) > 0)
3678 arg0 = TREE_OPERAND (exp, 0);
3679 if (TREE_CODE_CLASS (code) == tcc_comparison
3680 || TREE_CODE_CLASS (code) == tcc_unary
3681 || TREE_CODE_CLASS (code) == tcc_binary)
3682 arg0_type = TREE_TYPE (arg0);
3683 if (TREE_CODE_CLASS (code) == tcc_binary
3684 || TREE_CODE_CLASS (code) == tcc_comparison
3685 || (TREE_CODE_CLASS (code) == tcc_expression
3686 && TREE_CODE_LENGTH (code) > 1))
3687 arg1 = TREE_OPERAND (exp, 1);
3690 switch (code)
3692 case TRUTH_NOT_EXPR:
3693 in_p = ! in_p, exp = arg0;
3694 continue;
3696 case EQ_EXPR: case NE_EXPR:
3697 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3698 /* We can only do something if the range is testing for zero
3699 and if the second operand is an integer constant. Note that
3700 saying something is "in" the range we make is done by
3701 complementing IN_P since it will set in the initial case of
3702 being not equal to zero; "out" is leaving it alone. */
3703 if (low == 0 || high == 0
3704 || ! integer_zerop (low) || ! integer_zerop (high)
3705 || TREE_CODE (arg1) != INTEGER_CST)
3706 break;
3708 switch (code)
3710 case NE_EXPR: /* - [c, c] */
3711 low = high = arg1;
3712 break;
3713 case EQ_EXPR: /* + [c, c] */
3714 in_p = ! in_p, low = high = arg1;
3715 break;
3716 case GT_EXPR: /* - [-, c] */
3717 low = 0, high = arg1;
3718 break;
3719 case GE_EXPR: /* + [c, -] */
3720 in_p = ! in_p, low = arg1, high = 0;
3721 break;
3722 case LT_EXPR: /* - [c, -] */
3723 low = arg1, high = 0;
3724 break;
3725 case LE_EXPR: /* + [-, c] */
3726 in_p = ! in_p, low = 0, high = arg1;
3727 break;
3728 default:
3729 gcc_unreachable ();
3732 /* If this is an unsigned comparison, we also know that EXP is
3733 greater than or equal to zero. We base the range tests we make
3734 on that fact, so we record it here so we can parse existing
3735 range tests. We test arg0_type since often the return type
3736 of, e.g. EQ_EXPR, is boolean. */
3737 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3739 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3740 in_p, low, high, 1,
3741 fold_convert (arg0_type, integer_zero_node),
3742 NULL_TREE))
3743 break;
3745 in_p = n_in_p, low = n_low, high = n_high;
3747 /* If the high bound is missing, but we have a nonzero low
3748 bound, reverse the range so it goes from zero to the low bound
3749 minus 1. */
3750 if (high == 0 && low && ! integer_zerop (low))
3752 in_p = ! in_p;
3753 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3754 integer_one_node, 0);
3755 low = fold_convert (arg0_type, integer_zero_node);
3759 exp = arg0;
3760 continue;
3762 case NEGATE_EXPR:
3763 /* (-x) IN [a,b] -> x in [-b, -a] */
3764 n_low = range_binop (MINUS_EXPR, exp_type,
3765 fold_convert (exp_type, integer_zero_node),
3766 0, high, 1);
3767 n_high = range_binop (MINUS_EXPR, exp_type,
3768 fold_convert (exp_type, integer_zero_node),
3769 0, low, 0);
3770 low = n_low, high = n_high;
3771 exp = arg0;
3772 continue;
3774 case BIT_NOT_EXPR:
3775 /* ~ X -> -X - 1 */
3776 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3777 fold_convert (exp_type, integer_one_node));
3778 continue;
3780 case PLUS_EXPR: case MINUS_EXPR:
3781 if (TREE_CODE (arg1) != INTEGER_CST)
3782 break;
3784 /* If EXP is signed, any overflow in the computation is undefined,
3785 so we don't worry about it so long as our computations on
3786 the bounds don't overflow. For unsigned, overflow is defined
3787 and this is exactly the right thing. */
3788 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3789 arg0_type, low, 0, arg1, 0);
3790 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3791 arg0_type, high, 1, arg1, 0);
3792 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3793 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3794 break;
3796 /* Check for an unsigned range which has wrapped around the maximum
3797 value thus making n_high < n_low, and normalize it. */
3798 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3800 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3801 integer_one_node, 0);
3802 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3803 integer_one_node, 0);
3805 /* If the range is of the form +/- [ x+1, x ], we won't
3806 be able to normalize it. But then, it represents the
3807 whole range or the empty set, so make it
3808 +/- [ -, - ]. */
3809 if (tree_int_cst_equal (n_low, low)
3810 && tree_int_cst_equal (n_high, high))
3811 low = high = 0;
3812 else
3813 in_p = ! in_p;
3815 else
3816 low = n_low, high = n_high;
3818 exp = arg0;
3819 continue;
3821 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3822 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3823 break;
3825 if (! INTEGRAL_TYPE_P (arg0_type)
3826 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3827 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3828 break;
3830 n_low = low, n_high = high;
3832 if (n_low != 0)
3833 n_low = fold_convert (arg0_type, n_low);
3835 if (n_high != 0)
3836 n_high = fold_convert (arg0_type, n_high);
3839 /* If we're converting arg0 from an unsigned type, to exp,
3840 a signed type, we will be doing the comparison as unsigned.
3841 The tests above have already verified that LOW and HIGH
3842 are both positive.
3844 So we have to ensure that we will handle large unsigned
3845 values the same way that the current signed bounds treat
3846 negative values. */
3848 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3850 tree high_positive;
3851 tree equiv_type = lang_hooks.types.type_for_mode
3852 (TYPE_MODE (arg0_type), 1);
3854 /* A range without an upper bound is, naturally, unbounded.
3855 Since convert would have cropped a very large value, use
3856 the max value for the destination type. */
3857 high_positive
3858 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3859 : TYPE_MAX_VALUE (arg0_type);
3861 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3862 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3863 fold_convert (arg0_type,
3864 high_positive),
3865 fold_convert (arg0_type,
3866 integer_one_node));
3868 /* If the low bound is specified, "and" the range with the
3869 range for which the original unsigned value will be
3870 positive. */
3871 if (low != 0)
3873 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3874 1, n_low, n_high, 1,
3875 fold_convert (arg0_type,
3876 integer_zero_node),
3877 high_positive))
3878 break;
3880 in_p = (n_in_p == in_p);
3882 else
3884 /* Otherwise, "or" the range with the range of the input
3885 that will be interpreted as negative. */
3886 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3887 0, n_low, n_high, 1,
3888 fold_convert (arg0_type,
3889 integer_zero_node),
3890 high_positive))
3891 break;
3893 in_p = (in_p != n_in_p);
3897 exp = arg0;
3898 low = n_low, high = n_high;
3899 continue;
3901 default:
3902 break;
3905 break;
3908 /* If EXP is a constant, we can evaluate whether this is true or false. */
3909 if (TREE_CODE (exp) == INTEGER_CST)
3911 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3912 exp, 0, low, 0))
3913 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3914 exp, 1, high, 1)));
3915 low = high = 0;
3916 exp = 0;
3919 *pin_p = in_p, *plow = low, *phigh = high;
3920 return exp;
3923 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3924 type, TYPE, return an expression to test if EXP is in (or out of, depending
3925 on IN_P) the range. Return 0 if the test couldn't be created. */
3927 static tree
3928 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3930 tree etype = TREE_TYPE (exp);
3931 tree value;
3933 if (! in_p)
3935 value = build_range_check (type, exp, 1, low, high);
3936 if (value != 0)
3937 return invert_truthvalue (value);
3939 return 0;
3942 if (low == 0 && high == 0)
3943 return fold_convert (type, integer_one_node);
3945 if (low == 0)
3946 return fold_build2 (LE_EXPR, type, exp, high);
3948 if (high == 0)
3949 return fold_build2 (GE_EXPR, type, exp, low);
3951 if (operand_equal_p (low, high, 0))
3952 return fold_build2 (EQ_EXPR, type, exp, low);
3954 if (integer_zerop (low))
3956 if (! TYPE_UNSIGNED (etype))
3958 etype = lang_hooks.types.unsigned_type (etype);
3959 high = fold_convert (etype, high);
3960 exp = fold_convert (etype, exp);
3962 return build_range_check (type, exp, 1, 0, high);
3965 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3966 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3968 unsigned HOST_WIDE_INT lo;
3969 HOST_WIDE_INT hi;
3970 int prec;
3972 prec = TYPE_PRECISION (etype);
3973 if (prec <= HOST_BITS_PER_WIDE_INT)
3975 hi = 0;
3976 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3978 else
3980 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3981 lo = (unsigned HOST_WIDE_INT) -1;
3984 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3986 if (TYPE_UNSIGNED (etype))
3988 etype = lang_hooks.types.signed_type (etype);
3989 exp = fold_convert (etype, exp);
3991 return fold_build2 (GT_EXPR, type, exp,
3992 fold_convert (etype, integer_zero_node));
3996 value = const_binop (MINUS_EXPR, high, low, 0);
3997 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3999 tree utype, minv, maxv;
4001 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4002 for the type in question, as we rely on this here. */
4003 switch (TREE_CODE (etype))
4005 case INTEGER_TYPE:
4006 case ENUMERAL_TYPE:
4007 case CHAR_TYPE:
4008 utype = lang_hooks.types.unsigned_type (etype);
4009 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4010 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4011 integer_one_node, 1);
4012 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4013 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4014 minv, 1, maxv, 1)))
4016 etype = utype;
4017 high = fold_convert (etype, high);
4018 low = fold_convert (etype, low);
4019 exp = fold_convert (etype, exp);
4020 value = const_binop (MINUS_EXPR, high, low, 0);
4022 break;
4023 default:
4024 break;
4028 if (value != 0 && ! TREE_OVERFLOW (value))
4029 return build_range_check (type,
4030 fold_build2 (MINUS_EXPR, etype, exp, low),
4031 1, fold_convert (etype, integer_zero_node),
4032 value);
4034 return 0;
4037 /* Given two ranges, see if we can merge them into one. Return 1 if we
4038 can, 0 if we can't. Set the output range into the specified parameters. */
4040 static int
4041 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4042 tree high0, int in1_p, tree low1, tree high1)
4044 int no_overlap;
4045 int subset;
4046 int temp;
4047 tree tem;
4048 int in_p;
4049 tree low, high;
4050 int lowequal = ((low0 == 0 && low1 == 0)
4051 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4052 low0, 0, low1, 0)));
4053 int highequal = ((high0 == 0 && high1 == 0)
4054 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4055 high0, 1, high1, 1)));
4057 /* Make range 0 be the range that starts first, or ends last if they
4058 start at the same value. Swap them if it isn't. */
4059 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4060 low0, 0, low1, 0))
4061 || (lowequal
4062 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4063 high1, 1, high0, 1))))
4065 temp = in0_p, in0_p = in1_p, in1_p = temp;
4066 tem = low0, low0 = low1, low1 = tem;
4067 tem = high0, high0 = high1, high1 = tem;
4070 /* Now flag two cases, whether the ranges are disjoint or whether the
4071 second range is totally subsumed in the first. Note that the tests
4072 below are simplified by the ones above. */
4073 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4074 high0, 1, low1, 0));
4075 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4076 high1, 1, high0, 1));
4078 /* We now have four cases, depending on whether we are including or
4079 excluding the two ranges. */
4080 if (in0_p && in1_p)
4082 /* If they don't overlap, the result is false. If the second range
4083 is a subset it is the result. Otherwise, the range is from the start
4084 of the second to the end of the first. */
4085 if (no_overlap)
4086 in_p = 0, low = high = 0;
4087 else if (subset)
4088 in_p = 1, low = low1, high = high1;
4089 else
4090 in_p = 1, low = low1, high = high0;
4093 else if (in0_p && ! in1_p)
4095 /* If they don't overlap, the result is the first range. If they are
4096 equal, the result is false. If the second range is a subset of the
4097 first, and the ranges begin at the same place, we go from just after
4098 the end of the first range to the end of the second. If the second
4099 range is not a subset of the first, or if it is a subset and both
4100 ranges end at the same place, the range starts at the start of the
4101 first range and ends just before the second range.
4102 Otherwise, we can't describe this as a single range. */
4103 if (no_overlap)
4104 in_p = 1, low = low0, high = high0;
4105 else if (lowequal && highequal)
4106 in_p = 0, low = high = 0;
4107 else if (subset && lowequal)
4109 in_p = 1, high = high0;
4110 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4111 integer_one_node, 0);
4113 else if (! subset || highequal)
4115 in_p = 1, low = low0;
4116 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4117 integer_one_node, 0);
4119 else
4120 return 0;
4123 else if (! in0_p && in1_p)
4125 /* If they don't overlap, the result is the second range. If the second
4126 is a subset of the first, the result is false. Otherwise,
4127 the range starts just after the first range and ends at the
4128 end of the second. */
4129 if (no_overlap)
4130 in_p = 1, low = low1, high = high1;
4131 else if (subset || highequal)
4132 in_p = 0, low = high = 0;
4133 else
4135 in_p = 1, high = high1;
4136 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4137 integer_one_node, 0);
4141 else
4143 /* The case where we are excluding both ranges. Here the complex case
4144 is if they don't overlap. In that case, the only time we have a
4145 range is if they are adjacent. If the second is a subset of the
4146 first, the result is the first. Otherwise, the range to exclude
4147 starts at the beginning of the first range and ends at the end of the
4148 second. */
4149 if (no_overlap)
4151 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4152 range_binop (PLUS_EXPR, NULL_TREE,
4153 high0, 1,
4154 integer_one_node, 1),
4155 1, low1, 0)))
4156 in_p = 0, low = low0, high = high1;
4157 else
4159 /* Canonicalize - [min, x] into - [-, x]. */
4160 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4161 switch (TREE_CODE (TREE_TYPE (low0)))
4163 case ENUMERAL_TYPE:
4164 if (TYPE_PRECISION (TREE_TYPE (low0))
4165 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4166 break;
4167 /* FALLTHROUGH */
4168 case INTEGER_TYPE:
4169 case CHAR_TYPE:
4170 if (tree_int_cst_equal (low0,
4171 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4172 low0 = 0;
4173 break;
4174 case POINTER_TYPE:
4175 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4176 && integer_zerop (low0))
4177 low0 = 0;
4178 break;
4179 default:
4180 break;
4183 /* Canonicalize - [x, max] into - [x, -]. */
4184 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4185 switch (TREE_CODE (TREE_TYPE (high1)))
4187 case ENUMERAL_TYPE:
4188 if (TYPE_PRECISION (TREE_TYPE (high1))
4189 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4190 break;
4191 /* FALLTHROUGH */
4192 case INTEGER_TYPE:
4193 case CHAR_TYPE:
4194 if (tree_int_cst_equal (high1,
4195 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4196 high1 = 0;
4197 break;
4198 case POINTER_TYPE:
4199 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4200 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4201 high1, 1,
4202 integer_one_node, 1)))
4203 high1 = 0;
4204 break;
4205 default:
4206 break;
4209 /* The ranges might be also adjacent between the maximum and
4210 minimum values of the given type. For
4211 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4212 return + [x + 1, y - 1]. */
4213 if (low0 == 0 && high1 == 0)
4215 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4216 integer_one_node, 1);
4217 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4218 integer_one_node, 0);
4219 if (low == 0 || high == 0)
4220 return 0;
4222 in_p = 1;
4224 else
4225 return 0;
4228 else if (subset)
4229 in_p = 0, low = low0, high = high0;
4230 else
4231 in_p = 0, low = low0, high = high1;
4234 *pin_p = in_p, *plow = low, *phigh = high;
4235 return 1;
4239 /* Subroutine of fold, looking inside expressions of the form
4240 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4241 of the COND_EXPR. This function is being used also to optimize
4242 A op B ? C : A, by reversing the comparison first.
4244 Return a folded expression whose code is not a COND_EXPR
4245 anymore, or NULL_TREE if no folding opportunity is found. */
4247 static tree
4248 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4250 enum tree_code comp_code = TREE_CODE (arg0);
4251 tree arg00 = TREE_OPERAND (arg0, 0);
4252 tree arg01 = TREE_OPERAND (arg0, 1);
4253 tree arg1_type = TREE_TYPE (arg1);
4254 tree tem;
4256 STRIP_NOPS (arg1);
4257 STRIP_NOPS (arg2);
4259 /* If we have A op 0 ? A : -A, consider applying the following
4260 transformations:
4262 A == 0? A : -A same as -A
4263 A != 0? A : -A same as A
4264 A >= 0? A : -A same as abs (A)
4265 A > 0? A : -A same as abs (A)
4266 A <= 0? A : -A same as -abs (A)
4267 A < 0? A : -A same as -abs (A)
4269 None of these transformations work for modes with signed
4270 zeros. If A is +/-0, the first two transformations will
4271 change the sign of the result (from +0 to -0, or vice
4272 versa). The last four will fix the sign of the result,
4273 even though the original expressions could be positive or
4274 negative, depending on the sign of A.
4276 Note that all these transformations are correct if A is
4277 NaN, since the two alternatives (A and -A) are also NaNs. */
4278 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4279 ? real_zerop (arg01)
4280 : integer_zerop (arg01))
4281 && ((TREE_CODE (arg2) == NEGATE_EXPR
4282 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4283 /* In the case that A is of the form X-Y, '-A' (arg2) may
4284 have already been folded to Y-X, check for that. */
4285 || (TREE_CODE (arg1) == MINUS_EXPR
4286 && TREE_CODE (arg2) == MINUS_EXPR
4287 && operand_equal_p (TREE_OPERAND (arg1, 0),
4288 TREE_OPERAND (arg2, 1), 0)
4289 && operand_equal_p (TREE_OPERAND (arg1, 1),
4290 TREE_OPERAND (arg2, 0), 0))))
4291 switch (comp_code)
4293 case EQ_EXPR:
4294 case UNEQ_EXPR:
4295 tem = fold_convert (arg1_type, arg1);
4296 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4297 case NE_EXPR:
4298 case LTGT_EXPR:
4299 return pedantic_non_lvalue (fold_convert (type, arg1));
4300 case UNGE_EXPR:
4301 case UNGT_EXPR:
4302 if (flag_trapping_math)
4303 break;
4304 /* Fall through. */
4305 case GE_EXPR:
4306 case GT_EXPR:
4307 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4308 arg1 = fold_convert (lang_hooks.types.signed_type
4309 (TREE_TYPE (arg1)), arg1);
4310 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4311 return pedantic_non_lvalue (fold_convert (type, tem));
4312 case UNLE_EXPR:
4313 case UNLT_EXPR:
4314 if (flag_trapping_math)
4315 break;
4316 case LE_EXPR:
4317 case LT_EXPR:
4318 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4319 arg1 = fold_convert (lang_hooks.types.signed_type
4320 (TREE_TYPE (arg1)), arg1);
4321 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4322 return negate_expr (fold_convert (type, tem));
4323 default:
4324 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4325 break;
4328 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4329 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4330 both transformations are correct when A is NaN: A != 0
4331 is then true, and A == 0 is false. */
4333 if (integer_zerop (arg01) && integer_zerop (arg2))
4335 if (comp_code == NE_EXPR)
4336 return pedantic_non_lvalue (fold_convert (type, arg1));
4337 else if (comp_code == EQ_EXPR)
4338 return fold_convert (type, integer_zero_node);
4341 /* Try some transformations of A op B ? A : B.
4343 A == B? A : B same as B
4344 A != B? A : B same as A
4345 A >= B? A : B same as max (A, B)
4346 A > B? A : B same as max (B, A)
4347 A <= B? A : B same as min (A, B)
4348 A < B? A : B same as min (B, A)
4350 As above, these transformations don't work in the presence
4351 of signed zeros. For example, if A and B are zeros of
4352 opposite sign, the first two transformations will change
4353 the sign of the result. In the last four, the original
4354 expressions give different results for (A=+0, B=-0) and
4355 (A=-0, B=+0), but the transformed expressions do not.
4357 The first two transformations are correct if either A or B
4358 is a NaN. In the first transformation, the condition will
4359 be false, and B will indeed be chosen. In the case of the
4360 second transformation, the condition A != B will be true,
4361 and A will be chosen.
4363 The conversions to max() and min() are not correct if B is
4364 a number and A is not. The conditions in the original
4365 expressions will be false, so all four give B. The min()
4366 and max() versions would give a NaN instead. */
4367 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4368 /* Avoid these transformations if the COND_EXPR may be used
4369 as an lvalue in the C++ front-end. PR c++/19199. */
4370 && (in_gimple_form
4371 || strcmp (lang_hooks.name, "GNU C++") != 0
4372 || ! maybe_lvalue_p (arg1)
4373 || ! maybe_lvalue_p (arg2)))
4375 tree comp_op0 = arg00;
4376 tree comp_op1 = arg01;
4377 tree comp_type = TREE_TYPE (comp_op0);
4379 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4380 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4382 comp_type = type;
4383 comp_op0 = arg1;
4384 comp_op1 = arg2;
4387 switch (comp_code)
4389 case EQ_EXPR:
4390 return pedantic_non_lvalue (fold_convert (type, arg2));
4391 case NE_EXPR:
4392 return pedantic_non_lvalue (fold_convert (type, arg1));
4393 case LE_EXPR:
4394 case LT_EXPR:
4395 case UNLE_EXPR:
4396 case UNLT_EXPR:
4397 /* In C++ a ?: expression can be an lvalue, so put the
4398 operand which will be used if they are equal first
4399 so that we can convert this back to the
4400 corresponding COND_EXPR. */
4401 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4403 comp_op0 = fold_convert (comp_type, comp_op0);
4404 comp_op1 = fold_convert (comp_type, comp_op1);
4405 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4406 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4407 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4408 return pedantic_non_lvalue (fold_convert (type, tem));
4410 break;
4411 case GE_EXPR:
4412 case GT_EXPR:
4413 case UNGE_EXPR:
4414 case UNGT_EXPR:
4415 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4417 comp_op0 = fold_convert (comp_type, comp_op0);
4418 comp_op1 = fold_convert (comp_type, comp_op1);
4419 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4420 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4421 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4422 return pedantic_non_lvalue (fold_convert (type, tem));
4424 break;
4425 case UNEQ_EXPR:
4426 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4427 return pedantic_non_lvalue (fold_convert (type, arg2));
4428 break;
4429 case LTGT_EXPR:
4430 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4431 return pedantic_non_lvalue (fold_convert (type, arg1));
4432 break;
4433 default:
4434 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4435 break;
4439 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4440 we might still be able to simplify this. For example,
4441 if C1 is one less or one more than C2, this might have started
4442 out as a MIN or MAX and been transformed by this function.
4443 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4445 if (INTEGRAL_TYPE_P (type)
4446 && TREE_CODE (arg01) == INTEGER_CST
4447 && TREE_CODE (arg2) == INTEGER_CST)
4448 switch (comp_code)
4450 case EQ_EXPR:
4451 /* We can replace A with C1 in this case. */
4452 arg1 = fold_convert (type, arg01);
4453 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4455 case LT_EXPR:
4456 /* If C1 is C2 + 1, this is min(A, C2). */
4457 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4458 OEP_ONLY_CONST)
4459 && operand_equal_p (arg01,
4460 const_binop (PLUS_EXPR, arg2,
4461 integer_one_node, 0),
4462 OEP_ONLY_CONST))
4463 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4464 type, arg1, arg2));
4465 break;
4467 case LE_EXPR:
4468 /* If C1 is C2 - 1, this is min(A, C2). */
4469 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4470 OEP_ONLY_CONST)
4471 && operand_equal_p (arg01,
4472 const_binop (MINUS_EXPR, arg2,
4473 integer_one_node, 0),
4474 OEP_ONLY_CONST))
4475 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4476 type, arg1, arg2));
4477 break;
4479 case GT_EXPR:
4480 /* If C1 is C2 - 1, this is max(A, C2). */
4481 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4482 OEP_ONLY_CONST)
4483 && operand_equal_p (arg01,
4484 const_binop (MINUS_EXPR, arg2,
4485 integer_one_node, 0),
4486 OEP_ONLY_CONST))
4487 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4488 type, arg1, arg2));
4489 break;
4491 case GE_EXPR:
4492 /* If C1 is C2 + 1, this is max(A, C2). */
4493 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4494 OEP_ONLY_CONST)
4495 && operand_equal_p (arg01,
4496 const_binop (PLUS_EXPR, arg2,
4497 integer_one_node, 0),
4498 OEP_ONLY_CONST))
4499 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4500 type, arg1, arg2));
4501 break;
4502 case NE_EXPR:
4503 break;
4504 default:
4505 gcc_unreachable ();
4508 return NULL_TREE;
4513 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4514 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4515 #endif
4517 /* EXP is some logical combination of boolean tests. See if we can
4518 merge it into some range test. Return the new tree if so. */
4520 static tree
4521 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4523 int or_op = (code == TRUTH_ORIF_EXPR
4524 || code == TRUTH_OR_EXPR);
4525 int in0_p, in1_p, in_p;
4526 tree low0, low1, low, high0, high1, high;
4527 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4528 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4529 tree tem;
4531 /* If this is an OR operation, invert both sides; we will invert
4532 again at the end. */
4533 if (or_op)
4534 in0_p = ! in0_p, in1_p = ! in1_p;
4536 /* If both expressions are the same, if we can merge the ranges, and we
4537 can build the range test, return it or it inverted. If one of the
4538 ranges is always true or always false, consider it to be the same
4539 expression as the other. */
4540 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4541 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4542 in1_p, low1, high1)
4543 && 0 != (tem = (build_range_check (type,
4544 lhs != 0 ? lhs
4545 : rhs != 0 ? rhs : integer_zero_node,
4546 in_p, low, high))))
4547 return or_op ? invert_truthvalue (tem) : tem;
4549 /* On machines where the branch cost is expensive, if this is a
4550 short-circuited branch and the underlying object on both sides
4551 is the same, make a non-short-circuit operation. */
4552 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4553 && lhs != 0 && rhs != 0
4554 && (code == TRUTH_ANDIF_EXPR
4555 || code == TRUTH_ORIF_EXPR)
4556 && operand_equal_p (lhs, rhs, 0))
4558 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4559 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4560 which cases we can't do this. */
4561 if (simple_operand_p (lhs))
4562 return build2 (code == TRUTH_ANDIF_EXPR
4563 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4564 type, op0, op1);
4566 else if (lang_hooks.decls.global_bindings_p () == 0
4567 && ! CONTAINS_PLACEHOLDER_P (lhs))
4569 tree common = save_expr (lhs);
4571 if (0 != (lhs = build_range_check (type, common,
4572 or_op ? ! in0_p : in0_p,
4573 low0, high0))
4574 && (0 != (rhs = build_range_check (type, common,
4575 or_op ? ! in1_p : in1_p,
4576 low1, high1))))
4577 return build2 (code == TRUTH_ANDIF_EXPR
4578 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4579 type, lhs, rhs);
4583 return 0;
4586 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4587 bit value. Arrange things so the extra bits will be set to zero if and
4588 only if C is signed-extended to its full width. If MASK is nonzero,
4589 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4591 static tree
4592 unextend (tree c, int p, int unsignedp, tree mask)
4594 tree type = TREE_TYPE (c);
4595 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4596 tree temp;
4598 if (p == modesize || unsignedp)
4599 return c;
4601 /* We work by getting just the sign bit into the low-order bit, then
4602 into the high-order bit, then sign-extend. We then XOR that value
4603 with C. */
4604 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4605 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4607 /* We must use a signed type in order to get an arithmetic right shift.
4608 However, we must also avoid introducing accidental overflows, so that
4609 a subsequent call to integer_zerop will work. Hence we must
4610 do the type conversion here. At this point, the constant is either
4611 zero or one, and the conversion to a signed type can never overflow.
4612 We could get an overflow if this conversion is done anywhere else. */
4613 if (TYPE_UNSIGNED (type))
4614 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4616 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4617 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4618 if (mask != 0)
4619 temp = const_binop (BIT_AND_EXPR, temp,
4620 fold_convert (TREE_TYPE (c), mask), 0);
4621 /* If necessary, convert the type back to match the type of C. */
4622 if (TYPE_UNSIGNED (type))
4623 temp = fold_convert (type, temp);
4625 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4628 /* Find ways of folding logical expressions of LHS and RHS:
4629 Try to merge two comparisons to the same innermost item.
4630 Look for range tests like "ch >= '0' && ch <= '9'".
4631 Look for combinations of simple terms on machines with expensive branches
4632 and evaluate the RHS unconditionally.
4634 For example, if we have p->a == 2 && p->b == 4 and we can make an
4635 object large enough to span both A and B, we can do this with a comparison
4636 against the object ANDed with the a mask.
4638 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4639 operations to do this with one comparison.
4641 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4642 function and the one above.
4644 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4645 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4647 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4648 two operands.
4650 We return the simplified tree or 0 if no optimization is possible. */
4652 static tree
4653 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4655 /* If this is the "or" of two comparisons, we can do something if
4656 the comparisons are NE_EXPR. If this is the "and", we can do something
4657 if the comparisons are EQ_EXPR. I.e.,
4658 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4660 WANTED_CODE is this operation code. For single bit fields, we can
4661 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4662 comparison for one-bit fields. */
4664 enum tree_code wanted_code;
4665 enum tree_code lcode, rcode;
4666 tree ll_arg, lr_arg, rl_arg, rr_arg;
4667 tree ll_inner, lr_inner, rl_inner, rr_inner;
4668 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4669 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4670 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4671 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4672 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4673 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4674 enum machine_mode lnmode, rnmode;
4675 tree ll_mask, lr_mask, rl_mask, rr_mask;
4676 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4677 tree l_const, r_const;
4678 tree lntype, rntype, result;
4679 int first_bit, end_bit;
4680 int volatilep;
4682 /* Start by getting the comparison codes. Fail if anything is volatile.
4683 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4684 it were surrounded with a NE_EXPR. */
4686 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4687 return 0;
4689 lcode = TREE_CODE (lhs);
4690 rcode = TREE_CODE (rhs);
4692 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4694 lhs = build2 (NE_EXPR, truth_type, lhs,
4695 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4696 lcode = NE_EXPR;
4699 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4701 rhs = build2 (NE_EXPR, truth_type, rhs,
4702 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4703 rcode = NE_EXPR;
4706 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4707 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4708 return 0;
4710 ll_arg = TREE_OPERAND (lhs, 0);
4711 lr_arg = TREE_OPERAND (lhs, 1);
4712 rl_arg = TREE_OPERAND (rhs, 0);
4713 rr_arg = TREE_OPERAND (rhs, 1);
4715 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4716 if (simple_operand_p (ll_arg)
4717 && simple_operand_p (lr_arg))
4719 tree result;
4720 if (operand_equal_p (ll_arg, rl_arg, 0)
4721 && operand_equal_p (lr_arg, rr_arg, 0))
4723 result = combine_comparisons (code, lcode, rcode,
4724 truth_type, ll_arg, lr_arg);
4725 if (result)
4726 return result;
4728 else if (operand_equal_p (ll_arg, rr_arg, 0)
4729 && operand_equal_p (lr_arg, rl_arg, 0))
4731 result = combine_comparisons (code, lcode,
4732 swap_tree_comparison (rcode),
4733 truth_type, ll_arg, lr_arg);
4734 if (result)
4735 return result;
4739 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4740 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4742 /* If the RHS can be evaluated unconditionally and its operands are
4743 simple, it wins to evaluate the RHS unconditionally on machines
4744 with expensive branches. In this case, this isn't a comparison
4745 that can be merged. Avoid doing this if the RHS is a floating-point
4746 comparison since those can trap. */
4748 if (BRANCH_COST >= 2
4749 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4750 && simple_operand_p (rl_arg)
4751 && simple_operand_p (rr_arg))
4753 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4754 if (code == TRUTH_OR_EXPR
4755 && lcode == NE_EXPR && integer_zerop (lr_arg)
4756 && rcode == NE_EXPR && integer_zerop (rr_arg)
4757 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4758 return build2 (NE_EXPR, truth_type,
4759 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4760 ll_arg, rl_arg),
4761 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4763 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4764 if (code == TRUTH_AND_EXPR
4765 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4766 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4767 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4768 return build2 (EQ_EXPR, truth_type,
4769 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4770 ll_arg, rl_arg),
4771 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4773 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4774 return build2 (code, truth_type, lhs, rhs);
4777 /* See if the comparisons can be merged. Then get all the parameters for
4778 each side. */
4780 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4781 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4782 return 0;
4784 volatilep = 0;
4785 ll_inner = decode_field_reference (ll_arg,
4786 &ll_bitsize, &ll_bitpos, &ll_mode,
4787 &ll_unsignedp, &volatilep, &ll_mask,
4788 &ll_and_mask);
4789 lr_inner = decode_field_reference (lr_arg,
4790 &lr_bitsize, &lr_bitpos, &lr_mode,
4791 &lr_unsignedp, &volatilep, &lr_mask,
4792 &lr_and_mask);
4793 rl_inner = decode_field_reference (rl_arg,
4794 &rl_bitsize, &rl_bitpos, &rl_mode,
4795 &rl_unsignedp, &volatilep, &rl_mask,
4796 &rl_and_mask);
4797 rr_inner = decode_field_reference (rr_arg,
4798 &rr_bitsize, &rr_bitpos, &rr_mode,
4799 &rr_unsignedp, &volatilep, &rr_mask,
4800 &rr_and_mask);
4802 /* It must be true that the inner operation on the lhs of each
4803 comparison must be the same if we are to be able to do anything.
4804 Then see if we have constants. If not, the same must be true for
4805 the rhs's. */
4806 if (volatilep || ll_inner == 0 || rl_inner == 0
4807 || ! operand_equal_p (ll_inner, rl_inner, 0))
4808 return 0;
4810 if (TREE_CODE (lr_arg) == INTEGER_CST
4811 && TREE_CODE (rr_arg) == INTEGER_CST)
4812 l_const = lr_arg, r_const = rr_arg;
4813 else if (lr_inner == 0 || rr_inner == 0
4814 || ! operand_equal_p (lr_inner, rr_inner, 0))
4815 return 0;
4816 else
4817 l_const = r_const = 0;
4819 /* If either comparison code is not correct for our logical operation,
4820 fail. However, we can convert a one-bit comparison against zero into
4821 the opposite comparison against that bit being set in the field. */
4823 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4824 if (lcode != wanted_code)
4826 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4828 /* Make the left operand unsigned, since we are only interested
4829 in the value of one bit. Otherwise we are doing the wrong
4830 thing below. */
4831 ll_unsignedp = 1;
4832 l_const = ll_mask;
4834 else
4835 return 0;
4838 /* This is analogous to the code for l_const above. */
4839 if (rcode != wanted_code)
4841 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4843 rl_unsignedp = 1;
4844 r_const = rl_mask;
4846 else
4847 return 0;
4850 /* After this point all optimizations will generate bit-field
4851 references, which we might not want. */
4852 if (! lang_hooks.can_use_bit_fields_p ())
4853 return 0;
4855 /* See if we can find a mode that contains both fields being compared on
4856 the left. If we can't, fail. Otherwise, update all constants and masks
4857 to be relative to a field of that size. */
4858 first_bit = MIN (ll_bitpos, rl_bitpos);
4859 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4860 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4861 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4862 volatilep);
4863 if (lnmode == VOIDmode)
4864 return 0;
4866 lnbitsize = GET_MODE_BITSIZE (lnmode);
4867 lnbitpos = first_bit & ~ (lnbitsize - 1);
4868 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4869 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4871 if (BYTES_BIG_ENDIAN)
4873 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4874 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4877 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4878 size_int (xll_bitpos), 0);
4879 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4880 size_int (xrl_bitpos), 0);
4882 if (l_const)
4884 l_const = fold_convert (lntype, l_const);
4885 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4886 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4887 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4888 fold_build1 (BIT_NOT_EXPR,
4889 lntype, ll_mask),
4890 0)))
4892 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4894 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4897 if (r_const)
4899 r_const = fold_convert (lntype, r_const);
4900 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4901 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4902 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4903 fold_build1 (BIT_NOT_EXPR,
4904 lntype, rl_mask),
4905 0)))
4907 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4909 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4913 /* If the right sides are not constant, do the same for it. Also,
4914 disallow this optimization if a size or signedness mismatch occurs
4915 between the left and right sides. */
4916 if (l_const == 0)
4918 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4919 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4920 /* Make sure the two fields on the right
4921 correspond to the left without being swapped. */
4922 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4923 return 0;
4925 first_bit = MIN (lr_bitpos, rr_bitpos);
4926 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4927 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4928 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4929 volatilep);
4930 if (rnmode == VOIDmode)
4931 return 0;
4933 rnbitsize = GET_MODE_BITSIZE (rnmode);
4934 rnbitpos = first_bit & ~ (rnbitsize - 1);
4935 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4936 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4938 if (BYTES_BIG_ENDIAN)
4940 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4941 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4944 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4945 size_int (xlr_bitpos), 0);
4946 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4947 size_int (xrr_bitpos), 0);
4949 /* Make a mask that corresponds to both fields being compared.
4950 Do this for both items being compared. If the operands are the
4951 same size and the bits being compared are in the same position
4952 then we can do this by masking both and comparing the masked
4953 results. */
4954 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4955 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4956 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4958 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4959 ll_unsignedp || rl_unsignedp);
4960 if (! all_ones_mask_p (ll_mask, lnbitsize))
4961 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4963 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4964 lr_unsignedp || rr_unsignedp);
4965 if (! all_ones_mask_p (lr_mask, rnbitsize))
4966 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4968 return build2 (wanted_code, truth_type, lhs, rhs);
4971 /* There is still another way we can do something: If both pairs of
4972 fields being compared are adjacent, we may be able to make a wider
4973 field containing them both.
4975 Note that we still must mask the lhs/rhs expressions. Furthermore,
4976 the mask must be shifted to account for the shift done by
4977 make_bit_field_ref. */
4978 if ((ll_bitsize + ll_bitpos == rl_bitpos
4979 && lr_bitsize + lr_bitpos == rr_bitpos)
4980 || (ll_bitpos == rl_bitpos + rl_bitsize
4981 && lr_bitpos == rr_bitpos + rr_bitsize))
4983 tree type;
4985 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4986 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4987 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4988 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4990 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4991 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4992 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4993 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4995 /* Convert to the smaller type before masking out unwanted bits. */
4996 type = lntype;
4997 if (lntype != rntype)
4999 if (lnbitsize > rnbitsize)
5001 lhs = fold_convert (rntype, lhs);
5002 ll_mask = fold_convert (rntype, ll_mask);
5003 type = rntype;
5005 else if (lnbitsize < rnbitsize)
5007 rhs = fold_convert (lntype, rhs);
5008 lr_mask = fold_convert (lntype, lr_mask);
5009 type = lntype;
5013 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5014 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5016 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5017 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5019 return build2 (wanted_code, truth_type, lhs, rhs);
5022 return 0;
5025 /* Handle the case of comparisons with constants. If there is something in
5026 common between the masks, those bits of the constants must be the same.
5027 If not, the condition is always false. Test for this to avoid generating
5028 incorrect code below. */
5029 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5030 if (! integer_zerop (result)
5031 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5032 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5034 if (wanted_code == NE_EXPR)
5036 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5037 return constant_boolean_node (true, truth_type);
5039 else
5041 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5042 return constant_boolean_node (false, truth_type);
5046 /* Construct the expression we will return. First get the component
5047 reference we will make. Unless the mask is all ones the width of
5048 that field, perform the mask operation. Then compare with the
5049 merged constant. */
5050 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5051 ll_unsignedp || rl_unsignedp);
5053 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5054 if (! all_ones_mask_p (ll_mask, lnbitsize))
5055 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5057 return build2 (wanted_code, truth_type, result,
5058 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5061 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5062 constant. */
5064 static tree
5065 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5067 tree arg0 = op0;
5068 enum tree_code op_code;
5069 tree comp_const = op1;
5070 tree minmax_const;
5071 int consts_equal, consts_lt;
5072 tree inner;
5074 STRIP_SIGN_NOPS (arg0);
5076 op_code = TREE_CODE (arg0);
5077 minmax_const = TREE_OPERAND (arg0, 1);
5078 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5079 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5080 inner = TREE_OPERAND (arg0, 0);
5082 /* If something does not permit us to optimize, return the original tree. */
5083 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5084 || TREE_CODE (comp_const) != INTEGER_CST
5085 || TREE_CONSTANT_OVERFLOW (comp_const)
5086 || TREE_CODE (minmax_const) != INTEGER_CST
5087 || TREE_CONSTANT_OVERFLOW (minmax_const))
5088 return NULL_TREE;
5090 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5091 and GT_EXPR, doing the rest with recursive calls using logical
5092 simplifications. */
5093 switch (code)
5095 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5097 /* FIXME: We should be able to invert code without building a
5098 scratch tree node, but doing so would require us to
5099 duplicate a part of invert_truthvalue here. */
5100 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5101 tem = optimize_minmax_comparison (TREE_CODE (tem),
5102 TREE_TYPE (tem),
5103 TREE_OPERAND (tem, 0),
5104 TREE_OPERAND (tem, 1));
5105 return invert_truthvalue (tem);
5108 case GE_EXPR:
5109 return
5110 fold_build2 (TRUTH_ORIF_EXPR, type,
5111 optimize_minmax_comparison
5112 (EQ_EXPR, type, arg0, comp_const),
5113 optimize_minmax_comparison
5114 (GT_EXPR, type, arg0, comp_const));
5116 case EQ_EXPR:
5117 if (op_code == MAX_EXPR && consts_equal)
5118 /* MAX (X, 0) == 0 -> X <= 0 */
5119 return fold_build2 (LE_EXPR, type, inner, comp_const);
5121 else if (op_code == MAX_EXPR && consts_lt)
5122 /* MAX (X, 0) == 5 -> X == 5 */
5123 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5125 else if (op_code == MAX_EXPR)
5126 /* MAX (X, 0) == -1 -> false */
5127 return omit_one_operand (type, integer_zero_node, inner);
5129 else if (consts_equal)
5130 /* MIN (X, 0) == 0 -> X >= 0 */
5131 return fold_build2 (GE_EXPR, type, inner, comp_const);
5133 else if (consts_lt)
5134 /* MIN (X, 0) == 5 -> false */
5135 return omit_one_operand (type, integer_zero_node, inner);
5137 else
5138 /* MIN (X, 0) == -1 -> X == -1 */
5139 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5141 case GT_EXPR:
5142 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5143 /* MAX (X, 0) > 0 -> X > 0
5144 MAX (X, 0) > 5 -> X > 5 */
5145 return fold_build2 (GT_EXPR, type, inner, comp_const);
5147 else if (op_code == MAX_EXPR)
5148 /* MAX (X, 0) > -1 -> true */
5149 return omit_one_operand (type, integer_one_node, inner);
5151 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5152 /* MIN (X, 0) > 0 -> false
5153 MIN (X, 0) > 5 -> false */
5154 return omit_one_operand (type, integer_zero_node, inner);
5156 else
5157 /* MIN (X, 0) > -1 -> X > -1 */
5158 return fold_build2 (GT_EXPR, type, inner, comp_const);
5160 default:
5161 return NULL_TREE;
5165 /* T is an integer expression that is being multiplied, divided, or taken a
5166 modulus (CODE says which and what kind of divide or modulus) by a
5167 constant C. See if we can eliminate that operation by folding it with
5168 other operations already in T. WIDE_TYPE, if non-null, is a type that
5169 should be used for the computation if wider than our type.
5171 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5172 (X * 2) + (Y * 4). We must, however, be assured that either the original
5173 expression would not overflow or that overflow is undefined for the type
5174 in the language in question.
5176 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5177 the machine has a multiply-accumulate insn or that this is part of an
5178 addressing calculation.
5180 If we return a non-null expression, it is an equivalent form of the
5181 original computation, but need not be in the original type. */
5183 static tree
5184 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5186 /* To avoid exponential search depth, refuse to allow recursion past
5187 three levels. Beyond that (1) it's highly unlikely that we'll find
5188 something interesting and (2) we've probably processed it before
5189 when we built the inner expression. */
5191 static int depth;
5192 tree ret;
5194 if (depth > 3)
5195 return NULL;
5197 depth++;
5198 ret = extract_muldiv_1 (t, c, code, wide_type);
5199 depth--;
5201 return ret;
5204 static tree
5205 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5207 tree type = TREE_TYPE (t);
5208 enum tree_code tcode = TREE_CODE (t);
5209 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5210 > GET_MODE_SIZE (TYPE_MODE (type)))
5211 ? wide_type : type);
5212 tree t1, t2;
5213 int same_p = tcode == code;
5214 tree op0 = NULL_TREE, op1 = NULL_TREE;
5216 /* Don't deal with constants of zero here; they confuse the code below. */
5217 if (integer_zerop (c))
5218 return NULL_TREE;
5220 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5221 op0 = TREE_OPERAND (t, 0);
5223 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5224 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5226 /* Note that we need not handle conditional operations here since fold
5227 already handles those cases. So just do arithmetic here. */
5228 switch (tcode)
5230 case INTEGER_CST:
5231 /* For a constant, we can always simplify if we are a multiply
5232 or (for divide and modulus) if it is a multiple of our constant. */
5233 if (code == MULT_EXPR
5234 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5235 return const_binop (code, fold_convert (ctype, t),
5236 fold_convert (ctype, c), 0);
5237 break;
5239 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5240 /* If op0 is an expression ... */
5241 if ((COMPARISON_CLASS_P (op0)
5242 || UNARY_CLASS_P (op0)
5243 || BINARY_CLASS_P (op0)
5244 || EXPRESSION_CLASS_P (op0))
5245 /* ... and is unsigned, and its type is smaller than ctype,
5246 then we cannot pass through as widening. */
5247 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5248 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5249 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5250 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5251 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5252 /* ... or this is a truncation (t is narrower than op0),
5253 then we cannot pass through this narrowing. */
5254 || (GET_MODE_SIZE (TYPE_MODE (type))
5255 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5256 /* ... or signedness changes for division or modulus,
5257 then we cannot pass through this conversion. */
5258 || (code != MULT_EXPR
5259 && (TYPE_UNSIGNED (ctype)
5260 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5261 break;
5263 /* Pass the constant down and see if we can make a simplification. If
5264 we can, replace this expression with the inner simplification for
5265 possible later conversion to our or some other type. */
5266 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5267 && TREE_CODE (t2) == INTEGER_CST
5268 && ! TREE_CONSTANT_OVERFLOW (t2)
5269 && (0 != (t1 = extract_muldiv (op0, t2, code,
5270 code == MULT_EXPR
5271 ? ctype : NULL_TREE))))
5272 return t1;
5273 break;
5275 case ABS_EXPR:
5276 /* If widening the type changes it from signed to unsigned, then we
5277 must avoid building ABS_EXPR itself as unsigned. */
5278 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5280 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5281 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5283 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5284 return fold_convert (ctype, t1);
5286 break;
5288 /* FALLTHROUGH */
5289 case NEGATE_EXPR:
5290 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5291 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5292 break;
5294 case MIN_EXPR: case MAX_EXPR:
5295 /* If widening the type changes the signedness, then we can't perform
5296 this optimization as that changes the result. */
5297 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5298 break;
5300 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5301 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5302 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5304 if (tree_int_cst_sgn (c) < 0)
5305 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5307 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5308 fold_convert (ctype, t2));
5310 break;
5312 case LSHIFT_EXPR: case RSHIFT_EXPR:
5313 /* If the second operand is constant, this is a multiplication
5314 or floor division, by a power of two, so we can treat it that
5315 way unless the multiplier or divisor overflows. Signed
5316 left-shift overflow is implementation-defined rather than
5317 undefined in C90, so do not convert signed left shift into
5318 multiplication. */
5319 if (TREE_CODE (op1) == INTEGER_CST
5320 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5321 /* const_binop may not detect overflow correctly,
5322 so check for it explicitly here. */
5323 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5324 && TREE_INT_CST_HIGH (op1) == 0
5325 && 0 != (t1 = fold_convert (ctype,
5326 const_binop (LSHIFT_EXPR,
5327 size_one_node,
5328 op1, 0)))
5329 && ! TREE_OVERFLOW (t1))
5330 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5331 ? MULT_EXPR : FLOOR_DIV_EXPR,
5332 ctype, fold_convert (ctype, op0), t1),
5333 c, code, wide_type);
5334 break;
5336 case PLUS_EXPR: case MINUS_EXPR:
5337 /* See if we can eliminate the operation on both sides. If we can, we
5338 can return a new PLUS or MINUS. If we can't, the only remaining
5339 cases where we can do anything are if the second operand is a
5340 constant. */
5341 t1 = extract_muldiv (op0, c, code, wide_type);
5342 t2 = extract_muldiv (op1, c, code, wide_type);
5343 if (t1 != 0 && t2 != 0
5344 && (code == MULT_EXPR
5345 /* If not multiplication, we can only do this if both operands
5346 are divisible by c. */
5347 || (multiple_of_p (ctype, op0, c)
5348 && multiple_of_p (ctype, op1, c))))
5349 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5350 fold_convert (ctype, t2));
5352 /* If this was a subtraction, negate OP1 and set it to be an addition.
5353 This simplifies the logic below. */
5354 if (tcode == MINUS_EXPR)
5355 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5357 if (TREE_CODE (op1) != INTEGER_CST)
5358 break;
5360 /* If either OP1 or C are negative, this optimization is not safe for
5361 some of the division and remainder types while for others we need
5362 to change the code. */
5363 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5365 if (code == CEIL_DIV_EXPR)
5366 code = FLOOR_DIV_EXPR;
5367 else if (code == FLOOR_DIV_EXPR)
5368 code = CEIL_DIV_EXPR;
5369 else if (code != MULT_EXPR
5370 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5371 break;
5374 /* If it's a multiply or a division/modulus operation of a multiple
5375 of our constant, do the operation and verify it doesn't overflow. */
5376 if (code == MULT_EXPR
5377 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5379 op1 = const_binop (code, fold_convert (ctype, op1),
5380 fold_convert (ctype, c), 0);
5381 /* We allow the constant to overflow with wrapping semantics. */
5382 if (op1 == 0
5383 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5384 break;
5386 else
5387 break;
5389 /* If we have an unsigned type is not a sizetype, we cannot widen
5390 the operation since it will change the result if the original
5391 computation overflowed. */
5392 if (TYPE_UNSIGNED (ctype)
5393 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5394 && ctype != type)
5395 break;
5397 /* If we were able to eliminate our operation from the first side,
5398 apply our operation to the second side and reform the PLUS. */
5399 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5400 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5402 /* The last case is if we are a multiply. In that case, we can
5403 apply the distributive law to commute the multiply and addition
5404 if the multiplication of the constants doesn't overflow. */
5405 if (code == MULT_EXPR)
5406 return fold_build2 (tcode, ctype,
5407 fold_build2 (code, ctype,
5408 fold_convert (ctype, op0),
5409 fold_convert (ctype, c)),
5410 op1);
5412 break;
5414 case MULT_EXPR:
5415 /* We have a special case here if we are doing something like
5416 (C * 8) % 4 since we know that's zero. */
5417 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5418 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5419 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5420 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5421 return omit_one_operand (type, integer_zero_node, op0);
5423 /* ... fall through ... */
5425 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5426 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5427 /* If we can extract our operation from the LHS, do so and return a
5428 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5429 do something only if the second operand is a constant. */
5430 if (same_p
5431 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5432 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5433 fold_convert (ctype, op1));
5434 else if (tcode == MULT_EXPR && code == MULT_EXPR
5435 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5436 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5437 fold_convert (ctype, t1));
5438 else if (TREE_CODE (op1) != INTEGER_CST)
5439 return 0;
5441 /* If these are the same operation types, we can associate them
5442 assuming no overflow. */
5443 if (tcode == code
5444 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5445 fold_convert (ctype, c), 0))
5446 && ! TREE_OVERFLOW (t1))
5447 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5449 /* If these operations "cancel" each other, we have the main
5450 optimizations of this pass, which occur when either constant is a
5451 multiple of the other, in which case we replace this with either an
5452 operation or CODE or TCODE.
5454 If we have an unsigned type that is not a sizetype, we cannot do
5455 this since it will change the result if the original computation
5456 overflowed. */
5457 if ((! TYPE_UNSIGNED (ctype)
5458 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5459 && ! flag_wrapv
5460 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5461 || (tcode == MULT_EXPR
5462 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5463 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5465 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5466 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5467 fold_convert (ctype,
5468 const_binop (TRUNC_DIV_EXPR,
5469 op1, c, 0)));
5470 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5471 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5472 fold_convert (ctype,
5473 const_binop (TRUNC_DIV_EXPR,
5474 c, op1, 0)));
5476 break;
5478 default:
5479 break;
5482 return 0;
5485 /* Return a node which has the indicated constant VALUE (either 0 or
5486 1), and is of the indicated TYPE. */
5488 tree
5489 constant_boolean_node (int value, tree type)
5491 if (type == integer_type_node)
5492 return value ? integer_one_node : integer_zero_node;
5493 else if (type == boolean_type_node)
5494 return value ? boolean_true_node : boolean_false_node;
5495 else
5496 return build_int_cst (type, value);
5500 /* Return true if expr looks like an ARRAY_REF and set base and
5501 offset to the appropriate trees. If there is no offset,
5502 offset is set to NULL_TREE. Base will be canonicalized to
5503 something you can get the element type from using
5504 TREE_TYPE (TREE_TYPE (base)). */
5506 static bool
5507 extract_array_ref (tree expr, tree *base, tree *offset)
5509 /* One canonical form is a PLUS_EXPR with the first
5510 argument being an ADDR_EXPR with a possible NOP_EXPR
5511 attached. */
5512 if (TREE_CODE (expr) == PLUS_EXPR)
5514 tree op0 = TREE_OPERAND (expr, 0);
5515 tree inner_base, dummy1;
5516 /* Strip NOP_EXPRs here because the C frontends and/or
5517 folders present us (int *)&x.a + 4B possibly. */
5518 STRIP_NOPS (op0);
5519 if (extract_array_ref (op0, &inner_base, &dummy1))
5521 *base = inner_base;
5522 if (dummy1 == NULL_TREE)
5523 *offset = TREE_OPERAND (expr, 1);
5524 else
5525 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5526 dummy1, TREE_OPERAND (expr, 1));
5527 return true;
5530 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5531 which we transform into an ADDR_EXPR with appropriate
5532 offset. For other arguments to the ADDR_EXPR we assume
5533 zero offset and as such do not care about the ADDR_EXPR
5534 type and strip possible nops from it. */
5535 else if (TREE_CODE (expr) == ADDR_EXPR)
5537 tree op0 = TREE_OPERAND (expr, 0);
5538 if (TREE_CODE (op0) == ARRAY_REF)
5540 *base = TREE_OPERAND (op0, 0);
5541 *offset = TREE_OPERAND (op0, 1);
5543 else
5545 /* Handle array-to-pointer decay as &a. */
5546 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5547 *base = TREE_OPERAND (expr, 0);
5548 else
5549 *base = expr;
5550 *offset = NULL_TREE;
5552 return true;
5554 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5555 else if (SSA_VAR_P (expr)
5556 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5558 *base = expr;
5559 *offset = NULL_TREE;
5560 return true;
5563 return false;
5567 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5568 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5569 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5570 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5571 COND is the first argument to CODE; otherwise (as in the example
5572 given here), it is the second argument. TYPE is the type of the
5573 original expression. Return NULL_TREE if no simplification is
5574 possible. */
5576 static tree
5577 fold_binary_op_with_conditional_arg (enum tree_code code,
5578 tree type, tree op0, tree op1,
5579 tree cond, tree arg, int cond_first_p)
5581 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5582 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5583 tree test, true_value, false_value;
5584 tree lhs = NULL_TREE;
5585 tree rhs = NULL_TREE;
5587 /* This transformation is only worthwhile if we don't have to wrap
5588 arg in a SAVE_EXPR, and the operation can be simplified on at least
5589 one of the branches once its pushed inside the COND_EXPR. */
5590 if (!TREE_CONSTANT (arg))
5591 return NULL_TREE;
5593 if (TREE_CODE (cond) == COND_EXPR)
5595 test = TREE_OPERAND (cond, 0);
5596 true_value = TREE_OPERAND (cond, 1);
5597 false_value = TREE_OPERAND (cond, 2);
5598 /* If this operand throws an expression, then it does not make
5599 sense to try to perform a logical or arithmetic operation
5600 involving it. */
5601 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5602 lhs = true_value;
5603 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5604 rhs = false_value;
5606 else
5608 tree testtype = TREE_TYPE (cond);
5609 test = cond;
5610 true_value = constant_boolean_node (true, testtype);
5611 false_value = constant_boolean_node (false, testtype);
5614 arg = fold_convert (arg_type, arg);
5615 if (lhs == 0)
5617 true_value = fold_convert (cond_type, true_value);
5618 if (cond_first_p)
5619 lhs = fold_build2 (code, type, true_value, arg);
5620 else
5621 lhs = fold_build2 (code, type, arg, true_value);
5623 if (rhs == 0)
5625 false_value = fold_convert (cond_type, false_value);
5626 if (cond_first_p)
5627 rhs = fold_build2 (code, type, false_value, arg);
5628 else
5629 rhs = fold_build2 (code, type, arg, false_value);
5632 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5633 return fold_convert (type, test);
5637 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5639 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5640 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5641 ADDEND is the same as X.
5643 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5644 and finite. The problematic cases are when X is zero, and its mode
5645 has signed zeros. In the case of rounding towards -infinity,
5646 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5647 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5649 static bool
5650 fold_real_zero_addition_p (tree type, tree addend, int negate)
5652 if (!real_zerop (addend))
5653 return false;
5655 /* Don't allow the fold with -fsignaling-nans. */
5656 if (HONOR_SNANS (TYPE_MODE (type)))
5657 return false;
5659 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5660 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5661 return true;
5663 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5664 if (TREE_CODE (addend) == REAL_CST
5665 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5666 negate = !negate;
5668 /* The mode has signed zeros, and we have to honor their sign.
5669 In this situation, there is only one case we can return true for.
5670 X - 0 is the same as X unless rounding towards -infinity is
5671 supported. */
5672 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5675 /* Subroutine of fold() that checks comparisons of built-in math
5676 functions against real constants.
5678 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5679 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5680 is the type of the result and ARG0 and ARG1 are the operands of the
5681 comparison. ARG1 must be a TREE_REAL_CST.
5683 The function returns the constant folded tree if a simplification
5684 can be made, and NULL_TREE otherwise. */
5686 static tree
5687 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5688 tree type, tree arg0, tree arg1)
5690 REAL_VALUE_TYPE c;
5692 if (BUILTIN_SQRT_P (fcode))
5694 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5695 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5697 c = TREE_REAL_CST (arg1);
5698 if (REAL_VALUE_NEGATIVE (c))
5700 /* sqrt(x) < y is always false, if y is negative. */
5701 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5702 return omit_one_operand (type, integer_zero_node, arg);
5704 /* sqrt(x) > y is always true, if y is negative and we
5705 don't care about NaNs, i.e. negative values of x. */
5706 if (code == NE_EXPR || !HONOR_NANS (mode))
5707 return omit_one_operand (type, integer_one_node, arg);
5709 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5710 return fold_build2 (GE_EXPR, type, arg,
5711 build_real (TREE_TYPE (arg), dconst0));
5713 else if (code == GT_EXPR || code == GE_EXPR)
5715 REAL_VALUE_TYPE c2;
5717 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5718 real_convert (&c2, mode, &c2);
5720 if (REAL_VALUE_ISINF (c2))
5722 /* sqrt(x) > y is x == +Inf, when y is very large. */
5723 if (HONOR_INFINITIES (mode))
5724 return fold_build2 (EQ_EXPR, type, arg,
5725 build_real (TREE_TYPE (arg), c2));
5727 /* sqrt(x) > y is always false, when y is very large
5728 and we don't care about infinities. */
5729 return omit_one_operand (type, integer_zero_node, arg);
5732 /* sqrt(x) > c is the same as x > c*c. */
5733 return fold_build2 (code, type, arg,
5734 build_real (TREE_TYPE (arg), c2));
5736 else if (code == LT_EXPR || code == LE_EXPR)
5738 REAL_VALUE_TYPE c2;
5740 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5741 real_convert (&c2, mode, &c2);
5743 if (REAL_VALUE_ISINF (c2))
5745 /* sqrt(x) < y is always true, when y is a very large
5746 value and we don't care about NaNs or Infinities. */
5747 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5748 return omit_one_operand (type, integer_one_node, arg);
5750 /* sqrt(x) < y is x != +Inf when y is very large and we
5751 don't care about NaNs. */
5752 if (! HONOR_NANS (mode))
5753 return fold_build2 (NE_EXPR, type, arg,
5754 build_real (TREE_TYPE (arg), c2));
5756 /* sqrt(x) < y is x >= 0 when y is very large and we
5757 don't care about Infinities. */
5758 if (! HONOR_INFINITIES (mode))
5759 return fold_build2 (GE_EXPR, type, arg,
5760 build_real (TREE_TYPE (arg), dconst0));
5762 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5763 if (lang_hooks.decls.global_bindings_p () != 0
5764 || CONTAINS_PLACEHOLDER_P (arg))
5765 return NULL_TREE;
5767 arg = save_expr (arg);
5768 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5769 fold_build2 (GE_EXPR, type, arg,
5770 build_real (TREE_TYPE (arg),
5771 dconst0)),
5772 fold_build2 (NE_EXPR, type, arg,
5773 build_real (TREE_TYPE (arg),
5774 c2)));
5777 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5778 if (! HONOR_NANS (mode))
5779 return fold_build2 (code, type, arg,
5780 build_real (TREE_TYPE (arg), c2));
5782 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5783 if (lang_hooks.decls.global_bindings_p () == 0
5784 && ! CONTAINS_PLACEHOLDER_P (arg))
5786 arg = save_expr (arg);
5787 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5788 fold_build2 (GE_EXPR, type, arg,
5789 build_real (TREE_TYPE (arg),
5790 dconst0)),
5791 fold_build2 (code, type, arg,
5792 build_real (TREE_TYPE (arg),
5793 c2)));
5798 return NULL_TREE;
5801 /* Subroutine of fold() that optimizes comparisons against Infinities,
5802 either +Inf or -Inf.
5804 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5805 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5806 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5808 The function returns the constant folded tree if a simplification
5809 can be made, and NULL_TREE otherwise. */
5811 static tree
5812 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5814 enum machine_mode mode;
5815 REAL_VALUE_TYPE max;
5816 tree temp;
5817 bool neg;
5819 mode = TYPE_MODE (TREE_TYPE (arg0));
5821 /* For negative infinity swap the sense of the comparison. */
5822 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5823 if (neg)
5824 code = swap_tree_comparison (code);
5826 switch (code)
5828 case GT_EXPR:
5829 /* x > +Inf is always false, if with ignore sNANs. */
5830 if (HONOR_SNANS (mode))
5831 return NULL_TREE;
5832 return omit_one_operand (type, integer_zero_node, arg0);
5834 case LE_EXPR:
5835 /* x <= +Inf is always true, if we don't case about NaNs. */
5836 if (! HONOR_NANS (mode))
5837 return omit_one_operand (type, integer_one_node, arg0);
5839 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5840 if (lang_hooks.decls.global_bindings_p () == 0
5841 && ! CONTAINS_PLACEHOLDER_P (arg0))
5843 arg0 = save_expr (arg0);
5844 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5846 break;
5848 case EQ_EXPR:
5849 case GE_EXPR:
5850 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5851 real_maxval (&max, neg, mode);
5852 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5853 arg0, build_real (TREE_TYPE (arg0), max));
5855 case LT_EXPR:
5856 /* x < +Inf is always equal to x <= DBL_MAX. */
5857 real_maxval (&max, neg, mode);
5858 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5859 arg0, build_real (TREE_TYPE (arg0), max));
5861 case NE_EXPR:
5862 /* x != +Inf is always equal to !(x > DBL_MAX). */
5863 real_maxval (&max, neg, mode);
5864 if (! HONOR_NANS (mode))
5865 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5866 arg0, build_real (TREE_TYPE (arg0), max));
5868 /* The transformation below creates non-gimple code and thus is
5869 not appropriate if we are in gimple form. */
5870 if (in_gimple_form)
5871 return NULL_TREE;
5873 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5874 arg0, build_real (TREE_TYPE (arg0), max));
5875 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5877 default:
5878 break;
5881 return NULL_TREE;
5884 /* Subroutine of fold() that optimizes comparisons of a division by
5885 a nonzero integer constant against an integer constant, i.e.
5886 X/C1 op C2.
5888 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5889 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5890 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5892 The function returns the constant folded tree if a simplification
5893 can be made, and NULL_TREE otherwise. */
5895 static tree
5896 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5898 tree prod, tmp, hi, lo;
5899 tree arg00 = TREE_OPERAND (arg0, 0);
5900 tree arg01 = TREE_OPERAND (arg0, 1);
5901 unsigned HOST_WIDE_INT lpart;
5902 HOST_WIDE_INT hpart;
5903 int overflow;
5905 /* We have to do this the hard way to detect unsigned overflow.
5906 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5907 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5908 TREE_INT_CST_HIGH (arg01),
5909 TREE_INT_CST_LOW (arg1),
5910 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5911 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5912 prod = force_fit_type (prod, -1, overflow, false);
5914 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5916 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5917 lo = prod;
5919 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5920 overflow = add_double (TREE_INT_CST_LOW (prod),
5921 TREE_INT_CST_HIGH (prod),
5922 TREE_INT_CST_LOW (tmp),
5923 TREE_INT_CST_HIGH (tmp),
5924 &lpart, &hpart);
5925 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5926 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5927 TREE_CONSTANT_OVERFLOW (prod));
5929 else if (tree_int_cst_sgn (arg01) >= 0)
5931 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5932 switch (tree_int_cst_sgn (arg1))
5934 case -1:
5935 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5936 hi = prod;
5937 break;
5939 case 0:
5940 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5941 hi = tmp;
5942 break;
5944 case 1:
5945 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5946 lo = prod;
5947 break;
5949 default:
5950 gcc_unreachable ();
5953 else
5955 /* A negative divisor reverses the relational operators. */
5956 code = swap_tree_comparison (code);
5958 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5959 switch (tree_int_cst_sgn (arg1))
5961 case -1:
5962 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5963 lo = prod;
5964 break;
5966 case 0:
5967 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5968 lo = tmp;
5969 break;
5971 case 1:
5972 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5973 hi = prod;
5974 break;
5976 default:
5977 gcc_unreachable ();
5981 switch (code)
5983 case EQ_EXPR:
5984 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5985 return omit_one_operand (type, integer_zero_node, arg00);
5986 if (TREE_OVERFLOW (hi))
5987 return fold_build2 (GE_EXPR, type, arg00, lo);
5988 if (TREE_OVERFLOW (lo))
5989 return fold_build2 (LE_EXPR, type, arg00, hi);
5990 return build_range_check (type, arg00, 1, lo, hi);
5992 case NE_EXPR:
5993 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5994 return omit_one_operand (type, integer_one_node, arg00);
5995 if (TREE_OVERFLOW (hi))
5996 return fold_build2 (LT_EXPR, type, arg00, lo);
5997 if (TREE_OVERFLOW (lo))
5998 return fold_build2 (GT_EXPR, type, arg00, hi);
5999 return build_range_check (type, arg00, 0, lo, hi);
6001 case LT_EXPR:
6002 if (TREE_OVERFLOW (lo))
6003 return omit_one_operand (type, integer_zero_node, arg00);
6004 return fold_build2 (LT_EXPR, type, arg00, lo);
6006 case LE_EXPR:
6007 if (TREE_OVERFLOW (hi))
6008 return omit_one_operand (type, integer_one_node, arg00);
6009 return fold_build2 (LE_EXPR, type, arg00, hi);
6011 case GT_EXPR:
6012 if (TREE_OVERFLOW (hi))
6013 return omit_one_operand (type, integer_zero_node, arg00);
6014 return fold_build2 (GT_EXPR, type, arg00, hi);
6016 case GE_EXPR:
6017 if (TREE_OVERFLOW (lo))
6018 return omit_one_operand (type, integer_one_node, arg00);
6019 return fold_build2 (GE_EXPR, type, arg00, lo);
6021 default:
6022 break;
6025 return NULL_TREE;
6029 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6030 equality/inequality test, then return a simplified form of the test
6031 using a sign testing. Otherwise return NULL. TYPE is the desired
6032 result type. */
6034 static tree
6035 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6036 tree result_type)
6038 /* If this is testing a single bit, we can optimize the test. */
6039 if ((code == NE_EXPR || code == EQ_EXPR)
6040 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6041 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6043 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6044 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6045 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6047 if (arg00 != NULL_TREE
6048 /* This is only a win if casting to a signed type is cheap,
6049 i.e. when arg00's type is not a partial mode. */
6050 && TYPE_PRECISION (TREE_TYPE (arg00))
6051 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6053 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6054 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6055 result_type, fold_convert (stype, arg00),
6056 fold_convert (stype, integer_zero_node));
6060 return NULL_TREE;
6063 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6064 equality/inequality test, then return a simplified form of
6065 the test using shifts and logical operations. Otherwise return
6066 NULL. TYPE is the desired result type. */
6068 tree
6069 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6070 tree result_type)
6072 /* If this is testing a single bit, we can optimize the test. */
6073 if ((code == NE_EXPR || code == EQ_EXPR)
6074 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6075 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6077 tree inner = TREE_OPERAND (arg0, 0);
6078 tree type = TREE_TYPE (arg0);
6079 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6080 enum machine_mode operand_mode = TYPE_MODE (type);
6081 int ops_unsigned;
6082 tree signed_type, unsigned_type, intermediate_type;
6083 tree tem;
6085 /* First, see if we can fold the single bit test into a sign-bit
6086 test. */
6087 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6088 result_type);
6089 if (tem)
6090 return tem;
6092 /* Otherwise we have (A & C) != 0 where C is a single bit,
6093 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6094 Similarly for (A & C) == 0. */
6096 /* If INNER is a right shift of a constant and it plus BITNUM does
6097 not overflow, adjust BITNUM and INNER. */
6098 if (TREE_CODE (inner) == RSHIFT_EXPR
6099 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6100 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6101 && bitnum < TYPE_PRECISION (type)
6102 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6103 bitnum - TYPE_PRECISION (type)))
6105 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6106 inner = TREE_OPERAND (inner, 0);
6109 /* If we are going to be able to omit the AND below, we must do our
6110 operations as unsigned. If we must use the AND, we have a choice.
6111 Normally unsigned is faster, but for some machines signed is. */
6112 #ifdef LOAD_EXTEND_OP
6113 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6114 && !flag_syntax_only) ? 0 : 1;
6115 #else
6116 ops_unsigned = 1;
6117 #endif
6119 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6120 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6121 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6122 inner = fold_convert (intermediate_type, inner);
6124 if (bitnum != 0)
6125 inner = build2 (RSHIFT_EXPR, intermediate_type,
6126 inner, size_int (bitnum));
6128 if (code == EQ_EXPR)
6129 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6130 inner, integer_one_node);
6132 /* Put the AND last so it can combine with more things. */
6133 inner = build2 (BIT_AND_EXPR, intermediate_type,
6134 inner, integer_one_node);
6136 /* Make sure to return the proper type. */
6137 inner = fold_convert (result_type, inner);
6139 return inner;
6141 return NULL_TREE;
6144 /* Check whether we are allowed to reorder operands arg0 and arg1,
6145 such that the evaluation of arg1 occurs before arg0. */
6147 static bool
6148 reorder_operands_p (tree arg0, tree arg1)
6150 if (! flag_evaluation_order)
6151 return true;
6152 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6153 return true;
6154 return ! TREE_SIDE_EFFECTS (arg0)
6155 && ! TREE_SIDE_EFFECTS (arg1);
6158 /* Test whether it is preferable two swap two operands, ARG0 and
6159 ARG1, for example because ARG0 is an integer constant and ARG1
6160 isn't. If REORDER is true, only recommend swapping if we can
6161 evaluate the operands in reverse order. */
6163 bool
6164 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6166 STRIP_SIGN_NOPS (arg0);
6167 STRIP_SIGN_NOPS (arg1);
6169 if (TREE_CODE (arg1) == INTEGER_CST)
6170 return 0;
6171 if (TREE_CODE (arg0) == INTEGER_CST)
6172 return 1;
6174 if (TREE_CODE (arg1) == REAL_CST)
6175 return 0;
6176 if (TREE_CODE (arg0) == REAL_CST)
6177 return 1;
6179 if (TREE_CODE (arg1) == COMPLEX_CST)
6180 return 0;
6181 if (TREE_CODE (arg0) == COMPLEX_CST)
6182 return 1;
6184 if (TREE_CONSTANT (arg1))
6185 return 0;
6186 if (TREE_CONSTANT (arg0))
6187 return 1;
6189 if (optimize_size)
6190 return 0;
6192 if (reorder && flag_evaluation_order
6193 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6194 return 0;
6196 if (DECL_P (arg1))
6197 return 0;
6198 if (DECL_P (arg0))
6199 return 1;
6201 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6202 for commutative and comparison operators. Ensuring a canonical
6203 form allows the optimizers to find additional redundancies without
6204 having to explicitly check for both orderings. */
6205 if (TREE_CODE (arg0) == SSA_NAME
6206 && TREE_CODE (arg1) == SSA_NAME
6207 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6208 return 1;
6210 return 0;
6213 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6214 ARG0 is extended to a wider type. */
6216 static tree
6217 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6219 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6220 tree arg1_unw;
6221 tree shorter_type, outer_type;
6222 tree min, max;
6223 bool above, below;
6225 if (arg0_unw == arg0)
6226 return NULL_TREE;
6227 shorter_type = TREE_TYPE (arg0_unw);
6229 #ifdef HAVE_canonicalize_funcptr_for_compare
6230 /* Disable this optimization if we're casting a function pointer
6231 type on targets that require function pointer canonicalization. */
6232 if (HAVE_canonicalize_funcptr_for_compare
6233 && TREE_CODE (shorter_type) == POINTER_TYPE
6234 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6235 return NULL_TREE;
6236 #endif
6238 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6239 return NULL_TREE;
6241 arg1_unw = get_unwidened (arg1, shorter_type);
6242 if (!arg1_unw)
6243 return NULL_TREE;
6245 /* If possible, express the comparison in the shorter mode. */
6246 if ((code == EQ_EXPR || code == NE_EXPR
6247 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6248 && (TREE_TYPE (arg1_unw) == shorter_type
6249 || (TREE_CODE (arg1_unw) == INTEGER_CST
6250 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6251 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6252 && int_fits_type_p (arg1_unw, shorter_type))))
6253 return fold_build2 (code, type, arg0_unw,
6254 fold_convert (shorter_type, arg1_unw));
6256 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6257 return NULL_TREE;
6259 /* If we are comparing with the integer that does not fit into the range
6260 of the shorter type, the result is known. */
6261 outer_type = TREE_TYPE (arg1_unw);
6262 min = lower_bound_in_type (outer_type, shorter_type);
6263 max = upper_bound_in_type (outer_type, shorter_type);
6265 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6266 max, arg1_unw));
6267 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6268 arg1_unw, min));
6270 switch (code)
6272 case EQ_EXPR:
6273 if (above || below)
6274 return omit_one_operand (type, integer_zero_node, arg0);
6275 break;
6277 case NE_EXPR:
6278 if (above || below)
6279 return omit_one_operand (type, integer_one_node, arg0);
6280 break;
6282 case LT_EXPR:
6283 case LE_EXPR:
6284 if (above)
6285 return omit_one_operand (type, integer_one_node, arg0);
6286 else if (below)
6287 return omit_one_operand (type, integer_zero_node, arg0);
6289 case GT_EXPR:
6290 case GE_EXPR:
6291 if (above)
6292 return omit_one_operand (type, integer_zero_node, arg0);
6293 else if (below)
6294 return omit_one_operand (type, integer_one_node, arg0);
6296 default:
6297 break;
6300 return NULL_TREE;
6303 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6304 ARG0 just the signedness is changed. */
6306 static tree
6307 fold_sign_changed_comparison (enum tree_code code, tree type,
6308 tree arg0, tree arg1)
6310 tree arg0_inner, tmp;
6311 tree inner_type, outer_type;
6313 if (TREE_CODE (arg0) != NOP_EXPR
6314 && TREE_CODE (arg0) != CONVERT_EXPR)
6315 return NULL_TREE;
6317 outer_type = TREE_TYPE (arg0);
6318 arg0_inner = TREE_OPERAND (arg0, 0);
6319 inner_type = TREE_TYPE (arg0_inner);
6321 #ifdef HAVE_canonicalize_funcptr_for_compare
6322 /* Disable this optimization if we're casting a function pointer
6323 type on targets that require function pointer canonicalization. */
6324 if (HAVE_canonicalize_funcptr_for_compare
6325 && TREE_CODE (inner_type) == POINTER_TYPE
6326 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6327 return NULL_TREE;
6328 #endif
6330 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6331 return NULL_TREE;
6333 if (TREE_CODE (arg1) != INTEGER_CST
6334 && !((TREE_CODE (arg1) == NOP_EXPR
6335 || TREE_CODE (arg1) == CONVERT_EXPR)
6336 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6337 return NULL_TREE;
6339 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6340 && code != NE_EXPR
6341 && code != EQ_EXPR)
6342 return NULL_TREE;
6344 if (TREE_CODE (arg1) == INTEGER_CST)
6346 tmp = build_int_cst_wide (inner_type,
6347 TREE_INT_CST_LOW (arg1),
6348 TREE_INT_CST_HIGH (arg1));
6349 arg1 = force_fit_type (tmp, 0,
6350 TREE_OVERFLOW (arg1),
6351 TREE_CONSTANT_OVERFLOW (arg1));
6353 else
6354 arg1 = fold_convert (inner_type, arg1);
6356 return fold_build2 (code, type, arg0_inner, arg1);
6359 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6360 step of the array. Reconstructs s and delta in the case of s * delta
6361 being an integer constant (and thus already folded).
6362 ADDR is the address. MULT is the multiplicative expression.
6363 If the function succeeds, the new address expression is returned. Otherwise
6364 NULL_TREE is returned. */
6366 static tree
6367 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6369 tree s, delta, step;
6370 tree ref = TREE_OPERAND (addr, 0), pref;
6371 tree ret, pos;
6372 tree itype;
6374 /* Canonicalize op1 into a possibly non-constant delta
6375 and an INTEGER_CST s. */
6376 if (TREE_CODE (op1) == MULT_EXPR)
6378 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6380 STRIP_NOPS (arg0);
6381 STRIP_NOPS (arg1);
6383 if (TREE_CODE (arg0) == INTEGER_CST)
6385 s = arg0;
6386 delta = arg1;
6388 else if (TREE_CODE (arg1) == INTEGER_CST)
6390 s = arg1;
6391 delta = arg0;
6393 else
6394 return NULL_TREE;
6396 else if (TREE_CODE (op1) == INTEGER_CST)
6398 delta = op1;
6399 s = NULL_TREE;
6401 else
6403 /* Simulate we are delta * 1. */
6404 delta = op1;
6405 s = integer_one_node;
6408 for (;; ref = TREE_OPERAND (ref, 0))
6410 if (TREE_CODE (ref) == ARRAY_REF)
6412 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6413 if (! itype)
6414 continue;
6416 step = array_ref_element_size (ref);
6417 if (TREE_CODE (step) != INTEGER_CST)
6418 continue;
6420 if (s)
6422 if (! tree_int_cst_equal (step, s))
6423 continue;
6425 else
6427 /* Try if delta is a multiple of step. */
6428 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6429 if (! tmp)
6430 continue;
6431 delta = tmp;
6434 break;
6437 if (!handled_component_p (ref))
6438 return NULL_TREE;
6441 /* We found the suitable array reference. So copy everything up to it,
6442 and replace the index. */
6444 pref = TREE_OPERAND (addr, 0);
6445 ret = copy_node (pref);
6446 pos = ret;
6448 while (pref != ref)
6450 pref = TREE_OPERAND (pref, 0);
6451 TREE_OPERAND (pos, 0) = copy_node (pref);
6452 pos = TREE_OPERAND (pos, 0);
6455 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6456 fold_convert (itype,
6457 TREE_OPERAND (pos, 1)),
6458 fold_convert (itype, delta));
6460 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6464 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6465 means A >= Y && A != MAX, but in this case we know that
6466 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6468 static tree
6469 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6471 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6473 if (TREE_CODE (bound) == LT_EXPR)
6474 a = TREE_OPERAND (bound, 0);
6475 else if (TREE_CODE (bound) == GT_EXPR)
6476 a = TREE_OPERAND (bound, 1);
6477 else
6478 return NULL_TREE;
6480 typea = TREE_TYPE (a);
6481 if (!INTEGRAL_TYPE_P (typea)
6482 && !POINTER_TYPE_P (typea))
6483 return NULL_TREE;
6485 if (TREE_CODE (ineq) == LT_EXPR)
6487 a1 = TREE_OPERAND (ineq, 1);
6488 y = TREE_OPERAND (ineq, 0);
6490 else if (TREE_CODE (ineq) == GT_EXPR)
6492 a1 = TREE_OPERAND (ineq, 0);
6493 y = TREE_OPERAND (ineq, 1);
6495 else
6496 return NULL_TREE;
6498 if (TREE_TYPE (a1) != typea)
6499 return NULL_TREE;
6501 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6502 if (!integer_onep (diff))
6503 return NULL_TREE;
6505 return fold_build2 (GE_EXPR, type, a, y);
6508 /* Fold a unary expression of code CODE and type TYPE with operand
6509 OP0. Return the folded expression if folding is successful.
6510 Otherwise, return NULL_TREE. */
6512 tree
6513 fold_unary (enum tree_code code, tree type, tree op0)
6515 tree tem;
6516 tree arg0;
6517 enum tree_code_class kind = TREE_CODE_CLASS (code);
6519 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6520 && TREE_CODE_LENGTH (code) == 1);
6522 arg0 = op0;
6523 if (arg0)
6525 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6527 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6528 STRIP_SIGN_NOPS (arg0);
6530 else
6532 /* Strip any conversions that don't change the mode. This
6533 is safe for every expression, except for a comparison
6534 expression because its signedness is derived from its
6535 operands.
6537 Note that this is done as an internal manipulation within
6538 the constant folder, in order to find the simplest
6539 representation of the arguments so that their form can be
6540 studied. In any cases, the appropriate type conversions
6541 should be put back in the tree that will get out of the
6542 constant folder. */
6543 STRIP_NOPS (arg0);
6547 if (TREE_CODE_CLASS (code) == tcc_unary)
6549 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6550 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6551 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6552 else if (TREE_CODE (arg0) == COND_EXPR)
6554 tree arg01 = TREE_OPERAND (arg0, 1);
6555 tree arg02 = TREE_OPERAND (arg0, 2);
6556 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6557 arg01 = fold_build1 (code, type, arg01);
6558 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6559 arg02 = fold_build1 (code, type, arg02);
6560 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6561 arg01, arg02);
6563 /* If this was a conversion, and all we did was to move into
6564 inside the COND_EXPR, bring it back out. But leave it if
6565 it is a conversion from integer to integer and the
6566 result precision is no wider than a word since such a
6567 conversion is cheap and may be optimized away by combine,
6568 while it couldn't if it were outside the COND_EXPR. Then return
6569 so we don't get into an infinite recursion loop taking the
6570 conversion out and then back in. */
6572 if ((code == NOP_EXPR || code == CONVERT_EXPR
6573 || code == NON_LVALUE_EXPR)
6574 && TREE_CODE (tem) == COND_EXPR
6575 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6576 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6577 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6578 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6579 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6580 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6581 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6582 && (INTEGRAL_TYPE_P
6583 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6584 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6585 || flag_syntax_only))
6586 tem = build1 (code, type,
6587 build3 (COND_EXPR,
6588 TREE_TYPE (TREE_OPERAND
6589 (TREE_OPERAND (tem, 1), 0)),
6590 TREE_OPERAND (tem, 0),
6591 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6592 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6593 return tem;
6595 else if (COMPARISON_CLASS_P (arg0))
6597 if (TREE_CODE (type) == BOOLEAN_TYPE)
6599 arg0 = copy_node (arg0);
6600 TREE_TYPE (arg0) = type;
6601 return arg0;
6603 else if (TREE_CODE (type) != INTEGER_TYPE)
6604 return fold_build3 (COND_EXPR, type, arg0,
6605 fold_build1 (code, type,
6606 integer_one_node),
6607 fold_build1 (code, type,
6608 integer_zero_node));
6612 switch (code)
6614 case NOP_EXPR:
6615 case FLOAT_EXPR:
6616 case CONVERT_EXPR:
6617 case FIX_TRUNC_EXPR:
6618 case FIX_CEIL_EXPR:
6619 case FIX_FLOOR_EXPR:
6620 case FIX_ROUND_EXPR:
6621 if (TREE_TYPE (op0) == type)
6622 return op0;
6624 /* Handle cases of two conversions in a row. */
6625 if (TREE_CODE (op0) == NOP_EXPR
6626 || TREE_CODE (op0) == CONVERT_EXPR)
6628 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6629 tree inter_type = TREE_TYPE (op0);
6630 int inside_int = INTEGRAL_TYPE_P (inside_type);
6631 int inside_ptr = POINTER_TYPE_P (inside_type);
6632 int inside_float = FLOAT_TYPE_P (inside_type);
6633 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6634 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6635 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6636 int inter_int = INTEGRAL_TYPE_P (inter_type);
6637 int inter_ptr = POINTER_TYPE_P (inter_type);
6638 int inter_float = FLOAT_TYPE_P (inter_type);
6639 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6640 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6641 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6642 int final_int = INTEGRAL_TYPE_P (type);
6643 int final_ptr = POINTER_TYPE_P (type);
6644 int final_float = FLOAT_TYPE_P (type);
6645 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6646 unsigned int final_prec = TYPE_PRECISION (type);
6647 int final_unsignedp = TYPE_UNSIGNED (type);
6649 /* In addition to the cases of two conversions in a row
6650 handled below, if we are converting something to its own
6651 type via an object of identical or wider precision, neither
6652 conversion is needed. */
6653 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6654 && ((inter_int && final_int) || (inter_float && final_float))
6655 && inter_prec >= final_prec)
6656 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6658 /* Likewise, if the intermediate and final types are either both
6659 float or both integer, we don't need the middle conversion if
6660 it is wider than the final type and doesn't change the signedness
6661 (for integers). Avoid this if the final type is a pointer
6662 since then we sometimes need the inner conversion. Likewise if
6663 the outer has a precision not equal to the size of its mode. */
6664 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6665 || (inter_float && inside_float)
6666 || (inter_vec && inside_vec))
6667 && inter_prec >= inside_prec
6668 && (inter_float || inter_vec
6669 || inter_unsignedp == inside_unsignedp)
6670 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6671 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6672 && ! final_ptr
6673 && (! final_vec || inter_prec == inside_prec))
6674 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6676 /* If we have a sign-extension of a zero-extended value, we can
6677 replace that by a single zero-extension. */
6678 if (inside_int && inter_int && final_int
6679 && inside_prec < inter_prec && inter_prec < final_prec
6680 && inside_unsignedp && !inter_unsignedp)
6681 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6683 /* Two conversions in a row are not needed unless:
6684 - some conversion is floating-point (overstrict for now), or
6685 - some conversion is a vector (overstrict for now), or
6686 - the intermediate type is narrower than both initial and
6687 final, or
6688 - the intermediate type and innermost type differ in signedness,
6689 and the outermost type is wider than the intermediate, or
6690 - the initial type is a pointer type and the precisions of the
6691 intermediate and final types differ, or
6692 - the final type is a pointer type and the precisions of the
6693 initial and intermediate types differ. */
6694 if (! inside_float && ! inter_float && ! final_float
6695 && ! inside_vec && ! inter_vec && ! final_vec
6696 && (inter_prec > inside_prec || inter_prec > final_prec)
6697 && ! (inside_int && inter_int
6698 && inter_unsignedp != inside_unsignedp
6699 && inter_prec < final_prec)
6700 && ((inter_unsignedp && inter_prec > inside_prec)
6701 == (final_unsignedp && final_prec > inter_prec))
6702 && ! (inside_ptr && inter_prec != final_prec)
6703 && ! (final_ptr && inside_prec != inter_prec)
6704 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6705 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6706 && ! final_ptr)
6707 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6710 if (TREE_CODE (op0) == MODIFY_EXPR
6711 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6712 /* Detect assigning a bitfield. */
6713 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6714 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6716 /* Don't leave an assignment inside a conversion
6717 unless assigning a bitfield. */
6718 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6719 /* First do the assignment, then return converted constant. */
6720 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6721 TREE_NO_WARNING (tem) = 1;
6722 TREE_USED (tem) = 1;
6723 return tem;
6726 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6727 constants (if x has signed type, the sign bit cannot be set
6728 in c). This folds extension into the BIT_AND_EXPR. */
6729 if (INTEGRAL_TYPE_P (type)
6730 && TREE_CODE (type) != BOOLEAN_TYPE
6731 && TREE_CODE (op0) == BIT_AND_EXPR
6732 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6734 tree and = op0;
6735 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6736 int change = 0;
6738 if (TYPE_UNSIGNED (TREE_TYPE (and))
6739 || (TYPE_PRECISION (type)
6740 <= TYPE_PRECISION (TREE_TYPE (and))))
6741 change = 1;
6742 else if (TYPE_PRECISION (TREE_TYPE (and1))
6743 <= HOST_BITS_PER_WIDE_INT
6744 && host_integerp (and1, 1))
6746 unsigned HOST_WIDE_INT cst;
6748 cst = tree_low_cst (and1, 1);
6749 cst &= (HOST_WIDE_INT) -1
6750 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6751 change = (cst == 0);
6752 #ifdef LOAD_EXTEND_OP
6753 if (change
6754 && !flag_syntax_only
6755 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6756 == ZERO_EXTEND))
6758 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6759 and0 = fold_convert (uns, and0);
6760 and1 = fold_convert (uns, and1);
6762 #endif
6764 if (change)
6766 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6767 TREE_INT_CST_HIGH (and1));
6768 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6769 TREE_CONSTANT_OVERFLOW (and1));
6770 return fold_build2 (BIT_AND_EXPR, type,
6771 fold_convert (type, and0), tem);
6775 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6776 T2 being pointers to types of the same size. */
6777 if (POINTER_TYPE_P (type)
6778 && BINARY_CLASS_P (arg0)
6779 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6780 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6782 tree arg00 = TREE_OPERAND (arg0, 0);
6783 tree t0 = type;
6784 tree t1 = TREE_TYPE (arg00);
6785 tree tt0 = TREE_TYPE (t0);
6786 tree tt1 = TREE_TYPE (t1);
6787 tree s0 = TYPE_SIZE (tt0);
6788 tree s1 = TYPE_SIZE (tt1);
6790 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6791 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6792 TREE_OPERAND (arg0, 1));
6795 tem = fold_convert_const (code, type, arg0);
6796 return tem ? tem : NULL_TREE;
6798 case VIEW_CONVERT_EXPR:
6799 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6800 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6801 return NULL_TREE;
6803 case NEGATE_EXPR:
6804 if (negate_expr_p (arg0))
6805 return fold_convert (type, negate_expr (arg0));
6806 /* Convert - (~A) to A + 1. */
6807 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6808 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6809 build_int_cst (type, 1));
6810 return NULL_TREE;
6812 case ABS_EXPR:
6813 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6814 return fold_abs_const (arg0, type);
6815 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6816 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6817 /* Convert fabs((double)float) into (double)fabsf(float). */
6818 else if (TREE_CODE (arg0) == NOP_EXPR
6819 && TREE_CODE (type) == REAL_TYPE)
6821 tree targ0 = strip_float_extensions (arg0);
6822 if (targ0 != arg0)
6823 return fold_convert (type, fold_build1 (ABS_EXPR,
6824 TREE_TYPE (targ0),
6825 targ0));
6827 else if (tree_expr_nonnegative_p (arg0))
6828 return arg0;
6830 /* Strip sign ops from argument. */
6831 if (TREE_CODE (type) == REAL_TYPE)
6833 tem = fold_strip_sign_ops (arg0);
6834 if (tem)
6835 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6837 return NULL_TREE;
6839 case CONJ_EXPR:
6840 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6841 return fold_convert (type, arg0);
6842 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6843 return build2 (COMPLEX_EXPR, type,
6844 TREE_OPERAND (arg0, 0),
6845 negate_expr (TREE_OPERAND (arg0, 1)));
6846 else if (TREE_CODE (arg0) == COMPLEX_CST)
6847 return build_complex (type, TREE_REALPART (arg0),
6848 negate_expr (TREE_IMAGPART (arg0)));
6849 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6850 return fold_build2 (TREE_CODE (arg0), type,
6851 fold_build1 (CONJ_EXPR, type,
6852 TREE_OPERAND (arg0, 0)),
6853 fold_build1 (CONJ_EXPR, type,
6854 TREE_OPERAND (arg0, 1)));
6855 else if (TREE_CODE (arg0) == CONJ_EXPR)
6856 return TREE_OPERAND (arg0, 0);
6857 return NULL_TREE;
6859 case BIT_NOT_EXPR:
6860 if (TREE_CODE (arg0) == INTEGER_CST)
6861 return fold_not_const (arg0, type);
6862 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6863 return TREE_OPERAND (arg0, 0);
6864 /* Convert ~ (-A) to A - 1. */
6865 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6866 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6867 build_int_cst (type, 1));
6868 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6869 else if (INTEGRAL_TYPE_P (type)
6870 && ((TREE_CODE (arg0) == MINUS_EXPR
6871 && integer_onep (TREE_OPERAND (arg0, 1)))
6872 || (TREE_CODE (arg0) == PLUS_EXPR
6873 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6874 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6875 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6876 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6877 && (tem = fold_unary (BIT_NOT_EXPR, type,
6878 fold_convert (type,
6879 TREE_OPERAND (arg0, 0)))))
6880 return fold_build2 (BIT_XOR_EXPR, type, tem,
6881 fold_convert (type, TREE_OPERAND (arg0, 1)));
6882 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6883 && (tem = fold_unary (BIT_NOT_EXPR, type,
6884 fold_convert (type,
6885 TREE_OPERAND (arg0, 1)))))
6886 return fold_build2 (BIT_XOR_EXPR, type,
6887 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6889 return NULL_TREE;
6891 case TRUTH_NOT_EXPR:
6892 /* The argument to invert_truthvalue must have Boolean type. */
6893 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6894 arg0 = fold_convert (boolean_type_node, arg0);
6896 /* Note that the operand of this must be an int
6897 and its values must be 0 or 1.
6898 ("true" is a fixed value perhaps depending on the language,
6899 but we don't handle values other than 1 correctly yet.) */
6900 tem = invert_truthvalue (arg0);
6901 /* Avoid infinite recursion. */
6902 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6903 return NULL_TREE;
6904 return fold_convert (type, tem);
6906 case REALPART_EXPR:
6907 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6908 return NULL_TREE;
6909 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6910 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6911 TREE_OPERAND (arg0, 1));
6912 else if (TREE_CODE (arg0) == COMPLEX_CST)
6913 return TREE_REALPART (arg0);
6914 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6915 return fold_build2 (TREE_CODE (arg0), type,
6916 fold_build1 (REALPART_EXPR, type,
6917 TREE_OPERAND (arg0, 0)),
6918 fold_build1 (REALPART_EXPR, type,
6919 TREE_OPERAND (arg0, 1)));
6920 return NULL_TREE;
6922 case IMAGPART_EXPR:
6923 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6924 return fold_convert (type, integer_zero_node);
6925 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6926 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6927 TREE_OPERAND (arg0, 0));
6928 else if (TREE_CODE (arg0) == COMPLEX_CST)
6929 return TREE_IMAGPART (arg0);
6930 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6931 return fold_build2 (TREE_CODE (arg0), type,
6932 fold_build1 (IMAGPART_EXPR, type,
6933 TREE_OPERAND (arg0, 0)),
6934 fold_build1 (IMAGPART_EXPR, type,
6935 TREE_OPERAND (arg0, 1)));
6936 return NULL_TREE;
6938 default:
6939 return NULL_TREE;
6940 } /* switch (code) */
6943 /* Fold a binary expression of code CODE and type TYPE with operands
6944 OP0 and OP1. Return the folded expression if folding is
6945 successful. Otherwise, return NULL_TREE. */
6947 tree
6948 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
6950 tree t1 = NULL_TREE;
6951 tree tem;
6952 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6953 enum tree_code_class kind = TREE_CODE_CLASS (code);
6955 /* WINS will be nonzero when the switch is done
6956 if all operands are constant. */
6957 int wins = 1;
6959 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6960 && TREE_CODE_LENGTH (code) == 2);
6962 arg0 = op0;
6963 arg1 = op1;
6965 if (arg0)
6967 tree subop;
6969 /* Strip any conversions that don't change the mode. This is
6970 safe for every expression, except for a comparison expression
6971 because its signedness is derived from its operands. So, in
6972 the latter case, only strip conversions that don't change the
6973 signedness.
6975 Note that this is done as an internal manipulation within the
6976 constant folder, in order to find the simplest representation
6977 of the arguments so that their form can be studied. In any
6978 cases, the appropriate type conversions should be put back in
6979 the tree that will get out of the constant folder. */
6980 if (kind == tcc_comparison)
6981 STRIP_SIGN_NOPS (arg0);
6982 else
6983 STRIP_NOPS (arg0);
6985 if (TREE_CODE (arg0) == COMPLEX_CST)
6986 subop = TREE_REALPART (arg0);
6987 else
6988 subop = arg0;
6990 if (TREE_CODE (subop) != INTEGER_CST
6991 && TREE_CODE (subop) != REAL_CST)
6992 /* Note that TREE_CONSTANT isn't enough:
6993 static var addresses are constant but we can't
6994 do arithmetic on them. */
6995 wins = 0;
6998 if (arg1)
7000 tree subop;
7002 /* Strip any conversions that don't change the mode. This is
7003 safe for every expression, except for a comparison expression
7004 because its signedness is derived from its operands. So, in
7005 the latter case, only strip conversions that don't change the
7006 signedness.
7008 Note that this is done as an internal manipulation within the
7009 constant folder, in order to find the simplest representation
7010 of the arguments so that their form can be studied. In any
7011 cases, the appropriate type conversions should be put back in
7012 the tree that will get out of the constant folder. */
7013 if (kind == tcc_comparison)
7014 STRIP_SIGN_NOPS (arg1);
7015 else
7016 STRIP_NOPS (arg1);
7018 if (TREE_CODE (arg1) == COMPLEX_CST)
7019 subop = TREE_REALPART (arg1);
7020 else
7021 subop = arg1;
7023 if (TREE_CODE (subop) != INTEGER_CST
7024 && TREE_CODE (subop) != REAL_CST)
7025 /* Note that TREE_CONSTANT isn't enough:
7026 static var addresses are constant but we can't
7027 do arithmetic on them. */
7028 wins = 0;
7031 /* If this is a commutative operation, and ARG0 is a constant, move it
7032 to ARG1 to reduce the number of tests below. */
7033 if (commutative_tree_code (code)
7034 && tree_swap_operands_p (arg0, arg1, true))
7035 return fold_build2 (code, type, op1, op0);
7037 /* Now WINS is set as described above,
7038 ARG0 is the first operand of EXPR,
7039 and ARG1 is the second operand (if it has more than one operand).
7041 First check for cases where an arithmetic operation is applied to a
7042 compound, conditional, or comparison operation. Push the arithmetic
7043 operation inside the compound or conditional to see if any folding
7044 can then be done. Convert comparison to conditional for this purpose.
7045 The also optimizes non-constant cases that used to be done in
7046 expand_expr.
7048 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7049 one of the operands is a comparison and the other is a comparison, a
7050 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7051 code below would make the expression more complex. Change it to a
7052 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7053 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7055 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7056 || code == EQ_EXPR || code == NE_EXPR)
7057 && ((truth_value_p (TREE_CODE (arg0))
7058 && (truth_value_p (TREE_CODE (arg1))
7059 || (TREE_CODE (arg1) == BIT_AND_EXPR
7060 && integer_onep (TREE_OPERAND (arg1, 1)))))
7061 || (truth_value_p (TREE_CODE (arg1))
7062 && (truth_value_p (TREE_CODE (arg0))
7063 || (TREE_CODE (arg0) == BIT_AND_EXPR
7064 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7066 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7067 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7068 : TRUTH_XOR_EXPR,
7069 boolean_type_node,
7070 fold_convert (boolean_type_node, arg0),
7071 fold_convert (boolean_type_node, arg1));
7073 if (code == EQ_EXPR)
7074 tem = invert_truthvalue (tem);
7076 return fold_convert (type, tem);
7079 if (TREE_CODE_CLASS (code) == tcc_comparison
7080 && TREE_CODE (arg0) == COMPOUND_EXPR)
7081 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7082 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7083 else if (TREE_CODE_CLASS (code) == tcc_comparison
7084 && TREE_CODE (arg1) == COMPOUND_EXPR)
7085 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7086 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7087 else if (TREE_CODE_CLASS (code) == tcc_binary
7088 || TREE_CODE_CLASS (code) == tcc_comparison)
7090 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7091 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7092 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7093 arg1));
7094 if (TREE_CODE (arg1) == COMPOUND_EXPR
7095 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7096 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7097 fold_build2 (code, type,
7098 arg0, TREE_OPERAND (arg1, 1)));
7100 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7102 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7103 arg0, arg1,
7104 /*cond_first_p=*/1);
7105 if (tem != NULL_TREE)
7106 return tem;
7109 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7111 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7112 arg1, arg0,
7113 /*cond_first_p=*/0);
7114 if (tem != NULL_TREE)
7115 return tem;
7119 switch (code)
7121 case PLUS_EXPR:
7122 /* A + (-B) -> A - B */
7123 if (TREE_CODE (arg1) == NEGATE_EXPR)
7124 return fold_build2 (MINUS_EXPR, type,
7125 fold_convert (type, arg0),
7126 fold_convert (type, TREE_OPERAND (arg1, 0)));
7127 /* (-A) + B -> B - A */
7128 if (TREE_CODE (arg0) == NEGATE_EXPR
7129 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7130 return fold_build2 (MINUS_EXPR, type,
7131 fold_convert (type, arg1),
7132 fold_convert (type, TREE_OPERAND (arg0, 0)));
7133 /* Convert ~A + 1 to -A. */
7134 if (INTEGRAL_TYPE_P (type)
7135 && TREE_CODE (arg0) == BIT_NOT_EXPR
7136 && integer_onep (arg1))
7137 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7139 if (! FLOAT_TYPE_P (type))
7141 if (integer_zerop (arg1))
7142 return non_lvalue (fold_convert (type, arg0));
7144 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7145 with a constant, and the two constants have no bits in common,
7146 we should treat this as a BIT_IOR_EXPR since this may produce more
7147 simplifications. */
7148 if (TREE_CODE (arg0) == BIT_AND_EXPR
7149 && TREE_CODE (arg1) == BIT_AND_EXPR
7150 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7151 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7152 && integer_zerop (const_binop (BIT_AND_EXPR,
7153 TREE_OPERAND (arg0, 1),
7154 TREE_OPERAND (arg1, 1), 0)))
7156 code = BIT_IOR_EXPR;
7157 goto bit_ior;
7160 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7161 (plus (plus (mult) (mult)) (foo)) so that we can
7162 take advantage of the factoring cases below. */
7163 if (((TREE_CODE (arg0) == PLUS_EXPR
7164 || TREE_CODE (arg0) == MINUS_EXPR)
7165 && TREE_CODE (arg1) == MULT_EXPR)
7166 || ((TREE_CODE (arg1) == PLUS_EXPR
7167 || TREE_CODE (arg1) == MINUS_EXPR)
7168 && TREE_CODE (arg0) == MULT_EXPR))
7170 tree parg0, parg1, parg, marg;
7171 enum tree_code pcode;
7173 if (TREE_CODE (arg1) == MULT_EXPR)
7174 parg = arg0, marg = arg1;
7175 else
7176 parg = arg1, marg = arg0;
7177 pcode = TREE_CODE (parg);
7178 parg0 = TREE_OPERAND (parg, 0);
7179 parg1 = TREE_OPERAND (parg, 1);
7180 STRIP_NOPS (parg0);
7181 STRIP_NOPS (parg1);
7183 if (TREE_CODE (parg0) == MULT_EXPR
7184 && TREE_CODE (parg1) != MULT_EXPR)
7185 return fold_build2 (pcode, type,
7186 fold_build2 (PLUS_EXPR, type,
7187 fold_convert (type, parg0),
7188 fold_convert (type, marg)),
7189 fold_convert (type, parg1));
7190 if (TREE_CODE (parg0) != MULT_EXPR
7191 && TREE_CODE (parg1) == MULT_EXPR)
7192 return fold_build2 (PLUS_EXPR, type,
7193 fold_convert (type, parg0),
7194 fold_build2 (pcode, type,
7195 fold_convert (type, marg),
7196 fold_convert (type,
7197 parg1)));
7200 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7202 tree arg00, arg01, arg10, arg11;
7203 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7205 /* (A * C) + (B * C) -> (A+B) * C.
7206 We are most concerned about the case where C is a constant,
7207 but other combinations show up during loop reduction. Since
7208 it is not difficult, try all four possibilities. */
7210 arg00 = TREE_OPERAND (arg0, 0);
7211 arg01 = TREE_OPERAND (arg0, 1);
7212 arg10 = TREE_OPERAND (arg1, 0);
7213 arg11 = TREE_OPERAND (arg1, 1);
7214 same = NULL_TREE;
7216 if (operand_equal_p (arg01, arg11, 0))
7217 same = arg01, alt0 = arg00, alt1 = arg10;
7218 else if (operand_equal_p (arg00, arg10, 0))
7219 same = arg00, alt0 = arg01, alt1 = arg11;
7220 else if (operand_equal_p (arg00, arg11, 0))
7221 same = arg00, alt0 = arg01, alt1 = arg10;
7222 else if (operand_equal_p (arg01, arg10, 0))
7223 same = arg01, alt0 = arg00, alt1 = arg11;
7225 /* No identical multiplicands; see if we can find a common
7226 power-of-two factor in non-power-of-two multiplies. This
7227 can help in multi-dimensional array access. */
7228 else if (TREE_CODE (arg01) == INTEGER_CST
7229 && TREE_CODE (arg11) == INTEGER_CST
7230 && TREE_INT_CST_HIGH (arg01) == 0
7231 && TREE_INT_CST_HIGH (arg11) == 0)
7233 HOST_WIDE_INT int01, int11, tmp;
7234 int01 = TREE_INT_CST_LOW (arg01);
7235 int11 = TREE_INT_CST_LOW (arg11);
7237 /* Move min of absolute values to int11. */
7238 if ((int01 >= 0 ? int01 : -int01)
7239 < (int11 >= 0 ? int11 : -int11))
7241 tmp = int01, int01 = int11, int11 = tmp;
7242 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7243 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7246 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7248 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7249 build_int_cst (NULL_TREE,
7250 int01 / int11));
7251 alt1 = arg10;
7252 same = arg11;
7256 if (same)
7257 return fold_build2 (MULT_EXPR, type,
7258 fold_build2 (PLUS_EXPR, type,
7259 fold_convert (type, alt0),
7260 fold_convert (type, alt1)),
7261 fold_convert (type, same));
7264 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7265 of the array. Loop optimizer sometimes produce this type of
7266 expressions. */
7267 if (TREE_CODE (arg0) == ADDR_EXPR)
7269 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7270 if (tem)
7271 return fold_convert (type, fold (tem));
7273 else if (TREE_CODE (arg1) == ADDR_EXPR)
7275 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7276 if (tem)
7277 return fold_convert (type, fold (tem));
7280 else
7282 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7283 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7284 return non_lvalue (fold_convert (type, arg0));
7286 /* Likewise if the operands are reversed. */
7287 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7288 return non_lvalue (fold_convert (type, arg1));
7290 /* Convert X + -C into X - C. */
7291 if (TREE_CODE (arg1) == REAL_CST
7292 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7294 tem = fold_negate_const (arg1, type);
7295 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7296 return fold_build2 (MINUS_EXPR, type,
7297 fold_convert (type, arg0),
7298 fold_convert (type, tem));
7301 if (flag_unsafe_math_optimizations
7302 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7303 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7304 && (tem = distribute_real_division (code, type, arg0, arg1)))
7305 return tem;
7307 /* Convert x+x into x*2.0. */
7308 if (operand_equal_p (arg0, arg1, 0)
7309 && SCALAR_FLOAT_TYPE_P (type))
7310 return fold_build2 (MULT_EXPR, type, arg0,
7311 build_real (type, dconst2));
7313 /* Convert x*c+x into x*(c+1). */
7314 if (flag_unsafe_math_optimizations
7315 && TREE_CODE (arg0) == MULT_EXPR
7316 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7317 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7318 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7320 REAL_VALUE_TYPE c;
7322 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7323 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7324 return fold_build2 (MULT_EXPR, type, arg1,
7325 build_real (type, c));
7328 /* Convert x+x*c into x*(c+1). */
7329 if (flag_unsafe_math_optimizations
7330 && TREE_CODE (arg1) == MULT_EXPR
7331 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7332 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7333 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7335 REAL_VALUE_TYPE c;
7337 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7338 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7339 return fold_build2 (MULT_EXPR, type, arg0,
7340 build_real (type, c));
7343 /* Convert x*c1+x*c2 into x*(c1+c2). */
7344 if (flag_unsafe_math_optimizations
7345 && TREE_CODE (arg0) == MULT_EXPR
7346 && TREE_CODE (arg1) == MULT_EXPR
7347 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7348 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7349 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7350 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7351 && operand_equal_p (TREE_OPERAND (arg0, 0),
7352 TREE_OPERAND (arg1, 0), 0))
7354 REAL_VALUE_TYPE c1, c2;
7356 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7357 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7358 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7359 return fold_build2 (MULT_EXPR, type,
7360 TREE_OPERAND (arg0, 0),
7361 build_real (type, c1));
7363 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7364 if (flag_unsafe_math_optimizations
7365 && TREE_CODE (arg1) == PLUS_EXPR
7366 && TREE_CODE (arg0) != MULT_EXPR)
7368 tree tree10 = TREE_OPERAND (arg1, 0);
7369 tree tree11 = TREE_OPERAND (arg1, 1);
7370 if (TREE_CODE (tree11) == MULT_EXPR
7371 && TREE_CODE (tree10) == MULT_EXPR)
7373 tree tree0;
7374 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7375 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7378 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7379 if (flag_unsafe_math_optimizations
7380 && TREE_CODE (arg0) == PLUS_EXPR
7381 && TREE_CODE (arg1) != MULT_EXPR)
7383 tree tree00 = TREE_OPERAND (arg0, 0);
7384 tree tree01 = TREE_OPERAND (arg0, 1);
7385 if (TREE_CODE (tree01) == MULT_EXPR
7386 && TREE_CODE (tree00) == MULT_EXPR)
7388 tree tree0;
7389 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7390 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7395 bit_rotate:
7396 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7397 is a rotate of A by C1 bits. */
7398 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7399 is a rotate of A by B bits. */
7401 enum tree_code code0, code1;
7402 code0 = TREE_CODE (arg0);
7403 code1 = TREE_CODE (arg1);
7404 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7405 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7406 && operand_equal_p (TREE_OPERAND (arg0, 0),
7407 TREE_OPERAND (arg1, 0), 0)
7408 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7410 tree tree01, tree11;
7411 enum tree_code code01, code11;
7413 tree01 = TREE_OPERAND (arg0, 1);
7414 tree11 = TREE_OPERAND (arg1, 1);
7415 STRIP_NOPS (tree01);
7416 STRIP_NOPS (tree11);
7417 code01 = TREE_CODE (tree01);
7418 code11 = TREE_CODE (tree11);
7419 if (code01 == INTEGER_CST
7420 && code11 == INTEGER_CST
7421 && TREE_INT_CST_HIGH (tree01) == 0
7422 && TREE_INT_CST_HIGH (tree11) == 0
7423 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7424 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7425 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7426 code0 == LSHIFT_EXPR ? tree01 : tree11);
7427 else if (code11 == MINUS_EXPR)
7429 tree tree110, tree111;
7430 tree110 = TREE_OPERAND (tree11, 0);
7431 tree111 = TREE_OPERAND (tree11, 1);
7432 STRIP_NOPS (tree110);
7433 STRIP_NOPS (tree111);
7434 if (TREE_CODE (tree110) == INTEGER_CST
7435 && 0 == compare_tree_int (tree110,
7436 TYPE_PRECISION
7437 (TREE_TYPE (TREE_OPERAND
7438 (arg0, 0))))
7439 && operand_equal_p (tree01, tree111, 0))
7440 return build2 ((code0 == LSHIFT_EXPR
7441 ? LROTATE_EXPR
7442 : RROTATE_EXPR),
7443 type, TREE_OPERAND (arg0, 0), tree01);
7445 else if (code01 == MINUS_EXPR)
7447 tree tree010, tree011;
7448 tree010 = TREE_OPERAND (tree01, 0);
7449 tree011 = TREE_OPERAND (tree01, 1);
7450 STRIP_NOPS (tree010);
7451 STRIP_NOPS (tree011);
7452 if (TREE_CODE (tree010) == INTEGER_CST
7453 && 0 == compare_tree_int (tree010,
7454 TYPE_PRECISION
7455 (TREE_TYPE (TREE_OPERAND
7456 (arg0, 0))))
7457 && operand_equal_p (tree11, tree011, 0))
7458 return build2 ((code0 != LSHIFT_EXPR
7459 ? LROTATE_EXPR
7460 : RROTATE_EXPR),
7461 type, TREE_OPERAND (arg0, 0), tree11);
7466 associate:
7467 /* In most languages, can't associate operations on floats through
7468 parentheses. Rather than remember where the parentheses were, we
7469 don't associate floats at all, unless the user has specified
7470 -funsafe-math-optimizations. */
7472 if (! wins
7473 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7475 tree var0, con0, lit0, minus_lit0;
7476 tree var1, con1, lit1, minus_lit1;
7478 /* Split both trees into variables, constants, and literals. Then
7479 associate each group together, the constants with literals,
7480 then the result with variables. This increases the chances of
7481 literals being recombined later and of generating relocatable
7482 expressions for the sum of a constant and literal. */
7483 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7484 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7485 code == MINUS_EXPR);
7487 /* Only do something if we found more than two objects. Otherwise,
7488 nothing has changed and we risk infinite recursion. */
7489 if (2 < ((var0 != 0) + (var1 != 0)
7490 + (con0 != 0) + (con1 != 0)
7491 + (lit0 != 0) + (lit1 != 0)
7492 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7494 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7495 if (code == MINUS_EXPR)
7496 code = PLUS_EXPR;
7498 var0 = associate_trees (var0, var1, code, type);
7499 con0 = associate_trees (con0, con1, code, type);
7500 lit0 = associate_trees (lit0, lit1, code, type);
7501 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7503 /* Preserve the MINUS_EXPR if the negative part of the literal is
7504 greater than the positive part. Otherwise, the multiplicative
7505 folding code (i.e extract_muldiv) may be fooled in case
7506 unsigned constants are subtracted, like in the following
7507 example: ((X*2 + 4) - 8U)/2. */
7508 if (minus_lit0 && lit0)
7510 if (TREE_CODE (lit0) == INTEGER_CST
7511 && TREE_CODE (minus_lit0) == INTEGER_CST
7512 && tree_int_cst_lt (lit0, minus_lit0))
7514 minus_lit0 = associate_trees (minus_lit0, lit0,
7515 MINUS_EXPR, type);
7516 lit0 = 0;
7518 else
7520 lit0 = associate_trees (lit0, minus_lit0,
7521 MINUS_EXPR, type);
7522 minus_lit0 = 0;
7525 if (minus_lit0)
7527 if (con0 == 0)
7528 return fold_convert (type,
7529 associate_trees (var0, minus_lit0,
7530 MINUS_EXPR, type));
7531 else
7533 con0 = associate_trees (con0, minus_lit0,
7534 MINUS_EXPR, type);
7535 return fold_convert (type,
7536 associate_trees (var0, con0,
7537 PLUS_EXPR, type));
7541 con0 = associate_trees (con0, lit0, code, type);
7542 return fold_convert (type, associate_trees (var0, con0,
7543 code, type));
7547 binary:
7548 if (wins)
7549 t1 = const_binop (code, arg0, arg1, 0);
7550 if (t1 != NULL_TREE)
7552 /* The return value should always have
7553 the same type as the original expression. */
7554 if (TREE_TYPE (t1) != type)
7555 t1 = fold_convert (type, t1);
7557 return t1;
7559 return NULL_TREE;
7561 case MINUS_EXPR:
7562 /* A - (-B) -> A + B */
7563 if (TREE_CODE (arg1) == NEGATE_EXPR)
7564 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7565 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7566 if (TREE_CODE (arg0) == NEGATE_EXPR
7567 && (FLOAT_TYPE_P (type)
7568 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7569 && negate_expr_p (arg1)
7570 && reorder_operands_p (arg0, arg1))
7571 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7572 TREE_OPERAND (arg0, 0));
7573 /* Convert -A - 1 to ~A. */
7574 if (INTEGRAL_TYPE_P (type)
7575 && TREE_CODE (arg0) == NEGATE_EXPR
7576 && integer_onep (arg1))
7577 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7579 /* Convert -1 - A to ~A. */
7580 if (INTEGRAL_TYPE_P (type)
7581 && integer_all_onesp (arg0))
7582 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7584 if (! FLOAT_TYPE_P (type))
7586 if (! wins && integer_zerop (arg0))
7587 return negate_expr (fold_convert (type, arg1));
7588 if (integer_zerop (arg1))
7589 return non_lvalue (fold_convert (type, arg0));
7591 /* Fold A - (A & B) into ~B & A. */
7592 if (!TREE_SIDE_EFFECTS (arg0)
7593 && TREE_CODE (arg1) == BIT_AND_EXPR)
7595 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7596 return fold_build2 (BIT_AND_EXPR, type,
7597 fold_build1 (BIT_NOT_EXPR, type,
7598 TREE_OPERAND (arg1, 0)),
7599 arg0);
7600 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7601 return fold_build2 (BIT_AND_EXPR, type,
7602 fold_build1 (BIT_NOT_EXPR, type,
7603 TREE_OPERAND (arg1, 1)),
7604 arg0);
7607 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7608 any power of 2 minus 1. */
7609 if (TREE_CODE (arg0) == BIT_AND_EXPR
7610 && TREE_CODE (arg1) == BIT_AND_EXPR
7611 && operand_equal_p (TREE_OPERAND (arg0, 0),
7612 TREE_OPERAND (arg1, 0), 0))
7614 tree mask0 = TREE_OPERAND (arg0, 1);
7615 tree mask1 = TREE_OPERAND (arg1, 1);
7616 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7618 if (operand_equal_p (tem, mask1, 0))
7620 tem = fold_build2 (BIT_XOR_EXPR, type,
7621 TREE_OPERAND (arg0, 0), mask1);
7622 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7627 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7628 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7629 return non_lvalue (fold_convert (type, arg0));
7631 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7632 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7633 (-ARG1 + ARG0) reduces to -ARG1. */
7634 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7635 return negate_expr (fold_convert (type, arg1));
7637 /* Fold &x - &x. This can happen from &x.foo - &x.
7638 This is unsafe for certain floats even in non-IEEE formats.
7639 In IEEE, it is unsafe because it does wrong for NaNs.
7640 Also note that operand_equal_p is always false if an operand
7641 is volatile. */
7643 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7644 && operand_equal_p (arg0, arg1, 0))
7645 return fold_convert (type, integer_zero_node);
7647 /* A - B -> A + (-B) if B is easily negatable. */
7648 if (!wins && negate_expr_p (arg1)
7649 && ((FLOAT_TYPE_P (type)
7650 /* Avoid this transformation if B is a positive REAL_CST. */
7651 && (TREE_CODE (arg1) != REAL_CST
7652 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7653 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7654 return fold_build2 (PLUS_EXPR, type,
7655 fold_convert (type, arg0),
7656 fold_convert (type, negate_expr (arg1)));
7658 /* Try folding difference of addresses. */
7660 HOST_WIDE_INT diff;
7662 if ((TREE_CODE (arg0) == ADDR_EXPR
7663 || TREE_CODE (arg1) == ADDR_EXPR)
7664 && ptr_difference_const (arg0, arg1, &diff))
7665 return build_int_cst_type (type, diff);
7668 /* Fold &a[i] - &a[j] to i-j. */
7669 if (TREE_CODE (arg0) == ADDR_EXPR
7670 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7671 && TREE_CODE (arg1) == ADDR_EXPR
7672 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7674 tree aref0 = TREE_OPERAND (arg0, 0);
7675 tree aref1 = TREE_OPERAND (arg1, 0);
7676 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7677 TREE_OPERAND (aref1, 0), 0))
7679 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7680 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7681 tree esz = array_ref_element_size (aref0);
7682 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7683 return fold_build2 (MULT_EXPR, type, diff,
7684 fold_convert (type, esz));
7689 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7690 of the array. Loop optimizer sometimes produce this type of
7691 expressions. */
7692 if (TREE_CODE (arg0) == ADDR_EXPR)
7694 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7695 if (tem)
7696 return fold_convert (type, fold (tem));
7699 if (flag_unsafe_math_optimizations
7700 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7701 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7702 && (tem = distribute_real_division (code, type, arg0, arg1)))
7703 return tem;
7705 if (TREE_CODE (arg0) == MULT_EXPR
7706 && TREE_CODE (arg1) == MULT_EXPR
7707 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7709 /* (A * C) - (B * C) -> (A-B) * C. */
7710 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7711 TREE_OPERAND (arg1, 1), 0))
7712 return fold_build2 (MULT_EXPR, type,
7713 fold_build2 (MINUS_EXPR, type,
7714 TREE_OPERAND (arg0, 0),
7715 TREE_OPERAND (arg1, 0)),
7716 TREE_OPERAND (arg0, 1));
7717 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7718 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7719 TREE_OPERAND (arg1, 0), 0))
7720 return fold_build2 (MULT_EXPR, type,
7721 TREE_OPERAND (arg0, 0),
7722 fold_build2 (MINUS_EXPR, type,
7723 TREE_OPERAND (arg0, 1),
7724 TREE_OPERAND (arg1, 1)));
7727 goto associate;
7729 case MULT_EXPR:
7730 /* (-A) * (-B) -> A * B */
7731 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7732 return fold_build2 (MULT_EXPR, type,
7733 TREE_OPERAND (arg0, 0),
7734 negate_expr (arg1));
7735 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7736 return fold_build2 (MULT_EXPR, type,
7737 negate_expr (arg0),
7738 TREE_OPERAND (arg1, 0));
7740 if (! FLOAT_TYPE_P (type))
7742 if (integer_zerop (arg1))
7743 return omit_one_operand (type, arg1, arg0);
7744 if (integer_onep (arg1))
7745 return non_lvalue (fold_convert (type, arg0));
7746 /* Transform x * -1 into -x. */
7747 if (integer_all_onesp (arg1))
7748 return fold_convert (type, negate_expr (arg0));
7750 /* (a * (1 << b)) is (a << b) */
7751 if (TREE_CODE (arg1) == LSHIFT_EXPR
7752 && integer_onep (TREE_OPERAND (arg1, 0)))
7753 return fold_build2 (LSHIFT_EXPR, type, arg0,
7754 TREE_OPERAND (arg1, 1));
7755 if (TREE_CODE (arg0) == LSHIFT_EXPR
7756 && integer_onep (TREE_OPERAND (arg0, 0)))
7757 return fold_build2 (LSHIFT_EXPR, type, arg1,
7758 TREE_OPERAND (arg0, 1));
7760 if (TREE_CODE (arg1) == INTEGER_CST
7761 && 0 != (tem = extract_muldiv (op0,
7762 fold_convert (type, arg1),
7763 code, NULL_TREE)))
7764 return fold_convert (type, tem);
7767 else
7769 /* Maybe fold x * 0 to 0. The expressions aren't the same
7770 when x is NaN, since x * 0 is also NaN. Nor are they the
7771 same in modes with signed zeros, since multiplying a
7772 negative value by 0 gives -0, not +0. */
7773 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7774 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7775 && real_zerop (arg1))
7776 return omit_one_operand (type, arg1, arg0);
7777 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7778 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7779 && real_onep (arg1))
7780 return non_lvalue (fold_convert (type, arg0));
7782 /* Transform x * -1.0 into -x. */
7783 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7784 && real_minus_onep (arg1))
7785 return fold_convert (type, negate_expr (arg0));
7787 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7788 if (flag_unsafe_math_optimizations
7789 && TREE_CODE (arg0) == RDIV_EXPR
7790 && TREE_CODE (arg1) == REAL_CST
7791 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7793 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7794 arg1, 0);
7795 if (tem)
7796 return fold_build2 (RDIV_EXPR, type, tem,
7797 TREE_OPERAND (arg0, 1));
7800 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7801 if (operand_equal_p (arg0, arg1, 0))
7803 tree tem = fold_strip_sign_ops (arg0);
7804 if (tem != NULL_TREE)
7806 tem = fold_convert (type, tem);
7807 return fold_build2 (MULT_EXPR, type, tem, tem);
7811 if (flag_unsafe_math_optimizations)
7813 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7814 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7816 /* Optimizations of root(...)*root(...). */
7817 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7819 tree rootfn, arg, arglist;
7820 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7821 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7823 /* Optimize sqrt(x)*sqrt(x) as x. */
7824 if (BUILTIN_SQRT_P (fcode0)
7825 && operand_equal_p (arg00, arg10, 0)
7826 && ! HONOR_SNANS (TYPE_MODE (type)))
7827 return arg00;
7829 /* Optimize root(x)*root(y) as root(x*y). */
7830 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7831 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7832 arglist = build_tree_list (NULL_TREE, arg);
7833 return build_function_call_expr (rootfn, arglist);
7836 /* Optimize expN(x)*expN(y) as expN(x+y). */
7837 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7839 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7840 tree arg = fold_build2 (PLUS_EXPR, type,
7841 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7842 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7843 tree arglist = build_tree_list (NULL_TREE, arg);
7844 return build_function_call_expr (expfn, arglist);
7847 /* Optimizations of pow(...)*pow(...). */
7848 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7849 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7850 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7852 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7853 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7854 1)));
7855 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7856 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7857 1)));
7859 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7860 if (operand_equal_p (arg01, arg11, 0))
7862 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7863 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7864 tree arglist = tree_cons (NULL_TREE, arg,
7865 build_tree_list (NULL_TREE,
7866 arg01));
7867 return build_function_call_expr (powfn, arglist);
7870 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7871 if (operand_equal_p (arg00, arg10, 0))
7873 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7874 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7875 tree arglist = tree_cons (NULL_TREE, arg00,
7876 build_tree_list (NULL_TREE,
7877 arg));
7878 return build_function_call_expr (powfn, arglist);
7882 /* Optimize tan(x)*cos(x) as sin(x). */
7883 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7884 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7885 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7886 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7887 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7888 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7889 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7890 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7892 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7894 if (sinfn != NULL_TREE)
7895 return build_function_call_expr (sinfn,
7896 TREE_OPERAND (arg0, 1));
7899 /* Optimize x*pow(x,c) as pow(x,c+1). */
7900 if (fcode1 == BUILT_IN_POW
7901 || fcode1 == BUILT_IN_POWF
7902 || fcode1 == BUILT_IN_POWL)
7904 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7905 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7906 1)));
7907 if (TREE_CODE (arg11) == REAL_CST
7908 && ! TREE_CONSTANT_OVERFLOW (arg11)
7909 && operand_equal_p (arg0, arg10, 0))
7911 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7912 REAL_VALUE_TYPE c;
7913 tree arg, arglist;
7915 c = TREE_REAL_CST (arg11);
7916 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7917 arg = build_real (type, c);
7918 arglist = build_tree_list (NULL_TREE, arg);
7919 arglist = tree_cons (NULL_TREE, arg0, arglist);
7920 return build_function_call_expr (powfn, arglist);
7924 /* Optimize pow(x,c)*x as pow(x,c+1). */
7925 if (fcode0 == BUILT_IN_POW
7926 || fcode0 == BUILT_IN_POWF
7927 || fcode0 == BUILT_IN_POWL)
7929 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7930 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7931 1)));
7932 if (TREE_CODE (arg01) == REAL_CST
7933 && ! TREE_CONSTANT_OVERFLOW (arg01)
7934 && operand_equal_p (arg1, arg00, 0))
7936 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7937 REAL_VALUE_TYPE c;
7938 tree arg, arglist;
7940 c = TREE_REAL_CST (arg01);
7941 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7942 arg = build_real (type, c);
7943 arglist = build_tree_list (NULL_TREE, arg);
7944 arglist = tree_cons (NULL_TREE, arg1, arglist);
7945 return build_function_call_expr (powfn, arglist);
7949 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7950 if (! optimize_size
7951 && operand_equal_p (arg0, arg1, 0))
7953 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7955 if (powfn)
7957 tree arg = build_real (type, dconst2);
7958 tree arglist = build_tree_list (NULL_TREE, arg);
7959 arglist = tree_cons (NULL_TREE, arg0, arglist);
7960 return build_function_call_expr (powfn, arglist);
7965 goto associate;
7967 case BIT_IOR_EXPR:
7968 bit_ior:
7969 if (integer_all_onesp (arg1))
7970 return omit_one_operand (type, arg1, arg0);
7971 if (integer_zerop (arg1))
7972 return non_lvalue (fold_convert (type, arg0));
7973 if (operand_equal_p (arg0, arg1, 0))
7974 return non_lvalue (fold_convert (type, arg0));
7976 /* ~X | X is -1. */
7977 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7978 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7980 t1 = build_int_cst (type, -1);
7981 t1 = force_fit_type (t1, 0, false, false);
7982 return omit_one_operand (type, t1, arg1);
7985 /* X | ~X is -1. */
7986 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7987 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7989 t1 = build_int_cst (type, -1);
7990 t1 = force_fit_type (t1, 0, false, false);
7991 return omit_one_operand (type, t1, arg0);
7994 t1 = distribute_bit_expr (code, type, arg0, arg1);
7995 if (t1 != NULL_TREE)
7996 return t1;
7998 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8000 This results in more efficient code for machines without a NAND
8001 instruction. Combine will canonicalize to the first form
8002 which will allow use of NAND instructions provided by the
8003 backend if they exist. */
8004 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8005 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8007 return fold_build1 (BIT_NOT_EXPR, type,
8008 build2 (BIT_AND_EXPR, type,
8009 TREE_OPERAND (arg0, 0),
8010 TREE_OPERAND (arg1, 0)));
8013 /* See if this can be simplified into a rotate first. If that
8014 is unsuccessful continue in the association code. */
8015 goto bit_rotate;
8017 case BIT_XOR_EXPR:
8018 if (integer_zerop (arg1))
8019 return non_lvalue (fold_convert (type, arg0));
8020 if (integer_all_onesp (arg1))
8021 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8022 if (operand_equal_p (arg0, arg1, 0))
8023 return omit_one_operand (type, integer_zero_node, arg0);
8025 /* ~X ^ X is -1. */
8026 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8027 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8029 t1 = build_int_cst (type, -1);
8030 t1 = force_fit_type (t1, 0, false, false);
8031 return omit_one_operand (type, t1, arg1);
8034 /* X ^ ~X is -1. */
8035 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8036 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8038 t1 = build_int_cst (type, -1);
8039 t1 = force_fit_type (t1, 0, false, false);
8040 return omit_one_operand (type, t1, arg0);
8043 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8044 with a constant, and the two constants have no bits in common,
8045 we should treat this as a BIT_IOR_EXPR since this may produce more
8046 simplifications. */
8047 if (TREE_CODE (arg0) == BIT_AND_EXPR
8048 && TREE_CODE (arg1) == BIT_AND_EXPR
8049 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8050 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8051 && integer_zerop (const_binop (BIT_AND_EXPR,
8052 TREE_OPERAND (arg0, 1),
8053 TREE_OPERAND (arg1, 1), 0)))
8055 code = BIT_IOR_EXPR;
8056 goto bit_ior;
8059 /* Convert ~X ^ ~Y to X ^ Y. */
8060 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8061 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8062 return fold_build2 (code, type,
8063 fold_convert (type, TREE_OPERAND (arg0, 0)),
8064 fold_convert (type, TREE_OPERAND (arg1, 0)));
8066 /* See if this can be simplified into a rotate first. If that
8067 is unsuccessful continue in the association code. */
8068 goto bit_rotate;
8070 case BIT_AND_EXPR:
8071 if (integer_all_onesp (arg1))
8072 return non_lvalue (fold_convert (type, arg0));
8073 if (integer_zerop (arg1))
8074 return omit_one_operand (type, arg1, arg0);
8075 if (operand_equal_p (arg0, arg1, 0))
8076 return non_lvalue (fold_convert (type, arg0));
8078 /* ~X & X is always zero. */
8079 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8080 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8081 return omit_one_operand (type, integer_zero_node, arg1);
8083 /* X & ~X is always zero. */
8084 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8085 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8086 return omit_one_operand (type, integer_zero_node, arg0);
8088 t1 = distribute_bit_expr (code, type, arg0, arg1);
8089 if (t1 != NULL_TREE)
8090 return t1;
8091 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8092 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8093 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8095 unsigned int prec
8096 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8098 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8099 && (~TREE_INT_CST_LOW (arg1)
8100 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8101 return fold_convert (type, TREE_OPERAND (arg0, 0));
8104 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8106 This results in more efficient code for machines without a NOR
8107 instruction. Combine will canonicalize to the first form
8108 which will allow use of NOR instructions provided by the
8109 backend if they exist. */
8110 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8111 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8113 return fold_build1 (BIT_NOT_EXPR, type,
8114 build2 (BIT_IOR_EXPR, type,
8115 TREE_OPERAND (arg0, 0),
8116 TREE_OPERAND (arg1, 0)));
8119 goto associate;
8121 case RDIV_EXPR:
8122 /* Don't touch a floating-point divide by zero unless the mode
8123 of the constant can represent infinity. */
8124 if (TREE_CODE (arg1) == REAL_CST
8125 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8126 && real_zerop (arg1))
8127 return NULL_TREE;
8129 /* (-A) / (-B) -> A / B */
8130 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8131 return fold_build2 (RDIV_EXPR, type,
8132 TREE_OPERAND (arg0, 0),
8133 negate_expr (arg1));
8134 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8135 return fold_build2 (RDIV_EXPR, type,
8136 negate_expr (arg0),
8137 TREE_OPERAND (arg1, 0));
8139 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8140 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8141 && real_onep (arg1))
8142 return non_lvalue (fold_convert (type, arg0));
8144 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8145 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8146 && real_minus_onep (arg1))
8147 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8149 /* If ARG1 is a constant, we can convert this to a multiply by the
8150 reciprocal. This does not have the same rounding properties,
8151 so only do this if -funsafe-math-optimizations. We can actually
8152 always safely do it if ARG1 is a power of two, but it's hard to
8153 tell if it is or not in a portable manner. */
8154 if (TREE_CODE (arg1) == REAL_CST)
8156 if (flag_unsafe_math_optimizations
8157 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8158 arg1, 0)))
8159 return fold_build2 (MULT_EXPR, type, arg0, tem);
8160 /* Find the reciprocal if optimizing and the result is exact. */
8161 if (optimize)
8163 REAL_VALUE_TYPE r;
8164 r = TREE_REAL_CST (arg1);
8165 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8167 tem = build_real (type, r);
8168 return fold_build2 (MULT_EXPR, type,
8169 fold_convert (type, arg0), tem);
8173 /* Convert A/B/C to A/(B*C). */
8174 if (flag_unsafe_math_optimizations
8175 && TREE_CODE (arg0) == RDIV_EXPR)
8176 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8177 fold_build2 (MULT_EXPR, type,
8178 TREE_OPERAND (arg0, 1), arg1));
8180 /* Convert A/(B/C) to (A/B)*C. */
8181 if (flag_unsafe_math_optimizations
8182 && TREE_CODE (arg1) == RDIV_EXPR)
8183 return fold_build2 (MULT_EXPR, type,
8184 fold_build2 (RDIV_EXPR, type, arg0,
8185 TREE_OPERAND (arg1, 0)),
8186 TREE_OPERAND (arg1, 1));
8188 /* Convert C1/(X*C2) into (C1/C2)/X. */
8189 if (flag_unsafe_math_optimizations
8190 && TREE_CODE (arg1) == MULT_EXPR
8191 && TREE_CODE (arg0) == REAL_CST
8192 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8194 tree tem = const_binop (RDIV_EXPR, arg0,
8195 TREE_OPERAND (arg1, 1), 0);
8196 if (tem)
8197 return fold_build2 (RDIV_EXPR, type, tem,
8198 TREE_OPERAND (arg1, 0));
8201 if (flag_unsafe_math_optimizations)
8203 enum built_in_function fcode = builtin_mathfn_code (arg1);
8204 /* Optimize x/expN(y) into x*expN(-y). */
8205 if (BUILTIN_EXPONENT_P (fcode))
8207 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8208 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8209 tree arglist = build_tree_list (NULL_TREE,
8210 fold_convert (type, arg));
8211 arg1 = build_function_call_expr (expfn, arglist);
8212 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8215 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8216 if (fcode == BUILT_IN_POW
8217 || fcode == BUILT_IN_POWF
8218 || fcode == BUILT_IN_POWL)
8220 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8221 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8222 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8223 tree neg11 = fold_convert (type, negate_expr (arg11));
8224 tree arglist = tree_cons(NULL_TREE, arg10,
8225 build_tree_list (NULL_TREE, neg11));
8226 arg1 = build_function_call_expr (powfn, arglist);
8227 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8231 if (flag_unsafe_math_optimizations)
8233 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8234 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8236 /* Optimize sin(x)/cos(x) as tan(x). */
8237 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8238 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8239 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8240 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8241 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8243 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8245 if (tanfn != NULL_TREE)
8246 return build_function_call_expr (tanfn,
8247 TREE_OPERAND (arg0, 1));
8250 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8251 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8252 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8253 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8254 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8255 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8257 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8259 if (tanfn != NULL_TREE)
8261 tree tmp = TREE_OPERAND (arg0, 1);
8262 tmp = build_function_call_expr (tanfn, tmp);
8263 return fold_build2 (RDIV_EXPR, type,
8264 build_real (type, dconst1), tmp);
8268 /* Optimize pow(x,c)/x as pow(x,c-1). */
8269 if (fcode0 == BUILT_IN_POW
8270 || fcode0 == BUILT_IN_POWF
8271 || fcode0 == BUILT_IN_POWL)
8273 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8274 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8275 if (TREE_CODE (arg01) == REAL_CST
8276 && ! TREE_CONSTANT_OVERFLOW (arg01)
8277 && operand_equal_p (arg1, arg00, 0))
8279 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8280 REAL_VALUE_TYPE c;
8281 tree arg, arglist;
8283 c = TREE_REAL_CST (arg01);
8284 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8285 arg = build_real (type, c);
8286 arglist = build_tree_list (NULL_TREE, arg);
8287 arglist = tree_cons (NULL_TREE, arg1, arglist);
8288 return build_function_call_expr (powfn, arglist);
8292 goto binary;
8294 case TRUNC_DIV_EXPR:
8295 case ROUND_DIV_EXPR:
8296 case FLOOR_DIV_EXPR:
8297 case CEIL_DIV_EXPR:
8298 case EXACT_DIV_EXPR:
8299 if (integer_onep (arg1))
8300 return non_lvalue (fold_convert (type, arg0));
8301 if (integer_zerop (arg1))
8302 return NULL_TREE;
8303 /* X / -1 is -X. */
8304 if (!TYPE_UNSIGNED (type)
8305 && TREE_CODE (arg1) == INTEGER_CST
8306 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8307 && TREE_INT_CST_HIGH (arg1) == -1)
8308 return fold_convert (type, negate_expr (arg0));
8310 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8311 operation, EXACT_DIV_EXPR.
8313 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8314 At one time others generated faster code, it's not clear if they do
8315 after the last round to changes to the DIV code in expmed.c. */
8316 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8317 && multiple_of_p (type, arg0, arg1))
8318 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8320 if (TREE_CODE (arg1) == INTEGER_CST
8321 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8322 return fold_convert (type, tem);
8324 goto binary;
8326 case CEIL_MOD_EXPR:
8327 case FLOOR_MOD_EXPR:
8328 case ROUND_MOD_EXPR:
8329 case TRUNC_MOD_EXPR:
8330 /* X % 1 is always zero, but be sure to preserve any side
8331 effects in X. */
8332 if (integer_onep (arg1))
8333 return omit_one_operand (type, integer_zero_node, arg0);
8335 /* X % 0, return X % 0 unchanged so that we can get the
8336 proper warnings and errors. */
8337 if (integer_zerop (arg1))
8338 return NULL_TREE;
8340 /* 0 % X is always zero, but be sure to preserve any side
8341 effects in X. Place this after checking for X == 0. */
8342 if (integer_zerop (arg0))
8343 return omit_one_operand (type, integer_zero_node, arg1);
8345 /* X % -1 is zero. */
8346 if (!TYPE_UNSIGNED (type)
8347 && TREE_CODE (arg1) == INTEGER_CST
8348 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8349 && TREE_INT_CST_HIGH (arg1) == -1)
8350 return omit_one_operand (type, integer_zero_node, arg0);
8352 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8353 i.e. "X % C" into "X & C2", if X and C are positive. */
8354 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8355 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8356 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8358 unsigned HOST_WIDE_INT high, low;
8359 tree mask;
8360 int l;
8362 l = tree_log2 (arg1);
8363 if (l >= HOST_BITS_PER_WIDE_INT)
8365 high = ((unsigned HOST_WIDE_INT) 1
8366 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8367 low = -1;
8369 else
8371 high = 0;
8372 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8375 mask = build_int_cst_wide (type, low, high);
8376 return fold_build2 (BIT_AND_EXPR, type,
8377 fold_convert (type, arg0), mask);
8380 /* X % -C is the same as X % C. */
8381 if (code == TRUNC_MOD_EXPR
8382 && !TYPE_UNSIGNED (type)
8383 && TREE_CODE (arg1) == INTEGER_CST
8384 && !TREE_CONSTANT_OVERFLOW (arg1)
8385 && TREE_INT_CST_HIGH (arg1) < 0
8386 && !flag_trapv
8387 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8388 && !sign_bit_p (arg1, arg1))
8389 return fold_build2 (code, type, fold_convert (type, arg0),
8390 fold_convert (type, negate_expr (arg1)));
8392 /* X % -Y is the same as X % Y. */
8393 if (code == TRUNC_MOD_EXPR
8394 && !TYPE_UNSIGNED (type)
8395 && TREE_CODE (arg1) == NEGATE_EXPR
8396 && !flag_trapv)
8397 return fold_build2 (code, type, fold_convert (type, arg0),
8398 fold_convert (type, TREE_OPERAND (arg1, 0)));
8400 if (TREE_CODE (arg1) == INTEGER_CST
8401 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8402 return fold_convert (type, tem);
8404 goto binary;
8406 case LROTATE_EXPR:
8407 case RROTATE_EXPR:
8408 if (integer_all_onesp (arg0))
8409 return omit_one_operand (type, arg0, arg1);
8410 goto shift;
8412 case RSHIFT_EXPR:
8413 /* Optimize -1 >> x for arithmetic right shifts. */
8414 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8415 return omit_one_operand (type, arg0, arg1);
8416 /* ... fall through ... */
8418 case LSHIFT_EXPR:
8419 shift:
8420 if (integer_zerop (arg1))
8421 return non_lvalue (fold_convert (type, arg0));
8422 if (integer_zerop (arg0))
8423 return omit_one_operand (type, arg0, arg1);
8425 /* Since negative shift count is not well-defined,
8426 don't try to compute it in the compiler. */
8427 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8428 return NULL_TREE;
8430 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8431 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8432 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8433 && host_integerp (TREE_OPERAND (arg0, 1), false)
8434 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8436 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8437 + TREE_INT_CST_LOW (arg1));
8439 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8440 being well defined. */
8441 if (low >= TYPE_PRECISION (type))
8443 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8444 low = low % TYPE_PRECISION (type);
8445 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8446 return build_int_cst (type, 0);
8447 else
8448 low = TYPE_PRECISION (type) - 1;
8451 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8452 build_int_cst (type, low));
8455 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8456 into x & ((unsigned)-1 >> c) for unsigned types. */
8457 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8458 || (TYPE_UNSIGNED (type)
8459 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8460 && host_integerp (arg1, false)
8461 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8462 && host_integerp (TREE_OPERAND (arg0, 1), false)
8463 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8465 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8466 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8467 tree lshift;
8468 tree arg00;
8470 if (low0 == low1)
8472 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8474 lshift = build_int_cst (type, -1);
8475 lshift = int_const_binop (code, lshift, arg1, 0);
8477 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8481 /* Rewrite an LROTATE_EXPR by a constant into an
8482 RROTATE_EXPR by a new constant. */
8483 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8485 tree tem = build_int_cst (NULL_TREE,
8486 GET_MODE_BITSIZE (TYPE_MODE (type)));
8487 tem = fold_convert (TREE_TYPE (arg1), tem);
8488 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8489 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8492 /* If we have a rotate of a bit operation with the rotate count and
8493 the second operand of the bit operation both constant,
8494 permute the two operations. */
8495 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8496 && (TREE_CODE (arg0) == BIT_AND_EXPR
8497 || TREE_CODE (arg0) == BIT_IOR_EXPR
8498 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8499 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8500 return fold_build2 (TREE_CODE (arg0), type,
8501 fold_build2 (code, type,
8502 TREE_OPERAND (arg0, 0), arg1),
8503 fold_build2 (code, type,
8504 TREE_OPERAND (arg0, 1), arg1));
8506 /* Two consecutive rotates adding up to the width of the mode can
8507 be ignored. */
8508 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8509 && TREE_CODE (arg0) == RROTATE_EXPR
8510 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8511 && TREE_INT_CST_HIGH (arg1) == 0
8512 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8513 && ((TREE_INT_CST_LOW (arg1)
8514 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8515 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8516 return TREE_OPERAND (arg0, 0);
8518 goto binary;
8520 case MIN_EXPR:
8521 if (operand_equal_p (arg0, arg1, 0))
8522 return omit_one_operand (type, arg0, arg1);
8523 if (INTEGRAL_TYPE_P (type)
8524 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8525 return omit_one_operand (type, arg1, arg0);
8526 goto associate;
8528 case MAX_EXPR:
8529 if (operand_equal_p (arg0, arg1, 0))
8530 return omit_one_operand (type, arg0, arg1);
8531 if (INTEGRAL_TYPE_P (type)
8532 && TYPE_MAX_VALUE (type)
8533 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8534 return omit_one_operand (type, arg1, arg0);
8535 goto associate;
8537 case TRUTH_ANDIF_EXPR:
8538 /* Note that the operands of this must be ints
8539 and their values must be 0 or 1.
8540 ("true" is a fixed value perhaps depending on the language.) */
8541 /* If first arg is constant zero, return it. */
8542 if (integer_zerop (arg0))
8543 return fold_convert (type, arg0);
8544 case TRUTH_AND_EXPR:
8545 /* If either arg is constant true, drop it. */
8546 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8547 return non_lvalue (fold_convert (type, arg1));
8548 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8549 /* Preserve sequence points. */
8550 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8551 return non_lvalue (fold_convert (type, arg0));
8552 /* If second arg is constant zero, result is zero, but first arg
8553 must be evaluated. */
8554 if (integer_zerop (arg1))
8555 return omit_one_operand (type, arg1, arg0);
8556 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8557 case will be handled here. */
8558 if (integer_zerop (arg0))
8559 return omit_one_operand (type, arg0, arg1);
8561 /* !X && X is always false. */
8562 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8563 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8564 return omit_one_operand (type, integer_zero_node, arg1);
8565 /* X && !X is always false. */
8566 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8567 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8568 return omit_one_operand (type, integer_zero_node, arg0);
8570 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8571 means A >= Y && A != MAX, but in this case we know that
8572 A < X <= MAX. */
8574 if (!TREE_SIDE_EFFECTS (arg0)
8575 && !TREE_SIDE_EFFECTS (arg1))
8577 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8578 if (tem)
8579 return fold_build2 (code, type, tem, arg1);
8581 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8582 if (tem)
8583 return fold_build2 (code, type, arg0, tem);
8586 truth_andor:
8587 /* We only do these simplifications if we are optimizing. */
8588 if (!optimize)
8589 return NULL_TREE;
8591 /* Check for things like (A || B) && (A || C). We can convert this
8592 to A || (B && C). Note that either operator can be any of the four
8593 truth and/or operations and the transformation will still be
8594 valid. Also note that we only care about order for the
8595 ANDIF and ORIF operators. If B contains side effects, this
8596 might change the truth-value of A. */
8597 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8598 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8599 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8600 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8601 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8602 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8604 tree a00 = TREE_OPERAND (arg0, 0);
8605 tree a01 = TREE_OPERAND (arg0, 1);
8606 tree a10 = TREE_OPERAND (arg1, 0);
8607 tree a11 = TREE_OPERAND (arg1, 1);
8608 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8609 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8610 && (code == TRUTH_AND_EXPR
8611 || code == TRUTH_OR_EXPR));
8613 if (operand_equal_p (a00, a10, 0))
8614 return fold_build2 (TREE_CODE (arg0), type, a00,
8615 fold_build2 (code, type, a01, a11));
8616 else if (commutative && operand_equal_p (a00, a11, 0))
8617 return fold_build2 (TREE_CODE (arg0), type, a00,
8618 fold_build2 (code, type, a01, a10));
8619 else if (commutative && operand_equal_p (a01, a10, 0))
8620 return fold_build2 (TREE_CODE (arg0), type, a01,
8621 fold_build2 (code, type, a00, a11));
8623 /* This case if tricky because we must either have commutative
8624 operators or else A10 must not have side-effects. */
8626 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8627 && operand_equal_p (a01, a11, 0))
8628 return fold_build2 (TREE_CODE (arg0), type,
8629 fold_build2 (code, type, a00, a10),
8630 a01);
8633 /* See if we can build a range comparison. */
8634 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8635 return tem;
8637 /* Check for the possibility of merging component references. If our
8638 lhs is another similar operation, try to merge its rhs with our
8639 rhs. Then try to merge our lhs and rhs. */
8640 if (TREE_CODE (arg0) == code
8641 && 0 != (tem = fold_truthop (code, type,
8642 TREE_OPERAND (arg0, 1), arg1)))
8643 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8645 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8646 return tem;
8648 return NULL_TREE;
8650 case TRUTH_ORIF_EXPR:
8651 /* Note that the operands of this must be ints
8652 and their values must be 0 or true.
8653 ("true" is a fixed value perhaps depending on the language.) */
8654 /* If first arg is constant true, return it. */
8655 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8656 return fold_convert (type, arg0);
8657 case TRUTH_OR_EXPR:
8658 /* If either arg is constant zero, drop it. */
8659 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8660 return non_lvalue (fold_convert (type, arg1));
8661 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8662 /* Preserve sequence points. */
8663 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8664 return non_lvalue (fold_convert (type, arg0));
8665 /* If second arg is constant true, result is true, but we must
8666 evaluate first arg. */
8667 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8668 return omit_one_operand (type, arg1, arg0);
8669 /* Likewise for first arg, but note this only occurs here for
8670 TRUTH_OR_EXPR. */
8671 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8672 return omit_one_operand (type, arg0, arg1);
8674 /* !X || X is always true. */
8675 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8676 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8677 return omit_one_operand (type, integer_one_node, arg1);
8678 /* X || !X is always true. */
8679 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8680 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8681 return omit_one_operand (type, integer_one_node, arg0);
8683 goto truth_andor;
8685 case TRUTH_XOR_EXPR:
8686 /* If the second arg is constant zero, drop it. */
8687 if (integer_zerop (arg1))
8688 return non_lvalue (fold_convert (type, arg0));
8689 /* If the second arg is constant true, this is a logical inversion. */
8690 if (integer_onep (arg1))
8692 /* Only call invert_truthvalue if operand is a truth value. */
8693 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8694 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8695 else
8696 tem = invert_truthvalue (arg0);
8697 return non_lvalue (fold_convert (type, tem));
8699 /* Identical arguments cancel to zero. */
8700 if (operand_equal_p (arg0, arg1, 0))
8701 return omit_one_operand (type, integer_zero_node, arg0);
8703 /* !X ^ X is always true. */
8704 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8705 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8706 return omit_one_operand (type, integer_one_node, arg1);
8708 /* X ^ !X is always true. */
8709 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8710 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8711 return omit_one_operand (type, integer_one_node, arg0);
8713 return NULL_TREE;
8715 case EQ_EXPR:
8716 case NE_EXPR:
8717 case LT_EXPR:
8718 case GT_EXPR:
8719 case LE_EXPR:
8720 case GE_EXPR:
8721 /* If one arg is a real or integer constant, put it last. */
8722 if (tree_swap_operands_p (arg0, arg1, true))
8723 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8725 /* bool_var != 0 becomes bool_var. */
8726 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8727 && code == NE_EXPR)
8728 return non_lvalue (fold_convert (type, arg0));
8730 /* bool_var == 1 becomes bool_var. */
8731 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8732 && code == EQ_EXPR)
8733 return non_lvalue (fold_convert (type, arg0));
8735 /* If this is an equality comparison of the address of a non-weak
8736 object against zero, then we know the result. */
8737 if ((code == EQ_EXPR || code == NE_EXPR)
8738 && TREE_CODE (arg0) == ADDR_EXPR
8739 && DECL_P (TREE_OPERAND (arg0, 0))
8740 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8741 && integer_zerop (arg1))
8742 return constant_boolean_node (code != EQ_EXPR, type);
8744 /* If this is an equality comparison of the address of two non-weak,
8745 unaliased symbols neither of which are extern (since we do not
8746 have access to attributes for externs), then we know the result. */
8747 if ((code == EQ_EXPR || code == NE_EXPR)
8748 && TREE_CODE (arg0) == ADDR_EXPR
8749 && DECL_P (TREE_OPERAND (arg0, 0))
8750 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8751 && ! lookup_attribute ("alias",
8752 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8753 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8754 && TREE_CODE (arg1) == ADDR_EXPR
8755 && DECL_P (TREE_OPERAND (arg1, 0))
8756 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8757 && ! lookup_attribute ("alias",
8758 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8759 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8760 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8761 ? code == EQ_EXPR : code != EQ_EXPR,
8762 type);
8764 /* If this is a comparison of two exprs that look like an
8765 ARRAY_REF of the same object, then we can fold this to a
8766 comparison of the two offsets. */
8767 if (TREE_CODE_CLASS (code) == tcc_comparison)
8769 tree base0, offset0, base1, offset1;
8771 if (extract_array_ref (arg0, &base0, &offset0)
8772 && extract_array_ref (arg1, &base1, &offset1)
8773 && operand_equal_p (base0, base1, 0))
8775 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
8776 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
8777 offset0 = NULL_TREE;
8778 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
8779 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
8780 offset1 = NULL_TREE;
8781 if (offset0 == NULL_TREE
8782 && offset1 == NULL_TREE)
8784 offset0 = integer_zero_node;
8785 offset1 = integer_zero_node;
8787 else if (offset0 == NULL_TREE)
8788 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8789 else if (offset1 == NULL_TREE)
8790 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8792 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8793 return fold_build2 (code, type, offset0, offset1);
8797 /* Transform comparisons of the form X +- C CMP X. */
8798 if ((code != EQ_EXPR && code != NE_EXPR)
8799 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8800 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8801 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8802 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8803 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8804 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8805 && !(flag_wrapv || flag_trapv))))
8807 tree arg01 = TREE_OPERAND (arg0, 1);
8808 enum tree_code code0 = TREE_CODE (arg0);
8809 int is_positive;
8811 if (TREE_CODE (arg01) == REAL_CST)
8812 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8813 else
8814 is_positive = tree_int_cst_sgn (arg01);
8816 /* (X - c) > X becomes false. */
8817 if (code == GT_EXPR
8818 && ((code0 == MINUS_EXPR && is_positive >= 0)
8819 || (code0 == PLUS_EXPR && is_positive <= 0)))
8820 return constant_boolean_node (0, type);
8822 /* Likewise (X + c) < X becomes false. */
8823 if (code == LT_EXPR
8824 && ((code0 == PLUS_EXPR && is_positive >= 0)
8825 || (code0 == MINUS_EXPR && is_positive <= 0)))
8826 return constant_boolean_node (0, type);
8828 /* Convert (X - c) <= X to true. */
8829 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8830 && code == LE_EXPR
8831 && ((code0 == MINUS_EXPR && is_positive >= 0)
8832 || (code0 == PLUS_EXPR && is_positive <= 0)))
8833 return constant_boolean_node (1, type);
8835 /* Convert (X + c) >= X to true. */
8836 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8837 && code == GE_EXPR
8838 && ((code0 == PLUS_EXPR && is_positive >= 0)
8839 || (code0 == MINUS_EXPR && is_positive <= 0)))
8840 return constant_boolean_node (1, type);
8842 if (TREE_CODE (arg01) == INTEGER_CST)
8844 /* Convert X + c > X and X - c < X to true for integers. */
8845 if (code == GT_EXPR
8846 && ((code0 == PLUS_EXPR && is_positive > 0)
8847 || (code0 == MINUS_EXPR && is_positive < 0)))
8848 return constant_boolean_node (1, type);
8850 if (code == LT_EXPR
8851 && ((code0 == MINUS_EXPR && is_positive > 0)
8852 || (code0 == PLUS_EXPR && is_positive < 0)))
8853 return constant_boolean_node (1, type);
8855 /* Convert X + c <= X and X - c >= X to false for integers. */
8856 if (code == LE_EXPR
8857 && ((code0 == PLUS_EXPR && is_positive > 0)
8858 || (code0 == MINUS_EXPR && is_positive < 0)))
8859 return constant_boolean_node (0, type);
8861 if (code == GE_EXPR
8862 && ((code0 == MINUS_EXPR && is_positive > 0)
8863 || (code0 == PLUS_EXPR && is_positive < 0)))
8864 return constant_boolean_node (0, type);
8868 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8870 tree targ0 = strip_float_extensions (arg0);
8871 tree targ1 = strip_float_extensions (arg1);
8872 tree newtype = TREE_TYPE (targ0);
8874 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8875 newtype = TREE_TYPE (targ1);
8877 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8878 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8879 return fold_build2 (code, type, fold_convert (newtype, targ0),
8880 fold_convert (newtype, targ1));
8882 /* (-a) CMP (-b) -> b CMP a */
8883 if (TREE_CODE (arg0) == NEGATE_EXPR
8884 && TREE_CODE (arg1) == NEGATE_EXPR)
8885 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8886 TREE_OPERAND (arg0, 0));
8888 if (TREE_CODE (arg1) == REAL_CST)
8890 REAL_VALUE_TYPE cst;
8891 cst = TREE_REAL_CST (arg1);
8893 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8894 if (TREE_CODE (arg0) == NEGATE_EXPR)
8895 return
8896 fold_build2 (swap_tree_comparison (code), type,
8897 TREE_OPERAND (arg0, 0),
8898 build_real (TREE_TYPE (arg1),
8899 REAL_VALUE_NEGATE (cst)));
8901 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8902 /* a CMP (-0) -> a CMP 0 */
8903 if (REAL_VALUE_MINUS_ZERO (cst))
8904 return fold_build2 (code, type, arg0,
8905 build_real (TREE_TYPE (arg1), dconst0));
8907 /* x != NaN is always true, other ops are always false. */
8908 if (REAL_VALUE_ISNAN (cst)
8909 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8911 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8912 return omit_one_operand (type, tem, arg0);
8915 /* Fold comparisons against infinity. */
8916 if (REAL_VALUE_ISINF (cst))
8918 tem = fold_inf_compare (code, type, arg0, arg1);
8919 if (tem != NULL_TREE)
8920 return tem;
8924 /* If this is a comparison of a real constant with a PLUS_EXPR
8925 or a MINUS_EXPR of a real constant, we can convert it into a
8926 comparison with a revised real constant as long as no overflow
8927 occurs when unsafe_math_optimizations are enabled. */
8928 if (flag_unsafe_math_optimizations
8929 && TREE_CODE (arg1) == REAL_CST
8930 && (TREE_CODE (arg0) == PLUS_EXPR
8931 || TREE_CODE (arg0) == MINUS_EXPR)
8932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8933 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8934 ? MINUS_EXPR : PLUS_EXPR,
8935 arg1, TREE_OPERAND (arg0, 1), 0))
8936 && ! TREE_CONSTANT_OVERFLOW (tem))
8937 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8939 /* Likewise, we can simplify a comparison of a real constant with
8940 a MINUS_EXPR whose first operand is also a real constant, i.e.
8941 (c1 - x) < c2 becomes x > c1-c2. */
8942 if (flag_unsafe_math_optimizations
8943 && TREE_CODE (arg1) == REAL_CST
8944 && TREE_CODE (arg0) == MINUS_EXPR
8945 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8946 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8947 arg1, 0))
8948 && ! TREE_CONSTANT_OVERFLOW (tem))
8949 return fold_build2 (swap_tree_comparison (code), type,
8950 TREE_OPERAND (arg0, 1), tem);
8952 /* Fold comparisons against built-in math functions. */
8953 if (TREE_CODE (arg1) == REAL_CST
8954 && flag_unsafe_math_optimizations
8955 && ! flag_errno_math)
8957 enum built_in_function fcode = builtin_mathfn_code (arg0);
8959 if (fcode != END_BUILTINS)
8961 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8962 if (tem != NULL_TREE)
8963 return tem;
8968 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8969 if (TREE_CONSTANT (arg1)
8970 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8971 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8972 /* This optimization is invalid for ordered comparisons
8973 if CONST+INCR overflows or if foo+incr might overflow.
8974 This optimization is invalid for floating point due to rounding.
8975 For pointer types we assume overflow doesn't happen. */
8976 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8977 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8978 && (code == EQ_EXPR || code == NE_EXPR))))
8980 tree varop, newconst;
8982 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8984 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8985 arg1, TREE_OPERAND (arg0, 1));
8986 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8987 TREE_OPERAND (arg0, 0),
8988 TREE_OPERAND (arg0, 1));
8990 else
8992 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8993 arg1, TREE_OPERAND (arg0, 1));
8994 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8995 TREE_OPERAND (arg0, 0),
8996 TREE_OPERAND (arg0, 1));
9000 /* If VAROP is a reference to a bitfield, we must mask
9001 the constant by the width of the field. */
9002 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9003 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9004 && host_integerp (DECL_SIZE (TREE_OPERAND
9005 (TREE_OPERAND (varop, 0), 1)), 1))
9007 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9008 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9009 tree folded_compare, shift;
9011 /* First check whether the comparison would come out
9012 always the same. If we don't do that we would
9013 change the meaning with the masking. */
9014 folded_compare = fold_build2 (code, type,
9015 TREE_OPERAND (varop, 0), arg1);
9016 if (integer_zerop (folded_compare)
9017 || integer_onep (folded_compare))
9018 return omit_one_operand (type, folded_compare, varop);
9020 shift = build_int_cst (NULL_TREE,
9021 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9022 shift = fold_convert (TREE_TYPE (varop), shift);
9023 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9024 newconst, shift);
9025 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9026 newconst, shift);
9029 return fold_build2 (code, type, varop, newconst);
9032 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9033 This transformation affects the cases which are handled in later
9034 optimizations involving comparisons with non-negative constants. */
9035 if (TREE_CODE (arg1) == INTEGER_CST
9036 && TREE_CODE (arg0) != INTEGER_CST
9037 && tree_int_cst_sgn (arg1) > 0)
9039 switch (code)
9041 case GE_EXPR:
9042 arg1 = const_binop (MINUS_EXPR, arg1,
9043 build_int_cst (TREE_TYPE (arg1), 1), 0);
9044 return fold_build2 (GT_EXPR, type, arg0,
9045 fold_convert (TREE_TYPE (arg0), arg1));
9047 case LT_EXPR:
9048 arg1 = const_binop (MINUS_EXPR, arg1,
9049 build_int_cst (TREE_TYPE (arg1), 1), 0);
9050 return fold_build2 (LE_EXPR, type, arg0,
9051 fold_convert (TREE_TYPE (arg0), arg1));
9053 default:
9054 break;
9058 /* Comparisons with the highest or lowest possible integer of
9059 the specified size will have known values. */
9061 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9063 if (TREE_CODE (arg1) == INTEGER_CST
9064 && ! TREE_CONSTANT_OVERFLOW (arg1)
9065 && width <= 2 * HOST_BITS_PER_WIDE_INT
9066 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9067 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9069 HOST_WIDE_INT signed_max_hi;
9070 unsigned HOST_WIDE_INT signed_max_lo;
9071 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9073 if (width <= HOST_BITS_PER_WIDE_INT)
9075 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9076 - 1;
9077 signed_max_hi = 0;
9078 max_hi = 0;
9080 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9082 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9083 min_lo = 0;
9084 min_hi = 0;
9086 else
9088 max_lo = signed_max_lo;
9089 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9090 min_hi = -1;
9093 else
9095 width -= HOST_BITS_PER_WIDE_INT;
9096 signed_max_lo = -1;
9097 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9098 - 1;
9099 max_lo = -1;
9100 min_lo = 0;
9102 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9104 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9105 min_hi = 0;
9107 else
9109 max_hi = signed_max_hi;
9110 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9114 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9115 && TREE_INT_CST_LOW (arg1) == max_lo)
9116 switch (code)
9118 case GT_EXPR:
9119 return omit_one_operand (type, integer_zero_node, arg0);
9121 case GE_EXPR:
9122 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9124 case LE_EXPR:
9125 return omit_one_operand (type, integer_one_node, arg0);
9127 case LT_EXPR:
9128 return fold_build2 (NE_EXPR, type, arg0, arg1);
9130 /* The GE_EXPR and LT_EXPR cases above are not normally
9131 reached because of previous transformations. */
9133 default:
9134 break;
9136 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9137 == max_hi
9138 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9139 switch (code)
9141 case GT_EXPR:
9142 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9143 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9144 case LE_EXPR:
9145 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9146 return fold_build2 (NE_EXPR, type, arg0, arg1);
9147 default:
9148 break;
9150 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9151 == min_hi
9152 && TREE_INT_CST_LOW (arg1) == min_lo)
9153 switch (code)
9155 case LT_EXPR:
9156 return omit_one_operand (type, integer_zero_node, arg0);
9158 case LE_EXPR:
9159 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9161 case GE_EXPR:
9162 return omit_one_operand (type, integer_one_node, arg0);
9164 case GT_EXPR:
9165 return fold_build2 (NE_EXPR, type, arg0, arg1);
9167 default:
9168 break;
9170 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9171 == min_hi
9172 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9173 switch (code)
9175 case GE_EXPR:
9176 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9177 return fold_build2 (NE_EXPR, type, arg0, arg1);
9178 case LT_EXPR:
9179 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9180 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9181 default:
9182 break;
9185 else if (!in_gimple_form
9186 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9187 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9188 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9189 /* signed_type does not work on pointer types. */
9190 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9192 /* The following case also applies to X < signed_max+1
9193 and X >= signed_max+1 because previous transformations. */
9194 if (code == LE_EXPR || code == GT_EXPR)
9196 tree st0, st1;
9197 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9198 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9199 return fold
9200 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9201 type, fold_convert (st0, arg0),
9202 fold_convert (st1, integer_zero_node)));
9208 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9209 a MINUS_EXPR of a constant, we can convert it into a comparison with
9210 a revised constant as long as no overflow occurs. */
9211 if ((code == EQ_EXPR || code == NE_EXPR)
9212 && TREE_CODE (arg1) == INTEGER_CST
9213 && (TREE_CODE (arg0) == PLUS_EXPR
9214 || TREE_CODE (arg0) == MINUS_EXPR)
9215 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9216 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9217 ? MINUS_EXPR : PLUS_EXPR,
9218 arg1, TREE_OPERAND (arg0, 1), 0))
9219 && ! TREE_CONSTANT_OVERFLOW (tem))
9220 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9222 /* Similarly for a NEGATE_EXPR. */
9223 else if ((code == EQ_EXPR || code == NE_EXPR)
9224 && TREE_CODE (arg0) == NEGATE_EXPR
9225 && TREE_CODE (arg1) == INTEGER_CST
9226 && 0 != (tem = negate_expr (arg1))
9227 && TREE_CODE (tem) == INTEGER_CST
9228 && ! TREE_CONSTANT_OVERFLOW (tem))
9229 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9231 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9232 for !=. Don't do this for ordered comparisons due to overflow. */
9233 else if ((code == NE_EXPR || code == EQ_EXPR)
9234 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9235 return fold_build2 (code, type,
9236 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9238 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9239 && (TREE_CODE (arg0) == NOP_EXPR
9240 || TREE_CODE (arg0) == CONVERT_EXPR))
9242 /* If we are widening one operand of an integer comparison,
9243 see if the other operand is similarly being widened. Perhaps we
9244 can do the comparison in the narrower type. */
9245 tem = fold_widened_comparison (code, type, arg0, arg1);
9246 if (tem)
9247 return tem;
9249 /* Or if we are changing signedness. */
9250 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9251 if (tem)
9252 return tem;
9255 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9256 constant, we can simplify it. */
9257 else if (TREE_CODE (arg1) == INTEGER_CST
9258 && (TREE_CODE (arg0) == MIN_EXPR
9259 || TREE_CODE (arg0) == MAX_EXPR)
9260 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9262 tem = optimize_minmax_comparison (code, type, op0, op1);
9263 if (tem)
9264 return tem;
9266 return NULL_TREE;
9269 /* If we are comparing an ABS_EXPR with a constant, we can
9270 convert all the cases into explicit comparisons, but they may
9271 well not be faster than doing the ABS and one comparison.
9272 But ABS (X) <= C is a range comparison, which becomes a subtraction
9273 and a comparison, and is probably faster. */
9274 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9275 && TREE_CODE (arg0) == ABS_EXPR
9276 && ! TREE_SIDE_EFFECTS (arg0)
9277 && (0 != (tem = negate_expr (arg1)))
9278 && TREE_CODE (tem) == INTEGER_CST
9279 && ! TREE_CONSTANT_OVERFLOW (tem))
9280 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9281 build2 (GE_EXPR, type,
9282 TREE_OPERAND (arg0, 0), tem),
9283 build2 (LE_EXPR, type,
9284 TREE_OPERAND (arg0, 0), arg1));
9286 /* Convert ABS_EXPR<x> >= 0 to true. */
9287 else if (code == GE_EXPR
9288 && tree_expr_nonnegative_p (arg0)
9289 && (integer_zerop (arg1)
9290 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9291 && real_zerop (arg1))))
9292 return omit_one_operand (type, integer_one_node, arg0);
9294 /* Convert ABS_EXPR<x> < 0 to false. */
9295 else if (code == LT_EXPR
9296 && tree_expr_nonnegative_p (arg0)
9297 && (integer_zerop (arg1) || real_zerop (arg1)))
9298 return omit_one_operand (type, integer_zero_node, arg0);
9300 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9301 else if ((code == EQ_EXPR || code == NE_EXPR)
9302 && TREE_CODE (arg0) == ABS_EXPR
9303 && (integer_zerop (arg1) || real_zerop (arg1)))
9304 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9306 /* If this is an EQ or NE comparison with zero and ARG0 is
9307 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9308 two operations, but the latter can be done in one less insn
9309 on machines that have only two-operand insns or on which a
9310 constant cannot be the first operand. */
9311 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9312 && TREE_CODE (arg0) == BIT_AND_EXPR)
9314 tree arg00 = TREE_OPERAND (arg0, 0);
9315 tree arg01 = TREE_OPERAND (arg0, 1);
9316 if (TREE_CODE (arg00) == LSHIFT_EXPR
9317 && integer_onep (TREE_OPERAND (arg00, 0)))
9318 return
9319 fold_build2 (code, type,
9320 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9321 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9322 arg01, TREE_OPERAND (arg00, 1)),
9323 fold_convert (TREE_TYPE (arg0),
9324 integer_one_node)),
9325 arg1);
9326 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9327 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9328 return
9329 fold_build2 (code, type,
9330 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9331 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9332 arg00, TREE_OPERAND (arg01, 1)),
9333 fold_convert (TREE_TYPE (arg0),
9334 integer_one_node)),
9335 arg1);
9338 /* If this is an NE or EQ comparison of zero against the result of a
9339 signed MOD operation whose second operand is a power of 2, make
9340 the MOD operation unsigned since it is simpler and equivalent. */
9341 if ((code == NE_EXPR || code == EQ_EXPR)
9342 && integer_zerop (arg1)
9343 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9344 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9345 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9346 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9347 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9348 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9350 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9351 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9352 fold_convert (newtype,
9353 TREE_OPERAND (arg0, 0)),
9354 fold_convert (newtype,
9355 TREE_OPERAND (arg0, 1)));
9357 return fold_build2 (code, type, newmod,
9358 fold_convert (newtype, arg1));
9361 /* If this is an NE comparison of zero with an AND of one, remove the
9362 comparison since the AND will give the correct value. */
9363 if (code == NE_EXPR && integer_zerop (arg1)
9364 && TREE_CODE (arg0) == BIT_AND_EXPR
9365 && integer_onep (TREE_OPERAND (arg0, 1)))
9366 return fold_convert (type, arg0);
9368 /* If we have (A & C) == C where C is a power of 2, convert this into
9369 (A & C) != 0. Similarly for NE_EXPR. */
9370 if ((code == EQ_EXPR || code == NE_EXPR)
9371 && TREE_CODE (arg0) == BIT_AND_EXPR
9372 && integer_pow2p (TREE_OPERAND (arg0, 1))
9373 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9374 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9375 arg0, fold_convert (TREE_TYPE (arg0),
9376 integer_zero_node));
9378 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9379 bit, then fold the expression into A < 0 or A >= 0. */
9380 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9381 if (tem)
9382 return tem;
9384 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9385 Similarly for NE_EXPR. */
9386 if ((code == EQ_EXPR || code == NE_EXPR)
9387 && TREE_CODE (arg0) == BIT_AND_EXPR
9388 && TREE_CODE (arg1) == INTEGER_CST
9389 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9391 tree notc = fold_build1 (BIT_NOT_EXPR,
9392 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9393 TREE_OPERAND (arg0, 1));
9394 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9395 arg1, notc);
9396 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9397 if (integer_nonzerop (dandnotc))
9398 return omit_one_operand (type, rslt, arg0);
9401 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9402 Similarly for NE_EXPR. */
9403 if ((code == EQ_EXPR || code == NE_EXPR)
9404 && TREE_CODE (arg0) == BIT_IOR_EXPR
9405 && TREE_CODE (arg1) == INTEGER_CST
9406 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9408 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9409 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9410 TREE_OPERAND (arg0, 1), notd);
9411 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9412 if (integer_nonzerop (candnotd))
9413 return omit_one_operand (type, rslt, arg0);
9416 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9417 and similarly for >= into !=. */
9418 if ((code == LT_EXPR || code == GE_EXPR)
9419 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9420 && TREE_CODE (arg1) == LSHIFT_EXPR
9421 && integer_onep (TREE_OPERAND (arg1, 0)))
9422 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9423 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9424 TREE_OPERAND (arg1, 1)),
9425 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9427 else if ((code == LT_EXPR || code == GE_EXPR)
9428 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9429 && (TREE_CODE (arg1) == NOP_EXPR
9430 || TREE_CODE (arg1) == CONVERT_EXPR)
9431 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9432 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9433 return
9434 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9435 fold_convert (TREE_TYPE (arg0),
9436 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9437 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9438 1))),
9439 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9441 /* Simplify comparison of something with itself. (For IEEE
9442 floating-point, we can only do some of these simplifications.) */
9443 if (operand_equal_p (arg0, arg1, 0))
9445 switch (code)
9447 case EQ_EXPR:
9448 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9449 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9450 return constant_boolean_node (1, type);
9451 break;
9453 case GE_EXPR:
9454 case LE_EXPR:
9455 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9456 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9457 return constant_boolean_node (1, type);
9458 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9460 case NE_EXPR:
9461 /* For NE, we can only do this simplification if integer
9462 or we don't honor IEEE floating point NaNs. */
9463 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9464 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9465 break;
9466 /* ... fall through ... */
9467 case GT_EXPR:
9468 case LT_EXPR:
9469 return constant_boolean_node (0, type);
9470 default:
9471 gcc_unreachable ();
9475 /* If we are comparing an expression that just has comparisons
9476 of two integer values, arithmetic expressions of those comparisons,
9477 and constants, we can simplify it. There are only three cases
9478 to check: the two values can either be equal, the first can be
9479 greater, or the second can be greater. Fold the expression for
9480 those three values. Since each value must be 0 or 1, we have
9481 eight possibilities, each of which corresponds to the constant 0
9482 or 1 or one of the six possible comparisons.
9484 This handles common cases like (a > b) == 0 but also handles
9485 expressions like ((x > y) - (y > x)) > 0, which supposedly
9486 occur in macroized code. */
9488 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9490 tree cval1 = 0, cval2 = 0;
9491 int save_p = 0;
9493 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9494 /* Don't handle degenerate cases here; they should already
9495 have been handled anyway. */
9496 && cval1 != 0 && cval2 != 0
9497 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9498 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9499 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9500 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9501 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9502 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9503 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9505 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9506 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9508 /* We can't just pass T to eval_subst in case cval1 or cval2
9509 was the same as ARG1. */
9511 tree high_result
9512 = fold_build2 (code, type,
9513 eval_subst (arg0, cval1, maxval,
9514 cval2, minval),
9515 arg1);
9516 tree equal_result
9517 = fold_build2 (code, type,
9518 eval_subst (arg0, cval1, maxval,
9519 cval2, maxval),
9520 arg1);
9521 tree low_result
9522 = fold_build2 (code, type,
9523 eval_subst (arg0, cval1, minval,
9524 cval2, maxval),
9525 arg1);
9527 /* All three of these results should be 0 or 1. Confirm they
9528 are. Then use those values to select the proper code
9529 to use. */
9531 if ((integer_zerop (high_result)
9532 || integer_onep (high_result))
9533 && (integer_zerop (equal_result)
9534 || integer_onep (equal_result))
9535 && (integer_zerop (low_result)
9536 || integer_onep (low_result)))
9538 /* Make a 3-bit mask with the high-order bit being the
9539 value for `>', the next for '=', and the low for '<'. */
9540 switch ((integer_onep (high_result) * 4)
9541 + (integer_onep (equal_result) * 2)
9542 + integer_onep (low_result))
9544 case 0:
9545 /* Always false. */
9546 return omit_one_operand (type, integer_zero_node, arg0);
9547 case 1:
9548 code = LT_EXPR;
9549 break;
9550 case 2:
9551 code = EQ_EXPR;
9552 break;
9553 case 3:
9554 code = LE_EXPR;
9555 break;
9556 case 4:
9557 code = GT_EXPR;
9558 break;
9559 case 5:
9560 code = NE_EXPR;
9561 break;
9562 case 6:
9563 code = GE_EXPR;
9564 break;
9565 case 7:
9566 /* Always true. */
9567 return omit_one_operand (type, integer_one_node, arg0);
9570 if (save_p)
9571 return save_expr (build2 (code, type, cval1, cval2));
9572 else
9573 return fold_build2 (code, type, cval1, cval2);
9578 /* If this is a comparison of a field, we may be able to simplify it. */
9579 if (((TREE_CODE (arg0) == COMPONENT_REF
9580 && lang_hooks.can_use_bit_fields_p ())
9581 || TREE_CODE (arg0) == BIT_FIELD_REF)
9582 && (code == EQ_EXPR || code == NE_EXPR)
9583 /* Handle the constant case even without -O
9584 to make sure the warnings are given. */
9585 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9587 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9588 if (t1)
9589 return t1;
9592 /* Fold a comparison of the address of COMPONENT_REFs with the same
9593 type and component to a comparison of the address of the base
9594 object. In short, &x->a OP &y->a to x OP y and
9595 &x->a OP &y.a to x OP &y */
9596 if (TREE_CODE (arg0) == ADDR_EXPR
9597 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9598 && TREE_CODE (arg1) == ADDR_EXPR
9599 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9601 tree cref0 = TREE_OPERAND (arg0, 0);
9602 tree cref1 = TREE_OPERAND (arg1, 0);
9603 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9605 tree op0 = TREE_OPERAND (cref0, 0);
9606 tree op1 = TREE_OPERAND (cref1, 0);
9607 return fold_build2 (code, type,
9608 build_fold_addr_expr (op0),
9609 build_fold_addr_expr (op1));
9613 /* Optimize comparisons of strlen vs zero to a compare of the
9614 first character of the string vs zero. To wit,
9615 strlen(ptr) == 0 => *ptr == 0
9616 strlen(ptr) != 0 => *ptr != 0
9617 Other cases should reduce to one of these two (or a constant)
9618 due to the return value of strlen being unsigned. */
9619 if ((code == EQ_EXPR || code == NE_EXPR)
9620 && integer_zerop (arg1)
9621 && TREE_CODE (arg0) == CALL_EXPR)
9623 tree fndecl = get_callee_fndecl (arg0);
9624 tree arglist;
9626 if (fndecl
9627 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9628 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9629 && (arglist = TREE_OPERAND (arg0, 1))
9630 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9631 && ! TREE_CHAIN (arglist))
9633 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9634 return fold_build2 (code, type, iref,
9635 build_int_cst (TREE_TYPE (iref), 0));
9639 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9640 into a single range test. */
9641 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9642 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9643 && TREE_CODE (arg1) == INTEGER_CST
9644 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9645 && !integer_zerop (TREE_OPERAND (arg0, 1))
9646 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9647 && !TREE_OVERFLOW (arg1))
9649 t1 = fold_div_compare (code, type, arg0, arg1);
9650 if (t1 != NULL_TREE)
9651 return t1;
9654 if ((code == EQ_EXPR || code == NE_EXPR)
9655 && !TREE_SIDE_EFFECTS (arg0)
9656 && integer_zerop (arg1)
9657 && tree_expr_nonzero_p (arg0))
9658 return constant_boolean_node (code==NE_EXPR, type);
9660 t1 = fold_relational_const (code, type, arg0, arg1);
9661 return t1 == NULL_TREE ? NULL_TREE : t1;
9663 case UNORDERED_EXPR:
9664 case ORDERED_EXPR:
9665 case UNLT_EXPR:
9666 case UNLE_EXPR:
9667 case UNGT_EXPR:
9668 case UNGE_EXPR:
9669 case UNEQ_EXPR:
9670 case LTGT_EXPR:
9671 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9673 t1 = fold_relational_const (code, type, arg0, arg1);
9674 if (t1 != NULL_TREE)
9675 return t1;
9678 /* If the first operand is NaN, the result is constant. */
9679 if (TREE_CODE (arg0) == REAL_CST
9680 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9681 && (code != LTGT_EXPR || ! flag_trapping_math))
9683 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9684 ? integer_zero_node
9685 : integer_one_node;
9686 return omit_one_operand (type, t1, arg1);
9689 /* If the second operand is NaN, the result is constant. */
9690 if (TREE_CODE (arg1) == REAL_CST
9691 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9692 && (code != LTGT_EXPR || ! flag_trapping_math))
9694 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9695 ? integer_zero_node
9696 : integer_one_node;
9697 return omit_one_operand (type, t1, arg0);
9700 /* Simplify unordered comparison of something with itself. */
9701 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9702 && operand_equal_p (arg0, arg1, 0))
9703 return constant_boolean_node (1, type);
9705 if (code == LTGT_EXPR
9706 && !flag_trapping_math
9707 && operand_equal_p (arg0, arg1, 0))
9708 return constant_boolean_node (0, type);
9710 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9712 tree targ0 = strip_float_extensions (arg0);
9713 tree targ1 = strip_float_extensions (arg1);
9714 tree newtype = TREE_TYPE (targ0);
9716 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9717 newtype = TREE_TYPE (targ1);
9719 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9720 return fold_build2 (code, type, fold_convert (newtype, targ0),
9721 fold_convert (newtype, targ1));
9724 return NULL_TREE;
9726 case COMPOUND_EXPR:
9727 /* When pedantic, a compound expression can be neither an lvalue
9728 nor an integer constant expression. */
9729 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9730 return NULL_TREE;
9731 /* Don't let (0, 0) be null pointer constant. */
9732 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9733 : fold_convert (type, arg1);
9734 return pedantic_non_lvalue (tem);
9736 case COMPLEX_EXPR:
9737 if (wins)
9738 return build_complex (type, arg0, arg1);
9739 return NULL_TREE;
9741 case ASSERT_EXPR:
9742 /* An ASSERT_EXPR should never be passed to fold_binary. */
9743 gcc_unreachable ();
9745 default:
9746 return NULL_TREE;
9747 } /* switch (code) */
9750 /* Callback for walk_tree, looking for LABEL_EXPR.
9751 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9752 Do not check the sub-tree of GOTO_EXPR. */
9754 static tree
9755 contains_label_1 (tree *tp,
9756 int *walk_subtrees,
9757 void *data ATTRIBUTE_UNUSED)
9759 switch (TREE_CODE (*tp))
9761 case LABEL_EXPR:
9762 return *tp;
9763 case GOTO_EXPR:
9764 *walk_subtrees = 0;
9765 /* no break */
9766 default:
9767 return NULL_TREE;
9771 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9772 accessible from outside the sub-tree. Returns NULL_TREE if no
9773 addressable label is found. */
9775 static bool
9776 contains_label_p (tree st)
9778 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9781 /* Fold a ternary expression of code CODE and type TYPE with operands
9782 OP0, OP1, and OP2. Return the folded expression if folding is
9783 successful. Otherwise, return NULL_TREE. */
9785 tree
9786 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9788 tree tem;
9789 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9790 enum tree_code_class kind = TREE_CODE_CLASS (code);
9792 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9793 && TREE_CODE_LENGTH (code) == 3);
9795 /* Strip any conversions that don't change the mode. This is safe
9796 for every expression, except for a comparison expression because
9797 its signedness is derived from its operands. So, in the latter
9798 case, only strip conversions that don't change the signedness.
9800 Note that this is done as an internal manipulation within the
9801 constant folder, in order to find the simplest representation of
9802 the arguments so that their form can be studied. In any cases,
9803 the appropriate type conversions should be put back in the tree
9804 that will get out of the constant folder. */
9805 if (op0)
9807 arg0 = op0;
9808 STRIP_NOPS (arg0);
9811 if (op1)
9813 arg1 = op1;
9814 STRIP_NOPS (arg1);
9817 switch (code)
9819 case COMPONENT_REF:
9820 if (TREE_CODE (arg0) == CONSTRUCTOR
9821 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9823 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
9824 if (m)
9825 return TREE_VALUE (m);
9827 return NULL_TREE;
9829 case COND_EXPR:
9830 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9831 so all simple results must be passed through pedantic_non_lvalue. */
9832 if (TREE_CODE (arg0) == INTEGER_CST)
9834 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9835 tem = integer_zerop (arg0) ? op2 : op1;
9836 /* Only optimize constant conditions when the selected branch
9837 has the same type as the COND_EXPR. This avoids optimizing
9838 away "c ? x : throw", where the throw has a void type.
9839 Avoid throwing away that operand which contains label. */
9840 if ((!TREE_SIDE_EFFECTS (unused_op)
9841 || !contains_label_p (unused_op))
9842 && (! VOID_TYPE_P (TREE_TYPE (tem))
9843 || VOID_TYPE_P (type)))
9844 return pedantic_non_lvalue (tem);
9845 return NULL_TREE;
9847 if (operand_equal_p (arg1, op2, 0))
9848 return pedantic_omit_one_operand (type, arg1, arg0);
9850 /* If we have A op B ? A : C, we may be able to convert this to a
9851 simpler expression, depending on the operation and the values
9852 of B and C. Signed zeros prevent all of these transformations,
9853 for reasons given above each one.
9855 Also try swapping the arguments and inverting the conditional. */
9856 if (COMPARISON_CLASS_P (arg0)
9857 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9858 arg1, TREE_OPERAND (arg0, 1))
9859 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9861 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9862 if (tem)
9863 return tem;
9866 if (COMPARISON_CLASS_P (arg0)
9867 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9868 op2,
9869 TREE_OPERAND (arg0, 1))
9870 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9872 tem = invert_truthvalue (arg0);
9873 if (COMPARISON_CLASS_P (tem))
9875 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
9876 if (tem)
9877 return tem;
9881 /* If the second operand is simpler than the third, swap them
9882 since that produces better jump optimization results. */
9883 if (tree_swap_operands_p (op1, op2, false))
9885 /* See if this can be inverted. If it can't, possibly because
9886 it was a floating-point inequality comparison, don't do
9887 anything. */
9888 tem = invert_truthvalue (arg0);
9890 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9891 return fold_build3 (code, type, tem, op2, op1);
9894 /* Convert A ? 1 : 0 to simply A. */
9895 if (integer_onep (op1)
9896 && integer_zerop (op2)
9897 /* If we try to convert OP0 to our type, the
9898 call to fold will try to move the conversion inside
9899 a COND, which will recurse. In that case, the COND_EXPR
9900 is probably the best choice, so leave it alone. */
9901 && type == TREE_TYPE (arg0))
9902 return pedantic_non_lvalue (arg0);
9904 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9905 over COND_EXPR in cases such as floating point comparisons. */
9906 if (integer_zerop (op1)
9907 && integer_onep (op2)
9908 && truth_value_p (TREE_CODE (arg0)))
9909 return pedantic_non_lvalue (fold_convert (type,
9910 invert_truthvalue (arg0)));
9912 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9913 if (TREE_CODE (arg0) == LT_EXPR
9914 && integer_zerop (TREE_OPERAND (arg0, 1))
9915 && integer_zerop (op2)
9916 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9917 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
9918 TREE_TYPE (tem), tem, arg1));
9920 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9921 already handled above. */
9922 if (TREE_CODE (arg0) == BIT_AND_EXPR
9923 && integer_onep (TREE_OPERAND (arg0, 1))
9924 && integer_zerop (op2)
9925 && integer_pow2p (arg1))
9927 tree tem = TREE_OPERAND (arg0, 0);
9928 STRIP_NOPS (tem);
9929 if (TREE_CODE (tem) == RSHIFT_EXPR
9930 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9931 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9932 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9933 return fold_build2 (BIT_AND_EXPR, type,
9934 TREE_OPERAND (tem, 0), arg1);
9937 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9938 is probably obsolete because the first operand should be a
9939 truth value (that's why we have the two cases above), but let's
9940 leave it in until we can confirm this for all front-ends. */
9941 if (integer_zerop (op2)
9942 && TREE_CODE (arg0) == NE_EXPR
9943 && integer_zerop (TREE_OPERAND (arg0, 1))
9944 && integer_pow2p (arg1)
9945 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9946 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9947 arg1, OEP_ONLY_CONST))
9948 return pedantic_non_lvalue (fold_convert (type,
9949 TREE_OPERAND (arg0, 0)));
9951 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9952 if (integer_zerop (op2)
9953 && truth_value_p (TREE_CODE (arg0))
9954 && truth_value_p (TREE_CODE (arg1)))
9955 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
9957 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9958 if (integer_onep (op2)
9959 && truth_value_p (TREE_CODE (arg0))
9960 && truth_value_p (TREE_CODE (arg1)))
9962 /* Only perform transformation if ARG0 is easily inverted. */
9963 tem = invert_truthvalue (arg0);
9964 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9965 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
9968 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9969 if (integer_zerop (arg1)
9970 && truth_value_p (TREE_CODE (arg0))
9971 && truth_value_p (TREE_CODE (op2)))
9973 /* Only perform transformation if ARG0 is easily inverted. */
9974 tem = invert_truthvalue (arg0);
9975 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9976 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
9979 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9980 if (integer_onep (arg1)
9981 && truth_value_p (TREE_CODE (arg0))
9982 && truth_value_p (TREE_CODE (op2)))
9983 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
9985 return NULL_TREE;
9987 case CALL_EXPR:
9988 /* Check for a built-in function. */
9989 if (TREE_CODE (op0) == ADDR_EXPR
9990 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
9991 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
9993 tree fndecl = TREE_OPERAND (op0, 0);
9994 tree arglist = op1;
9995 tree tmp = fold_builtin (fndecl, arglist, false);
9996 if (tmp)
9997 return tmp;
9999 return NULL_TREE;
10001 case BIT_FIELD_REF:
10002 if (TREE_CODE (arg0) == VECTOR_CST
10003 && type == TREE_TYPE (TREE_TYPE (arg0))
10004 && host_integerp (arg1, 1)
10005 && host_integerp (op2, 1))
10007 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10008 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10010 if (width != 0
10011 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10012 && (idx % width) == 0
10013 && (idx = idx / width)
10014 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10016 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10017 while (idx-- > 0 && elements)
10018 elements = TREE_CHAIN (elements);
10019 if (elements)
10020 return TREE_VALUE (elements);
10021 else
10022 return fold_convert (type, integer_zero_node);
10025 return NULL_TREE;
10027 default:
10028 return NULL_TREE;
10029 } /* switch (code) */
10032 /* Perform constant folding and related simplification of EXPR.
10033 The related simplifications include x*1 => x, x*0 => 0, etc.,
10034 and application of the associative law.
10035 NOP_EXPR conversions may be removed freely (as long as we
10036 are careful not to change the type of the overall expression).
10037 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10038 but we can constant-fold them if they have constant operands. */
10040 #ifdef ENABLE_FOLD_CHECKING
10041 # define fold(x) fold_1 (x)
10042 static tree fold_1 (tree);
10043 static
10044 #endif
10045 tree
10046 fold (tree expr)
10048 const tree t = expr;
10049 enum tree_code code = TREE_CODE (t);
10050 enum tree_code_class kind = TREE_CODE_CLASS (code);
10051 tree tem;
10053 /* Return right away if a constant. */
10054 if (kind == tcc_constant)
10055 return t;
10057 if (IS_EXPR_CODE_CLASS (kind))
10059 tree type = TREE_TYPE (t);
10060 tree op0, op1, op2;
10062 switch (TREE_CODE_LENGTH (code))
10064 case 1:
10065 op0 = TREE_OPERAND (t, 0);
10066 tem = fold_unary (code, type, op0);
10067 return tem ? tem : expr;
10068 case 2:
10069 op0 = TREE_OPERAND (t, 0);
10070 op1 = TREE_OPERAND (t, 1);
10071 tem = fold_binary (code, type, op0, op1);
10072 return tem ? tem : expr;
10073 case 3:
10074 op0 = TREE_OPERAND (t, 0);
10075 op1 = TREE_OPERAND (t, 1);
10076 op2 = TREE_OPERAND (t, 2);
10077 tem = fold_ternary (code, type, op0, op1, op2);
10078 return tem ? tem : expr;
10079 default:
10080 break;
10084 switch (code)
10086 case CONST_DECL:
10087 return fold (DECL_INITIAL (t));
10089 default:
10090 return t;
10091 } /* switch (code) */
10094 #ifdef ENABLE_FOLD_CHECKING
10095 #undef fold
10097 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10098 static void fold_check_failed (tree, tree);
10099 void print_fold_checksum (tree);
10101 /* When --enable-checking=fold, compute a digest of expr before
10102 and after actual fold call to see if fold did not accidentally
10103 change original expr. */
10105 tree
10106 fold (tree expr)
10108 tree ret;
10109 struct md5_ctx ctx;
10110 unsigned char checksum_before[16], checksum_after[16];
10111 htab_t ht;
10113 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10114 md5_init_ctx (&ctx);
10115 fold_checksum_tree (expr, &ctx, ht);
10116 md5_finish_ctx (&ctx, checksum_before);
10117 htab_empty (ht);
10119 ret = fold_1 (expr);
10121 md5_init_ctx (&ctx);
10122 fold_checksum_tree (expr, &ctx, ht);
10123 md5_finish_ctx (&ctx, checksum_after);
10124 htab_delete (ht);
10126 if (memcmp (checksum_before, checksum_after, 16))
10127 fold_check_failed (expr, ret);
10129 return ret;
10132 void
10133 print_fold_checksum (tree expr)
10135 struct md5_ctx ctx;
10136 unsigned char checksum[16], cnt;
10137 htab_t ht;
10139 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10140 md5_init_ctx (&ctx);
10141 fold_checksum_tree (expr, &ctx, ht);
10142 md5_finish_ctx (&ctx, checksum);
10143 htab_delete (ht);
10144 for (cnt = 0; cnt < 16; ++cnt)
10145 fprintf (stderr, "%02x", checksum[cnt]);
10146 putc ('\n', stderr);
10149 static void
10150 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10152 internal_error ("fold check: original tree changed by fold");
10155 static void
10156 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10158 void **slot;
10159 enum tree_code code;
10160 char buf[sizeof (struct tree_decl)];
10161 int i, len;
10163 recursive_label:
10165 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10166 <= sizeof (struct tree_decl))
10167 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10168 if (expr == NULL)
10169 return;
10170 slot = htab_find_slot (ht, expr, INSERT);
10171 if (*slot != NULL)
10172 return;
10173 *slot = expr;
10174 code = TREE_CODE (expr);
10175 if (TREE_CODE_CLASS (code) == tcc_declaration
10176 && DECL_ASSEMBLER_NAME_SET_P (expr))
10178 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10179 memcpy (buf, expr, tree_size (expr));
10180 expr = (tree) buf;
10181 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10183 else if (TREE_CODE_CLASS (code) == tcc_type
10184 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10185 || TYPE_CACHED_VALUES_P (expr)
10186 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10188 /* Allow these fields to be modified. */
10189 memcpy (buf, expr, tree_size (expr));
10190 expr = (tree) buf;
10191 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10192 TYPE_POINTER_TO (expr) = NULL;
10193 TYPE_REFERENCE_TO (expr) = NULL;
10194 if (TYPE_CACHED_VALUES_P (expr))
10196 TYPE_CACHED_VALUES_P (expr) = 0;
10197 TYPE_CACHED_VALUES (expr) = NULL;
10200 md5_process_bytes (expr, tree_size (expr), ctx);
10201 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10202 if (TREE_CODE_CLASS (code) != tcc_type
10203 && TREE_CODE_CLASS (code) != tcc_declaration
10204 && code != TREE_LIST)
10205 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10206 switch (TREE_CODE_CLASS (code))
10208 case tcc_constant:
10209 switch (code)
10211 case STRING_CST:
10212 md5_process_bytes (TREE_STRING_POINTER (expr),
10213 TREE_STRING_LENGTH (expr), ctx);
10214 break;
10215 case COMPLEX_CST:
10216 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10217 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10218 break;
10219 case VECTOR_CST:
10220 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10221 break;
10222 default:
10223 break;
10225 break;
10226 case tcc_exceptional:
10227 switch (code)
10229 case TREE_LIST:
10230 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10231 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10232 expr = TREE_CHAIN (expr);
10233 goto recursive_label;
10234 break;
10235 case TREE_VEC:
10236 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10237 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10238 break;
10239 default:
10240 break;
10242 break;
10243 case tcc_expression:
10244 case tcc_reference:
10245 case tcc_comparison:
10246 case tcc_unary:
10247 case tcc_binary:
10248 case tcc_statement:
10249 len = TREE_CODE_LENGTH (code);
10250 for (i = 0; i < len; ++i)
10251 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10252 break;
10253 case tcc_declaration:
10254 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10255 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10256 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10257 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10258 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10259 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10260 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10261 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10262 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10263 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10264 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10265 break;
10266 case tcc_type:
10267 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10268 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10269 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10270 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10271 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10272 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10273 if (INTEGRAL_TYPE_P (expr)
10274 || SCALAR_FLOAT_TYPE_P (expr))
10276 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10277 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10279 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10280 if (TREE_CODE (expr) == RECORD_TYPE
10281 || TREE_CODE (expr) == UNION_TYPE
10282 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10283 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10284 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10285 break;
10286 default:
10287 break;
10291 #endif
10293 /* Fold a unary tree expression with code CODE of type TYPE with an
10294 operand OP0. Return a folded expression if successful. Otherwise,
10295 return a tree expression with code CODE of type TYPE with an
10296 operand OP0. */
10298 tree
10299 fold_build1 (enum tree_code code, tree type, tree op0)
10301 tree tem = fold_unary (code, type, op0);
10302 if (tem)
10303 return tem;
10305 return build1 (code, type, op0);
10308 /* Fold a binary tree expression with code CODE of type TYPE with
10309 operands OP0 and OP1. Return a folded expression if successful.
10310 Otherwise, return a tree expression with code CODE of type TYPE
10311 with operands OP0 and OP1. */
10313 tree
10314 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10316 tree tem = fold_binary (code, type, op0, op1);
10317 if (tem)
10318 return tem;
10320 return build2 (code, type, op0, op1);
10323 /* Fold a ternary tree expression with code CODE of type TYPE with
10324 operands OP0, OP1, and OP2. Return a folded expression if
10325 successful. Otherwise, return a tree expression with code CODE of
10326 type TYPE with operands OP0, OP1, and OP2. */
10328 tree
10329 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10331 tree tem = fold_ternary (code, type, op0, op1, op2);
10332 if (tem)
10333 return tem;
10335 return build3 (code, type, op0, op1, op2);
10338 /* Perform constant folding and related simplification of initializer
10339 expression EXPR. This behaves identically to "fold" but ignores
10340 potential run-time traps and exceptions that fold must preserve. */
10342 tree
10343 fold_initializer (tree expr)
10345 int saved_signaling_nans = flag_signaling_nans;
10346 int saved_trapping_math = flag_trapping_math;
10347 int saved_rounding_math = flag_rounding_math;
10348 int saved_trapv = flag_trapv;
10349 tree result;
10351 flag_signaling_nans = 0;
10352 flag_trapping_math = 0;
10353 flag_rounding_math = 0;
10354 flag_trapv = 0;
10356 result = fold (expr);
10358 flag_signaling_nans = saved_signaling_nans;
10359 flag_trapping_math = saved_trapping_math;
10360 flag_rounding_math = saved_rounding_math;
10361 flag_trapv = saved_trapv;
10363 return result;
10366 /* Determine if first argument is a multiple of second argument. Return 0 if
10367 it is not, or we cannot easily determined it to be.
10369 An example of the sort of thing we care about (at this point; this routine
10370 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10371 fold cases do now) is discovering that
10373 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10375 is a multiple of
10377 SAVE_EXPR (J * 8)
10379 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10381 This code also handles discovering that
10383 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10385 is a multiple of 8 so we don't have to worry about dealing with a
10386 possible remainder.
10388 Note that we *look* inside a SAVE_EXPR only to determine how it was
10389 calculated; it is not safe for fold to do much of anything else with the
10390 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10391 at run time. For example, the latter example above *cannot* be implemented
10392 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10393 evaluation time of the original SAVE_EXPR is not necessarily the same at
10394 the time the new expression is evaluated. The only optimization of this
10395 sort that would be valid is changing
10397 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10399 divided by 8 to
10401 SAVE_EXPR (I) * SAVE_EXPR (J)
10403 (where the same SAVE_EXPR (J) is used in the original and the
10404 transformed version). */
10406 static int
10407 multiple_of_p (tree type, tree top, tree bottom)
10409 if (operand_equal_p (top, bottom, 0))
10410 return 1;
10412 if (TREE_CODE (type) != INTEGER_TYPE)
10413 return 0;
10415 switch (TREE_CODE (top))
10417 case BIT_AND_EXPR:
10418 /* Bitwise and provides a power of two multiple. If the mask is
10419 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10420 if (!integer_pow2p (bottom))
10421 return 0;
10422 /* FALLTHRU */
10424 case MULT_EXPR:
10425 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10426 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10428 case PLUS_EXPR:
10429 case MINUS_EXPR:
10430 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10431 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10433 case LSHIFT_EXPR:
10434 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10436 tree op1, t1;
10438 op1 = TREE_OPERAND (top, 1);
10439 /* const_binop may not detect overflow correctly,
10440 so check for it explicitly here. */
10441 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10442 > TREE_INT_CST_LOW (op1)
10443 && TREE_INT_CST_HIGH (op1) == 0
10444 && 0 != (t1 = fold_convert (type,
10445 const_binop (LSHIFT_EXPR,
10446 size_one_node,
10447 op1, 0)))
10448 && ! TREE_OVERFLOW (t1))
10449 return multiple_of_p (type, t1, bottom);
10451 return 0;
10453 case NOP_EXPR:
10454 /* Can't handle conversions from non-integral or wider integral type. */
10455 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10456 || (TYPE_PRECISION (type)
10457 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10458 return 0;
10460 /* .. fall through ... */
10462 case SAVE_EXPR:
10463 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10465 case INTEGER_CST:
10466 if (TREE_CODE (bottom) != INTEGER_CST
10467 || (TYPE_UNSIGNED (type)
10468 && (tree_int_cst_sgn (top) < 0
10469 || tree_int_cst_sgn (bottom) < 0)))
10470 return 0;
10471 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10472 top, bottom, 0));
10474 default:
10475 return 0;
10479 /* Return true if `t' is known to be non-negative. */
10482 tree_expr_nonnegative_p (tree t)
10484 switch (TREE_CODE (t))
10486 case ABS_EXPR:
10487 return 1;
10489 case INTEGER_CST:
10490 return tree_int_cst_sgn (t) >= 0;
10492 case REAL_CST:
10493 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10495 case PLUS_EXPR:
10496 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10497 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10498 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10500 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10501 both unsigned and at least 2 bits shorter than the result. */
10502 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10503 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10504 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10506 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10507 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10508 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10509 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10511 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10512 TYPE_PRECISION (inner2)) + 1;
10513 return prec < TYPE_PRECISION (TREE_TYPE (t));
10516 break;
10518 case MULT_EXPR:
10519 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10521 /* x * x for floating point x is always non-negative. */
10522 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10523 return 1;
10524 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10525 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10528 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10529 both unsigned and their total bits is shorter than the result. */
10530 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10531 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10532 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10534 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10535 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10536 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10537 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10538 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10539 < TYPE_PRECISION (TREE_TYPE (t));
10541 return 0;
10543 case TRUNC_DIV_EXPR:
10544 case CEIL_DIV_EXPR:
10545 case FLOOR_DIV_EXPR:
10546 case ROUND_DIV_EXPR:
10547 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10548 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10550 case TRUNC_MOD_EXPR:
10551 case CEIL_MOD_EXPR:
10552 case FLOOR_MOD_EXPR:
10553 case ROUND_MOD_EXPR:
10554 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10556 case RDIV_EXPR:
10557 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10558 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10560 case BIT_AND_EXPR:
10561 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10562 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10563 case BIT_IOR_EXPR:
10564 case BIT_XOR_EXPR:
10565 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10566 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10568 case NOP_EXPR:
10570 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10571 tree outer_type = TREE_TYPE (t);
10573 if (TREE_CODE (outer_type) == REAL_TYPE)
10575 if (TREE_CODE (inner_type) == REAL_TYPE)
10576 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10577 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10579 if (TYPE_UNSIGNED (inner_type))
10580 return 1;
10581 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10584 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10586 if (TREE_CODE (inner_type) == REAL_TYPE)
10587 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10588 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10589 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10590 && TYPE_UNSIGNED (inner_type);
10593 break;
10595 case COND_EXPR:
10596 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10597 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10598 case COMPOUND_EXPR:
10599 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10600 case MIN_EXPR:
10601 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10602 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10603 case MAX_EXPR:
10604 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10605 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10606 case MODIFY_EXPR:
10607 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10608 case BIND_EXPR:
10609 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10610 case SAVE_EXPR:
10611 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10612 case NON_LVALUE_EXPR:
10613 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10614 case FLOAT_EXPR:
10615 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10617 case TARGET_EXPR:
10619 tree temp = TARGET_EXPR_SLOT (t);
10620 t = TARGET_EXPR_INITIAL (t);
10622 /* If the initializer is non-void, then it's a normal expression
10623 that will be assigned to the slot. */
10624 if (!VOID_TYPE_P (t))
10625 return tree_expr_nonnegative_p (t);
10627 /* Otherwise, the initializer sets the slot in some way. One common
10628 way is an assignment statement at the end of the initializer. */
10629 while (1)
10631 if (TREE_CODE (t) == BIND_EXPR)
10632 t = expr_last (BIND_EXPR_BODY (t));
10633 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10634 || TREE_CODE (t) == TRY_CATCH_EXPR)
10635 t = expr_last (TREE_OPERAND (t, 0));
10636 else if (TREE_CODE (t) == STATEMENT_LIST)
10637 t = expr_last (t);
10638 else
10639 break;
10641 if (TREE_CODE (t) == MODIFY_EXPR
10642 && TREE_OPERAND (t, 0) == temp)
10643 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10645 return 0;
10648 case CALL_EXPR:
10650 tree fndecl = get_callee_fndecl (t);
10651 tree arglist = TREE_OPERAND (t, 1);
10652 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10653 switch (DECL_FUNCTION_CODE (fndecl))
10655 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10656 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10657 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10658 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10660 CASE_BUILTIN_F (BUILT_IN_ACOS)
10661 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10662 CASE_BUILTIN_F (BUILT_IN_CABS)
10663 CASE_BUILTIN_F (BUILT_IN_COSH)
10664 CASE_BUILTIN_F (BUILT_IN_ERFC)
10665 CASE_BUILTIN_F (BUILT_IN_EXP)
10666 CASE_BUILTIN_F (BUILT_IN_EXP10)
10667 CASE_BUILTIN_F (BUILT_IN_EXP2)
10668 CASE_BUILTIN_F (BUILT_IN_FABS)
10669 CASE_BUILTIN_F (BUILT_IN_FDIM)
10670 CASE_BUILTIN_F (BUILT_IN_FREXP)
10671 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10672 CASE_BUILTIN_F (BUILT_IN_POW10)
10673 CASE_BUILTIN_I (BUILT_IN_FFS)
10674 CASE_BUILTIN_I (BUILT_IN_PARITY)
10675 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10676 /* Always true. */
10677 return 1;
10679 CASE_BUILTIN_F (BUILT_IN_SQRT)
10680 /* sqrt(-0.0) is -0.0. */
10681 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10682 return 1;
10683 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10685 CASE_BUILTIN_F (BUILT_IN_ASINH)
10686 CASE_BUILTIN_F (BUILT_IN_ATAN)
10687 CASE_BUILTIN_F (BUILT_IN_ATANH)
10688 CASE_BUILTIN_F (BUILT_IN_CBRT)
10689 CASE_BUILTIN_F (BUILT_IN_CEIL)
10690 CASE_BUILTIN_F (BUILT_IN_ERF)
10691 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10692 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10693 CASE_BUILTIN_F (BUILT_IN_FMOD)
10694 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10695 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10696 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10697 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10698 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10699 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10700 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10701 CASE_BUILTIN_F (BUILT_IN_LRINT)
10702 CASE_BUILTIN_F (BUILT_IN_LROUND)
10703 CASE_BUILTIN_F (BUILT_IN_MODF)
10704 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10705 CASE_BUILTIN_F (BUILT_IN_POW)
10706 CASE_BUILTIN_F (BUILT_IN_RINT)
10707 CASE_BUILTIN_F (BUILT_IN_ROUND)
10708 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10709 CASE_BUILTIN_F (BUILT_IN_SINH)
10710 CASE_BUILTIN_F (BUILT_IN_TANH)
10711 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10712 /* True if the 1st argument is nonnegative. */
10713 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10715 CASE_BUILTIN_F (BUILT_IN_FMAX)
10716 /* True if the 1st OR 2nd arguments are nonnegative. */
10717 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10718 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10720 CASE_BUILTIN_F (BUILT_IN_FMIN)
10721 /* True if the 1st AND 2nd arguments are nonnegative. */
10722 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10723 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10725 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10726 /* True if the 2nd argument is nonnegative. */
10727 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10729 default:
10730 break;
10731 #undef CASE_BUILTIN_F
10732 #undef CASE_BUILTIN_I
10736 /* ... fall through ... */
10738 default:
10739 if (truth_value_p (TREE_CODE (t)))
10740 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10741 return 1;
10744 /* We don't know sign of `t', so be conservative and return false. */
10745 return 0;
10748 /* Return true when T is an address and is known to be nonzero.
10749 For floating point we further ensure that T is not denormal.
10750 Similar logic is present in nonzero_address in rtlanal.h. */
10752 static bool
10753 tree_expr_nonzero_p (tree t)
10755 tree type = TREE_TYPE (t);
10757 /* Doing something useful for floating point would need more work. */
10758 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10759 return false;
10761 switch (TREE_CODE (t))
10763 case ABS_EXPR:
10764 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10765 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10767 case INTEGER_CST:
10768 /* We used to test for !integer_zerop here. This does not work correctly
10769 if TREE_CONSTANT_OVERFLOW (t). */
10770 return (TREE_INT_CST_LOW (t) != 0
10771 || TREE_INT_CST_HIGH (t) != 0);
10773 case PLUS_EXPR:
10774 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10776 /* With the presence of negative values it is hard
10777 to say something. */
10778 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10779 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10780 return false;
10781 /* One of operands must be positive and the other non-negative. */
10782 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10783 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10785 break;
10787 case MULT_EXPR:
10788 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10790 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10791 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10793 break;
10795 case NOP_EXPR:
10797 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10798 tree outer_type = TREE_TYPE (t);
10800 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10801 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10803 break;
10805 case ADDR_EXPR:
10807 tree base = get_base_address (TREE_OPERAND (t, 0));
10809 if (!base)
10810 return false;
10812 /* Weak declarations may link to NULL. */
10813 if (DECL_P (base))
10814 return !DECL_WEAK (base);
10816 /* Constants are never weak. */
10817 if (CONSTANT_CLASS_P (base))
10818 return true;
10820 return false;
10823 case COND_EXPR:
10824 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10825 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10827 case MIN_EXPR:
10828 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10829 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10831 case MAX_EXPR:
10832 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10834 /* When both operands are nonzero, then MAX must be too. */
10835 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10836 return true;
10838 /* MAX where operand 0 is positive is positive. */
10839 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10841 /* MAX where operand 1 is positive is positive. */
10842 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10843 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10844 return true;
10845 break;
10847 case COMPOUND_EXPR:
10848 case MODIFY_EXPR:
10849 case BIND_EXPR:
10850 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10852 case SAVE_EXPR:
10853 case NON_LVALUE_EXPR:
10854 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10856 case BIT_IOR_EXPR:
10857 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10858 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10860 default:
10861 break;
10863 return false;
10866 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10867 attempt to fold the expression to a constant without modifying TYPE,
10868 OP0 or OP1.
10870 If the expression could be simplified to a constant, then return
10871 the constant. If the expression would not be simplified to a
10872 constant, then return NULL_TREE. */
10874 tree
10875 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10877 tree tem = fold_binary (code, type, op0, op1);
10878 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
10881 /* Given the components of a unary expression CODE, TYPE and OP0,
10882 attempt to fold the expression to a constant without modifying
10883 TYPE or OP0.
10885 If the expression could be simplified to a constant, then return
10886 the constant. If the expression would not be simplified to a
10887 constant, then return NULL_TREE. */
10889 tree
10890 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10892 tree tem = fold_unary (code, type, op0);
10893 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
10896 /* If EXP represents referencing an element in a constant string
10897 (either via pointer arithmetic or array indexing), return the
10898 tree representing the value accessed, otherwise return NULL. */
10900 tree
10901 fold_read_from_constant_string (tree exp)
10903 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10905 tree exp1 = TREE_OPERAND (exp, 0);
10906 tree index;
10907 tree string;
10909 if (TREE_CODE (exp) == INDIRECT_REF)
10910 string = string_constant (exp1, &index);
10911 else
10913 tree low_bound = array_ref_low_bound (exp);
10914 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10916 /* Optimize the special-case of a zero lower bound.
10918 We convert the low_bound to sizetype to avoid some problems
10919 with constant folding. (E.g. suppose the lower bound is 1,
10920 and its mode is QI. Without the conversion,l (ARRAY
10921 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10922 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10923 if (! integer_zerop (low_bound))
10924 index = size_diffop (index, fold_convert (sizetype, low_bound));
10926 string = exp1;
10929 if (string
10930 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10931 && TREE_CODE (string) == STRING_CST
10932 && TREE_CODE (index) == INTEGER_CST
10933 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10934 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10935 == MODE_INT)
10936 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10937 return fold_convert (TREE_TYPE (exp),
10938 build_int_cst (NULL_TREE,
10939 (TREE_STRING_POINTER (string)
10940 [TREE_INT_CST_LOW (index)])));
10942 return NULL;
10945 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10946 an integer constant or real constant.
10948 TYPE is the type of the result. */
10950 static tree
10951 fold_negate_const (tree arg0, tree type)
10953 tree t = NULL_TREE;
10955 switch (TREE_CODE (arg0))
10957 case INTEGER_CST:
10959 unsigned HOST_WIDE_INT low;
10960 HOST_WIDE_INT high;
10961 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10962 TREE_INT_CST_HIGH (arg0),
10963 &low, &high);
10964 t = build_int_cst_wide (type, low, high);
10965 t = force_fit_type (t, 1,
10966 (overflow | TREE_OVERFLOW (arg0))
10967 && !TYPE_UNSIGNED (type),
10968 TREE_CONSTANT_OVERFLOW (arg0));
10969 break;
10972 case REAL_CST:
10973 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10974 break;
10976 default:
10977 gcc_unreachable ();
10980 return t;
10983 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10984 an integer constant or real constant.
10986 TYPE is the type of the result. */
10988 tree
10989 fold_abs_const (tree arg0, tree type)
10991 tree t = NULL_TREE;
10993 switch (TREE_CODE (arg0))
10995 case INTEGER_CST:
10996 /* If the value is unsigned, then the absolute value is
10997 the same as the ordinary value. */
10998 if (TYPE_UNSIGNED (type))
10999 t = arg0;
11000 /* Similarly, if the value is non-negative. */
11001 else if (INT_CST_LT (integer_minus_one_node, arg0))
11002 t = arg0;
11003 /* If the value is negative, then the absolute value is
11004 its negation. */
11005 else
11007 unsigned HOST_WIDE_INT low;
11008 HOST_WIDE_INT high;
11009 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11010 TREE_INT_CST_HIGH (arg0),
11011 &low, &high);
11012 t = build_int_cst_wide (type, low, high);
11013 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11014 TREE_CONSTANT_OVERFLOW (arg0));
11016 break;
11018 case REAL_CST:
11019 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11020 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11021 else
11022 t = arg0;
11023 break;
11025 default:
11026 gcc_unreachable ();
11029 return t;
11032 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11033 constant. TYPE is the type of the result. */
11035 static tree
11036 fold_not_const (tree arg0, tree type)
11038 tree t = NULL_TREE;
11040 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11042 t = build_int_cst_wide (type,
11043 ~ TREE_INT_CST_LOW (arg0),
11044 ~ TREE_INT_CST_HIGH (arg0));
11045 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11046 TREE_CONSTANT_OVERFLOW (arg0));
11048 return t;
11051 /* Given CODE, a relational operator, the target type, TYPE and two
11052 constant operands OP0 and OP1, return the result of the
11053 relational operation. If the result is not a compile time
11054 constant, then return NULL_TREE. */
11056 static tree
11057 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11059 int result, invert;
11061 /* From here on, the only cases we handle are when the result is
11062 known to be a constant. */
11064 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11066 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11067 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11069 /* Handle the cases where either operand is a NaN. */
11070 if (real_isnan (c0) || real_isnan (c1))
11072 switch (code)
11074 case EQ_EXPR:
11075 case ORDERED_EXPR:
11076 result = 0;
11077 break;
11079 case NE_EXPR:
11080 case UNORDERED_EXPR:
11081 case UNLT_EXPR:
11082 case UNLE_EXPR:
11083 case UNGT_EXPR:
11084 case UNGE_EXPR:
11085 case UNEQ_EXPR:
11086 result = 1;
11087 break;
11089 case LT_EXPR:
11090 case LE_EXPR:
11091 case GT_EXPR:
11092 case GE_EXPR:
11093 case LTGT_EXPR:
11094 if (flag_trapping_math)
11095 return NULL_TREE;
11096 result = 0;
11097 break;
11099 default:
11100 gcc_unreachable ();
11103 return constant_boolean_node (result, type);
11106 return constant_boolean_node (real_compare (code, c0, c1), type);
11109 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11111 To compute GT, swap the arguments and do LT.
11112 To compute GE, do LT and invert the result.
11113 To compute LE, swap the arguments, do LT and invert the result.
11114 To compute NE, do EQ and invert the result.
11116 Therefore, the code below must handle only EQ and LT. */
11118 if (code == LE_EXPR || code == GT_EXPR)
11120 tree tem = op0;
11121 op0 = op1;
11122 op1 = tem;
11123 code = swap_tree_comparison (code);
11126 /* Note that it is safe to invert for real values here because we
11127 have already handled the one case that it matters. */
11129 invert = 0;
11130 if (code == NE_EXPR || code == GE_EXPR)
11132 invert = 1;
11133 code = invert_tree_comparison (code, false);
11136 /* Compute a result for LT or EQ if args permit;
11137 Otherwise return T. */
11138 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11140 if (code == EQ_EXPR)
11141 result = tree_int_cst_equal (op0, op1);
11142 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11143 result = INT_CST_LT_UNSIGNED (op0, op1);
11144 else
11145 result = INT_CST_LT (op0, op1);
11147 else
11148 return NULL_TREE;
11150 if (invert)
11151 result ^= 1;
11152 return constant_boolean_node (result, type);
11155 /* Build an expression for the a clean point containing EXPR with type TYPE.
11156 Don't build a cleanup point expression for EXPR which don't have side
11157 effects. */
11159 tree
11160 fold_build_cleanup_point_expr (tree type, tree expr)
11162 /* If the expression does not have side effects then we don't have to wrap
11163 it with a cleanup point expression. */
11164 if (!TREE_SIDE_EFFECTS (expr))
11165 return expr;
11167 /* If the expression is a return, check to see if the expression inside the
11168 return has no side effects or the right hand side of the modify expression
11169 inside the return. If either don't have side effects set we don't need to
11170 wrap the expression in a cleanup point expression. Note we don't check the
11171 left hand side of the modify because it should always be a return decl. */
11172 if (TREE_CODE (expr) == RETURN_EXPR)
11174 tree op = TREE_OPERAND (expr, 0);
11175 if (!op || !TREE_SIDE_EFFECTS (op))
11176 return expr;
11177 op = TREE_OPERAND (op, 1);
11178 if (!TREE_SIDE_EFFECTS (op))
11179 return expr;
11182 return build1 (CLEANUP_POINT_EXPR, type, expr);
11185 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11186 avoid confusing the gimplify process. */
11188 tree
11189 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11191 /* The size of the object is not relevant when talking about its address. */
11192 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11193 t = TREE_OPERAND (t, 0);
11195 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11196 if (TREE_CODE (t) == INDIRECT_REF
11197 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11199 t = TREE_OPERAND (t, 0);
11200 if (TREE_TYPE (t) != ptrtype)
11201 t = build1 (NOP_EXPR, ptrtype, t);
11203 else
11205 tree base = t;
11207 while (handled_component_p (base))
11208 base = TREE_OPERAND (base, 0);
11209 if (DECL_P (base))
11210 TREE_ADDRESSABLE (base) = 1;
11212 t = build1 (ADDR_EXPR, ptrtype, t);
11215 return t;
11218 tree
11219 build_fold_addr_expr (tree t)
11221 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11224 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11225 of an indirection through OP0, or NULL_TREE if no simplification is
11226 possible. */
11228 tree
11229 fold_indirect_ref_1 (tree type, tree op0)
11231 tree sub = op0;
11232 tree subtype;
11234 STRIP_NOPS (sub);
11235 subtype = TREE_TYPE (sub);
11236 if (!POINTER_TYPE_P (subtype))
11237 return NULL_TREE;
11239 if (TREE_CODE (sub) == ADDR_EXPR)
11241 tree op = TREE_OPERAND (sub, 0);
11242 tree optype = TREE_TYPE (op);
11243 /* *&p => p */
11244 if (type == optype)
11245 return op;
11246 /* *(foo *)&fooarray => fooarray[0] */
11247 else if (TREE_CODE (optype) == ARRAY_TYPE
11248 && type == TREE_TYPE (optype))
11250 tree type_domain = TYPE_DOMAIN (optype);
11251 tree min_val = size_zero_node;
11252 if (type_domain && TYPE_MIN_VALUE (type_domain))
11253 min_val = TYPE_MIN_VALUE (type_domain);
11254 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11258 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11259 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11260 && type == TREE_TYPE (TREE_TYPE (subtype)))
11262 tree type_domain;
11263 tree min_val = size_zero_node;
11264 sub = build_fold_indirect_ref (sub);
11265 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11266 if (type_domain && TYPE_MIN_VALUE (type_domain))
11267 min_val = TYPE_MIN_VALUE (type_domain);
11268 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11271 return NULL_TREE;
11274 /* Builds an expression for an indirection through T, simplifying some
11275 cases. */
11277 tree
11278 build_fold_indirect_ref (tree t)
11280 tree type = TREE_TYPE (TREE_TYPE (t));
11281 tree sub = fold_indirect_ref_1 (type, t);
11283 if (sub)
11284 return sub;
11285 else
11286 return build1 (INDIRECT_REF, type, t);
11289 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11291 tree
11292 fold_indirect_ref (tree t)
11294 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11296 if (sub)
11297 return sub;
11298 else
11299 return t;
11302 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11303 whose result is ignored. The type of the returned tree need not be
11304 the same as the original expression. */
11306 tree
11307 fold_ignored_result (tree t)
11309 if (!TREE_SIDE_EFFECTS (t))
11310 return integer_zero_node;
11312 for (;;)
11313 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11315 case tcc_unary:
11316 t = TREE_OPERAND (t, 0);
11317 break;
11319 case tcc_binary:
11320 case tcc_comparison:
11321 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11322 t = TREE_OPERAND (t, 0);
11323 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11324 t = TREE_OPERAND (t, 1);
11325 else
11326 return t;
11327 break;
11329 case tcc_expression:
11330 switch (TREE_CODE (t))
11332 case COMPOUND_EXPR:
11333 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11334 return t;
11335 t = TREE_OPERAND (t, 0);
11336 break;
11338 case COND_EXPR:
11339 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11340 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11341 return t;
11342 t = TREE_OPERAND (t, 0);
11343 break;
11345 default:
11346 return t;
11348 break;
11350 default:
11351 return t;
11355 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11356 This can only be applied to objects of a sizetype. */
11358 tree
11359 round_up (tree value, int divisor)
11361 tree div = NULL_TREE;
11363 gcc_assert (divisor > 0);
11364 if (divisor == 1)
11365 return value;
11367 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11368 have to do anything. Only do this when we are not given a const,
11369 because in that case, this check is more expensive than just
11370 doing it. */
11371 if (TREE_CODE (value) != INTEGER_CST)
11373 div = build_int_cst (TREE_TYPE (value), divisor);
11375 if (multiple_of_p (TREE_TYPE (value), value, div))
11376 return value;
11379 /* If divisor is a power of two, simplify this to bit manipulation. */
11380 if (divisor == (divisor & -divisor))
11382 tree t;
11384 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11385 value = size_binop (PLUS_EXPR, value, t);
11386 t = build_int_cst (TREE_TYPE (value), -divisor);
11387 value = size_binop (BIT_AND_EXPR, value, t);
11389 else
11391 if (!div)
11392 div = build_int_cst (TREE_TYPE (value), divisor);
11393 value = size_binop (CEIL_DIV_EXPR, value, div);
11394 value = size_binop (MULT_EXPR, value, div);
11397 return value;
11400 /* Likewise, but round down. */
11402 tree
11403 round_down (tree value, int divisor)
11405 tree div = NULL_TREE;
11407 gcc_assert (divisor > 0);
11408 if (divisor == 1)
11409 return value;
11411 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11412 have to do anything. Only do this when we are not given a const,
11413 because in that case, this check is more expensive than just
11414 doing it. */
11415 if (TREE_CODE (value) != INTEGER_CST)
11417 div = build_int_cst (TREE_TYPE (value), divisor);
11419 if (multiple_of_p (TREE_TYPE (value), value, div))
11420 return value;
11423 /* If divisor is a power of two, simplify this to bit manipulation. */
11424 if (divisor == (divisor & -divisor))
11426 tree t;
11428 t = build_int_cst (TREE_TYPE (value), -divisor);
11429 value = size_binop (BIT_AND_EXPR, value, t);
11431 else
11433 if (!div)
11434 div = build_int_cst (TREE_TYPE (value), divisor);
11435 value = size_binop (FLOOR_DIV_EXPR, value, div);
11436 value = size_binop (MULT_EXPR, value, div);
11439 return value;
11442 /* Returns the pointer to the base of the object addressed by EXP and
11443 extracts the information about the offset of the access, storing it
11444 to PBITPOS and POFFSET. */
11446 static tree
11447 split_address_to_core_and_offset (tree exp,
11448 HOST_WIDE_INT *pbitpos, tree *poffset)
11450 tree core;
11451 enum machine_mode mode;
11452 int unsignedp, volatilep;
11453 HOST_WIDE_INT bitsize;
11455 if (TREE_CODE (exp) == ADDR_EXPR)
11457 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11458 poffset, &mode, &unsignedp, &volatilep,
11459 false);
11460 core = build_fold_addr_expr (core);
11462 else
11464 core = exp;
11465 *pbitpos = 0;
11466 *poffset = NULL_TREE;
11469 return core;
11472 /* Returns true if addresses of E1 and E2 differ by a constant, false
11473 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11475 bool
11476 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11478 tree core1, core2;
11479 HOST_WIDE_INT bitpos1, bitpos2;
11480 tree toffset1, toffset2, tdiff, type;
11482 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11483 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11485 if (bitpos1 % BITS_PER_UNIT != 0
11486 || bitpos2 % BITS_PER_UNIT != 0
11487 || !operand_equal_p (core1, core2, 0))
11488 return false;
11490 if (toffset1 && toffset2)
11492 type = TREE_TYPE (toffset1);
11493 if (type != TREE_TYPE (toffset2))
11494 toffset2 = fold_convert (type, toffset2);
11496 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11497 if (!host_integerp (tdiff, 0))
11498 return false;
11500 *diff = tree_low_cst (tdiff, 0);
11502 else if (toffset1 || toffset2)
11504 /* If only one of the offsets is non-constant, the difference cannot
11505 be a constant. */
11506 return false;
11508 else
11509 *diff = 0;
11511 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11512 return true;
11515 /* Simplify the floating point expression EXP when the sign of the
11516 result is not significant. Return NULL_TREE if no simplification
11517 is possible. */
11519 tree
11520 fold_strip_sign_ops (tree exp)
11522 tree arg0, arg1;
11524 switch (TREE_CODE (exp))
11526 case ABS_EXPR:
11527 case NEGATE_EXPR:
11528 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11529 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11531 case MULT_EXPR:
11532 case RDIV_EXPR:
11533 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11534 return NULL_TREE;
11535 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11536 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11537 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11538 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11539 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11540 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11541 break;
11543 default:
11544 break;
11546 return NULL_TREE;