2005-06-01 Diego Novillo <dnovillo@redhat.com>
[official-gcc.git] / gcc / fold-const.c
blob721701b6732f7238a50072cbf25a1751ac42b31a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static bool tree_expr_nonzero_p (tree);
137 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
138 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
139 and SUM1. Then this yields nonzero if overflow occurred during the
140 addition.
142 Overflow occurs if A and B have the same sign, but A and SUM differ in
143 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 sign. */
145 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
147 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
148 We do that by representing the two-word integer in 4 words, with only
149 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
150 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 #define LOWPART(x) \
153 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
154 #define HIGHPART(x) \
155 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
156 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
158 /* Unpack a two-word integer into 4 words.
159 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
160 WORDS points to the array of HOST_WIDE_INTs. */
162 static void
163 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
165 words[0] = LOWPART (low);
166 words[1] = HIGHPART (low);
167 words[2] = LOWPART (hi);
168 words[3] = HIGHPART (hi);
171 /* Pack an array of 4 words into a two-word integer.
172 WORDS points to the array of words.
173 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 static void
176 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
177 HOST_WIDE_INT *hi)
179 *low = words[0] + words[1] * BASE;
180 *hi = words[2] + words[3] * BASE;
183 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
184 in overflow of the value, when >0 we are only interested in signed
185 overflow, for <0 we are interested in any overflow. OVERFLOWED
186 indicates whether overflow has already occurred. CONST_OVERFLOWED
187 indicates whether constant overflow has already occurred. We force
188 T's value to be within range of T's type (by setting to 0 or 1 all
189 the bits outside the type's range). We set TREE_OVERFLOWED if,
190 OVERFLOWED is nonzero,
191 or OVERFLOWABLE is >0 and signed overflow occurs
192 or OVERFLOWABLE is <0 and any overflow occurs
193 We set TREE_CONSTANT_OVERFLOWED if,
194 CONST_OVERFLOWED is nonzero
195 or we set TREE_OVERFLOWED.
196 We return either the original T, or a copy. */
198 tree
199 force_fit_type (tree t, int overflowable,
200 bool overflowed, bool overflowed_const)
202 unsigned HOST_WIDE_INT low;
203 HOST_WIDE_INT high;
204 unsigned int prec;
205 int sign_extended_type;
207 gcc_assert (TREE_CODE (t) == INTEGER_CST);
209 low = TREE_INT_CST_LOW (t);
210 high = TREE_INT_CST_HIGH (t);
212 if (POINTER_TYPE_P (TREE_TYPE (t))
213 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
214 prec = POINTER_SIZE;
215 else
216 prec = TYPE_PRECISION (TREE_TYPE (t));
217 /* Size types *are* sign extended. */
218 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
219 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
220 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
222 /* First clear all bits that are beyond the type's precision. */
224 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
226 else if (prec > HOST_BITS_PER_WIDE_INT)
227 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
228 else
230 high = 0;
231 if (prec < HOST_BITS_PER_WIDE_INT)
232 low &= ~((HOST_WIDE_INT) (-1) << prec);
235 if (!sign_extended_type)
236 /* No sign extension */;
237 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238 /* Correct width already. */;
239 else if (prec > HOST_BITS_PER_WIDE_INT)
241 /* Sign extend top half? */
242 if (high & ((unsigned HOST_WIDE_INT)1
243 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
244 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
246 else if (prec == HOST_BITS_PER_WIDE_INT)
248 if ((HOST_WIDE_INT)low < 0)
249 high = -1;
251 else
253 /* Sign extend bottom half? */
254 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 high = -1;
257 low |= (HOST_WIDE_INT)(-1) << prec;
261 /* If the value changed, return a new node. */
262 if (overflowed || overflowed_const
263 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
265 t = build_int_cst_wide (TREE_TYPE (t), low, high);
267 if (overflowed
268 || overflowable < 0
269 || (overflowable > 0 && sign_extended_type))
271 t = copy_node (t);
272 TREE_OVERFLOW (t) = 1;
273 TREE_CONSTANT_OVERFLOW (t) = 1;
275 else if (overflowed_const)
277 t = copy_node (t);
278 TREE_CONSTANT_OVERFLOW (t) = 1;
282 return t;
285 /* Add two doubleword integers with doubleword result.
286 Each argument is given as two `HOST_WIDE_INT' pieces.
287 One argument is L1 and H1; the other, L2 and H2.
288 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
291 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
292 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
293 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
295 unsigned HOST_WIDE_INT l;
296 HOST_WIDE_INT h;
298 l = l1 + l2;
299 h = h1 + h2 + (l < l1);
301 *lv = l;
302 *hv = h;
303 return OVERFLOW_SUM_SIGN (h1, h2, h);
306 /* Negate a doubleword integer with doubleword result.
307 Return nonzero if the operation overflows, assuming it's signed.
308 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
309 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
312 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
313 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
315 if (l1 == 0)
317 *lv = 0;
318 *hv = - h1;
319 return (*hv & h1) < 0;
321 else
323 *lv = -l1;
324 *hv = ~h1;
325 return 0;
329 /* Multiply two doubleword integers with doubleword result.
330 Return nonzero if the operation overflows, assuming it's signed.
331 Each argument is given as two `HOST_WIDE_INT' pieces.
332 One argument is L1 and H1; the other, L2 and H2.
333 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
336 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
337 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
338 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
340 HOST_WIDE_INT arg1[4];
341 HOST_WIDE_INT arg2[4];
342 HOST_WIDE_INT prod[4 * 2];
343 unsigned HOST_WIDE_INT carry;
344 int i, j, k;
345 unsigned HOST_WIDE_INT toplow, neglow;
346 HOST_WIDE_INT tophigh, neghigh;
348 encode (arg1, l1, h1);
349 encode (arg2, l2, h2);
351 memset (prod, 0, sizeof prod);
353 for (i = 0; i < 4; i++)
355 carry = 0;
356 for (j = 0; j < 4; j++)
358 k = i + j;
359 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
360 carry += arg1[i] * arg2[j];
361 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 carry += prod[k];
363 prod[k] = LOWPART (carry);
364 carry = HIGHPART (carry);
366 prod[i + 4] = carry;
369 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
371 /* Check for overflow by calculating the top half of the answer in full;
372 it should agree with the low half's sign bit. */
373 decode (prod + 4, &toplow, &tophigh);
374 if (h1 < 0)
376 neg_double (l2, h2, &neglow, &neghigh);
377 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
379 if (h2 < 0)
381 neg_double (l1, h1, &neglow, &neghigh);
382 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
384 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
387 /* Shift the doubleword integer in L1, H1 left by COUNT places
388 keeping only PREC bits of result.
389 Shift right if COUNT is negative.
390 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
391 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 void
394 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
395 HOST_WIDE_INT count, unsigned int prec,
396 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
398 unsigned HOST_WIDE_INT signmask;
400 if (count < 0)
402 rshift_double (l1, h1, -count, prec, lv, hv, arith);
403 return;
406 if (SHIFT_COUNT_TRUNCATED)
407 count %= prec;
409 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
411 /* Shifting by the host word size is undefined according to the
412 ANSI standard, so we must handle this as a special case. */
413 *hv = 0;
414 *lv = 0;
416 else if (count >= HOST_BITS_PER_WIDE_INT)
418 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
419 *lv = 0;
421 else
423 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
424 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
425 *lv = l1 << count;
428 /* Sign extend all bits that are beyond the precision. */
430 signmask = -((prec > HOST_BITS_PER_WIDE_INT
431 ? ((unsigned HOST_WIDE_INT) *hv
432 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
433 : (*lv >> (prec - 1))) & 1);
435 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
437 else if (prec >= HOST_BITS_PER_WIDE_INT)
439 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
440 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
442 else
444 *hv = signmask;
445 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
446 *lv |= signmask << prec;
450 /* Shift the doubleword integer in L1, H1 right by COUNT places
451 keeping only PREC bits of result. COUNT must be positive.
452 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
453 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 void
456 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
457 HOST_WIDE_INT count, unsigned int prec,
458 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
459 int arith)
461 unsigned HOST_WIDE_INT signmask;
463 signmask = (arith
464 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
465 : 0);
467 if (SHIFT_COUNT_TRUNCATED)
468 count %= prec;
470 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
472 /* Shifting by the host word size is undefined according to the
473 ANSI standard, so we must handle this as a special case. */
474 *hv = 0;
475 *lv = 0;
477 else if (count >= HOST_BITS_PER_WIDE_INT)
479 *hv = 0;
480 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
482 else
484 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
485 *lv = ((l1 >> count)
486 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
489 /* Zero / sign extend all bits that are beyond the precision. */
491 if (count >= (HOST_WIDE_INT)prec)
493 *hv = signmask;
494 *lv = signmask;
496 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
498 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
500 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
501 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
503 else
505 *hv = signmask;
506 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
507 *lv |= signmask << (prec - count);
511 /* Rotate the doubleword integer in L1, H1 left by COUNT places
512 keeping only PREC bits of result.
513 Rotate right if COUNT is negative.
514 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 void
517 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
518 HOST_WIDE_INT count, unsigned int prec,
519 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
521 unsigned HOST_WIDE_INT s1l, s2l;
522 HOST_WIDE_INT s1h, s2h;
524 count %= prec;
525 if (count < 0)
526 count += prec;
528 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
529 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
530 *lv = s1l | s2l;
531 *hv = s1h | s2h;
534 /* Rotate the doubleword integer in L1, H1 left by COUNT places
535 keeping only PREC bits of result. COUNT must be positive.
536 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 void
539 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
540 HOST_WIDE_INT count, unsigned int prec,
541 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
543 unsigned HOST_WIDE_INT s1l, s2l;
544 HOST_WIDE_INT s1h, s2h;
546 count %= prec;
547 if (count < 0)
548 count += prec;
550 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
551 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
552 *lv = s1l | s2l;
553 *hv = s1h | s2h;
556 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
557 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
558 CODE is a tree code for a kind of division, one of
559 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 or EXACT_DIV_EXPR
561 It controls how the quotient is rounded to an integer.
562 Return nonzero if the operation overflows.
563 UNS nonzero says do unsigned division. */
566 div_and_round_double (enum tree_code code, int uns,
567 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
568 HOST_WIDE_INT hnum_orig,
569 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
570 HOST_WIDE_INT hden_orig,
571 unsigned HOST_WIDE_INT *lquo,
572 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
573 HOST_WIDE_INT *hrem)
575 int quo_neg = 0;
576 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
577 HOST_WIDE_INT den[4], quo[4];
578 int i, j;
579 unsigned HOST_WIDE_INT work;
580 unsigned HOST_WIDE_INT carry = 0;
581 unsigned HOST_WIDE_INT lnum = lnum_orig;
582 HOST_WIDE_INT hnum = hnum_orig;
583 unsigned HOST_WIDE_INT lden = lden_orig;
584 HOST_WIDE_INT hden = hden_orig;
585 int overflow = 0;
587 if (hden == 0 && lden == 0)
588 overflow = 1, lden = 1;
590 /* Calculate quotient sign and convert operands to unsigned. */
591 if (!uns)
593 if (hnum < 0)
595 quo_neg = ~ quo_neg;
596 /* (minimum integer) / (-1) is the only overflow case. */
597 if (neg_double (lnum, hnum, &lnum, &hnum)
598 && ((HOST_WIDE_INT) lden & hden) == -1)
599 overflow = 1;
601 if (hden < 0)
603 quo_neg = ~ quo_neg;
604 neg_double (lden, hden, &lden, &hden);
608 if (hnum == 0 && hden == 0)
609 { /* single precision */
610 *hquo = *hrem = 0;
611 /* This unsigned division rounds toward zero. */
612 *lquo = lnum / lden;
613 goto finish_up;
616 if (hnum == 0)
617 { /* trivial case: dividend < divisor */
618 /* hden != 0 already checked. */
619 *hquo = *lquo = 0;
620 *hrem = hnum;
621 *lrem = lnum;
622 goto finish_up;
625 memset (quo, 0, sizeof quo);
627 memset (num, 0, sizeof num); /* to zero 9th element */
628 memset (den, 0, sizeof den);
630 encode (num, lnum, hnum);
631 encode (den, lden, hden);
633 /* Special code for when the divisor < BASE. */
634 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
636 /* hnum != 0 already checked. */
637 for (i = 4 - 1; i >= 0; i--)
639 work = num[i] + carry * BASE;
640 quo[i] = work / lden;
641 carry = work % lden;
644 else
646 /* Full double precision division,
647 with thanks to Don Knuth's "Seminumerical Algorithms". */
648 int num_hi_sig, den_hi_sig;
649 unsigned HOST_WIDE_INT quo_est, scale;
651 /* Find the highest nonzero divisor digit. */
652 for (i = 4 - 1;; i--)
653 if (den[i] != 0)
655 den_hi_sig = i;
656 break;
659 /* Insure that the first digit of the divisor is at least BASE/2.
660 This is required by the quotient digit estimation algorithm. */
662 scale = BASE / (den[den_hi_sig] + 1);
663 if (scale > 1)
664 { /* scale divisor and dividend */
665 carry = 0;
666 for (i = 0; i <= 4 - 1; i++)
668 work = (num[i] * scale) + carry;
669 num[i] = LOWPART (work);
670 carry = HIGHPART (work);
673 num[4] = carry;
674 carry = 0;
675 for (i = 0; i <= 4 - 1; i++)
677 work = (den[i] * scale) + carry;
678 den[i] = LOWPART (work);
679 carry = HIGHPART (work);
680 if (den[i] != 0) den_hi_sig = i;
684 num_hi_sig = 4;
686 /* Main loop */
687 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
689 /* Guess the next quotient digit, quo_est, by dividing the first
690 two remaining dividend digits by the high order quotient digit.
691 quo_est is never low and is at most 2 high. */
692 unsigned HOST_WIDE_INT tmp;
694 num_hi_sig = i + den_hi_sig + 1;
695 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
696 if (num[num_hi_sig] != den[den_hi_sig])
697 quo_est = work / den[den_hi_sig];
698 else
699 quo_est = BASE - 1;
701 /* Refine quo_est so it's usually correct, and at most one high. */
702 tmp = work - quo_est * den[den_hi_sig];
703 if (tmp < BASE
704 && (den[den_hi_sig - 1] * quo_est
705 > (tmp * BASE + num[num_hi_sig - 2])))
706 quo_est--;
708 /* Try QUO_EST as the quotient digit, by multiplying the
709 divisor by QUO_EST and subtracting from the remaining dividend.
710 Keep in mind that QUO_EST is the I - 1st digit. */
712 carry = 0;
713 for (j = 0; j <= den_hi_sig; j++)
715 work = quo_est * den[j] + carry;
716 carry = HIGHPART (work);
717 work = num[i + j] - LOWPART (work);
718 num[i + j] = LOWPART (work);
719 carry += HIGHPART (work) != 0;
722 /* If quo_est was high by one, then num[i] went negative and
723 we need to correct things. */
724 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 quo_est--;
727 carry = 0; /* add divisor back in */
728 for (j = 0; j <= den_hi_sig; j++)
730 work = num[i + j] + den[j] + carry;
731 carry = HIGHPART (work);
732 num[i + j] = LOWPART (work);
735 num [num_hi_sig] += carry;
738 /* Store the quotient digit. */
739 quo[i] = quo_est;
743 decode (quo, lquo, hquo);
745 finish_up:
746 /* If result is negative, make it so. */
747 if (quo_neg)
748 neg_double (*lquo, *hquo, lquo, hquo);
750 /* Compute trial remainder: rem = num - (quo * den) */
751 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
752 neg_double (*lrem, *hrem, lrem, hrem);
753 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
755 switch (code)
757 case TRUNC_DIV_EXPR:
758 case TRUNC_MOD_EXPR: /* round toward zero */
759 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
760 return overflow;
762 case FLOOR_DIV_EXPR:
763 case FLOOR_MOD_EXPR: /* round toward negative infinity */
764 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 /* quo = quo - 1; */
767 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
768 lquo, hquo);
770 else
771 return overflow;
772 break;
774 case CEIL_DIV_EXPR:
775 case CEIL_MOD_EXPR: /* round toward positive infinity */
776 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
778 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
779 lquo, hquo);
781 else
782 return overflow;
783 break;
785 case ROUND_DIV_EXPR:
786 case ROUND_MOD_EXPR: /* round to closest integer */
788 unsigned HOST_WIDE_INT labs_rem = *lrem;
789 HOST_WIDE_INT habs_rem = *hrem;
790 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
791 HOST_WIDE_INT habs_den = hden, htwice;
793 /* Get absolute values. */
794 if (*hrem < 0)
795 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
796 if (hden < 0)
797 neg_double (lden, hden, &labs_den, &habs_den);
799 /* If (2 * abs (lrem) >= abs (lden)) */
800 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
801 labs_rem, habs_rem, &ltwice, &htwice);
803 if (((unsigned HOST_WIDE_INT) habs_den
804 < (unsigned HOST_WIDE_INT) htwice)
805 || (((unsigned HOST_WIDE_INT) habs_den
806 == (unsigned HOST_WIDE_INT) htwice)
807 && (labs_den < ltwice)))
809 if (*hquo < 0)
810 /* quo = quo - 1; */
811 add_double (*lquo, *hquo,
812 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
813 else
814 /* quo = quo + 1; */
815 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
816 lquo, hquo);
818 else
819 return overflow;
821 break;
823 default:
824 gcc_unreachable ();
827 /* Compute true remainder: rem = num - (quo * den) */
828 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
829 neg_double (*lrem, *hrem, lrem, hrem);
830 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
831 return overflow;
834 /* If ARG2 divides ARG1 with zero remainder, carries out the division
835 of type CODE and returns the quotient.
836 Otherwise returns NULL_TREE. */
838 static tree
839 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
841 unsigned HOST_WIDE_INT int1l, int2l;
842 HOST_WIDE_INT int1h, int2h;
843 unsigned HOST_WIDE_INT quol, reml;
844 HOST_WIDE_INT quoh, remh;
845 tree type = TREE_TYPE (arg1);
846 int uns = TYPE_UNSIGNED (type);
848 int1l = TREE_INT_CST_LOW (arg1);
849 int1h = TREE_INT_CST_HIGH (arg1);
850 int2l = TREE_INT_CST_LOW (arg2);
851 int2h = TREE_INT_CST_HIGH (arg2);
853 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
854 &quol, &quoh, &reml, &remh);
855 if (remh != 0 || reml != 0)
856 return NULL_TREE;
858 return build_int_cst_wide (type, quol, quoh);
861 /* Return true if built-in mathematical function specified by CODE
862 preserves the sign of it argument, i.e. -f(x) == f(-x). */
864 static bool
865 negate_mathfn_p (enum built_in_function code)
867 switch (code)
869 case BUILT_IN_ASIN:
870 case BUILT_IN_ASINF:
871 case BUILT_IN_ASINL:
872 case BUILT_IN_ATAN:
873 case BUILT_IN_ATANF:
874 case BUILT_IN_ATANL:
875 case BUILT_IN_SIN:
876 case BUILT_IN_SINF:
877 case BUILT_IN_SINL:
878 case BUILT_IN_TAN:
879 case BUILT_IN_TANF:
880 case BUILT_IN_TANL:
881 return true;
883 default:
884 break;
886 return false;
889 /* Check whether we may negate an integer constant T without causing
890 overflow. */
892 bool
893 may_negate_without_overflow_p (tree t)
895 unsigned HOST_WIDE_INT val;
896 unsigned int prec;
897 tree type;
899 gcc_assert (TREE_CODE (t) == INTEGER_CST);
901 type = TREE_TYPE (t);
902 if (TYPE_UNSIGNED (type))
903 return false;
905 prec = TYPE_PRECISION (type);
906 if (prec > HOST_BITS_PER_WIDE_INT)
908 if (TREE_INT_CST_LOW (t) != 0)
909 return true;
910 prec -= HOST_BITS_PER_WIDE_INT;
911 val = TREE_INT_CST_HIGH (t);
913 else
914 val = TREE_INT_CST_LOW (t);
915 if (prec < HOST_BITS_PER_WIDE_INT)
916 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
917 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
920 /* Determine whether an expression T can be cheaply negated using
921 the function negate_expr. */
923 static bool
924 negate_expr_p (tree t)
926 tree type;
928 if (t == 0)
929 return false;
931 type = TREE_TYPE (t);
933 STRIP_SIGN_NOPS (t);
934 switch (TREE_CODE (t))
936 case INTEGER_CST:
937 if (TYPE_UNSIGNED (type) || ! flag_trapv)
938 return true;
940 /* Check that -CST will not overflow type. */
941 return may_negate_without_overflow_p (t);
943 case REAL_CST:
944 case NEGATE_EXPR:
945 return true;
947 case COMPLEX_CST:
948 return negate_expr_p (TREE_REALPART (t))
949 && negate_expr_p (TREE_IMAGPART (t));
951 case PLUS_EXPR:
952 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
953 return false;
954 /* -(A + B) -> (-B) - A. */
955 if (negate_expr_p (TREE_OPERAND (t, 1))
956 && reorder_operands_p (TREE_OPERAND (t, 0),
957 TREE_OPERAND (t, 1)))
958 return true;
959 /* -(A + B) -> (-A) - B. */
960 return negate_expr_p (TREE_OPERAND (t, 0));
962 case MINUS_EXPR:
963 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
964 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
965 && reorder_operands_p (TREE_OPERAND (t, 0),
966 TREE_OPERAND (t, 1));
968 case MULT_EXPR:
969 if (TYPE_UNSIGNED (TREE_TYPE (t)))
970 break;
972 /* Fall through. */
974 case RDIV_EXPR:
975 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
976 return negate_expr_p (TREE_OPERAND (t, 1))
977 || negate_expr_p (TREE_OPERAND (t, 0));
978 break;
980 case NOP_EXPR:
981 /* Negate -((double)float) as (double)(-float). */
982 if (TREE_CODE (type) == REAL_TYPE)
984 tree tem = strip_float_extensions (t);
985 if (tem != t)
986 return negate_expr_p (tem);
988 break;
990 case CALL_EXPR:
991 /* Negate -f(x) as f(-x). */
992 if (negate_mathfn_p (builtin_mathfn_code (t)))
993 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
994 break;
996 case RSHIFT_EXPR:
997 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
998 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1000 tree op1 = TREE_OPERAND (t, 1);
1001 if (TREE_INT_CST_HIGH (op1) == 0
1002 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1003 == TREE_INT_CST_LOW (op1))
1004 return true;
1006 break;
1008 default:
1009 break;
1011 return false;
1014 /* Given T, an expression, return the negation of T. Allow for T to be
1015 null, in which case return null. */
1017 static tree
1018 negate_expr (tree t)
1020 tree type;
1021 tree tem;
1023 if (t == 0)
1024 return 0;
1026 type = TREE_TYPE (t);
1027 STRIP_SIGN_NOPS (t);
1029 switch (TREE_CODE (t))
1031 case INTEGER_CST:
1032 tem = fold_negate_const (t, type);
1033 if (! TREE_OVERFLOW (tem)
1034 || TYPE_UNSIGNED (type)
1035 || ! flag_trapv)
1036 return tem;
1037 break;
1039 case REAL_CST:
1040 tem = fold_negate_const (t, type);
1041 /* Two's complement FP formats, such as c4x, may overflow. */
1042 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1043 return fold_convert (type, tem);
1044 break;
1046 case COMPLEX_CST:
1048 tree rpart = negate_expr (TREE_REALPART (t));
1049 tree ipart = negate_expr (TREE_IMAGPART (t));
1051 if ((TREE_CODE (rpart) == REAL_CST
1052 && TREE_CODE (ipart) == REAL_CST)
1053 || (TREE_CODE (rpart) == INTEGER_CST
1054 && TREE_CODE (ipart) == INTEGER_CST))
1055 return build_complex (type, rpart, ipart);
1057 break;
1059 case NEGATE_EXPR:
1060 return fold_convert (type, TREE_OPERAND (t, 0));
1062 case PLUS_EXPR:
1063 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 /* -(A + B) -> (-B) - A. */
1066 if (negate_expr_p (TREE_OPERAND (t, 1))
1067 && reorder_operands_p (TREE_OPERAND (t, 0),
1068 TREE_OPERAND (t, 1)))
1070 tem = negate_expr (TREE_OPERAND (t, 1));
1071 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1072 tem, TREE_OPERAND (t, 0));
1073 return fold_convert (type, tem);
1076 /* -(A + B) -> (-A) - B. */
1077 if (negate_expr_p (TREE_OPERAND (t, 0)))
1079 tem = negate_expr (TREE_OPERAND (t, 0));
1080 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1081 tem, TREE_OPERAND (t, 1));
1082 return fold_convert (type, tem);
1085 break;
1087 case MINUS_EXPR:
1088 /* - (A - B) -> B - A */
1089 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1090 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1091 return fold_convert (type,
1092 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1093 TREE_OPERAND (t, 1),
1094 TREE_OPERAND (t, 0)));
1095 break;
1097 case MULT_EXPR:
1098 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1099 break;
1101 /* Fall through. */
1103 case RDIV_EXPR:
1104 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1106 tem = TREE_OPERAND (t, 1);
1107 if (negate_expr_p (tem))
1108 return fold_convert (type,
1109 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1110 TREE_OPERAND (t, 0),
1111 negate_expr (tem)));
1112 tem = TREE_OPERAND (t, 0);
1113 if (negate_expr_p (tem))
1114 return fold_convert (type,
1115 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1116 negate_expr (tem),
1117 TREE_OPERAND (t, 1)));
1119 break;
1121 case NOP_EXPR:
1122 /* Convert -((double)float) into (double)(-float). */
1123 if (TREE_CODE (type) == REAL_TYPE)
1125 tem = strip_float_extensions (t);
1126 if (tem != t && negate_expr_p (tem))
1127 return fold_convert (type, negate_expr (tem));
1129 break;
1131 case CALL_EXPR:
1132 /* Negate -f(x) as f(-x). */
1133 if (negate_mathfn_p (builtin_mathfn_code (t))
1134 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1136 tree fndecl, arg, arglist;
1138 fndecl = get_callee_fndecl (t);
1139 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1140 arglist = build_tree_list (NULL_TREE, arg);
1141 return build_function_call_expr (fndecl, arglist);
1143 break;
1145 case RSHIFT_EXPR:
1146 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1147 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1149 tree op1 = TREE_OPERAND (t, 1);
1150 if (TREE_INT_CST_HIGH (op1) == 0
1151 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1152 == TREE_INT_CST_LOW (op1))
1154 tree ntype = TYPE_UNSIGNED (type)
1155 ? lang_hooks.types.signed_type (type)
1156 : lang_hooks.types.unsigned_type (type);
1157 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1158 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1159 return fold_convert (type, temp);
1162 break;
1164 default:
1165 break;
1168 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1169 return fold_convert (type, tem);
1172 /* Split a tree IN into a constant, literal and variable parts that could be
1173 combined with CODE to make IN. "constant" means an expression with
1174 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1175 commutative arithmetic operation. Store the constant part into *CONP,
1176 the literal in *LITP and return the variable part. If a part isn't
1177 present, set it to null. If the tree does not decompose in this way,
1178 return the entire tree as the variable part and the other parts as null.
1180 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1181 case, we negate an operand that was subtracted. Except if it is a
1182 literal for which we use *MINUS_LITP instead.
1184 If NEGATE_P is true, we are negating all of IN, again except a literal
1185 for which we use *MINUS_LITP instead.
1187 If IN is itself a literal or constant, return it as appropriate.
1189 Note that we do not guarantee that any of the three values will be the
1190 same type as IN, but they will have the same signedness and mode. */
1192 static tree
1193 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1194 tree *minus_litp, int negate_p)
1196 tree var = 0;
1198 *conp = 0;
1199 *litp = 0;
1200 *minus_litp = 0;
1202 /* Strip any conversions that don't change the machine mode or signedness. */
1203 STRIP_SIGN_NOPS (in);
1205 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1206 *litp = in;
1207 else if (TREE_CODE (in) == code
1208 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1209 /* We can associate addition and subtraction together (even
1210 though the C standard doesn't say so) for integers because
1211 the value is not affected. For reals, the value might be
1212 affected, so we can't. */
1213 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1214 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1216 tree op0 = TREE_OPERAND (in, 0);
1217 tree op1 = TREE_OPERAND (in, 1);
1218 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1219 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1221 /* First see if either of the operands is a literal, then a constant. */
1222 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1223 *litp = op0, op0 = 0;
1224 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1225 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1227 if (op0 != 0 && TREE_CONSTANT (op0))
1228 *conp = op0, op0 = 0;
1229 else if (op1 != 0 && TREE_CONSTANT (op1))
1230 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1232 /* If we haven't dealt with either operand, this is not a case we can
1233 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1234 if (op0 != 0 && op1 != 0)
1235 var = in;
1236 else if (op0 != 0)
1237 var = op0;
1238 else
1239 var = op1, neg_var_p = neg1_p;
1241 /* Now do any needed negations. */
1242 if (neg_litp_p)
1243 *minus_litp = *litp, *litp = 0;
1244 if (neg_conp_p)
1245 *conp = negate_expr (*conp);
1246 if (neg_var_p)
1247 var = negate_expr (var);
1249 else if (TREE_CONSTANT (in))
1250 *conp = in;
1251 else
1252 var = in;
1254 if (negate_p)
1256 if (*litp)
1257 *minus_litp = *litp, *litp = 0;
1258 else if (*minus_litp)
1259 *litp = *minus_litp, *minus_litp = 0;
1260 *conp = negate_expr (*conp);
1261 var = negate_expr (var);
1264 return var;
1267 /* Re-associate trees split by the above function. T1 and T2 are either
1268 expressions to associate or null. Return the new expression, if any. If
1269 we build an operation, do it in TYPE and with CODE. */
1271 static tree
1272 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1274 if (t1 == 0)
1275 return t2;
1276 else if (t2 == 0)
1277 return t1;
1279 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1280 try to fold this since we will have infinite recursion. But do
1281 deal with any NEGATE_EXPRs. */
1282 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1283 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1285 if (code == PLUS_EXPR)
1287 if (TREE_CODE (t1) == NEGATE_EXPR)
1288 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1289 fold_convert (type, TREE_OPERAND (t1, 0)));
1290 else if (TREE_CODE (t2) == NEGATE_EXPR)
1291 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1292 fold_convert (type, TREE_OPERAND (t2, 0)));
1293 else if (integer_zerop (t2))
1294 return fold_convert (type, t1);
1296 else if (code == MINUS_EXPR)
1298 if (integer_zerop (t2))
1299 return fold_convert (type, t1);
1302 return build2 (code, type, fold_convert (type, t1),
1303 fold_convert (type, t2));
1306 return fold_build2 (code, type, fold_convert (type, t1),
1307 fold_convert (type, t2));
1310 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1311 to produce a new constant.
1313 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1315 tree
1316 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1318 unsigned HOST_WIDE_INT int1l, int2l;
1319 HOST_WIDE_INT int1h, int2h;
1320 unsigned HOST_WIDE_INT low;
1321 HOST_WIDE_INT hi;
1322 unsigned HOST_WIDE_INT garbagel;
1323 HOST_WIDE_INT garbageh;
1324 tree t;
1325 tree type = TREE_TYPE (arg1);
1326 int uns = TYPE_UNSIGNED (type);
1327 int is_sizetype
1328 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1329 int overflow = 0;
1331 int1l = TREE_INT_CST_LOW (arg1);
1332 int1h = TREE_INT_CST_HIGH (arg1);
1333 int2l = TREE_INT_CST_LOW (arg2);
1334 int2h = TREE_INT_CST_HIGH (arg2);
1336 switch (code)
1338 case BIT_IOR_EXPR:
1339 low = int1l | int2l, hi = int1h | int2h;
1340 break;
1342 case BIT_XOR_EXPR:
1343 low = int1l ^ int2l, hi = int1h ^ int2h;
1344 break;
1346 case BIT_AND_EXPR:
1347 low = int1l & int2l, hi = int1h & int2h;
1348 break;
1350 case RSHIFT_EXPR:
1351 int2l = -int2l;
1352 case LSHIFT_EXPR:
1353 /* It's unclear from the C standard whether shifts can overflow.
1354 The following code ignores overflow; perhaps a C standard
1355 interpretation ruling is needed. */
1356 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1357 &low, &hi, !uns);
1358 break;
1360 case RROTATE_EXPR:
1361 int2l = - int2l;
1362 case LROTATE_EXPR:
1363 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1364 &low, &hi);
1365 break;
1367 case PLUS_EXPR:
1368 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1369 break;
1371 case MINUS_EXPR:
1372 neg_double (int2l, int2h, &low, &hi);
1373 add_double (int1l, int1h, low, hi, &low, &hi);
1374 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1375 break;
1377 case MULT_EXPR:
1378 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1379 break;
1381 case TRUNC_DIV_EXPR:
1382 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1383 case EXACT_DIV_EXPR:
1384 /* This is a shortcut for a common special case. */
1385 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1386 && ! TREE_CONSTANT_OVERFLOW (arg1)
1387 && ! TREE_CONSTANT_OVERFLOW (arg2)
1388 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1390 if (code == CEIL_DIV_EXPR)
1391 int1l += int2l - 1;
1393 low = int1l / int2l, hi = 0;
1394 break;
1397 /* ... fall through ... */
1399 case ROUND_DIV_EXPR:
1400 if (int2h == 0 && int2l == 1)
1402 low = int1l, hi = int1h;
1403 break;
1405 if (int1l == int2l && int1h == int2h
1406 && ! (int1l == 0 && int1h == 0))
1408 low = 1, hi = 0;
1409 break;
1411 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1412 &low, &hi, &garbagel, &garbageh);
1413 break;
1415 case TRUNC_MOD_EXPR:
1416 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1417 /* This is a shortcut for a common special case. */
1418 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1419 && ! TREE_CONSTANT_OVERFLOW (arg1)
1420 && ! TREE_CONSTANT_OVERFLOW (arg2)
1421 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1423 if (code == CEIL_MOD_EXPR)
1424 int1l += int2l - 1;
1425 low = int1l % int2l, hi = 0;
1426 break;
1429 /* ... fall through ... */
1431 case ROUND_MOD_EXPR:
1432 overflow = div_and_round_double (code, uns,
1433 int1l, int1h, int2l, int2h,
1434 &garbagel, &garbageh, &low, &hi);
1435 break;
1437 case MIN_EXPR:
1438 case MAX_EXPR:
1439 if (uns)
1440 low = (((unsigned HOST_WIDE_INT) int1h
1441 < (unsigned HOST_WIDE_INT) int2h)
1442 || (((unsigned HOST_WIDE_INT) int1h
1443 == (unsigned HOST_WIDE_INT) int2h)
1444 && int1l < int2l));
1445 else
1446 low = (int1h < int2h
1447 || (int1h == int2h && int1l < int2l));
1449 if (low == (code == MIN_EXPR))
1450 low = int1l, hi = int1h;
1451 else
1452 low = int2l, hi = int2h;
1453 break;
1455 default:
1456 gcc_unreachable ();
1459 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1461 if (notrunc)
1463 /* Propagate overflow flags ourselves. */
1464 if (((!uns || is_sizetype) && overflow)
1465 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1467 t = copy_node (t);
1468 TREE_OVERFLOW (t) = 1;
1469 TREE_CONSTANT_OVERFLOW (t) = 1;
1471 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1473 t = copy_node (t);
1474 TREE_CONSTANT_OVERFLOW (t) = 1;
1477 else
1478 t = force_fit_type (t, 1,
1479 ((!uns || is_sizetype) && overflow)
1480 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1481 TREE_CONSTANT_OVERFLOW (arg1)
1482 | TREE_CONSTANT_OVERFLOW (arg2));
1484 return t;
1487 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1488 constant. We assume ARG1 and ARG2 have the same data type, or at least
1489 are the same kind of constant and the same machine mode.
1491 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1493 static tree
1494 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1496 STRIP_NOPS (arg1);
1497 STRIP_NOPS (arg2);
1499 if (TREE_CODE (arg1) == INTEGER_CST)
1500 return int_const_binop (code, arg1, arg2, notrunc);
1502 if (TREE_CODE (arg1) == REAL_CST)
1504 enum machine_mode mode;
1505 REAL_VALUE_TYPE d1;
1506 REAL_VALUE_TYPE d2;
1507 REAL_VALUE_TYPE value;
1508 REAL_VALUE_TYPE result;
1509 bool inexact;
1510 tree t, type;
1512 d1 = TREE_REAL_CST (arg1);
1513 d2 = TREE_REAL_CST (arg2);
1515 type = TREE_TYPE (arg1);
1516 mode = TYPE_MODE (type);
1518 /* Don't perform operation if we honor signaling NaNs and
1519 either operand is a NaN. */
1520 if (HONOR_SNANS (mode)
1521 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1522 return NULL_TREE;
1524 /* Don't perform operation if it would raise a division
1525 by zero exception. */
1526 if (code == RDIV_EXPR
1527 && REAL_VALUES_EQUAL (d2, dconst0)
1528 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1529 return NULL_TREE;
1531 /* If either operand is a NaN, just return it. Otherwise, set up
1532 for floating-point trap; we return an overflow. */
1533 if (REAL_VALUE_ISNAN (d1))
1534 return arg1;
1535 else if (REAL_VALUE_ISNAN (d2))
1536 return arg2;
1538 inexact = real_arithmetic (&value, code, &d1, &d2);
1539 real_convert (&result, mode, &value);
1541 /* Don't constant fold this floating point operation if the
1542 result may dependent upon the run-time rounding mode and
1543 flag_rounding_math is set, or if GCC's software emulation
1544 is unable to accurately represent the result. */
1546 if ((flag_rounding_math
1547 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1548 && !flag_unsafe_math_optimizations))
1549 && (inexact || !real_identical (&result, &value)))
1550 return NULL_TREE;
1552 t = build_real (type, result);
1554 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1555 TREE_CONSTANT_OVERFLOW (t)
1556 = TREE_OVERFLOW (t)
1557 | TREE_CONSTANT_OVERFLOW (arg1)
1558 | TREE_CONSTANT_OVERFLOW (arg2);
1559 return t;
1561 if (TREE_CODE (arg1) == COMPLEX_CST)
1563 tree type = TREE_TYPE (arg1);
1564 tree r1 = TREE_REALPART (arg1);
1565 tree i1 = TREE_IMAGPART (arg1);
1566 tree r2 = TREE_REALPART (arg2);
1567 tree i2 = TREE_IMAGPART (arg2);
1568 tree t;
1570 switch (code)
1572 case PLUS_EXPR:
1573 t = build_complex (type,
1574 const_binop (PLUS_EXPR, r1, r2, notrunc),
1575 const_binop (PLUS_EXPR, i1, i2, notrunc));
1576 break;
1578 case MINUS_EXPR:
1579 t = build_complex (type,
1580 const_binop (MINUS_EXPR, r1, r2, notrunc),
1581 const_binop (MINUS_EXPR, i1, i2, notrunc));
1582 break;
1584 case MULT_EXPR:
1585 t = build_complex (type,
1586 const_binop (MINUS_EXPR,
1587 const_binop (MULT_EXPR,
1588 r1, r2, notrunc),
1589 const_binop (MULT_EXPR,
1590 i1, i2, notrunc),
1591 notrunc),
1592 const_binop (PLUS_EXPR,
1593 const_binop (MULT_EXPR,
1594 r1, i2, notrunc),
1595 const_binop (MULT_EXPR,
1596 i1, r2, notrunc),
1597 notrunc));
1598 break;
1600 case RDIV_EXPR:
1602 tree t1, t2, real, imag;
1603 tree magsquared
1604 = const_binop (PLUS_EXPR,
1605 const_binop (MULT_EXPR, r2, r2, notrunc),
1606 const_binop (MULT_EXPR, i2, i2, notrunc),
1607 notrunc);
1609 t1 = const_binop (PLUS_EXPR,
1610 const_binop (MULT_EXPR, r1, r2, notrunc),
1611 const_binop (MULT_EXPR, i1, i2, notrunc),
1612 notrunc);
1613 t2 = const_binop (MINUS_EXPR,
1614 const_binop (MULT_EXPR, i1, r2, notrunc),
1615 const_binop (MULT_EXPR, r1, i2, notrunc),
1616 notrunc);
1618 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1620 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1621 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1623 else
1625 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1626 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1627 if (!real || !imag)
1628 return NULL_TREE;
1631 t = build_complex (type, real, imag);
1633 break;
1635 default:
1636 gcc_unreachable ();
1638 return t;
1640 return 0;
1643 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1644 indicates which particular sizetype to create. */
1646 tree
1647 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1649 return build_int_cst (sizetype_tab[(int) kind], number);
1652 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1653 is a tree code. The type of the result is taken from the operands.
1654 Both must be the same type integer type and it must be a size type.
1655 If the operands are constant, so is the result. */
1657 tree
1658 size_binop (enum tree_code code, tree arg0, tree arg1)
1660 tree type = TREE_TYPE (arg0);
1662 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1663 && type == TREE_TYPE (arg1));
1665 /* Handle the special case of two integer constants faster. */
1666 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1668 /* And some specific cases even faster than that. */
1669 if (code == PLUS_EXPR && integer_zerop (arg0))
1670 return arg1;
1671 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1672 && integer_zerop (arg1))
1673 return arg0;
1674 else if (code == MULT_EXPR && integer_onep (arg0))
1675 return arg1;
1677 /* Handle general case of two integer constants. */
1678 return int_const_binop (code, arg0, arg1, 0);
1681 if (arg0 == error_mark_node || arg1 == error_mark_node)
1682 return error_mark_node;
1684 return fold_build2 (code, type, arg0, arg1);
1687 /* Given two values, either both of sizetype or both of bitsizetype,
1688 compute the difference between the two values. Return the value
1689 in signed type corresponding to the type of the operands. */
1691 tree
1692 size_diffop (tree arg0, tree arg1)
1694 tree type = TREE_TYPE (arg0);
1695 tree ctype;
1697 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1698 && type == TREE_TYPE (arg1));
1700 /* If the type is already signed, just do the simple thing. */
1701 if (!TYPE_UNSIGNED (type))
1702 return size_binop (MINUS_EXPR, arg0, arg1);
1704 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1706 /* If either operand is not a constant, do the conversions to the signed
1707 type and subtract. The hardware will do the right thing with any
1708 overflow in the subtraction. */
1709 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1710 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1711 fold_convert (ctype, arg1));
1713 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1714 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1715 overflow) and negate (which can't either). Special-case a result
1716 of zero while we're here. */
1717 if (tree_int_cst_equal (arg0, arg1))
1718 return fold_convert (ctype, integer_zero_node);
1719 else if (tree_int_cst_lt (arg1, arg0))
1720 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1721 else
1722 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1723 fold_convert (ctype, size_binop (MINUS_EXPR,
1724 arg1, arg0)));
1727 /* A subroutine of fold_convert_const handling conversions of an
1728 INTEGER_CST to another integer type. */
1730 static tree
1731 fold_convert_const_int_from_int (tree type, tree arg1)
1733 tree t;
1735 /* Given an integer constant, make new constant with new type,
1736 appropriately sign-extended or truncated. */
1737 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1738 TREE_INT_CST_HIGH (arg1));
1740 t = force_fit_type (t,
1741 /* Don't set the overflow when
1742 converting a pointer */
1743 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1744 (TREE_INT_CST_HIGH (arg1) < 0
1745 && (TYPE_UNSIGNED (type)
1746 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1747 | TREE_OVERFLOW (arg1),
1748 TREE_CONSTANT_OVERFLOW (arg1));
1750 return t;
1753 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1754 to an integer type. */
1756 static tree
1757 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1759 int overflow = 0;
1760 tree t;
1762 /* The following code implements the floating point to integer
1763 conversion rules required by the Java Language Specification,
1764 that IEEE NaNs are mapped to zero and values that overflow
1765 the target precision saturate, i.e. values greater than
1766 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1767 are mapped to INT_MIN. These semantics are allowed by the
1768 C and C++ standards that simply state that the behavior of
1769 FP-to-integer conversion is unspecified upon overflow. */
1771 HOST_WIDE_INT high, low;
1772 REAL_VALUE_TYPE r;
1773 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1775 switch (code)
1777 case FIX_TRUNC_EXPR:
1778 real_trunc (&r, VOIDmode, &x);
1779 break;
1781 case FIX_CEIL_EXPR:
1782 real_ceil (&r, VOIDmode, &x);
1783 break;
1785 case FIX_FLOOR_EXPR:
1786 real_floor (&r, VOIDmode, &x);
1787 break;
1789 case FIX_ROUND_EXPR:
1790 real_round (&r, VOIDmode, &x);
1791 break;
1793 default:
1794 gcc_unreachable ();
1797 /* If R is NaN, return zero and show we have an overflow. */
1798 if (REAL_VALUE_ISNAN (r))
1800 overflow = 1;
1801 high = 0;
1802 low = 0;
1805 /* See if R is less than the lower bound or greater than the
1806 upper bound. */
1808 if (! overflow)
1810 tree lt = TYPE_MIN_VALUE (type);
1811 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1812 if (REAL_VALUES_LESS (r, l))
1814 overflow = 1;
1815 high = TREE_INT_CST_HIGH (lt);
1816 low = TREE_INT_CST_LOW (lt);
1820 if (! overflow)
1822 tree ut = TYPE_MAX_VALUE (type);
1823 if (ut)
1825 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1826 if (REAL_VALUES_LESS (u, r))
1828 overflow = 1;
1829 high = TREE_INT_CST_HIGH (ut);
1830 low = TREE_INT_CST_LOW (ut);
1835 if (! overflow)
1836 REAL_VALUE_TO_INT (&low, &high, r);
1838 t = build_int_cst_wide (type, low, high);
1840 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1841 TREE_CONSTANT_OVERFLOW (arg1));
1842 return t;
1845 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1846 to another floating point type. */
1848 static tree
1849 fold_convert_const_real_from_real (tree type, tree arg1)
1851 REAL_VALUE_TYPE value;
1852 tree t;
1854 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1855 t = build_real (type, value);
1857 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1858 TREE_CONSTANT_OVERFLOW (t)
1859 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1860 return t;
1863 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1864 type TYPE. If no simplification can be done return NULL_TREE. */
1866 static tree
1867 fold_convert_const (enum tree_code code, tree type, tree arg1)
1869 if (TREE_TYPE (arg1) == type)
1870 return arg1;
1872 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1874 if (TREE_CODE (arg1) == INTEGER_CST)
1875 return fold_convert_const_int_from_int (type, arg1);
1876 else if (TREE_CODE (arg1) == REAL_CST)
1877 return fold_convert_const_int_from_real (code, type, arg1);
1879 else if (TREE_CODE (type) == REAL_TYPE)
1881 if (TREE_CODE (arg1) == INTEGER_CST)
1882 return build_real_from_int_cst (type, arg1);
1883 if (TREE_CODE (arg1) == REAL_CST)
1884 return fold_convert_const_real_from_real (type, arg1);
1886 return NULL_TREE;
1889 /* Construct a vector of zero elements of vector type TYPE. */
1891 static tree
1892 build_zero_vector (tree type)
1894 tree elem, list;
1895 int i, units;
1897 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1898 units = TYPE_VECTOR_SUBPARTS (type);
1900 list = NULL_TREE;
1901 for (i = 0; i < units; i++)
1902 list = tree_cons (NULL_TREE, elem, list);
1903 return build_vector (type, list);
1906 /* Convert expression ARG to type TYPE. Used by the middle-end for
1907 simple conversions in preference to calling the front-end's convert. */
1909 tree
1910 fold_convert (tree type, tree arg)
1912 tree orig = TREE_TYPE (arg);
1913 tree tem;
1915 if (type == orig)
1916 return arg;
1918 if (TREE_CODE (arg) == ERROR_MARK
1919 || TREE_CODE (type) == ERROR_MARK
1920 || TREE_CODE (orig) == ERROR_MARK)
1921 return error_mark_node;
1923 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1924 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1925 TYPE_MAIN_VARIANT (orig)))
1926 return fold_build1 (NOP_EXPR, type, arg);
1928 switch (TREE_CODE (type))
1930 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1931 case POINTER_TYPE: case REFERENCE_TYPE:
1932 case OFFSET_TYPE:
1933 if (TREE_CODE (arg) == INTEGER_CST)
1935 tem = fold_convert_const (NOP_EXPR, type, arg);
1936 if (tem != NULL_TREE)
1937 return tem;
1939 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1940 || TREE_CODE (orig) == OFFSET_TYPE)
1941 return fold_build1 (NOP_EXPR, type, arg);
1942 if (TREE_CODE (orig) == COMPLEX_TYPE)
1944 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1945 return fold_convert (type, tem);
1947 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1948 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1949 return fold_build1 (NOP_EXPR, type, arg);
1951 case REAL_TYPE:
1952 if (TREE_CODE (arg) == INTEGER_CST)
1954 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1955 if (tem != NULL_TREE)
1956 return tem;
1958 else if (TREE_CODE (arg) == REAL_CST)
1960 tem = fold_convert_const (NOP_EXPR, type, arg);
1961 if (tem != NULL_TREE)
1962 return tem;
1965 switch (TREE_CODE (orig))
1967 case INTEGER_TYPE: case CHAR_TYPE:
1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 case POINTER_TYPE: case REFERENCE_TYPE:
1970 return fold_build1 (FLOAT_EXPR, type, arg);
1972 case REAL_TYPE:
1973 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1974 type, arg);
1976 case COMPLEX_TYPE:
1977 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1978 return fold_convert (type, tem);
1980 default:
1981 gcc_unreachable ();
1984 case COMPLEX_TYPE:
1985 switch (TREE_CODE (orig))
1987 case INTEGER_TYPE: case CHAR_TYPE:
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1990 case REAL_TYPE:
1991 return build2 (COMPLEX_EXPR, type,
1992 fold_convert (TREE_TYPE (type), arg),
1993 fold_convert (TREE_TYPE (type), integer_zero_node));
1994 case COMPLEX_TYPE:
1996 tree rpart, ipart;
1998 if (TREE_CODE (arg) == COMPLEX_EXPR)
2000 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2001 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2002 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2005 arg = save_expr (arg);
2006 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2007 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2008 rpart = fold_convert (TREE_TYPE (type), rpart);
2009 ipart = fold_convert (TREE_TYPE (type), ipart);
2010 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2013 default:
2014 gcc_unreachable ();
2017 case VECTOR_TYPE:
2018 if (integer_zerop (arg))
2019 return build_zero_vector (type);
2020 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2021 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2022 || TREE_CODE (orig) == VECTOR_TYPE);
2023 return fold_build1 (NOP_EXPR, type, arg);
2025 case VOID_TYPE:
2026 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2028 default:
2029 gcc_unreachable ();
2033 /* Return false if expr can be assumed not to be an value, true
2034 otherwise. */
2036 static bool
2037 maybe_lvalue_p (tree x)
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x))
2042 case VAR_DECL:
2043 case PARM_DECL:
2044 case RESULT_DECL:
2045 case LABEL_DECL:
2046 case FUNCTION_DECL:
2047 case SSA_NAME:
2049 case COMPONENT_REF:
2050 case INDIRECT_REF:
2051 case ALIGN_INDIRECT_REF:
2052 case MISALIGNED_INDIRECT_REF:
2053 case ARRAY_REF:
2054 case ARRAY_RANGE_REF:
2055 case BIT_FIELD_REF:
2056 case OBJ_TYPE_REF:
2058 case REALPART_EXPR:
2059 case IMAGPART_EXPR:
2060 case PREINCREMENT_EXPR:
2061 case PREDECREMENT_EXPR:
2062 case SAVE_EXPR:
2063 case TRY_CATCH_EXPR:
2064 case WITH_CLEANUP_EXPR:
2065 case COMPOUND_EXPR:
2066 case MODIFY_EXPR:
2067 case TARGET_EXPR:
2068 case COND_EXPR:
2069 case BIND_EXPR:
2070 case MIN_EXPR:
2071 case MAX_EXPR:
2072 break;
2074 default:
2075 /* Assume the worst for front-end tree codes. */
2076 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2077 break;
2078 return false;
2081 return true;
2084 /* Return an expr equal to X but certainly not valid as an lvalue. */
2086 tree
2087 non_lvalue (tree x)
2089 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2090 us. */
2091 if (in_gimple_form)
2092 return x;
2094 if (! maybe_lvalue_p (x))
2095 return x;
2096 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2099 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2100 Zero means allow extended lvalues. */
2102 int pedantic_lvalues;
2104 /* When pedantic, return an expr equal to X but certainly not valid as a
2105 pedantic lvalue. Otherwise, return X. */
2107 static tree
2108 pedantic_non_lvalue (tree x)
2110 if (pedantic_lvalues)
2111 return non_lvalue (x);
2112 else
2113 return x;
2116 /* Given a tree comparison code, return the code that is the logical inverse
2117 of the given code. It is not safe to do this for floating-point
2118 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2119 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2121 enum tree_code
2122 invert_tree_comparison (enum tree_code code, bool honor_nans)
2124 if (honor_nans && flag_trapping_math)
2125 return ERROR_MARK;
2127 switch (code)
2129 case EQ_EXPR:
2130 return NE_EXPR;
2131 case NE_EXPR:
2132 return EQ_EXPR;
2133 case GT_EXPR:
2134 return honor_nans ? UNLE_EXPR : LE_EXPR;
2135 case GE_EXPR:
2136 return honor_nans ? UNLT_EXPR : LT_EXPR;
2137 case LT_EXPR:
2138 return honor_nans ? UNGE_EXPR : GE_EXPR;
2139 case LE_EXPR:
2140 return honor_nans ? UNGT_EXPR : GT_EXPR;
2141 case LTGT_EXPR:
2142 return UNEQ_EXPR;
2143 case UNEQ_EXPR:
2144 return LTGT_EXPR;
2145 case UNGT_EXPR:
2146 return LE_EXPR;
2147 case UNGE_EXPR:
2148 return LT_EXPR;
2149 case UNLT_EXPR:
2150 return GE_EXPR;
2151 case UNLE_EXPR:
2152 return GT_EXPR;
2153 case ORDERED_EXPR:
2154 return UNORDERED_EXPR;
2155 case UNORDERED_EXPR:
2156 return ORDERED_EXPR;
2157 default:
2158 gcc_unreachable ();
2162 /* Similar, but return the comparison that results if the operands are
2163 swapped. This is safe for floating-point. */
2165 enum tree_code
2166 swap_tree_comparison (enum tree_code code)
2168 switch (code)
2170 case EQ_EXPR:
2171 case NE_EXPR:
2172 return code;
2173 case GT_EXPR:
2174 return LT_EXPR;
2175 case GE_EXPR:
2176 return LE_EXPR;
2177 case LT_EXPR:
2178 return GT_EXPR;
2179 case LE_EXPR:
2180 return GE_EXPR;
2181 default:
2182 gcc_unreachable ();
2187 /* Convert a comparison tree code from an enum tree_code representation
2188 into a compcode bit-based encoding. This function is the inverse of
2189 compcode_to_comparison. */
2191 static enum comparison_code
2192 comparison_to_compcode (enum tree_code code)
2194 switch (code)
2196 case LT_EXPR:
2197 return COMPCODE_LT;
2198 case EQ_EXPR:
2199 return COMPCODE_EQ;
2200 case LE_EXPR:
2201 return COMPCODE_LE;
2202 case GT_EXPR:
2203 return COMPCODE_GT;
2204 case NE_EXPR:
2205 return COMPCODE_NE;
2206 case GE_EXPR:
2207 return COMPCODE_GE;
2208 case ORDERED_EXPR:
2209 return COMPCODE_ORD;
2210 case UNORDERED_EXPR:
2211 return COMPCODE_UNORD;
2212 case UNLT_EXPR:
2213 return COMPCODE_UNLT;
2214 case UNEQ_EXPR:
2215 return COMPCODE_UNEQ;
2216 case UNLE_EXPR:
2217 return COMPCODE_UNLE;
2218 case UNGT_EXPR:
2219 return COMPCODE_UNGT;
2220 case LTGT_EXPR:
2221 return COMPCODE_LTGT;
2222 case UNGE_EXPR:
2223 return COMPCODE_UNGE;
2224 default:
2225 gcc_unreachable ();
2229 /* Convert a compcode bit-based encoding of a comparison operator back
2230 to GCC's enum tree_code representation. This function is the
2231 inverse of comparison_to_compcode. */
2233 static enum tree_code
2234 compcode_to_comparison (enum comparison_code code)
2236 switch (code)
2238 case COMPCODE_LT:
2239 return LT_EXPR;
2240 case COMPCODE_EQ:
2241 return EQ_EXPR;
2242 case COMPCODE_LE:
2243 return LE_EXPR;
2244 case COMPCODE_GT:
2245 return GT_EXPR;
2246 case COMPCODE_NE:
2247 return NE_EXPR;
2248 case COMPCODE_GE:
2249 return GE_EXPR;
2250 case COMPCODE_ORD:
2251 return ORDERED_EXPR;
2252 case COMPCODE_UNORD:
2253 return UNORDERED_EXPR;
2254 case COMPCODE_UNLT:
2255 return UNLT_EXPR;
2256 case COMPCODE_UNEQ:
2257 return UNEQ_EXPR;
2258 case COMPCODE_UNLE:
2259 return UNLE_EXPR;
2260 case COMPCODE_UNGT:
2261 return UNGT_EXPR;
2262 case COMPCODE_LTGT:
2263 return LTGT_EXPR;
2264 case COMPCODE_UNGE:
2265 return UNGE_EXPR;
2266 default:
2267 gcc_unreachable ();
2271 /* Return a tree for the comparison which is the combination of
2272 doing the AND or OR (depending on CODE) of the two operations LCODE
2273 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2274 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2275 if this makes the transformation invalid. */
2277 tree
2278 combine_comparisons (enum tree_code code, enum tree_code lcode,
2279 enum tree_code rcode, tree truth_type,
2280 tree ll_arg, tree lr_arg)
2282 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2283 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2284 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2285 enum comparison_code compcode;
2287 switch (code)
2289 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2290 compcode = lcompcode & rcompcode;
2291 break;
2293 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2294 compcode = lcompcode | rcompcode;
2295 break;
2297 default:
2298 return NULL_TREE;
2301 if (!honor_nans)
2303 /* Eliminate unordered comparisons, as well as LTGT and ORD
2304 which are not used unless the mode has NaNs. */
2305 compcode &= ~COMPCODE_UNORD;
2306 if (compcode == COMPCODE_LTGT)
2307 compcode = COMPCODE_NE;
2308 else if (compcode == COMPCODE_ORD)
2309 compcode = COMPCODE_TRUE;
2311 else if (flag_trapping_math)
2313 /* Check that the original operation and the optimized ones will trap
2314 under the same condition. */
2315 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2316 && (lcompcode != COMPCODE_EQ)
2317 && (lcompcode != COMPCODE_ORD);
2318 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2319 && (rcompcode != COMPCODE_EQ)
2320 && (rcompcode != COMPCODE_ORD);
2321 bool trap = (compcode & COMPCODE_UNORD) == 0
2322 && (compcode != COMPCODE_EQ)
2323 && (compcode != COMPCODE_ORD);
2325 /* In a short-circuited boolean expression the LHS might be
2326 such that the RHS, if evaluated, will never trap. For
2327 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2328 if neither x nor y is NaN. (This is a mixed blessing: for
2329 example, the expression above will never trap, hence
2330 optimizing it to x < y would be invalid). */
2331 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2332 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2333 rtrap = false;
2335 /* If the comparison was short-circuited, and only the RHS
2336 trapped, we may now generate a spurious trap. */
2337 if (rtrap && !ltrap
2338 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2339 return NULL_TREE;
2341 /* If we changed the conditions that cause a trap, we lose. */
2342 if ((ltrap || rtrap) != trap)
2343 return NULL_TREE;
2346 if (compcode == COMPCODE_TRUE)
2347 return constant_boolean_node (true, truth_type);
2348 else if (compcode == COMPCODE_FALSE)
2349 return constant_boolean_node (false, truth_type);
2350 else
2351 return fold_build2 (compcode_to_comparison (compcode),
2352 truth_type, ll_arg, lr_arg);
2355 /* Return nonzero if CODE is a tree code that represents a truth value. */
2357 static int
2358 truth_value_p (enum tree_code code)
2360 return (TREE_CODE_CLASS (code) == tcc_comparison
2361 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2362 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2363 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2366 /* Return nonzero if two operands (typically of the same tree node)
2367 are necessarily equal. If either argument has side-effects this
2368 function returns zero. FLAGS modifies behavior as follows:
2370 If OEP_ONLY_CONST is set, only return nonzero for constants.
2371 This function tests whether the operands are indistinguishable;
2372 it does not test whether they are equal using C's == operation.
2373 The distinction is important for IEEE floating point, because
2374 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2375 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2377 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2378 even though it may hold multiple values during a function.
2379 This is because a GCC tree node guarantees that nothing else is
2380 executed between the evaluation of its "operands" (which may often
2381 be evaluated in arbitrary order). Hence if the operands themselves
2382 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2383 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2384 unset means assuming isochronic (or instantaneous) tree equivalence.
2385 Unless comparing arbitrary expression trees, such as from different
2386 statements, this flag can usually be left unset.
2388 If OEP_PURE_SAME is set, then pure functions with identical arguments
2389 are considered the same. It is used when the caller has other ways
2390 to ensure that global memory is unchanged in between. */
2393 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2395 /* If either is ERROR_MARK, they aren't equal. */
2396 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2397 return 0;
2399 /* If both types don't have the same signedness, then we can't consider
2400 them equal. We must check this before the STRIP_NOPS calls
2401 because they may change the signedness of the arguments. */
2402 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2403 return 0;
2405 STRIP_NOPS (arg0);
2406 STRIP_NOPS (arg1);
2408 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2409 /* This is needed for conversions and for COMPONENT_REF.
2410 Might as well play it safe and always test this. */
2411 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2412 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2413 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2414 return 0;
2416 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2417 We don't care about side effects in that case because the SAVE_EXPR
2418 takes care of that for us. In all other cases, two expressions are
2419 equal if they have no side effects. If we have two identical
2420 expressions with side effects that should be treated the same due
2421 to the only side effects being identical SAVE_EXPR's, that will
2422 be detected in the recursive calls below. */
2423 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2424 && (TREE_CODE (arg0) == SAVE_EXPR
2425 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2426 return 1;
2428 /* Next handle constant cases, those for which we can return 1 even
2429 if ONLY_CONST is set. */
2430 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2431 switch (TREE_CODE (arg0))
2433 case INTEGER_CST:
2434 return (! TREE_CONSTANT_OVERFLOW (arg0)
2435 && ! TREE_CONSTANT_OVERFLOW (arg1)
2436 && tree_int_cst_equal (arg0, arg1));
2438 case REAL_CST:
2439 return (! TREE_CONSTANT_OVERFLOW (arg0)
2440 && ! TREE_CONSTANT_OVERFLOW (arg1)
2441 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2442 TREE_REAL_CST (arg1)));
2444 case VECTOR_CST:
2446 tree v1, v2;
2448 if (TREE_CONSTANT_OVERFLOW (arg0)
2449 || TREE_CONSTANT_OVERFLOW (arg1))
2450 return 0;
2452 v1 = TREE_VECTOR_CST_ELTS (arg0);
2453 v2 = TREE_VECTOR_CST_ELTS (arg1);
2454 while (v1 && v2)
2456 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2457 flags))
2458 return 0;
2459 v1 = TREE_CHAIN (v1);
2460 v2 = TREE_CHAIN (v2);
2463 return 1;
2466 case COMPLEX_CST:
2467 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2468 flags)
2469 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2470 flags));
2472 case STRING_CST:
2473 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2474 && ! memcmp (TREE_STRING_POINTER (arg0),
2475 TREE_STRING_POINTER (arg1),
2476 TREE_STRING_LENGTH (arg0)));
2478 case ADDR_EXPR:
2479 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2481 default:
2482 break;
2485 if (flags & OEP_ONLY_CONST)
2486 return 0;
2488 /* Define macros to test an operand from arg0 and arg1 for equality and a
2489 variant that allows null and views null as being different from any
2490 non-null value. In the latter case, if either is null, the both
2491 must be; otherwise, do the normal comparison. */
2492 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2493 TREE_OPERAND (arg1, N), flags)
2495 #define OP_SAME_WITH_NULL(N) \
2496 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2497 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2499 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2501 case tcc_unary:
2502 /* Two conversions are equal only if signedness and modes match. */
2503 switch (TREE_CODE (arg0))
2505 case NOP_EXPR:
2506 case CONVERT_EXPR:
2507 case FIX_CEIL_EXPR:
2508 case FIX_TRUNC_EXPR:
2509 case FIX_FLOOR_EXPR:
2510 case FIX_ROUND_EXPR:
2511 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2512 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2513 return 0;
2514 break;
2515 default:
2516 break;
2519 return OP_SAME (0);
2522 case tcc_comparison:
2523 case tcc_binary:
2524 if (OP_SAME (0) && OP_SAME (1))
2525 return 1;
2527 /* For commutative ops, allow the other order. */
2528 return (commutative_tree_code (TREE_CODE (arg0))
2529 && operand_equal_p (TREE_OPERAND (arg0, 0),
2530 TREE_OPERAND (arg1, 1), flags)
2531 && operand_equal_p (TREE_OPERAND (arg0, 1),
2532 TREE_OPERAND (arg1, 0), flags));
2534 case tcc_reference:
2535 /* If either of the pointer (or reference) expressions we are
2536 dereferencing contain a side effect, these cannot be equal. */
2537 if (TREE_SIDE_EFFECTS (arg0)
2538 || TREE_SIDE_EFFECTS (arg1))
2539 return 0;
2541 switch (TREE_CODE (arg0))
2543 case INDIRECT_REF:
2544 case ALIGN_INDIRECT_REF:
2545 case MISALIGNED_INDIRECT_REF:
2546 case REALPART_EXPR:
2547 case IMAGPART_EXPR:
2548 return OP_SAME (0);
2550 case ARRAY_REF:
2551 case ARRAY_RANGE_REF:
2552 /* Operands 2 and 3 may be null. */
2553 return (OP_SAME (0)
2554 && OP_SAME (1)
2555 && OP_SAME_WITH_NULL (2)
2556 && OP_SAME_WITH_NULL (3));
2558 case COMPONENT_REF:
2559 /* Handle operand 2 the same as for ARRAY_REF. */
2560 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2562 case BIT_FIELD_REF:
2563 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2565 default:
2566 return 0;
2569 case tcc_expression:
2570 switch (TREE_CODE (arg0))
2572 case ADDR_EXPR:
2573 case TRUTH_NOT_EXPR:
2574 return OP_SAME (0);
2576 case TRUTH_ANDIF_EXPR:
2577 case TRUTH_ORIF_EXPR:
2578 return OP_SAME (0) && OP_SAME (1);
2580 case TRUTH_AND_EXPR:
2581 case TRUTH_OR_EXPR:
2582 case TRUTH_XOR_EXPR:
2583 if (OP_SAME (0) && OP_SAME (1))
2584 return 1;
2586 /* Otherwise take into account this is a commutative operation. */
2587 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2588 TREE_OPERAND (arg1, 1), flags)
2589 && operand_equal_p (TREE_OPERAND (arg0, 1),
2590 TREE_OPERAND (arg1, 0), flags));
2592 case CALL_EXPR:
2593 /* If the CALL_EXPRs call different functions, then they
2594 clearly can not be equal. */
2595 if (!OP_SAME (0))
2596 return 0;
2599 unsigned int cef = call_expr_flags (arg0);
2600 if (flags & OEP_PURE_SAME)
2601 cef &= ECF_CONST | ECF_PURE;
2602 else
2603 cef &= ECF_CONST;
2604 if (!cef)
2605 return 0;
2608 /* Now see if all the arguments are the same. operand_equal_p
2609 does not handle TREE_LIST, so we walk the operands here
2610 feeding them to operand_equal_p. */
2611 arg0 = TREE_OPERAND (arg0, 1);
2612 arg1 = TREE_OPERAND (arg1, 1);
2613 while (arg0 && arg1)
2615 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2616 flags))
2617 return 0;
2619 arg0 = TREE_CHAIN (arg0);
2620 arg1 = TREE_CHAIN (arg1);
2623 /* If we get here and both argument lists are exhausted
2624 then the CALL_EXPRs are equal. */
2625 return ! (arg0 || arg1);
2627 default:
2628 return 0;
2631 case tcc_declaration:
2632 /* Consider __builtin_sqrt equal to sqrt. */
2633 return (TREE_CODE (arg0) == FUNCTION_DECL
2634 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2635 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2636 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2638 default:
2639 return 0;
2642 #undef OP_SAME
2643 #undef OP_SAME_WITH_NULL
2646 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2647 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2649 When in doubt, return 0. */
2651 static int
2652 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2654 int unsignedp1, unsignedpo;
2655 tree primarg0, primarg1, primother;
2656 unsigned int correct_width;
2658 if (operand_equal_p (arg0, arg1, 0))
2659 return 1;
2661 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2662 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2663 return 0;
2665 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2666 and see if the inner values are the same. This removes any
2667 signedness comparison, which doesn't matter here. */
2668 primarg0 = arg0, primarg1 = arg1;
2669 STRIP_NOPS (primarg0);
2670 STRIP_NOPS (primarg1);
2671 if (operand_equal_p (primarg0, primarg1, 0))
2672 return 1;
2674 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2675 actual comparison operand, ARG0.
2677 First throw away any conversions to wider types
2678 already present in the operands. */
2680 primarg1 = get_narrower (arg1, &unsignedp1);
2681 primother = get_narrower (other, &unsignedpo);
2683 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2684 if (unsignedp1 == unsignedpo
2685 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2686 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2688 tree type = TREE_TYPE (arg0);
2690 /* Make sure shorter operand is extended the right way
2691 to match the longer operand. */
2692 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2693 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2695 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2696 return 1;
2699 return 0;
2702 /* See if ARG is an expression that is either a comparison or is performing
2703 arithmetic on comparisons. The comparisons must only be comparing
2704 two different values, which will be stored in *CVAL1 and *CVAL2; if
2705 they are nonzero it means that some operands have already been found.
2706 No variables may be used anywhere else in the expression except in the
2707 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2708 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2710 If this is true, return 1. Otherwise, return zero. */
2712 static int
2713 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2715 enum tree_code code = TREE_CODE (arg);
2716 enum tree_code_class class = TREE_CODE_CLASS (code);
2718 /* We can handle some of the tcc_expression cases here. */
2719 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2720 class = tcc_unary;
2721 else if (class == tcc_expression
2722 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2723 || code == COMPOUND_EXPR))
2724 class = tcc_binary;
2726 else if (class == tcc_expression && code == SAVE_EXPR
2727 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2729 /* If we've already found a CVAL1 or CVAL2, this expression is
2730 two complex to handle. */
2731 if (*cval1 || *cval2)
2732 return 0;
2734 class = tcc_unary;
2735 *save_p = 1;
2738 switch (class)
2740 case tcc_unary:
2741 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2743 case tcc_binary:
2744 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2745 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2746 cval1, cval2, save_p));
2748 case tcc_constant:
2749 return 1;
2751 case tcc_expression:
2752 if (code == COND_EXPR)
2753 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2754 cval1, cval2, save_p)
2755 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2756 cval1, cval2, save_p)
2757 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2758 cval1, cval2, save_p));
2759 return 0;
2761 case tcc_comparison:
2762 /* First see if we can handle the first operand, then the second. For
2763 the second operand, we know *CVAL1 can't be zero. It must be that
2764 one side of the comparison is each of the values; test for the
2765 case where this isn't true by failing if the two operands
2766 are the same. */
2768 if (operand_equal_p (TREE_OPERAND (arg, 0),
2769 TREE_OPERAND (arg, 1), 0))
2770 return 0;
2772 if (*cval1 == 0)
2773 *cval1 = TREE_OPERAND (arg, 0);
2774 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2776 else if (*cval2 == 0)
2777 *cval2 = TREE_OPERAND (arg, 0);
2778 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2780 else
2781 return 0;
2783 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2785 else if (*cval2 == 0)
2786 *cval2 = TREE_OPERAND (arg, 1);
2787 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2789 else
2790 return 0;
2792 return 1;
2794 default:
2795 return 0;
2799 /* ARG is a tree that is known to contain just arithmetic operations and
2800 comparisons. Evaluate the operations in the tree substituting NEW0 for
2801 any occurrence of OLD0 as an operand of a comparison and likewise for
2802 NEW1 and OLD1. */
2804 static tree
2805 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2807 tree type = TREE_TYPE (arg);
2808 enum tree_code code = TREE_CODE (arg);
2809 enum tree_code_class class = TREE_CODE_CLASS (code);
2811 /* We can handle some of the tcc_expression cases here. */
2812 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2813 class = tcc_unary;
2814 else if (class == tcc_expression
2815 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2816 class = tcc_binary;
2818 switch (class)
2820 case tcc_unary:
2821 return fold_build1 (code, type,
2822 eval_subst (TREE_OPERAND (arg, 0),
2823 old0, new0, old1, new1));
2825 case tcc_binary:
2826 return fold_build2 (code, type,
2827 eval_subst (TREE_OPERAND (arg, 0),
2828 old0, new0, old1, new1),
2829 eval_subst (TREE_OPERAND (arg, 1),
2830 old0, new0, old1, new1));
2832 case tcc_expression:
2833 switch (code)
2835 case SAVE_EXPR:
2836 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2838 case COMPOUND_EXPR:
2839 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2841 case COND_EXPR:
2842 return fold_build3 (code, type,
2843 eval_subst (TREE_OPERAND (arg, 0),
2844 old0, new0, old1, new1),
2845 eval_subst (TREE_OPERAND (arg, 1),
2846 old0, new0, old1, new1),
2847 eval_subst (TREE_OPERAND (arg, 2),
2848 old0, new0, old1, new1));
2849 default:
2850 break;
2852 /* Fall through - ??? */
2854 case tcc_comparison:
2856 tree arg0 = TREE_OPERAND (arg, 0);
2857 tree arg1 = TREE_OPERAND (arg, 1);
2859 /* We need to check both for exact equality and tree equality. The
2860 former will be true if the operand has a side-effect. In that
2861 case, we know the operand occurred exactly once. */
2863 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2864 arg0 = new0;
2865 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2866 arg0 = new1;
2868 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2869 arg1 = new0;
2870 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2871 arg1 = new1;
2873 return fold_build2 (code, type, arg0, arg1);
2876 default:
2877 return arg;
2881 /* Return a tree for the case when the result of an expression is RESULT
2882 converted to TYPE and OMITTED was previously an operand of the expression
2883 but is now not needed (e.g., we folded OMITTED * 0).
2885 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2886 the conversion of RESULT to TYPE. */
2888 tree
2889 omit_one_operand (tree type, tree result, tree omitted)
2891 tree t = fold_convert (type, result);
2893 if (TREE_SIDE_EFFECTS (omitted))
2894 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2896 return non_lvalue (t);
2899 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2901 static tree
2902 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2904 tree t = fold_convert (type, result);
2906 if (TREE_SIDE_EFFECTS (omitted))
2907 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2909 return pedantic_non_lvalue (t);
2912 /* Return a tree for the case when the result of an expression is RESULT
2913 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2914 of the expression but are now not needed.
2916 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2917 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2918 evaluated before OMITTED2. Otherwise, if neither has side effects,
2919 just do the conversion of RESULT to TYPE. */
2921 tree
2922 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2924 tree t = fold_convert (type, result);
2926 if (TREE_SIDE_EFFECTS (omitted2))
2927 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2928 if (TREE_SIDE_EFFECTS (omitted1))
2929 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2931 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2935 /* Return a simplified tree node for the truth-negation of ARG. This
2936 never alters ARG itself. We assume that ARG is an operation that
2937 returns a truth value (0 or 1).
2939 FIXME: one would think we would fold the result, but it causes
2940 problems with the dominator optimizer. */
2941 tree
2942 invert_truthvalue (tree arg)
2944 tree type = TREE_TYPE (arg);
2945 enum tree_code code = TREE_CODE (arg);
2947 if (code == ERROR_MARK)
2948 return arg;
2950 /* If this is a comparison, we can simply invert it, except for
2951 floating-point non-equality comparisons, in which case we just
2952 enclose a TRUTH_NOT_EXPR around what we have. */
2954 if (TREE_CODE_CLASS (code) == tcc_comparison)
2956 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2957 if (FLOAT_TYPE_P (op_type)
2958 && flag_trapping_math
2959 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2960 && code != NE_EXPR && code != EQ_EXPR)
2961 return build1 (TRUTH_NOT_EXPR, type, arg);
2962 else
2964 code = invert_tree_comparison (code,
2965 HONOR_NANS (TYPE_MODE (op_type)));
2966 if (code == ERROR_MARK)
2967 return build1 (TRUTH_NOT_EXPR, type, arg);
2968 else
2969 return build2 (code, type,
2970 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2974 switch (code)
2976 case INTEGER_CST:
2977 return constant_boolean_node (integer_zerop (arg), type);
2979 case TRUTH_AND_EXPR:
2980 return build2 (TRUTH_OR_EXPR, type,
2981 invert_truthvalue (TREE_OPERAND (arg, 0)),
2982 invert_truthvalue (TREE_OPERAND (arg, 1)));
2984 case TRUTH_OR_EXPR:
2985 return build2 (TRUTH_AND_EXPR, type,
2986 invert_truthvalue (TREE_OPERAND (arg, 0)),
2987 invert_truthvalue (TREE_OPERAND (arg, 1)));
2989 case TRUTH_XOR_EXPR:
2990 /* Here we can invert either operand. We invert the first operand
2991 unless the second operand is a TRUTH_NOT_EXPR in which case our
2992 result is the XOR of the first operand with the inside of the
2993 negation of the second operand. */
2995 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2996 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2997 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2998 else
2999 return build2 (TRUTH_XOR_EXPR, type,
3000 invert_truthvalue (TREE_OPERAND (arg, 0)),
3001 TREE_OPERAND (arg, 1));
3003 case TRUTH_ANDIF_EXPR:
3004 return build2 (TRUTH_ORIF_EXPR, type,
3005 invert_truthvalue (TREE_OPERAND (arg, 0)),
3006 invert_truthvalue (TREE_OPERAND (arg, 1)));
3008 case TRUTH_ORIF_EXPR:
3009 return build2 (TRUTH_ANDIF_EXPR, type,
3010 invert_truthvalue (TREE_OPERAND (arg, 0)),
3011 invert_truthvalue (TREE_OPERAND (arg, 1)));
3013 case TRUTH_NOT_EXPR:
3014 return TREE_OPERAND (arg, 0);
3016 case COND_EXPR:
3017 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3018 invert_truthvalue (TREE_OPERAND (arg, 1)),
3019 invert_truthvalue (TREE_OPERAND (arg, 2)));
3021 case COMPOUND_EXPR:
3022 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3023 invert_truthvalue (TREE_OPERAND (arg, 1)));
3025 case NON_LVALUE_EXPR:
3026 return invert_truthvalue (TREE_OPERAND (arg, 0));
3028 case NOP_EXPR:
3029 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3030 break;
3032 case CONVERT_EXPR:
3033 case FLOAT_EXPR:
3034 return build1 (TREE_CODE (arg), type,
3035 invert_truthvalue (TREE_OPERAND (arg, 0)));
3037 case BIT_AND_EXPR:
3038 if (!integer_onep (TREE_OPERAND (arg, 1)))
3039 break;
3040 return build2 (EQ_EXPR, type, arg,
3041 fold_convert (type, integer_zero_node));
3043 case SAVE_EXPR:
3044 return build1 (TRUTH_NOT_EXPR, type, arg);
3046 case CLEANUP_POINT_EXPR:
3047 return build1 (CLEANUP_POINT_EXPR, type,
3048 invert_truthvalue (TREE_OPERAND (arg, 0)));
3050 default:
3051 break;
3053 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3054 return build1 (TRUTH_NOT_EXPR, type, arg);
3057 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3058 operands are another bit-wise operation with a common input. If so,
3059 distribute the bit operations to save an operation and possibly two if
3060 constants are involved. For example, convert
3061 (A | B) & (A | C) into A | (B & C)
3062 Further simplification will occur if B and C are constants.
3064 If this optimization cannot be done, 0 will be returned. */
3066 static tree
3067 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3069 tree common;
3070 tree left, right;
3072 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3073 || TREE_CODE (arg0) == code
3074 || (TREE_CODE (arg0) != BIT_AND_EXPR
3075 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3076 return 0;
3078 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3080 common = TREE_OPERAND (arg0, 0);
3081 left = TREE_OPERAND (arg0, 1);
3082 right = TREE_OPERAND (arg1, 1);
3084 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3086 common = TREE_OPERAND (arg0, 0);
3087 left = TREE_OPERAND (arg0, 1);
3088 right = TREE_OPERAND (arg1, 0);
3090 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3092 common = TREE_OPERAND (arg0, 1);
3093 left = TREE_OPERAND (arg0, 0);
3094 right = TREE_OPERAND (arg1, 1);
3096 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3098 common = TREE_OPERAND (arg0, 1);
3099 left = TREE_OPERAND (arg0, 0);
3100 right = TREE_OPERAND (arg1, 0);
3102 else
3103 return 0;
3105 return fold_build2 (TREE_CODE (arg0), type, common,
3106 fold_build2 (code, type, left, right));
3109 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3110 with code CODE. This optimization is unsafe. */
3111 static tree
3112 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3114 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3115 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3117 /* (A / C) +- (B / C) -> (A +- B) / C. */
3118 if (mul0 == mul1
3119 && operand_equal_p (TREE_OPERAND (arg0, 1),
3120 TREE_OPERAND (arg1, 1), 0))
3121 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3122 fold_build2 (code, type,
3123 TREE_OPERAND (arg0, 0),
3124 TREE_OPERAND (arg1, 0)),
3125 TREE_OPERAND (arg0, 1));
3127 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3128 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3129 TREE_OPERAND (arg1, 0), 0)
3130 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3131 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3133 REAL_VALUE_TYPE r0, r1;
3134 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3135 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3136 if (!mul0)
3137 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3138 if (!mul1)
3139 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3140 real_arithmetic (&r0, code, &r0, &r1);
3141 return fold_build2 (MULT_EXPR, type,
3142 TREE_OPERAND (arg0, 0),
3143 build_real (type, r0));
3146 return NULL_TREE;
3149 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3150 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3152 static tree
3153 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3154 int unsignedp)
3156 tree result;
3158 if (bitpos == 0)
3160 tree size = TYPE_SIZE (TREE_TYPE (inner));
3161 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3162 || POINTER_TYPE_P (TREE_TYPE (inner)))
3163 && host_integerp (size, 0)
3164 && tree_low_cst (size, 0) == bitsize)
3165 return fold_convert (type, inner);
3168 result = build3 (BIT_FIELD_REF, type, inner,
3169 size_int (bitsize), bitsize_int (bitpos));
3171 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3173 return result;
3176 /* Optimize a bit-field compare.
3178 There are two cases: First is a compare against a constant and the
3179 second is a comparison of two items where the fields are at the same
3180 bit position relative to the start of a chunk (byte, halfword, word)
3181 large enough to contain it. In these cases we can avoid the shift
3182 implicit in bitfield extractions.
3184 For constants, we emit a compare of the shifted constant with the
3185 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3186 compared. For two fields at the same position, we do the ANDs with the
3187 similar mask and compare the result of the ANDs.
3189 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3190 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3191 are the left and right operands of the comparison, respectively.
3193 If the optimization described above can be done, we return the resulting
3194 tree. Otherwise we return zero. */
3196 static tree
3197 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3198 tree lhs, tree rhs)
3200 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3201 tree type = TREE_TYPE (lhs);
3202 tree signed_type, unsigned_type;
3203 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3204 enum machine_mode lmode, rmode, nmode;
3205 int lunsignedp, runsignedp;
3206 int lvolatilep = 0, rvolatilep = 0;
3207 tree linner, rinner = NULL_TREE;
3208 tree mask;
3209 tree offset;
3211 /* Get all the information about the extractions being done. If the bit size
3212 if the same as the size of the underlying object, we aren't doing an
3213 extraction at all and so can do nothing. We also don't want to
3214 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3215 then will no longer be able to replace it. */
3216 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3217 &lunsignedp, &lvolatilep, false);
3218 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3219 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3220 return 0;
3222 if (!const_p)
3224 /* If this is not a constant, we can only do something if bit positions,
3225 sizes, and signedness are the same. */
3226 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3227 &runsignedp, &rvolatilep, false);
3229 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3230 || lunsignedp != runsignedp || offset != 0
3231 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3232 return 0;
3235 /* See if we can find a mode to refer to this field. We should be able to,
3236 but fail if we can't. */
3237 nmode = get_best_mode (lbitsize, lbitpos,
3238 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3239 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3240 TYPE_ALIGN (TREE_TYPE (rinner))),
3241 word_mode, lvolatilep || rvolatilep);
3242 if (nmode == VOIDmode)
3243 return 0;
3245 /* Set signed and unsigned types of the precision of this mode for the
3246 shifts below. */
3247 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3248 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3250 /* Compute the bit position and size for the new reference and our offset
3251 within it. If the new reference is the same size as the original, we
3252 won't optimize anything, so return zero. */
3253 nbitsize = GET_MODE_BITSIZE (nmode);
3254 nbitpos = lbitpos & ~ (nbitsize - 1);
3255 lbitpos -= nbitpos;
3256 if (nbitsize == lbitsize)
3257 return 0;
3259 if (BYTES_BIG_ENDIAN)
3260 lbitpos = nbitsize - lbitsize - lbitpos;
3262 /* Make the mask to be used against the extracted field. */
3263 mask = build_int_cst (unsigned_type, -1);
3264 mask = force_fit_type (mask, 0, false, false);
3265 mask = fold_convert (unsigned_type, mask);
3266 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3267 mask = const_binop (RSHIFT_EXPR, mask,
3268 size_int (nbitsize - lbitsize - lbitpos), 0);
3270 if (! const_p)
3271 /* If not comparing with constant, just rework the comparison
3272 and return. */
3273 return build2 (code, compare_type,
3274 build2 (BIT_AND_EXPR, unsigned_type,
3275 make_bit_field_ref (linner, unsigned_type,
3276 nbitsize, nbitpos, 1),
3277 mask),
3278 build2 (BIT_AND_EXPR, unsigned_type,
3279 make_bit_field_ref (rinner, unsigned_type,
3280 nbitsize, nbitpos, 1),
3281 mask));
3283 /* Otherwise, we are handling the constant case. See if the constant is too
3284 big for the field. Warn and return a tree of for 0 (false) if so. We do
3285 this not only for its own sake, but to avoid having to test for this
3286 error case below. If we didn't, we might generate wrong code.
3288 For unsigned fields, the constant shifted right by the field length should
3289 be all zero. For signed fields, the high-order bits should agree with
3290 the sign bit. */
3292 if (lunsignedp)
3294 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3295 fold_convert (unsigned_type, rhs),
3296 size_int (lbitsize), 0)))
3298 warning (0, "comparison is always %d due to width of bit-field",
3299 code == NE_EXPR);
3300 return constant_boolean_node (code == NE_EXPR, compare_type);
3303 else
3305 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3306 size_int (lbitsize - 1), 0);
3307 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3309 warning (0, "comparison is always %d due to width of bit-field",
3310 code == NE_EXPR);
3311 return constant_boolean_node (code == NE_EXPR, compare_type);
3315 /* Single-bit compares should always be against zero. */
3316 if (lbitsize == 1 && ! integer_zerop (rhs))
3318 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3319 rhs = fold_convert (type, integer_zero_node);
3322 /* Make a new bitfield reference, shift the constant over the
3323 appropriate number of bits and mask it with the computed mask
3324 (in case this was a signed field). If we changed it, make a new one. */
3325 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3326 if (lvolatilep)
3328 TREE_SIDE_EFFECTS (lhs) = 1;
3329 TREE_THIS_VOLATILE (lhs) = 1;
3332 rhs = fold (const_binop (BIT_AND_EXPR,
3333 const_binop (LSHIFT_EXPR,
3334 fold_convert (unsigned_type, rhs),
3335 size_int (lbitpos), 0),
3336 mask, 0));
3338 return build2 (code, compare_type,
3339 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3340 rhs);
3343 /* Subroutine for fold_truthop: decode a field reference.
3345 If EXP is a comparison reference, we return the innermost reference.
3347 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3348 set to the starting bit number.
3350 If the innermost field can be completely contained in a mode-sized
3351 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3353 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3354 otherwise it is not changed.
3356 *PUNSIGNEDP is set to the signedness of the field.
3358 *PMASK is set to the mask used. This is either contained in a
3359 BIT_AND_EXPR or derived from the width of the field.
3361 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3363 Return 0 if this is not a component reference or is one that we can't
3364 do anything with. */
3366 static tree
3367 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3368 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3369 int *punsignedp, int *pvolatilep,
3370 tree *pmask, tree *pand_mask)
3372 tree outer_type = 0;
3373 tree and_mask = 0;
3374 tree mask, inner, offset;
3375 tree unsigned_type;
3376 unsigned int precision;
3378 /* All the optimizations using this function assume integer fields.
3379 There are problems with FP fields since the type_for_size call
3380 below can fail for, e.g., XFmode. */
3381 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3382 return 0;
3384 /* We are interested in the bare arrangement of bits, so strip everything
3385 that doesn't affect the machine mode. However, record the type of the
3386 outermost expression if it may matter below. */
3387 if (TREE_CODE (exp) == NOP_EXPR
3388 || TREE_CODE (exp) == CONVERT_EXPR
3389 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3390 outer_type = TREE_TYPE (exp);
3391 STRIP_NOPS (exp);
3393 if (TREE_CODE (exp) == BIT_AND_EXPR)
3395 and_mask = TREE_OPERAND (exp, 1);
3396 exp = TREE_OPERAND (exp, 0);
3397 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3398 if (TREE_CODE (and_mask) != INTEGER_CST)
3399 return 0;
3402 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3403 punsignedp, pvolatilep, false);
3404 if ((inner == exp && and_mask == 0)
3405 || *pbitsize < 0 || offset != 0
3406 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3407 return 0;
3409 /* If the number of bits in the reference is the same as the bitsize of
3410 the outer type, then the outer type gives the signedness. Otherwise
3411 (in case of a small bitfield) the signedness is unchanged. */
3412 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3413 *punsignedp = TYPE_UNSIGNED (outer_type);
3415 /* Compute the mask to access the bitfield. */
3416 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3417 precision = TYPE_PRECISION (unsigned_type);
3419 mask = build_int_cst (unsigned_type, -1);
3420 mask = force_fit_type (mask, 0, false, false);
3422 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3423 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3425 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3426 if (and_mask != 0)
3427 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3428 fold_convert (unsigned_type, and_mask), mask);
3430 *pmask = mask;
3431 *pand_mask = and_mask;
3432 return inner;
3435 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3436 bit positions. */
3438 static int
3439 all_ones_mask_p (tree mask, int size)
3441 tree type = TREE_TYPE (mask);
3442 unsigned int precision = TYPE_PRECISION (type);
3443 tree tmask;
3445 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3446 tmask = force_fit_type (tmask, 0, false, false);
3448 return
3449 tree_int_cst_equal (mask,
3450 const_binop (RSHIFT_EXPR,
3451 const_binop (LSHIFT_EXPR, tmask,
3452 size_int (precision - size),
3454 size_int (precision - size), 0));
3457 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3458 represents the sign bit of EXP's type. If EXP represents a sign
3459 or zero extension, also test VAL against the unextended type.
3460 The return value is the (sub)expression whose sign bit is VAL,
3461 or NULL_TREE otherwise. */
3463 static tree
3464 sign_bit_p (tree exp, tree val)
3466 unsigned HOST_WIDE_INT mask_lo, lo;
3467 HOST_WIDE_INT mask_hi, hi;
3468 int width;
3469 tree t;
3471 /* Tree EXP must have an integral type. */
3472 t = TREE_TYPE (exp);
3473 if (! INTEGRAL_TYPE_P (t))
3474 return NULL_TREE;
3476 /* Tree VAL must be an integer constant. */
3477 if (TREE_CODE (val) != INTEGER_CST
3478 || TREE_CONSTANT_OVERFLOW (val))
3479 return NULL_TREE;
3481 width = TYPE_PRECISION (t);
3482 if (width > HOST_BITS_PER_WIDE_INT)
3484 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3485 lo = 0;
3487 mask_hi = ((unsigned HOST_WIDE_INT) -1
3488 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3489 mask_lo = -1;
3491 else
3493 hi = 0;
3494 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3496 mask_hi = 0;
3497 mask_lo = ((unsigned HOST_WIDE_INT) -1
3498 >> (HOST_BITS_PER_WIDE_INT - width));
3501 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3502 treat VAL as if it were unsigned. */
3503 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3504 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3505 return exp;
3507 /* Handle extension from a narrower type. */
3508 if (TREE_CODE (exp) == NOP_EXPR
3509 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3510 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3512 return NULL_TREE;
3515 /* Subroutine for fold_truthop: determine if an operand is simple enough
3516 to be evaluated unconditionally. */
3518 static int
3519 simple_operand_p (tree exp)
3521 /* Strip any conversions that don't change the machine mode. */
3522 STRIP_NOPS (exp);
3524 return (CONSTANT_CLASS_P (exp)
3525 || TREE_CODE (exp) == SSA_NAME
3526 || (DECL_P (exp)
3527 && ! TREE_ADDRESSABLE (exp)
3528 && ! TREE_THIS_VOLATILE (exp)
3529 && ! DECL_NONLOCAL (exp)
3530 /* Don't regard global variables as simple. They may be
3531 allocated in ways unknown to the compiler (shared memory,
3532 #pragma weak, etc). */
3533 && ! TREE_PUBLIC (exp)
3534 && ! DECL_EXTERNAL (exp)
3535 /* Loading a static variable is unduly expensive, but global
3536 registers aren't expensive. */
3537 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3540 /* The following functions are subroutines to fold_range_test and allow it to
3541 try to change a logical combination of comparisons into a range test.
3543 For example, both
3544 X == 2 || X == 3 || X == 4 || X == 5
3546 X >= 2 && X <= 5
3547 are converted to
3548 (unsigned) (X - 2) <= 3
3550 We describe each set of comparisons as being either inside or outside
3551 a range, using a variable named like IN_P, and then describe the
3552 range with a lower and upper bound. If one of the bounds is omitted,
3553 it represents either the highest or lowest value of the type.
3555 In the comments below, we represent a range by two numbers in brackets
3556 preceded by a "+" to designate being inside that range, or a "-" to
3557 designate being outside that range, so the condition can be inverted by
3558 flipping the prefix. An omitted bound is represented by a "-". For
3559 example, "- [-, 10]" means being outside the range starting at the lowest
3560 possible value and ending at 10, in other words, being greater than 10.
3561 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3562 always false.
3564 We set up things so that the missing bounds are handled in a consistent
3565 manner so neither a missing bound nor "true" and "false" need to be
3566 handled using a special case. */
3568 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3569 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3570 and UPPER1_P are nonzero if the respective argument is an upper bound
3571 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3572 must be specified for a comparison. ARG1 will be converted to ARG0's
3573 type if both are specified. */
3575 static tree
3576 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3577 tree arg1, int upper1_p)
3579 tree tem;
3580 int result;
3581 int sgn0, sgn1;
3583 /* If neither arg represents infinity, do the normal operation.
3584 Else, if not a comparison, return infinity. Else handle the special
3585 comparison rules. Note that most of the cases below won't occur, but
3586 are handled for consistency. */
3588 if (arg0 != 0 && arg1 != 0)
3590 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3591 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3592 STRIP_NOPS (tem);
3593 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3596 if (TREE_CODE_CLASS (code) != tcc_comparison)
3597 return 0;
3599 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3600 for neither. In real maths, we cannot assume open ended ranges are
3601 the same. But, this is computer arithmetic, where numbers are finite.
3602 We can therefore make the transformation of any unbounded range with
3603 the value Z, Z being greater than any representable number. This permits
3604 us to treat unbounded ranges as equal. */
3605 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3606 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3607 switch (code)
3609 case EQ_EXPR:
3610 result = sgn0 == sgn1;
3611 break;
3612 case NE_EXPR:
3613 result = sgn0 != sgn1;
3614 break;
3615 case LT_EXPR:
3616 result = sgn0 < sgn1;
3617 break;
3618 case LE_EXPR:
3619 result = sgn0 <= sgn1;
3620 break;
3621 case GT_EXPR:
3622 result = sgn0 > sgn1;
3623 break;
3624 case GE_EXPR:
3625 result = sgn0 >= sgn1;
3626 break;
3627 default:
3628 gcc_unreachable ();
3631 return constant_boolean_node (result, type);
3634 /* Given EXP, a logical expression, set the range it is testing into
3635 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3636 actually being tested. *PLOW and *PHIGH will be made of the same type
3637 as the returned expression. If EXP is not a comparison, we will most
3638 likely not be returning a useful value and range. */
3640 static tree
3641 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3643 enum tree_code code;
3644 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3645 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3646 int in_p, n_in_p;
3647 tree low, high, n_low, n_high;
3649 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3650 and see if we can refine the range. Some of the cases below may not
3651 happen, but it doesn't seem worth worrying about this. We "continue"
3652 the outer loop when we've changed something; otherwise we "break"
3653 the switch, which will "break" the while. */
3655 in_p = 0;
3656 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3658 while (1)
3660 code = TREE_CODE (exp);
3661 exp_type = TREE_TYPE (exp);
3663 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3665 if (TREE_CODE_LENGTH (code) > 0)
3666 arg0 = TREE_OPERAND (exp, 0);
3667 if (TREE_CODE_CLASS (code) == tcc_comparison
3668 || TREE_CODE_CLASS (code) == tcc_unary
3669 || TREE_CODE_CLASS (code) == tcc_binary)
3670 arg0_type = TREE_TYPE (arg0);
3671 if (TREE_CODE_CLASS (code) == tcc_binary
3672 || TREE_CODE_CLASS (code) == tcc_comparison
3673 || (TREE_CODE_CLASS (code) == tcc_expression
3674 && TREE_CODE_LENGTH (code) > 1))
3675 arg1 = TREE_OPERAND (exp, 1);
3678 switch (code)
3680 case TRUTH_NOT_EXPR:
3681 in_p = ! in_p, exp = arg0;
3682 continue;
3684 case EQ_EXPR: case NE_EXPR:
3685 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3686 /* We can only do something if the range is testing for zero
3687 and if the second operand is an integer constant. Note that
3688 saying something is "in" the range we make is done by
3689 complementing IN_P since it will set in the initial case of
3690 being not equal to zero; "out" is leaving it alone. */
3691 if (low == 0 || high == 0
3692 || ! integer_zerop (low) || ! integer_zerop (high)
3693 || TREE_CODE (arg1) != INTEGER_CST)
3694 break;
3696 switch (code)
3698 case NE_EXPR: /* - [c, c] */
3699 low = high = arg1;
3700 break;
3701 case EQ_EXPR: /* + [c, c] */
3702 in_p = ! in_p, low = high = arg1;
3703 break;
3704 case GT_EXPR: /* - [-, c] */
3705 low = 0, high = arg1;
3706 break;
3707 case GE_EXPR: /* + [c, -] */
3708 in_p = ! in_p, low = arg1, high = 0;
3709 break;
3710 case LT_EXPR: /* - [c, -] */
3711 low = arg1, high = 0;
3712 break;
3713 case LE_EXPR: /* + [-, c] */
3714 in_p = ! in_p, low = 0, high = arg1;
3715 break;
3716 default:
3717 gcc_unreachable ();
3720 /* If this is an unsigned comparison, we also know that EXP is
3721 greater than or equal to zero. We base the range tests we make
3722 on that fact, so we record it here so we can parse existing
3723 range tests. We test arg0_type since often the return type
3724 of, e.g. EQ_EXPR, is boolean. */
3725 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3727 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3728 in_p, low, high, 1,
3729 fold_convert (arg0_type, integer_zero_node),
3730 NULL_TREE))
3731 break;
3733 in_p = n_in_p, low = n_low, high = n_high;
3735 /* If the high bound is missing, but we have a nonzero low
3736 bound, reverse the range so it goes from zero to the low bound
3737 minus 1. */
3738 if (high == 0 && low && ! integer_zerop (low))
3740 in_p = ! in_p;
3741 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3742 integer_one_node, 0);
3743 low = fold_convert (arg0_type, integer_zero_node);
3747 exp = arg0;
3748 continue;
3750 case NEGATE_EXPR:
3751 /* (-x) IN [a,b] -> x in [-b, -a] */
3752 n_low = range_binop (MINUS_EXPR, exp_type,
3753 fold_convert (exp_type, integer_zero_node),
3754 0, high, 1);
3755 n_high = range_binop (MINUS_EXPR, exp_type,
3756 fold_convert (exp_type, integer_zero_node),
3757 0, low, 0);
3758 low = n_low, high = n_high;
3759 exp = arg0;
3760 continue;
3762 case BIT_NOT_EXPR:
3763 /* ~ X -> -X - 1 */
3764 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3765 fold_convert (exp_type, integer_one_node));
3766 continue;
3768 case PLUS_EXPR: case MINUS_EXPR:
3769 if (TREE_CODE (arg1) != INTEGER_CST)
3770 break;
3772 /* If EXP is signed, any overflow in the computation is undefined,
3773 so we don't worry about it so long as our computations on
3774 the bounds don't overflow. For unsigned, overflow is defined
3775 and this is exactly the right thing. */
3776 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3777 arg0_type, low, 0, arg1, 0);
3778 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3779 arg0_type, high, 1, arg1, 0);
3780 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3781 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3782 break;
3784 /* Check for an unsigned range which has wrapped around the maximum
3785 value thus making n_high < n_low, and normalize it. */
3786 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3788 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3789 integer_one_node, 0);
3790 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3791 integer_one_node, 0);
3793 /* If the range is of the form +/- [ x+1, x ], we won't
3794 be able to normalize it. But then, it represents the
3795 whole range or the empty set, so make it
3796 +/- [ -, - ]. */
3797 if (tree_int_cst_equal (n_low, low)
3798 && tree_int_cst_equal (n_high, high))
3799 low = high = 0;
3800 else
3801 in_p = ! in_p;
3803 else
3804 low = n_low, high = n_high;
3806 exp = arg0;
3807 continue;
3809 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3810 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3811 break;
3813 if (! INTEGRAL_TYPE_P (arg0_type)
3814 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3815 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3816 break;
3818 n_low = low, n_high = high;
3820 if (n_low != 0)
3821 n_low = fold_convert (arg0_type, n_low);
3823 if (n_high != 0)
3824 n_high = fold_convert (arg0_type, n_high);
3827 /* If we're converting arg0 from an unsigned type, to exp,
3828 a signed type, we will be doing the comparison as unsigned.
3829 The tests above have already verified that LOW and HIGH
3830 are both positive.
3832 So we have to ensure that we will handle large unsigned
3833 values the same way that the current signed bounds treat
3834 negative values. */
3836 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3838 tree high_positive;
3839 tree equiv_type = lang_hooks.types.type_for_mode
3840 (TYPE_MODE (arg0_type), 1);
3842 /* A range without an upper bound is, naturally, unbounded.
3843 Since convert would have cropped a very large value, use
3844 the max value for the destination type. */
3845 high_positive
3846 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3847 : TYPE_MAX_VALUE (arg0_type);
3849 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3850 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3851 fold_convert (arg0_type,
3852 high_positive),
3853 fold_convert (arg0_type,
3854 integer_one_node));
3856 /* If the low bound is specified, "and" the range with the
3857 range for which the original unsigned value will be
3858 positive. */
3859 if (low != 0)
3861 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3862 1, n_low, n_high, 1,
3863 fold_convert (arg0_type,
3864 integer_zero_node),
3865 high_positive))
3866 break;
3868 in_p = (n_in_p == in_p);
3870 else
3872 /* Otherwise, "or" the range with the range of the input
3873 that will be interpreted as negative. */
3874 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3875 0, n_low, n_high, 1,
3876 fold_convert (arg0_type,
3877 integer_zero_node),
3878 high_positive))
3879 break;
3881 in_p = (in_p != n_in_p);
3885 exp = arg0;
3886 low = n_low, high = n_high;
3887 continue;
3889 default:
3890 break;
3893 break;
3896 /* If EXP is a constant, we can evaluate whether this is true or false. */
3897 if (TREE_CODE (exp) == INTEGER_CST)
3899 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3900 exp, 0, low, 0))
3901 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3902 exp, 1, high, 1)));
3903 low = high = 0;
3904 exp = 0;
3907 *pin_p = in_p, *plow = low, *phigh = high;
3908 return exp;
3911 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3912 type, TYPE, return an expression to test if EXP is in (or out of, depending
3913 on IN_P) the range. Return 0 if the test couldn't be created. */
3915 static tree
3916 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3918 tree etype = TREE_TYPE (exp);
3919 tree value;
3921 if (! in_p)
3923 value = build_range_check (type, exp, 1, low, high);
3924 if (value != 0)
3925 return invert_truthvalue (value);
3927 return 0;
3930 if (low == 0 && high == 0)
3931 return fold_convert (type, integer_one_node);
3933 if (low == 0)
3934 return fold_build2 (LE_EXPR, type, exp, high);
3936 if (high == 0)
3937 return fold_build2 (GE_EXPR, type, exp, low);
3939 if (operand_equal_p (low, high, 0))
3940 return fold_build2 (EQ_EXPR, type, exp, low);
3942 if (integer_zerop (low))
3944 if (! TYPE_UNSIGNED (etype))
3946 etype = lang_hooks.types.unsigned_type (etype);
3947 high = fold_convert (etype, high);
3948 exp = fold_convert (etype, exp);
3950 return build_range_check (type, exp, 1, 0, high);
3953 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3954 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3956 unsigned HOST_WIDE_INT lo;
3957 HOST_WIDE_INT hi;
3958 int prec;
3960 prec = TYPE_PRECISION (etype);
3961 if (prec <= HOST_BITS_PER_WIDE_INT)
3963 hi = 0;
3964 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3966 else
3968 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3969 lo = (unsigned HOST_WIDE_INT) -1;
3972 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3974 if (TYPE_UNSIGNED (etype))
3976 etype = lang_hooks.types.signed_type (etype);
3977 exp = fold_convert (etype, exp);
3979 return fold_build2 (GT_EXPR, type, exp,
3980 fold_convert (etype, integer_zero_node));
3984 value = const_binop (MINUS_EXPR, high, low, 0);
3985 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3987 tree utype, minv, maxv;
3989 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3990 for the type in question, as we rely on this here. */
3991 switch (TREE_CODE (etype))
3993 case INTEGER_TYPE:
3994 case ENUMERAL_TYPE:
3995 case CHAR_TYPE:
3996 utype = lang_hooks.types.unsigned_type (etype);
3997 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3998 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3999 integer_one_node, 1);
4000 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4001 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4002 minv, 1, maxv, 1)))
4004 etype = utype;
4005 high = fold_convert (etype, high);
4006 low = fold_convert (etype, low);
4007 exp = fold_convert (etype, exp);
4008 value = const_binop (MINUS_EXPR, high, low, 0);
4010 break;
4011 default:
4012 break;
4016 if (value != 0 && ! TREE_OVERFLOW (value))
4017 return build_range_check (type,
4018 fold_build2 (MINUS_EXPR, etype, exp, low),
4019 1, fold_convert (etype, integer_zero_node),
4020 value);
4022 return 0;
4025 /* Given two ranges, see if we can merge them into one. Return 1 if we
4026 can, 0 if we can't. Set the output range into the specified parameters. */
4028 static int
4029 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4030 tree high0, int in1_p, tree low1, tree high1)
4032 int no_overlap;
4033 int subset;
4034 int temp;
4035 tree tem;
4036 int in_p;
4037 tree low, high;
4038 int lowequal = ((low0 == 0 && low1 == 0)
4039 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4040 low0, 0, low1, 0)));
4041 int highequal = ((high0 == 0 && high1 == 0)
4042 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4043 high0, 1, high1, 1)));
4045 /* Make range 0 be the range that starts first, or ends last if they
4046 start at the same value. Swap them if it isn't. */
4047 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4048 low0, 0, low1, 0))
4049 || (lowequal
4050 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4051 high1, 1, high0, 1))))
4053 temp = in0_p, in0_p = in1_p, in1_p = temp;
4054 tem = low0, low0 = low1, low1 = tem;
4055 tem = high0, high0 = high1, high1 = tem;
4058 /* Now flag two cases, whether the ranges are disjoint or whether the
4059 second range is totally subsumed in the first. Note that the tests
4060 below are simplified by the ones above. */
4061 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4062 high0, 1, low1, 0));
4063 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4064 high1, 1, high0, 1));
4066 /* We now have four cases, depending on whether we are including or
4067 excluding the two ranges. */
4068 if (in0_p && in1_p)
4070 /* If they don't overlap, the result is false. If the second range
4071 is a subset it is the result. Otherwise, the range is from the start
4072 of the second to the end of the first. */
4073 if (no_overlap)
4074 in_p = 0, low = high = 0;
4075 else if (subset)
4076 in_p = 1, low = low1, high = high1;
4077 else
4078 in_p = 1, low = low1, high = high0;
4081 else if (in0_p && ! in1_p)
4083 /* If they don't overlap, the result is the first range. If they are
4084 equal, the result is false. If the second range is a subset of the
4085 first, and the ranges begin at the same place, we go from just after
4086 the end of the first range to the end of the second. If the second
4087 range is not a subset of the first, or if it is a subset and both
4088 ranges end at the same place, the range starts at the start of the
4089 first range and ends just before the second range.
4090 Otherwise, we can't describe this as a single range. */
4091 if (no_overlap)
4092 in_p = 1, low = low0, high = high0;
4093 else if (lowequal && highequal)
4094 in_p = 0, low = high = 0;
4095 else if (subset && lowequal)
4097 in_p = 1, high = high0;
4098 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4099 integer_one_node, 0);
4101 else if (! subset || highequal)
4103 in_p = 1, low = low0;
4104 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4105 integer_one_node, 0);
4107 else
4108 return 0;
4111 else if (! in0_p && in1_p)
4113 /* If they don't overlap, the result is the second range. If the second
4114 is a subset of the first, the result is false. Otherwise,
4115 the range starts just after the first range and ends at the
4116 end of the second. */
4117 if (no_overlap)
4118 in_p = 1, low = low1, high = high1;
4119 else if (subset || highequal)
4120 in_p = 0, low = high = 0;
4121 else
4123 in_p = 1, high = high1;
4124 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4125 integer_one_node, 0);
4129 else
4131 /* The case where we are excluding both ranges. Here the complex case
4132 is if they don't overlap. In that case, the only time we have a
4133 range is if they are adjacent. If the second is a subset of the
4134 first, the result is the first. Otherwise, the range to exclude
4135 starts at the beginning of the first range and ends at the end of the
4136 second. */
4137 if (no_overlap)
4139 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4140 range_binop (PLUS_EXPR, NULL_TREE,
4141 high0, 1,
4142 integer_one_node, 1),
4143 1, low1, 0)))
4144 in_p = 0, low = low0, high = high1;
4145 else
4147 /* Canonicalize - [min, x] into - [-, x]. */
4148 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4149 switch (TREE_CODE (TREE_TYPE (low0)))
4151 case ENUMERAL_TYPE:
4152 if (TYPE_PRECISION (TREE_TYPE (low0))
4153 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4154 break;
4155 /* FALLTHROUGH */
4156 case INTEGER_TYPE:
4157 case CHAR_TYPE:
4158 if (tree_int_cst_equal (low0,
4159 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4160 low0 = 0;
4161 break;
4162 case POINTER_TYPE:
4163 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4164 && integer_zerop (low0))
4165 low0 = 0;
4166 break;
4167 default:
4168 break;
4171 /* Canonicalize - [x, max] into - [x, -]. */
4172 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4173 switch (TREE_CODE (TREE_TYPE (high1)))
4175 case ENUMERAL_TYPE:
4176 if (TYPE_PRECISION (TREE_TYPE (high1))
4177 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4178 break;
4179 /* FALLTHROUGH */
4180 case INTEGER_TYPE:
4181 case CHAR_TYPE:
4182 if (tree_int_cst_equal (high1,
4183 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4184 high1 = 0;
4185 break;
4186 case POINTER_TYPE:
4187 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4188 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4189 high1, 1,
4190 integer_one_node, 1)))
4191 high1 = 0;
4192 break;
4193 default:
4194 break;
4197 /* The ranges might be also adjacent between the maximum and
4198 minimum values of the given type. For
4199 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4200 return + [x + 1, y - 1]. */
4201 if (low0 == 0 && high1 == 0)
4203 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4204 integer_one_node, 1);
4205 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4206 integer_one_node, 0);
4207 if (low == 0 || high == 0)
4208 return 0;
4210 in_p = 1;
4212 else
4213 return 0;
4216 else if (subset)
4217 in_p = 0, low = low0, high = high0;
4218 else
4219 in_p = 0, low = low0, high = high1;
4222 *pin_p = in_p, *plow = low, *phigh = high;
4223 return 1;
4227 /* Subroutine of fold, looking inside expressions of the form
4228 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4229 of the COND_EXPR. This function is being used also to optimize
4230 A op B ? C : A, by reversing the comparison first.
4232 Return a folded expression whose code is not a COND_EXPR
4233 anymore, or NULL_TREE if no folding opportunity is found. */
4235 static tree
4236 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4238 enum tree_code comp_code = TREE_CODE (arg0);
4239 tree arg00 = TREE_OPERAND (arg0, 0);
4240 tree arg01 = TREE_OPERAND (arg0, 1);
4241 tree arg1_type = TREE_TYPE (arg1);
4242 tree tem;
4244 STRIP_NOPS (arg1);
4245 STRIP_NOPS (arg2);
4247 /* If we have A op 0 ? A : -A, consider applying the following
4248 transformations:
4250 A == 0? A : -A same as -A
4251 A != 0? A : -A same as A
4252 A >= 0? A : -A same as abs (A)
4253 A > 0? A : -A same as abs (A)
4254 A <= 0? A : -A same as -abs (A)
4255 A < 0? A : -A same as -abs (A)
4257 None of these transformations work for modes with signed
4258 zeros. If A is +/-0, the first two transformations will
4259 change the sign of the result (from +0 to -0, or vice
4260 versa). The last four will fix the sign of the result,
4261 even though the original expressions could be positive or
4262 negative, depending on the sign of A.
4264 Note that all these transformations are correct if A is
4265 NaN, since the two alternatives (A and -A) are also NaNs. */
4266 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4267 ? real_zerop (arg01)
4268 : integer_zerop (arg01))
4269 && ((TREE_CODE (arg2) == NEGATE_EXPR
4270 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4271 /* In the case that A is of the form X-Y, '-A' (arg2) may
4272 have already been folded to Y-X, check for that. */
4273 || (TREE_CODE (arg1) == MINUS_EXPR
4274 && TREE_CODE (arg2) == MINUS_EXPR
4275 && operand_equal_p (TREE_OPERAND (arg1, 0),
4276 TREE_OPERAND (arg2, 1), 0)
4277 && operand_equal_p (TREE_OPERAND (arg1, 1),
4278 TREE_OPERAND (arg2, 0), 0))))
4279 switch (comp_code)
4281 case EQ_EXPR:
4282 case UNEQ_EXPR:
4283 tem = fold_convert (arg1_type, arg1);
4284 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4285 case NE_EXPR:
4286 case LTGT_EXPR:
4287 return pedantic_non_lvalue (fold_convert (type, arg1));
4288 case UNGE_EXPR:
4289 case UNGT_EXPR:
4290 if (flag_trapping_math)
4291 break;
4292 /* Fall through. */
4293 case GE_EXPR:
4294 case GT_EXPR:
4295 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4296 arg1 = fold_convert (lang_hooks.types.signed_type
4297 (TREE_TYPE (arg1)), arg1);
4298 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4299 return pedantic_non_lvalue (fold_convert (type, tem));
4300 case UNLE_EXPR:
4301 case UNLT_EXPR:
4302 if (flag_trapping_math)
4303 break;
4304 case LE_EXPR:
4305 case LT_EXPR:
4306 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4307 arg1 = fold_convert (lang_hooks.types.signed_type
4308 (TREE_TYPE (arg1)), arg1);
4309 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4310 return negate_expr (fold_convert (type, tem));
4311 default:
4312 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4313 break;
4316 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4317 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4318 both transformations are correct when A is NaN: A != 0
4319 is then true, and A == 0 is false. */
4321 if (integer_zerop (arg01) && integer_zerop (arg2))
4323 if (comp_code == NE_EXPR)
4324 return pedantic_non_lvalue (fold_convert (type, arg1));
4325 else if (comp_code == EQ_EXPR)
4326 return fold_convert (type, integer_zero_node);
4329 /* Try some transformations of A op B ? A : B.
4331 A == B? A : B same as B
4332 A != B? A : B same as A
4333 A >= B? A : B same as max (A, B)
4334 A > B? A : B same as max (B, A)
4335 A <= B? A : B same as min (A, B)
4336 A < B? A : B same as min (B, A)
4338 As above, these transformations don't work in the presence
4339 of signed zeros. For example, if A and B are zeros of
4340 opposite sign, the first two transformations will change
4341 the sign of the result. In the last four, the original
4342 expressions give different results for (A=+0, B=-0) and
4343 (A=-0, B=+0), but the transformed expressions do not.
4345 The first two transformations are correct if either A or B
4346 is a NaN. In the first transformation, the condition will
4347 be false, and B will indeed be chosen. In the case of the
4348 second transformation, the condition A != B will be true,
4349 and A will be chosen.
4351 The conversions to max() and min() are not correct if B is
4352 a number and A is not. The conditions in the original
4353 expressions will be false, so all four give B. The min()
4354 and max() versions would give a NaN instead. */
4355 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4356 /* Avoid these transformations if the COND_EXPR may be used
4357 as an lvalue in the C++ front-end. PR c++/19199. */
4358 && (in_gimple_form
4359 || strcmp (lang_hooks.name, "GNU C++") != 0
4360 || ! maybe_lvalue_p (arg1)
4361 || ! maybe_lvalue_p (arg2)))
4363 tree comp_op0 = arg00;
4364 tree comp_op1 = arg01;
4365 tree comp_type = TREE_TYPE (comp_op0);
4367 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4368 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4370 comp_type = type;
4371 comp_op0 = arg1;
4372 comp_op1 = arg2;
4375 switch (comp_code)
4377 case EQ_EXPR:
4378 return pedantic_non_lvalue (fold_convert (type, arg2));
4379 case NE_EXPR:
4380 return pedantic_non_lvalue (fold_convert (type, arg1));
4381 case LE_EXPR:
4382 case LT_EXPR:
4383 case UNLE_EXPR:
4384 case UNLT_EXPR:
4385 /* In C++ a ?: expression can be an lvalue, so put the
4386 operand which will be used if they are equal first
4387 so that we can convert this back to the
4388 corresponding COND_EXPR. */
4389 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4391 comp_op0 = fold_convert (comp_type, comp_op0);
4392 comp_op1 = fold_convert (comp_type, comp_op1);
4393 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4394 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4395 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4396 return pedantic_non_lvalue (fold_convert (type, tem));
4398 break;
4399 case GE_EXPR:
4400 case GT_EXPR:
4401 case UNGE_EXPR:
4402 case UNGT_EXPR:
4403 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4405 comp_op0 = fold_convert (comp_type, comp_op0);
4406 comp_op1 = fold_convert (comp_type, comp_op1);
4407 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4408 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4409 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4410 return pedantic_non_lvalue (fold_convert (type, tem));
4412 break;
4413 case UNEQ_EXPR:
4414 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4415 return pedantic_non_lvalue (fold_convert (type, arg2));
4416 break;
4417 case LTGT_EXPR:
4418 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4419 return pedantic_non_lvalue (fold_convert (type, arg1));
4420 break;
4421 default:
4422 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4423 break;
4427 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4428 we might still be able to simplify this. For example,
4429 if C1 is one less or one more than C2, this might have started
4430 out as a MIN or MAX and been transformed by this function.
4431 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4433 if (INTEGRAL_TYPE_P (type)
4434 && TREE_CODE (arg01) == INTEGER_CST
4435 && TREE_CODE (arg2) == INTEGER_CST)
4436 switch (comp_code)
4438 case EQ_EXPR:
4439 /* We can replace A with C1 in this case. */
4440 arg1 = fold_convert (type, arg01);
4441 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4443 case LT_EXPR:
4444 /* If C1 is C2 + 1, this is min(A, C2). */
4445 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4446 OEP_ONLY_CONST)
4447 && operand_equal_p (arg01,
4448 const_binop (PLUS_EXPR, arg2,
4449 integer_one_node, 0),
4450 OEP_ONLY_CONST))
4451 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4452 type, arg1, arg2));
4453 break;
4455 case LE_EXPR:
4456 /* If C1 is C2 - 1, this is min(A, C2). */
4457 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4458 OEP_ONLY_CONST)
4459 && operand_equal_p (arg01,
4460 const_binop (MINUS_EXPR, arg2,
4461 integer_one_node, 0),
4462 OEP_ONLY_CONST))
4463 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4464 type, arg1, arg2));
4465 break;
4467 case GT_EXPR:
4468 /* If C1 is C2 - 1, this is max(A, C2). */
4469 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4470 OEP_ONLY_CONST)
4471 && operand_equal_p (arg01,
4472 const_binop (MINUS_EXPR, arg2,
4473 integer_one_node, 0),
4474 OEP_ONLY_CONST))
4475 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4476 type, arg1, arg2));
4477 break;
4479 case GE_EXPR:
4480 /* If C1 is C2 + 1, this is max(A, C2). */
4481 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4482 OEP_ONLY_CONST)
4483 && operand_equal_p (arg01,
4484 const_binop (PLUS_EXPR, arg2,
4485 integer_one_node, 0),
4486 OEP_ONLY_CONST))
4487 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4488 type, arg1, arg2));
4489 break;
4490 case NE_EXPR:
4491 break;
4492 default:
4493 gcc_unreachable ();
4496 return NULL_TREE;
4501 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4502 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4503 #endif
4505 /* EXP is some logical combination of boolean tests. See if we can
4506 merge it into some range test. Return the new tree if so. */
4508 static tree
4509 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4511 int or_op = (code == TRUTH_ORIF_EXPR
4512 || code == TRUTH_OR_EXPR);
4513 int in0_p, in1_p, in_p;
4514 tree low0, low1, low, high0, high1, high;
4515 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4516 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4517 tree tem;
4519 /* If this is an OR operation, invert both sides; we will invert
4520 again at the end. */
4521 if (or_op)
4522 in0_p = ! in0_p, in1_p = ! in1_p;
4524 /* If both expressions are the same, if we can merge the ranges, and we
4525 can build the range test, return it or it inverted. If one of the
4526 ranges is always true or always false, consider it to be the same
4527 expression as the other. */
4528 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4529 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4530 in1_p, low1, high1)
4531 && 0 != (tem = (build_range_check (type,
4532 lhs != 0 ? lhs
4533 : rhs != 0 ? rhs : integer_zero_node,
4534 in_p, low, high))))
4535 return or_op ? invert_truthvalue (tem) : tem;
4537 /* On machines where the branch cost is expensive, if this is a
4538 short-circuited branch and the underlying object on both sides
4539 is the same, make a non-short-circuit operation. */
4540 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4541 && lhs != 0 && rhs != 0
4542 && (code == TRUTH_ANDIF_EXPR
4543 || code == TRUTH_ORIF_EXPR)
4544 && operand_equal_p (lhs, rhs, 0))
4546 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4547 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4548 which cases we can't do this. */
4549 if (simple_operand_p (lhs))
4550 return build2 (code == TRUTH_ANDIF_EXPR
4551 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4552 type, op0, op1);
4554 else if (lang_hooks.decls.global_bindings_p () == 0
4555 && ! CONTAINS_PLACEHOLDER_P (lhs))
4557 tree common = save_expr (lhs);
4559 if (0 != (lhs = build_range_check (type, common,
4560 or_op ? ! in0_p : in0_p,
4561 low0, high0))
4562 && (0 != (rhs = build_range_check (type, common,
4563 or_op ? ! in1_p : in1_p,
4564 low1, high1))))
4565 return build2 (code == TRUTH_ANDIF_EXPR
4566 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4567 type, lhs, rhs);
4571 return 0;
4574 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4575 bit value. Arrange things so the extra bits will be set to zero if and
4576 only if C is signed-extended to its full width. If MASK is nonzero,
4577 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4579 static tree
4580 unextend (tree c, int p, int unsignedp, tree mask)
4582 tree type = TREE_TYPE (c);
4583 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4584 tree temp;
4586 if (p == modesize || unsignedp)
4587 return c;
4589 /* We work by getting just the sign bit into the low-order bit, then
4590 into the high-order bit, then sign-extend. We then XOR that value
4591 with C. */
4592 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4593 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4595 /* We must use a signed type in order to get an arithmetic right shift.
4596 However, we must also avoid introducing accidental overflows, so that
4597 a subsequent call to integer_zerop will work. Hence we must
4598 do the type conversion here. At this point, the constant is either
4599 zero or one, and the conversion to a signed type can never overflow.
4600 We could get an overflow if this conversion is done anywhere else. */
4601 if (TYPE_UNSIGNED (type))
4602 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4604 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4605 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4606 if (mask != 0)
4607 temp = const_binop (BIT_AND_EXPR, temp,
4608 fold_convert (TREE_TYPE (c), mask), 0);
4609 /* If necessary, convert the type back to match the type of C. */
4610 if (TYPE_UNSIGNED (type))
4611 temp = fold_convert (type, temp);
4613 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4616 /* Find ways of folding logical expressions of LHS and RHS:
4617 Try to merge two comparisons to the same innermost item.
4618 Look for range tests like "ch >= '0' && ch <= '9'".
4619 Look for combinations of simple terms on machines with expensive branches
4620 and evaluate the RHS unconditionally.
4622 For example, if we have p->a == 2 && p->b == 4 and we can make an
4623 object large enough to span both A and B, we can do this with a comparison
4624 against the object ANDed with the a mask.
4626 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4627 operations to do this with one comparison.
4629 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4630 function and the one above.
4632 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4633 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4635 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4636 two operands.
4638 We return the simplified tree or 0 if no optimization is possible. */
4640 static tree
4641 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4643 /* If this is the "or" of two comparisons, we can do something if
4644 the comparisons are NE_EXPR. If this is the "and", we can do something
4645 if the comparisons are EQ_EXPR. I.e.,
4646 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4648 WANTED_CODE is this operation code. For single bit fields, we can
4649 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4650 comparison for one-bit fields. */
4652 enum tree_code wanted_code;
4653 enum tree_code lcode, rcode;
4654 tree ll_arg, lr_arg, rl_arg, rr_arg;
4655 tree ll_inner, lr_inner, rl_inner, rr_inner;
4656 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4657 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4658 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4659 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4660 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4661 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4662 enum machine_mode lnmode, rnmode;
4663 tree ll_mask, lr_mask, rl_mask, rr_mask;
4664 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4665 tree l_const, r_const;
4666 tree lntype, rntype, result;
4667 int first_bit, end_bit;
4668 int volatilep;
4670 /* Start by getting the comparison codes. Fail if anything is volatile.
4671 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4672 it were surrounded with a NE_EXPR. */
4674 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4675 return 0;
4677 lcode = TREE_CODE (lhs);
4678 rcode = TREE_CODE (rhs);
4680 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4682 lhs = build2 (NE_EXPR, truth_type, lhs,
4683 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4684 lcode = NE_EXPR;
4687 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4689 rhs = build2 (NE_EXPR, truth_type, rhs,
4690 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4691 rcode = NE_EXPR;
4694 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4695 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4696 return 0;
4698 ll_arg = TREE_OPERAND (lhs, 0);
4699 lr_arg = TREE_OPERAND (lhs, 1);
4700 rl_arg = TREE_OPERAND (rhs, 0);
4701 rr_arg = TREE_OPERAND (rhs, 1);
4703 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4704 if (simple_operand_p (ll_arg)
4705 && simple_operand_p (lr_arg))
4707 tree result;
4708 if (operand_equal_p (ll_arg, rl_arg, 0)
4709 && operand_equal_p (lr_arg, rr_arg, 0))
4711 result = combine_comparisons (code, lcode, rcode,
4712 truth_type, ll_arg, lr_arg);
4713 if (result)
4714 return result;
4716 else if (operand_equal_p (ll_arg, rr_arg, 0)
4717 && operand_equal_p (lr_arg, rl_arg, 0))
4719 result = combine_comparisons (code, lcode,
4720 swap_tree_comparison (rcode),
4721 truth_type, ll_arg, lr_arg);
4722 if (result)
4723 return result;
4727 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4728 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4730 /* If the RHS can be evaluated unconditionally and its operands are
4731 simple, it wins to evaluate the RHS unconditionally on machines
4732 with expensive branches. In this case, this isn't a comparison
4733 that can be merged. Avoid doing this if the RHS is a floating-point
4734 comparison since those can trap. */
4736 if (BRANCH_COST >= 2
4737 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4738 && simple_operand_p (rl_arg)
4739 && simple_operand_p (rr_arg))
4741 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4742 if (code == TRUTH_OR_EXPR
4743 && lcode == NE_EXPR && integer_zerop (lr_arg)
4744 && rcode == NE_EXPR && integer_zerop (rr_arg)
4745 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4746 return build2 (NE_EXPR, truth_type,
4747 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4748 ll_arg, rl_arg),
4749 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4751 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4752 if (code == TRUTH_AND_EXPR
4753 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4754 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4755 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4756 return build2 (EQ_EXPR, truth_type,
4757 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4758 ll_arg, rl_arg),
4759 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4761 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4762 return build2 (code, truth_type, lhs, rhs);
4765 /* See if the comparisons can be merged. Then get all the parameters for
4766 each side. */
4768 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4769 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4770 return 0;
4772 volatilep = 0;
4773 ll_inner = decode_field_reference (ll_arg,
4774 &ll_bitsize, &ll_bitpos, &ll_mode,
4775 &ll_unsignedp, &volatilep, &ll_mask,
4776 &ll_and_mask);
4777 lr_inner = decode_field_reference (lr_arg,
4778 &lr_bitsize, &lr_bitpos, &lr_mode,
4779 &lr_unsignedp, &volatilep, &lr_mask,
4780 &lr_and_mask);
4781 rl_inner = decode_field_reference (rl_arg,
4782 &rl_bitsize, &rl_bitpos, &rl_mode,
4783 &rl_unsignedp, &volatilep, &rl_mask,
4784 &rl_and_mask);
4785 rr_inner = decode_field_reference (rr_arg,
4786 &rr_bitsize, &rr_bitpos, &rr_mode,
4787 &rr_unsignedp, &volatilep, &rr_mask,
4788 &rr_and_mask);
4790 /* It must be true that the inner operation on the lhs of each
4791 comparison must be the same if we are to be able to do anything.
4792 Then see if we have constants. If not, the same must be true for
4793 the rhs's. */
4794 if (volatilep || ll_inner == 0 || rl_inner == 0
4795 || ! operand_equal_p (ll_inner, rl_inner, 0))
4796 return 0;
4798 if (TREE_CODE (lr_arg) == INTEGER_CST
4799 && TREE_CODE (rr_arg) == INTEGER_CST)
4800 l_const = lr_arg, r_const = rr_arg;
4801 else if (lr_inner == 0 || rr_inner == 0
4802 || ! operand_equal_p (lr_inner, rr_inner, 0))
4803 return 0;
4804 else
4805 l_const = r_const = 0;
4807 /* If either comparison code is not correct for our logical operation,
4808 fail. However, we can convert a one-bit comparison against zero into
4809 the opposite comparison against that bit being set in the field. */
4811 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4812 if (lcode != wanted_code)
4814 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4816 /* Make the left operand unsigned, since we are only interested
4817 in the value of one bit. Otherwise we are doing the wrong
4818 thing below. */
4819 ll_unsignedp = 1;
4820 l_const = ll_mask;
4822 else
4823 return 0;
4826 /* This is analogous to the code for l_const above. */
4827 if (rcode != wanted_code)
4829 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4831 rl_unsignedp = 1;
4832 r_const = rl_mask;
4834 else
4835 return 0;
4838 /* After this point all optimizations will generate bit-field
4839 references, which we might not want. */
4840 if (! lang_hooks.can_use_bit_fields_p ())
4841 return 0;
4843 /* See if we can find a mode that contains both fields being compared on
4844 the left. If we can't, fail. Otherwise, update all constants and masks
4845 to be relative to a field of that size. */
4846 first_bit = MIN (ll_bitpos, rl_bitpos);
4847 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4848 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4849 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4850 volatilep);
4851 if (lnmode == VOIDmode)
4852 return 0;
4854 lnbitsize = GET_MODE_BITSIZE (lnmode);
4855 lnbitpos = first_bit & ~ (lnbitsize - 1);
4856 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4857 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4859 if (BYTES_BIG_ENDIAN)
4861 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4862 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4865 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4866 size_int (xll_bitpos), 0);
4867 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4868 size_int (xrl_bitpos), 0);
4870 if (l_const)
4872 l_const = fold_convert (lntype, l_const);
4873 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4874 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4875 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4876 fold_build1 (BIT_NOT_EXPR,
4877 lntype, ll_mask),
4878 0)))
4880 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4882 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4885 if (r_const)
4887 r_const = fold_convert (lntype, r_const);
4888 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4889 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4890 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4891 fold_build1 (BIT_NOT_EXPR,
4892 lntype, rl_mask),
4893 0)))
4895 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4897 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4901 /* If the right sides are not constant, do the same for it. Also,
4902 disallow this optimization if a size or signedness mismatch occurs
4903 between the left and right sides. */
4904 if (l_const == 0)
4906 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4907 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4908 /* Make sure the two fields on the right
4909 correspond to the left without being swapped. */
4910 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4911 return 0;
4913 first_bit = MIN (lr_bitpos, rr_bitpos);
4914 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4915 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4916 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4917 volatilep);
4918 if (rnmode == VOIDmode)
4919 return 0;
4921 rnbitsize = GET_MODE_BITSIZE (rnmode);
4922 rnbitpos = first_bit & ~ (rnbitsize - 1);
4923 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4924 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4926 if (BYTES_BIG_ENDIAN)
4928 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4929 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4932 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4933 size_int (xlr_bitpos), 0);
4934 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4935 size_int (xrr_bitpos), 0);
4937 /* Make a mask that corresponds to both fields being compared.
4938 Do this for both items being compared. If the operands are the
4939 same size and the bits being compared are in the same position
4940 then we can do this by masking both and comparing the masked
4941 results. */
4942 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4943 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4944 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4946 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4947 ll_unsignedp || rl_unsignedp);
4948 if (! all_ones_mask_p (ll_mask, lnbitsize))
4949 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4951 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4952 lr_unsignedp || rr_unsignedp);
4953 if (! all_ones_mask_p (lr_mask, rnbitsize))
4954 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4956 return build2 (wanted_code, truth_type, lhs, rhs);
4959 /* There is still another way we can do something: If both pairs of
4960 fields being compared are adjacent, we may be able to make a wider
4961 field containing them both.
4963 Note that we still must mask the lhs/rhs expressions. Furthermore,
4964 the mask must be shifted to account for the shift done by
4965 make_bit_field_ref. */
4966 if ((ll_bitsize + ll_bitpos == rl_bitpos
4967 && lr_bitsize + lr_bitpos == rr_bitpos)
4968 || (ll_bitpos == rl_bitpos + rl_bitsize
4969 && lr_bitpos == rr_bitpos + rr_bitsize))
4971 tree type;
4973 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4974 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4975 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4976 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4978 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4979 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4980 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4981 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4983 /* Convert to the smaller type before masking out unwanted bits. */
4984 type = lntype;
4985 if (lntype != rntype)
4987 if (lnbitsize > rnbitsize)
4989 lhs = fold_convert (rntype, lhs);
4990 ll_mask = fold_convert (rntype, ll_mask);
4991 type = rntype;
4993 else if (lnbitsize < rnbitsize)
4995 rhs = fold_convert (lntype, rhs);
4996 lr_mask = fold_convert (lntype, lr_mask);
4997 type = lntype;
5001 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5002 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5004 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5005 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5007 return build2 (wanted_code, truth_type, lhs, rhs);
5010 return 0;
5013 /* Handle the case of comparisons with constants. If there is something in
5014 common between the masks, those bits of the constants must be the same.
5015 If not, the condition is always false. Test for this to avoid generating
5016 incorrect code below. */
5017 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5018 if (! integer_zerop (result)
5019 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5020 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5022 if (wanted_code == NE_EXPR)
5024 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5025 return constant_boolean_node (true, truth_type);
5027 else
5029 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5030 return constant_boolean_node (false, truth_type);
5034 /* Construct the expression we will return. First get the component
5035 reference we will make. Unless the mask is all ones the width of
5036 that field, perform the mask operation. Then compare with the
5037 merged constant. */
5038 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5039 ll_unsignedp || rl_unsignedp);
5041 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5042 if (! all_ones_mask_p (ll_mask, lnbitsize))
5043 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5045 return build2 (wanted_code, truth_type, result,
5046 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5049 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5050 constant. */
5052 static tree
5053 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5055 tree arg0 = op0;
5056 enum tree_code op_code;
5057 tree comp_const = op1;
5058 tree minmax_const;
5059 int consts_equal, consts_lt;
5060 tree inner;
5062 STRIP_SIGN_NOPS (arg0);
5064 op_code = TREE_CODE (arg0);
5065 minmax_const = TREE_OPERAND (arg0, 1);
5066 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5067 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5068 inner = TREE_OPERAND (arg0, 0);
5070 /* If something does not permit us to optimize, return the original tree. */
5071 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5072 || TREE_CODE (comp_const) != INTEGER_CST
5073 || TREE_CONSTANT_OVERFLOW (comp_const)
5074 || TREE_CODE (minmax_const) != INTEGER_CST
5075 || TREE_CONSTANT_OVERFLOW (minmax_const))
5076 return NULL_TREE;
5078 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5079 and GT_EXPR, doing the rest with recursive calls using logical
5080 simplifications. */
5081 switch (code)
5083 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5085 /* FIXME: We should be able to invert code without building a
5086 scratch tree node, but doing so would require us to
5087 duplicate a part of invert_truthvalue here. */
5088 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5089 tem = optimize_minmax_comparison (TREE_CODE (tem),
5090 TREE_TYPE (tem),
5091 TREE_OPERAND (tem, 0),
5092 TREE_OPERAND (tem, 1));
5093 return invert_truthvalue (tem);
5096 case GE_EXPR:
5097 return
5098 fold_build2 (TRUTH_ORIF_EXPR, type,
5099 optimize_minmax_comparison
5100 (EQ_EXPR, type, arg0, comp_const),
5101 optimize_minmax_comparison
5102 (GT_EXPR, type, arg0, comp_const));
5104 case EQ_EXPR:
5105 if (op_code == MAX_EXPR && consts_equal)
5106 /* MAX (X, 0) == 0 -> X <= 0 */
5107 return fold_build2 (LE_EXPR, type, inner, comp_const);
5109 else if (op_code == MAX_EXPR && consts_lt)
5110 /* MAX (X, 0) == 5 -> X == 5 */
5111 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5113 else if (op_code == MAX_EXPR)
5114 /* MAX (X, 0) == -1 -> false */
5115 return omit_one_operand (type, integer_zero_node, inner);
5117 else if (consts_equal)
5118 /* MIN (X, 0) == 0 -> X >= 0 */
5119 return fold_build2 (GE_EXPR, type, inner, comp_const);
5121 else if (consts_lt)
5122 /* MIN (X, 0) == 5 -> false */
5123 return omit_one_operand (type, integer_zero_node, inner);
5125 else
5126 /* MIN (X, 0) == -1 -> X == -1 */
5127 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5129 case GT_EXPR:
5130 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5131 /* MAX (X, 0) > 0 -> X > 0
5132 MAX (X, 0) > 5 -> X > 5 */
5133 return fold_build2 (GT_EXPR, type, inner, comp_const);
5135 else if (op_code == MAX_EXPR)
5136 /* MAX (X, 0) > -1 -> true */
5137 return omit_one_operand (type, integer_one_node, inner);
5139 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5140 /* MIN (X, 0) > 0 -> false
5141 MIN (X, 0) > 5 -> false */
5142 return omit_one_operand (type, integer_zero_node, inner);
5144 else
5145 /* MIN (X, 0) > -1 -> X > -1 */
5146 return fold_build2 (GT_EXPR, type, inner, comp_const);
5148 default:
5149 return NULL_TREE;
5153 /* T is an integer expression that is being multiplied, divided, or taken a
5154 modulus (CODE says which and what kind of divide or modulus) by a
5155 constant C. See if we can eliminate that operation by folding it with
5156 other operations already in T. WIDE_TYPE, if non-null, is a type that
5157 should be used for the computation if wider than our type.
5159 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5160 (X * 2) + (Y * 4). We must, however, be assured that either the original
5161 expression would not overflow or that overflow is undefined for the type
5162 in the language in question.
5164 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5165 the machine has a multiply-accumulate insn or that this is part of an
5166 addressing calculation.
5168 If we return a non-null expression, it is an equivalent form of the
5169 original computation, but need not be in the original type. */
5171 static tree
5172 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5174 /* To avoid exponential search depth, refuse to allow recursion past
5175 three levels. Beyond that (1) it's highly unlikely that we'll find
5176 something interesting and (2) we've probably processed it before
5177 when we built the inner expression. */
5179 static int depth;
5180 tree ret;
5182 if (depth > 3)
5183 return NULL;
5185 depth++;
5186 ret = extract_muldiv_1 (t, c, code, wide_type);
5187 depth--;
5189 return ret;
5192 static tree
5193 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5195 tree type = TREE_TYPE (t);
5196 enum tree_code tcode = TREE_CODE (t);
5197 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5198 > GET_MODE_SIZE (TYPE_MODE (type)))
5199 ? wide_type : type);
5200 tree t1, t2;
5201 int same_p = tcode == code;
5202 tree op0 = NULL_TREE, op1 = NULL_TREE;
5204 /* Don't deal with constants of zero here; they confuse the code below. */
5205 if (integer_zerop (c))
5206 return NULL_TREE;
5208 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5209 op0 = TREE_OPERAND (t, 0);
5211 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5212 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5214 /* Note that we need not handle conditional operations here since fold
5215 already handles those cases. So just do arithmetic here. */
5216 switch (tcode)
5218 case INTEGER_CST:
5219 /* For a constant, we can always simplify if we are a multiply
5220 or (for divide and modulus) if it is a multiple of our constant. */
5221 if (code == MULT_EXPR
5222 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5223 return const_binop (code, fold_convert (ctype, t),
5224 fold_convert (ctype, c), 0);
5225 break;
5227 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5228 /* If op0 is an expression ... */
5229 if ((COMPARISON_CLASS_P (op0)
5230 || UNARY_CLASS_P (op0)
5231 || BINARY_CLASS_P (op0)
5232 || EXPRESSION_CLASS_P (op0))
5233 /* ... and is unsigned, and its type is smaller than ctype,
5234 then we cannot pass through as widening. */
5235 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5236 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5237 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5238 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5239 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5240 /* ... or this is a truncation (t is narrower than op0),
5241 then we cannot pass through this narrowing. */
5242 || (GET_MODE_SIZE (TYPE_MODE (type))
5243 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5244 /* ... or signedness changes for division or modulus,
5245 then we cannot pass through this conversion. */
5246 || (code != MULT_EXPR
5247 && (TYPE_UNSIGNED (ctype)
5248 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5249 break;
5251 /* Pass the constant down and see if we can make a simplification. If
5252 we can, replace this expression with the inner simplification for
5253 possible later conversion to our or some other type. */
5254 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5255 && TREE_CODE (t2) == INTEGER_CST
5256 && ! TREE_CONSTANT_OVERFLOW (t2)
5257 && (0 != (t1 = extract_muldiv (op0, t2, code,
5258 code == MULT_EXPR
5259 ? ctype : NULL_TREE))))
5260 return t1;
5261 break;
5263 case ABS_EXPR:
5264 /* If widening the type changes it from signed to unsigned, then we
5265 must avoid building ABS_EXPR itself as unsigned. */
5266 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5268 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5269 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5271 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5272 return fold_convert (ctype, t1);
5274 break;
5276 /* FALLTHROUGH */
5277 case NEGATE_EXPR:
5278 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5279 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5280 break;
5282 case MIN_EXPR: case MAX_EXPR:
5283 /* If widening the type changes the signedness, then we can't perform
5284 this optimization as that changes the result. */
5285 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5286 break;
5288 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5289 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5290 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5292 if (tree_int_cst_sgn (c) < 0)
5293 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5295 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5296 fold_convert (ctype, t2));
5298 break;
5300 case LSHIFT_EXPR: case RSHIFT_EXPR:
5301 /* If the second operand is constant, this is a multiplication
5302 or floor division, by a power of two, so we can treat it that
5303 way unless the multiplier or divisor overflows. Signed
5304 left-shift overflow is implementation-defined rather than
5305 undefined in C90, so do not convert signed left shift into
5306 multiplication. */
5307 if (TREE_CODE (op1) == INTEGER_CST
5308 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5309 /* const_binop may not detect overflow correctly,
5310 so check for it explicitly here. */
5311 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5312 && TREE_INT_CST_HIGH (op1) == 0
5313 && 0 != (t1 = fold_convert (ctype,
5314 const_binop (LSHIFT_EXPR,
5315 size_one_node,
5316 op1, 0)))
5317 && ! TREE_OVERFLOW (t1))
5318 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5319 ? MULT_EXPR : FLOOR_DIV_EXPR,
5320 ctype, fold_convert (ctype, op0), t1),
5321 c, code, wide_type);
5322 break;
5324 case PLUS_EXPR: case MINUS_EXPR:
5325 /* See if we can eliminate the operation on both sides. If we can, we
5326 can return a new PLUS or MINUS. If we can't, the only remaining
5327 cases where we can do anything are if the second operand is a
5328 constant. */
5329 t1 = extract_muldiv (op0, c, code, wide_type);
5330 t2 = extract_muldiv (op1, c, code, wide_type);
5331 if (t1 != 0 && t2 != 0
5332 && (code == MULT_EXPR
5333 /* If not multiplication, we can only do this if both operands
5334 are divisible by c. */
5335 || (multiple_of_p (ctype, op0, c)
5336 && multiple_of_p (ctype, op1, c))))
5337 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5338 fold_convert (ctype, t2));
5340 /* If this was a subtraction, negate OP1 and set it to be an addition.
5341 This simplifies the logic below. */
5342 if (tcode == MINUS_EXPR)
5343 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5345 if (TREE_CODE (op1) != INTEGER_CST)
5346 break;
5348 /* If either OP1 or C are negative, this optimization is not safe for
5349 some of the division and remainder types while for others we need
5350 to change the code. */
5351 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5353 if (code == CEIL_DIV_EXPR)
5354 code = FLOOR_DIV_EXPR;
5355 else if (code == FLOOR_DIV_EXPR)
5356 code = CEIL_DIV_EXPR;
5357 else if (code != MULT_EXPR
5358 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5359 break;
5362 /* If it's a multiply or a division/modulus operation of a multiple
5363 of our constant, do the operation and verify it doesn't overflow. */
5364 if (code == MULT_EXPR
5365 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5367 op1 = const_binop (code, fold_convert (ctype, op1),
5368 fold_convert (ctype, c), 0);
5369 /* We allow the constant to overflow with wrapping semantics. */
5370 if (op1 == 0
5371 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5372 break;
5374 else
5375 break;
5377 /* If we have an unsigned type is not a sizetype, we cannot widen
5378 the operation since it will change the result if the original
5379 computation overflowed. */
5380 if (TYPE_UNSIGNED (ctype)
5381 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5382 && ctype != type)
5383 break;
5385 /* If we were able to eliminate our operation from the first side,
5386 apply our operation to the second side and reform the PLUS. */
5387 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5388 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5390 /* The last case is if we are a multiply. In that case, we can
5391 apply the distributive law to commute the multiply and addition
5392 if the multiplication of the constants doesn't overflow. */
5393 if (code == MULT_EXPR)
5394 return fold_build2 (tcode, ctype,
5395 fold_build2 (code, ctype,
5396 fold_convert (ctype, op0),
5397 fold_convert (ctype, c)),
5398 op1);
5400 break;
5402 case MULT_EXPR:
5403 /* We have a special case here if we are doing something like
5404 (C * 8) % 4 since we know that's zero. */
5405 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5406 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5407 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5408 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5409 return omit_one_operand (type, integer_zero_node, op0);
5411 /* ... fall through ... */
5413 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5414 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5415 /* If we can extract our operation from the LHS, do so and return a
5416 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5417 do something only if the second operand is a constant. */
5418 if (same_p
5419 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5420 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5421 fold_convert (ctype, op1));
5422 else if (tcode == MULT_EXPR && code == MULT_EXPR
5423 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5424 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5425 fold_convert (ctype, t1));
5426 else if (TREE_CODE (op1) != INTEGER_CST)
5427 return 0;
5429 /* If these are the same operation types, we can associate them
5430 assuming no overflow. */
5431 if (tcode == code
5432 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5433 fold_convert (ctype, c), 0))
5434 && ! TREE_OVERFLOW (t1))
5435 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5437 /* If these operations "cancel" each other, we have the main
5438 optimizations of this pass, which occur when either constant is a
5439 multiple of the other, in which case we replace this with either an
5440 operation or CODE or TCODE.
5442 If we have an unsigned type that is not a sizetype, we cannot do
5443 this since it will change the result if the original computation
5444 overflowed. */
5445 if ((! TYPE_UNSIGNED (ctype)
5446 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5447 && ! flag_wrapv
5448 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5449 || (tcode == MULT_EXPR
5450 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5451 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5453 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5454 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5455 fold_convert (ctype,
5456 const_binop (TRUNC_DIV_EXPR,
5457 op1, c, 0)));
5458 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5459 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5460 fold_convert (ctype,
5461 const_binop (TRUNC_DIV_EXPR,
5462 c, op1, 0)));
5464 break;
5466 default:
5467 break;
5470 return 0;
5473 /* Return a node which has the indicated constant VALUE (either 0 or
5474 1), and is of the indicated TYPE. */
5476 tree
5477 constant_boolean_node (int value, tree type)
5479 if (type == integer_type_node)
5480 return value ? integer_one_node : integer_zero_node;
5481 else if (type == boolean_type_node)
5482 return value ? boolean_true_node : boolean_false_node;
5483 else
5484 return build_int_cst (type, value);
5488 /* Return true if expr looks like an ARRAY_REF and set base and
5489 offset to the appropriate trees. If there is no offset,
5490 offset is set to NULL_TREE. Base will be canonicalized to
5491 something you can get the element type from using
5492 TREE_TYPE (TREE_TYPE (base)). */
5494 static bool
5495 extract_array_ref (tree expr, tree *base, tree *offset)
5497 /* One canonical form is a PLUS_EXPR with the first
5498 argument being an ADDR_EXPR with a possible NOP_EXPR
5499 attached. */
5500 if (TREE_CODE (expr) == PLUS_EXPR)
5502 tree op0 = TREE_OPERAND (expr, 0);
5503 tree inner_base, dummy1;
5504 /* Strip NOP_EXPRs here because the C frontends and/or
5505 folders present us (int *)&x.a + 4B possibly. */
5506 STRIP_NOPS (op0);
5507 if (extract_array_ref (op0, &inner_base, &dummy1))
5509 *base = inner_base;
5510 if (dummy1 == NULL_TREE)
5511 *offset = TREE_OPERAND (expr, 1);
5512 else
5513 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5514 dummy1, TREE_OPERAND (expr, 1));
5515 return true;
5518 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5519 which we transform into an ADDR_EXPR with appropriate
5520 offset. For other arguments to the ADDR_EXPR we assume
5521 zero offset and as such do not care about the ADDR_EXPR
5522 type and strip possible nops from it. */
5523 else if (TREE_CODE (expr) == ADDR_EXPR)
5525 tree op0 = TREE_OPERAND (expr, 0);
5526 if (TREE_CODE (op0) == ARRAY_REF)
5528 *base = TREE_OPERAND (op0, 0);
5529 *offset = TREE_OPERAND (op0, 1);
5531 else
5533 /* Handle array-to-pointer decay as &a. */
5534 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5535 *base = TREE_OPERAND (expr, 0);
5536 else
5537 *base = expr;
5538 *offset = NULL_TREE;
5540 return true;
5542 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5543 else if (SSA_VAR_P (expr)
5544 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5546 *base = expr;
5547 *offset = NULL_TREE;
5548 return true;
5551 return false;
5555 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5556 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5557 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5558 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5559 COND is the first argument to CODE; otherwise (as in the example
5560 given here), it is the second argument. TYPE is the type of the
5561 original expression. Return NULL_TREE if no simplification is
5562 possible. */
5564 static tree
5565 fold_binary_op_with_conditional_arg (enum tree_code code,
5566 tree type, tree op0, tree op1,
5567 tree cond, tree arg, int cond_first_p)
5569 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5570 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5571 tree test, true_value, false_value;
5572 tree lhs = NULL_TREE;
5573 tree rhs = NULL_TREE;
5575 /* This transformation is only worthwhile if we don't have to wrap
5576 arg in a SAVE_EXPR, and the operation can be simplified on at least
5577 one of the branches once its pushed inside the COND_EXPR. */
5578 if (!TREE_CONSTANT (arg))
5579 return NULL_TREE;
5581 if (TREE_CODE (cond) == COND_EXPR)
5583 test = TREE_OPERAND (cond, 0);
5584 true_value = TREE_OPERAND (cond, 1);
5585 false_value = TREE_OPERAND (cond, 2);
5586 /* If this operand throws an expression, then it does not make
5587 sense to try to perform a logical or arithmetic operation
5588 involving it. */
5589 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5590 lhs = true_value;
5591 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5592 rhs = false_value;
5594 else
5596 tree testtype = TREE_TYPE (cond);
5597 test = cond;
5598 true_value = constant_boolean_node (true, testtype);
5599 false_value = constant_boolean_node (false, testtype);
5602 arg = fold_convert (arg_type, arg);
5603 if (lhs == 0)
5605 true_value = fold_convert (cond_type, true_value);
5606 if (cond_first_p)
5607 lhs = fold_build2 (code, type, true_value, arg);
5608 else
5609 lhs = fold_build2 (code, type, arg, true_value);
5611 if (rhs == 0)
5613 false_value = fold_convert (cond_type, false_value);
5614 if (cond_first_p)
5615 rhs = fold_build2 (code, type, false_value, arg);
5616 else
5617 rhs = fold_build2 (code, type, arg, false_value);
5620 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5621 return fold_convert (type, test);
5625 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5627 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5628 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5629 ADDEND is the same as X.
5631 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5632 and finite. The problematic cases are when X is zero, and its mode
5633 has signed zeros. In the case of rounding towards -infinity,
5634 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5635 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5637 static bool
5638 fold_real_zero_addition_p (tree type, tree addend, int negate)
5640 if (!real_zerop (addend))
5641 return false;
5643 /* Don't allow the fold with -fsignaling-nans. */
5644 if (HONOR_SNANS (TYPE_MODE (type)))
5645 return false;
5647 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5648 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5649 return true;
5651 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5652 if (TREE_CODE (addend) == REAL_CST
5653 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5654 negate = !negate;
5656 /* The mode has signed zeros, and we have to honor their sign.
5657 In this situation, there is only one case we can return true for.
5658 X - 0 is the same as X unless rounding towards -infinity is
5659 supported. */
5660 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5663 /* Subroutine of fold() that checks comparisons of built-in math
5664 functions against real constants.
5666 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5667 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5668 is the type of the result and ARG0 and ARG1 are the operands of the
5669 comparison. ARG1 must be a TREE_REAL_CST.
5671 The function returns the constant folded tree if a simplification
5672 can be made, and NULL_TREE otherwise. */
5674 static tree
5675 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5676 tree type, tree arg0, tree arg1)
5678 REAL_VALUE_TYPE c;
5680 if (BUILTIN_SQRT_P (fcode))
5682 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5683 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5685 c = TREE_REAL_CST (arg1);
5686 if (REAL_VALUE_NEGATIVE (c))
5688 /* sqrt(x) < y is always false, if y is negative. */
5689 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5690 return omit_one_operand (type, integer_zero_node, arg);
5692 /* sqrt(x) > y is always true, if y is negative and we
5693 don't care about NaNs, i.e. negative values of x. */
5694 if (code == NE_EXPR || !HONOR_NANS (mode))
5695 return omit_one_operand (type, integer_one_node, arg);
5697 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5698 return fold_build2 (GE_EXPR, type, arg,
5699 build_real (TREE_TYPE (arg), dconst0));
5701 else if (code == GT_EXPR || code == GE_EXPR)
5703 REAL_VALUE_TYPE c2;
5705 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5706 real_convert (&c2, mode, &c2);
5708 if (REAL_VALUE_ISINF (c2))
5710 /* sqrt(x) > y is x == +Inf, when y is very large. */
5711 if (HONOR_INFINITIES (mode))
5712 return fold_build2 (EQ_EXPR, type, arg,
5713 build_real (TREE_TYPE (arg), c2));
5715 /* sqrt(x) > y is always false, when y is very large
5716 and we don't care about infinities. */
5717 return omit_one_operand (type, integer_zero_node, arg);
5720 /* sqrt(x) > c is the same as x > c*c. */
5721 return fold_build2 (code, type, arg,
5722 build_real (TREE_TYPE (arg), c2));
5724 else if (code == LT_EXPR || code == LE_EXPR)
5726 REAL_VALUE_TYPE c2;
5728 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5729 real_convert (&c2, mode, &c2);
5731 if (REAL_VALUE_ISINF (c2))
5733 /* sqrt(x) < y is always true, when y is a very large
5734 value and we don't care about NaNs or Infinities. */
5735 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5736 return omit_one_operand (type, integer_one_node, arg);
5738 /* sqrt(x) < y is x != +Inf when y is very large and we
5739 don't care about NaNs. */
5740 if (! HONOR_NANS (mode))
5741 return fold_build2 (NE_EXPR, type, arg,
5742 build_real (TREE_TYPE (arg), c2));
5744 /* sqrt(x) < y is x >= 0 when y is very large and we
5745 don't care about Infinities. */
5746 if (! HONOR_INFINITIES (mode))
5747 return fold_build2 (GE_EXPR, type, arg,
5748 build_real (TREE_TYPE (arg), dconst0));
5750 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5751 if (lang_hooks.decls.global_bindings_p () != 0
5752 || CONTAINS_PLACEHOLDER_P (arg))
5753 return NULL_TREE;
5755 arg = save_expr (arg);
5756 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5757 fold_build2 (GE_EXPR, type, arg,
5758 build_real (TREE_TYPE (arg),
5759 dconst0)),
5760 fold_build2 (NE_EXPR, type, arg,
5761 build_real (TREE_TYPE (arg),
5762 c2)));
5765 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5766 if (! HONOR_NANS (mode))
5767 return fold_build2 (code, type, arg,
5768 build_real (TREE_TYPE (arg), c2));
5770 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5771 if (lang_hooks.decls.global_bindings_p () == 0
5772 && ! CONTAINS_PLACEHOLDER_P (arg))
5774 arg = save_expr (arg);
5775 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5776 fold_build2 (GE_EXPR, type, arg,
5777 build_real (TREE_TYPE (arg),
5778 dconst0)),
5779 fold_build2 (code, type, arg,
5780 build_real (TREE_TYPE (arg),
5781 c2)));
5786 return NULL_TREE;
5789 /* Subroutine of fold() that optimizes comparisons against Infinities,
5790 either +Inf or -Inf.
5792 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5793 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5794 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5796 The function returns the constant folded tree if a simplification
5797 can be made, and NULL_TREE otherwise. */
5799 static tree
5800 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5802 enum machine_mode mode;
5803 REAL_VALUE_TYPE max;
5804 tree temp;
5805 bool neg;
5807 mode = TYPE_MODE (TREE_TYPE (arg0));
5809 /* For negative infinity swap the sense of the comparison. */
5810 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5811 if (neg)
5812 code = swap_tree_comparison (code);
5814 switch (code)
5816 case GT_EXPR:
5817 /* x > +Inf is always false, if with ignore sNANs. */
5818 if (HONOR_SNANS (mode))
5819 return NULL_TREE;
5820 return omit_one_operand (type, integer_zero_node, arg0);
5822 case LE_EXPR:
5823 /* x <= +Inf is always true, if we don't case about NaNs. */
5824 if (! HONOR_NANS (mode))
5825 return omit_one_operand (type, integer_one_node, arg0);
5827 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5828 if (lang_hooks.decls.global_bindings_p () == 0
5829 && ! CONTAINS_PLACEHOLDER_P (arg0))
5831 arg0 = save_expr (arg0);
5832 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5834 break;
5836 case EQ_EXPR:
5837 case GE_EXPR:
5838 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5839 real_maxval (&max, neg, mode);
5840 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5841 arg0, build_real (TREE_TYPE (arg0), max));
5843 case LT_EXPR:
5844 /* x < +Inf is always equal to x <= DBL_MAX. */
5845 real_maxval (&max, neg, mode);
5846 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5847 arg0, build_real (TREE_TYPE (arg0), max));
5849 case NE_EXPR:
5850 /* x != +Inf is always equal to !(x > DBL_MAX). */
5851 real_maxval (&max, neg, mode);
5852 if (! HONOR_NANS (mode))
5853 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5854 arg0, build_real (TREE_TYPE (arg0), max));
5856 /* The transformation below creates non-gimple code and thus is
5857 not appropriate if we are in gimple form. */
5858 if (in_gimple_form)
5859 return NULL_TREE;
5861 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5862 arg0, build_real (TREE_TYPE (arg0), max));
5863 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5865 default:
5866 break;
5869 return NULL_TREE;
5872 /* Subroutine of fold() that optimizes comparisons of a division by
5873 a nonzero integer constant against an integer constant, i.e.
5874 X/C1 op C2.
5876 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5877 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5878 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5880 The function returns the constant folded tree if a simplification
5881 can be made, and NULL_TREE otherwise. */
5883 static tree
5884 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5886 tree prod, tmp, hi, lo;
5887 tree arg00 = TREE_OPERAND (arg0, 0);
5888 tree arg01 = TREE_OPERAND (arg0, 1);
5889 unsigned HOST_WIDE_INT lpart;
5890 HOST_WIDE_INT hpart;
5891 int overflow;
5893 /* We have to do this the hard way to detect unsigned overflow.
5894 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5895 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5896 TREE_INT_CST_HIGH (arg01),
5897 TREE_INT_CST_LOW (arg1),
5898 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5899 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5900 prod = force_fit_type (prod, -1, overflow, false);
5902 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5904 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5905 lo = prod;
5907 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5908 overflow = add_double (TREE_INT_CST_LOW (prod),
5909 TREE_INT_CST_HIGH (prod),
5910 TREE_INT_CST_LOW (tmp),
5911 TREE_INT_CST_HIGH (tmp),
5912 &lpart, &hpart);
5913 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5914 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5915 TREE_CONSTANT_OVERFLOW (prod));
5917 else if (tree_int_cst_sgn (arg01) >= 0)
5919 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5920 switch (tree_int_cst_sgn (arg1))
5922 case -1:
5923 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5924 hi = prod;
5925 break;
5927 case 0:
5928 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5929 hi = tmp;
5930 break;
5932 case 1:
5933 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5934 lo = prod;
5935 break;
5937 default:
5938 gcc_unreachable ();
5941 else
5943 /* A negative divisor reverses the relational operators. */
5944 code = swap_tree_comparison (code);
5946 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5947 switch (tree_int_cst_sgn (arg1))
5949 case -1:
5950 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5951 lo = prod;
5952 break;
5954 case 0:
5955 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5956 lo = tmp;
5957 break;
5959 case 1:
5960 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5961 hi = prod;
5962 break;
5964 default:
5965 gcc_unreachable ();
5969 switch (code)
5971 case EQ_EXPR:
5972 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5973 return omit_one_operand (type, integer_zero_node, arg00);
5974 if (TREE_OVERFLOW (hi))
5975 return fold_build2 (GE_EXPR, type, arg00, lo);
5976 if (TREE_OVERFLOW (lo))
5977 return fold_build2 (LE_EXPR, type, arg00, hi);
5978 return build_range_check (type, arg00, 1, lo, hi);
5980 case NE_EXPR:
5981 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5982 return omit_one_operand (type, integer_one_node, arg00);
5983 if (TREE_OVERFLOW (hi))
5984 return fold_build2 (LT_EXPR, type, arg00, lo);
5985 if (TREE_OVERFLOW (lo))
5986 return fold_build2 (GT_EXPR, type, arg00, hi);
5987 return build_range_check (type, arg00, 0, lo, hi);
5989 case LT_EXPR:
5990 if (TREE_OVERFLOW (lo))
5991 return omit_one_operand (type, integer_zero_node, arg00);
5992 return fold_build2 (LT_EXPR, type, arg00, lo);
5994 case LE_EXPR:
5995 if (TREE_OVERFLOW (hi))
5996 return omit_one_operand (type, integer_one_node, arg00);
5997 return fold_build2 (LE_EXPR, type, arg00, hi);
5999 case GT_EXPR:
6000 if (TREE_OVERFLOW (hi))
6001 return omit_one_operand (type, integer_zero_node, arg00);
6002 return fold_build2 (GT_EXPR, type, arg00, hi);
6004 case GE_EXPR:
6005 if (TREE_OVERFLOW (lo))
6006 return omit_one_operand (type, integer_one_node, arg00);
6007 return fold_build2 (GE_EXPR, type, arg00, lo);
6009 default:
6010 break;
6013 return NULL_TREE;
6017 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6018 equality/inequality test, then return a simplified form of the test
6019 using a sign testing. Otherwise return NULL. TYPE is the desired
6020 result type. */
6022 static tree
6023 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6024 tree result_type)
6026 /* If this is testing a single bit, we can optimize the test. */
6027 if ((code == NE_EXPR || code == EQ_EXPR)
6028 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6029 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6031 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6032 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6033 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6035 if (arg00 != NULL_TREE
6036 /* This is only a win if casting to a signed type is cheap,
6037 i.e. when arg00's type is not a partial mode. */
6038 && TYPE_PRECISION (TREE_TYPE (arg00))
6039 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6041 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6042 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6043 result_type, fold_convert (stype, arg00),
6044 fold_convert (stype, integer_zero_node));
6048 return NULL_TREE;
6051 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6052 equality/inequality test, then return a simplified form of
6053 the test using shifts and logical operations. Otherwise return
6054 NULL. TYPE is the desired result type. */
6056 tree
6057 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6058 tree result_type)
6060 /* If this is testing a single bit, we can optimize the test. */
6061 if ((code == NE_EXPR || code == EQ_EXPR)
6062 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6063 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6065 tree inner = TREE_OPERAND (arg0, 0);
6066 tree type = TREE_TYPE (arg0);
6067 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6068 enum machine_mode operand_mode = TYPE_MODE (type);
6069 int ops_unsigned;
6070 tree signed_type, unsigned_type, intermediate_type;
6071 tree tem;
6073 /* First, see if we can fold the single bit test into a sign-bit
6074 test. */
6075 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6076 result_type);
6077 if (tem)
6078 return tem;
6080 /* Otherwise we have (A & C) != 0 where C is a single bit,
6081 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6082 Similarly for (A & C) == 0. */
6084 /* If INNER is a right shift of a constant and it plus BITNUM does
6085 not overflow, adjust BITNUM and INNER. */
6086 if (TREE_CODE (inner) == RSHIFT_EXPR
6087 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6088 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6089 && bitnum < TYPE_PRECISION (type)
6090 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6091 bitnum - TYPE_PRECISION (type)))
6093 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6094 inner = TREE_OPERAND (inner, 0);
6097 /* If we are going to be able to omit the AND below, we must do our
6098 operations as unsigned. If we must use the AND, we have a choice.
6099 Normally unsigned is faster, but for some machines signed is. */
6100 #ifdef LOAD_EXTEND_OP
6101 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6102 && !flag_syntax_only) ? 0 : 1;
6103 #else
6104 ops_unsigned = 1;
6105 #endif
6107 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6108 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6109 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6110 inner = fold_convert (intermediate_type, inner);
6112 if (bitnum != 0)
6113 inner = build2 (RSHIFT_EXPR, intermediate_type,
6114 inner, size_int (bitnum));
6116 if (code == EQ_EXPR)
6117 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6118 inner, integer_one_node);
6120 /* Put the AND last so it can combine with more things. */
6121 inner = build2 (BIT_AND_EXPR, intermediate_type,
6122 inner, integer_one_node);
6124 /* Make sure to return the proper type. */
6125 inner = fold_convert (result_type, inner);
6127 return inner;
6129 return NULL_TREE;
6132 /* Check whether we are allowed to reorder operands arg0 and arg1,
6133 such that the evaluation of arg1 occurs before arg0. */
6135 static bool
6136 reorder_operands_p (tree arg0, tree arg1)
6138 if (! flag_evaluation_order)
6139 return true;
6140 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6141 return true;
6142 return ! TREE_SIDE_EFFECTS (arg0)
6143 && ! TREE_SIDE_EFFECTS (arg1);
6146 /* Test whether it is preferable two swap two operands, ARG0 and
6147 ARG1, for example because ARG0 is an integer constant and ARG1
6148 isn't. If REORDER is true, only recommend swapping if we can
6149 evaluate the operands in reverse order. */
6151 bool
6152 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6154 STRIP_SIGN_NOPS (arg0);
6155 STRIP_SIGN_NOPS (arg1);
6157 if (TREE_CODE (arg1) == INTEGER_CST)
6158 return 0;
6159 if (TREE_CODE (arg0) == INTEGER_CST)
6160 return 1;
6162 if (TREE_CODE (arg1) == REAL_CST)
6163 return 0;
6164 if (TREE_CODE (arg0) == REAL_CST)
6165 return 1;
6167 if (TREE_CODE (arg1) == COMPLEX_CST)
6168 return 0;
6169 if (TREE_CODE (arg0) == COMPLEX_CST)
6170 return 1;
6172 if (TREE_CONSTANT (arg1))
6173 return 0;
6174 if (TREE_CONSTANT (arg0))
6175 return 1;
6177 if (optimize_size)
6178 return 0;
6180 if (reorder && flag_evaluation_order
6181 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6182 return 0;
6184 if (DECL_P (arg1))
6185 return 0;
6186 if (DECL_P (arg0))
6187 return 1;
6189 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6190 for commutative and comparison operators. Ensuring a canonical
6191 form allows the optimizers to find additional redundancies without
6192 having to explicitly check for both orderings. */
6193 if (TREE_CODE (arg0) == SSA_NAME
6194 && TREE_CODE (arg1) == SSA_NAME
6195 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6196 return 1;
6198 return 0;
6201 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6202 ARG0 is extended to a wider type. */
6204 static tree
6205 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6207 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6208 tree arg1_unw;
6209 tree shorter_type, outer_type;
6210 tree min, max;
6211 bool above, below;
6213 if (arg0_unw == arg0)
6214 return NULL_TREE;
6215 shorter_type = TREE_TYPE (arg0_unw);
6217 #ifdef HAVE_canonicalize_funcptr_for_compare
6218 /* Disable this optimization if we're casting a function pointer
6219 type on targets that require function pointer canonicalization. */
6220 if (HAVE_canonicalize_funcptr_for_compare
6221 && TREE_CODE (shorter_type) == POINTER_TYPE
6222 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6223 return NULL_TREE;
6224 #endif
6226 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6227 return NULL_TREE;
6229 arg1_unw = get_unwidened (arg1, shorter_type);
6230 if (!arg1_unw)
6231 return NULL_TREE;
6233 /* If possible, express the comparison in the shorter mode. */
6234 if ((code == EQ_EXPR || code == NE_EXPR
6235 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6236 && (TREE_TYPE (arg1_unw) == shorter_type
6237 || (TREE_CODE (arg1_unw) == INTEGER_CST
6238 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6239 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6240 && int_fits_type_p (arg1_unw, shorter_type))))
6241 return fold_build2 (code, type, arg0_unw,
6242 fold_convert (shorter_type, arg1_unw));
6244 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6245 return NULL_TREE;
6247 /* If we are comparing with the integer that does not fit into the range
6248 of the shorter type, the result is known. */
6249 outer_type = TREE_TYPE (arg1_unw);
6250 min = lower_bound_in_type (outer_type, shorter_type);
6251 max = upper_bound_in_type (outer_type, shorter_type);
6253 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6254 max, arg1_unw));
6255 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6256 arg1_unw, min));
6258 switch (code)
6260 case EQ_EXPR:
6261 if (above || below)
6262 return omit_one_operand (type, integer_zero_node, arg0);
6263 break;
6265 case NE_EXPR:
6266 if (above || below)
6267 return omit_one_operand (type, integer_one_node, arg0);
6268 break;
6270 case LT_EXPR:
6271 case LE_EXPR:
6272 if (above)
6273 return omit_one_operand (type, integer_one_node, arg0);
6274 else if (below)
6275 return omit_one_operand (type, integer_zero_node, arg0);
6277 case GT_EXPR:
6278 case GE_EXPR:
6279 if (above)
6280 return omit_one_operand (type, integer_zero_node, arg0);
6281 else if (below)
6282 return omit_one_operand (type, integer_one_node, arg0);
6284 default:
6285 break;
6288 return NULL_TREE;
6291 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6292 ARG0 just the signedness is changed. */
6294 static tree
6295 fold_sign_changed_comparison (enum tree_code code, tree type,
6296 tree arg0, tree arg1)
6298 tree arg0_inner, tmp;
6299 tree inner_type, outer_type;
6301 if (TREE_CODE (arg0) != NOP_EXPR
6302 && TREE_CODE (arg0) != CONVERT_EXPR)
6303 return NULL_TREE;
6305 outer_type = TREE_TYPE (arg0);
6306 arg0_inner = TREE_OPERAND (arg0, 0);
6307 inner_type = TREE_TYPE (arg0_inner);
6309 #ifdef HAVE_canonicalize_funcptr_for_compare
6310 /* Disable this optimization if we're casting a function pointer
6311 type on targets that require function pointer canonicalization. */
6312 if (HAVE_canonicalize_funcptr_for_compare
6313 && TREE_CODE (inner_type) == POINTER_TYPE
6314 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6315 return NULL_TREE;
6316 #endif
6318 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6319 return NULL_TREE;
6321 if (TREE_CODE (arg1) != INTEGER_CST
6322 && !((TREE_CODE (arg1) == NOP_EXPR
6323 || TREE_CODE (arg1) == CONVERT_EXPR)
6324 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6325 return NULL_TREE;
6327 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6328 && code != NE_EXPR
6329 && code != EQ_EXPR)
6330 return NULL_TREE;
6332 if (TREE_CODE (arg1) == INTEGER_CST)
6334 tmp = build_int_cst_wide (inner_type,
6335 TREE_INT_CST_LOW (arg1),
6336 TREE_INT_CST_HIGH (arg1));
6337 arg1 = force_fit_type (tmp, 0,
6338 TREE_OVERFLOW (arg1),
6339 TREE_CONSTANT_OVERFLOW (arg1));
6341 else
6342 arg1 = fold_convert (inner_type, arg1);
6344 return fold_build2 (code, type, arg0_inner, arg1);
6347 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6348 step of the array. Reconstructs s and delta in the case of s * delta
6349 being an integer constant (and thus already folded).
6350 ADDR is the address. MULT is the multiplicative expression.
6351 If the function succeeds, the new address expression is returned. Otherwise
6352 NULL_TREE is returned. */
6354 static tree
6355 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6357 tree s, delta, step;
6358 tree ref = TREE_OPERAND (addr, 0), pref;
6359 tree ret, pos;
6360 tree itype;
6362 /* Canonicalize op1 into a possibly non-constant delta
6363 and an INTEGER_CST s. */
6364 if (TREE_CODE (op1) == MULT_EXPR)
6366 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6368 STRIP_NOPS (arg0);
6369 STRIP_NOPS (arg1);
6371 if (TREE_CODE (arg0) == INTEGER_CST)
6373 s = arg0;
6374 delta = arg1;
6376 else if (TREE_CODE (arg1) == INTEGER_CST)
6378 s = arg1;
6379 delta = arg0;
6381 else
6382 return NULL_TREE;
6384 else if (TREE_CODE (op1) == INTEGER_CST)
6386 delta = op1;
6387 s = NULL_TREE;
6389 else
6391 /* Simulate we are delta * 1. */
6392 delta = op1;
6393 s = integer_one_node;
6396 for (;; ref = TREE_OPERAND (ref, 0))
6398 if (TREE_CODE (ref) == ARRAY_REF)
6400 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6401 if (! itype)
6402 continue;
6404 step = array_ref_element_size (ref);
6405 if (TREE_CODE (step) != INTEGER_CST)
6406 continue;
6408 if (s)
6410 if (! tree_int_cst_equal (step, s))
6411 continue;
6413 else
6415 /* Try if delta is a multiple of step. */
6416 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6417 if (! tmp)
6418 continue;
6419 delta = tmp;
6422 break;
6425 if (!handled_component_p (ref))
6426 return NULL_TREE;
6429 /* We found the suitable array reference. So copy everything up to it,
6430 and replace the index. */
6432 pref = TREE_OPERAND (addr, 0);
6433 ret = copy_node (pref);
6434 pos = ret;
6436 while (pref != ref)
6438 pref = TREE_OPERAND (pref, 0);
6439 TREE_OPERAND (pos, 0) = copy_node (pref);
6440 pos = TREE_OPERAND (pos, 0);
6443 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6444 fold_convert (itype,
6445 TREE_OPERAND (pos, 1)),
6446 fold_convert (itype, delta));
6448 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6452 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6453 means A >= Y && A != MAX, but in this case we know that
6454 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6456 static tree
6457 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6459 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6461 if (TREE_CODE (bound) == LT_EXPR)
6462 a = TREE_OPERAND (bound, 0);
6463 else if (TREE_CODE (bound) == GT_EXPR)
6464 a = TREE_OPERAND (bound, 1);
6465 else
6466 return NULL_TREE;
6468 typea = TREE_TYPE (a);
6469 if (!INTEGRAL_TYPE_P (typea)
6470 && !POINTER_TYPE_P (typea))
6471 return NULL_TREE;
6473 if (TREE_CODE (ineq) == LT_EXPR)
6475 a1 = TREE_OPERAND (ineq, 1);
6476 y = TREE_OPERAND (ineq, 0);
6478 else if (TREE_CODE (ineq) == GT_EXPR)
6480 a1 = TREE_OPERAND (ineq, 0);
6481 y = TREE_OPERAND (ineq, 1);
6483 else
6484 return NULL_TREE;
6486 if (TREE_TYPE (a1) != typea)
6487 return NULL_TREE;
6489 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6490 if (!integer_onep (diff))
6491 return NULL_TREE;
6493 return fold_build2 (GE_EXPR, type, a, y);
6496 /* Fold complex addition when both components are accessible by parts.
6497 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6498 or MINUS_EXPR for subtraction. */
6500 static tree
6501 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6503 tree ar, ai, br, bi, rr, ri, inner_type;
6505 if (TREE_CODE (ac) == COMPLEX_EXPR)
6506 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6507 else if (TREE_CODE (ac) == COMPLEX_CST)
6508 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6509 else
6510 return NULL;
6512 if (TREE_CODE (bc) == COMPLEX_EXPR)
6513 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6514 else if (TREE_CODE (bc) == COMPLEX_CST)
6515 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6516 else
6517 return NULL;
6519 inner_type = TREE_TYPE (type);
6521 rr = fold_build2 (code, inner_type, ar, br);
6522 ri = fold_build2 (code, inner_type, ai, bi);
6524 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6527 /* Perform some simplifications of complex multiplication when one or more
6528 of the components are constants or zeros. Return non-null if successful. */
6530 tree
6531 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6533 tree rr, ri, inner_type, zero;
6534 bool ar0, ai0, br0, bi0, bi1;
6536 inner_type = TREE_TYPE (type);
6537 zero = NULL;
6539 if (SCALAR_FLOAT_TYPE_P (inner_type))
6541 ar0 = ai0 = br0 = bi0 = bi1 = false;
6543 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6545 if (TREE_CODE (ar) == REAL_CST
6546 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6547 ar0 = true, zero = ar;
6549 if (TREE_CODE (ai) == REAL_CST
6550 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6551 ai0 = true, zero = ai;
6553 if (TREE_CODE (br) == REAL_CST
6554 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6555 br0 = true, zero = br;
6557 if (TREE_CODE (bi) == REAL_CST)
6559 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6560 bi0 = true, zero = bi;
6561 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6562 bi1 = true;
6565 else
6567 ar0 = integer_zerop (ar);
6568 if (ar0)
6569 zero = ar;
6570 ai0 = integer_zerop (ai);
6571 if (ai0)
6572 zero = ai;
6573 br0 = integer_zerop (br);
6574 if (br0)
6575 zero = br;
6576 bi0 = integer_zerop (bi);
6577 if (bi0)
6579 zero = bi;
6580 bi1 = false;
6582 else
6583 bi1 = integer_onep (bi);
6586 /* We won't optimize anything below unless something is zero. */
6587 if (zero == NULL)
6588 return NULL;
6590 if (ai0 && br0 && bi1)
6592 rr = zero;
6593 ri = ar;
6595 else if (ai0 && bi0)
6597 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6598 ri = zero;
6600 else if (ai0 && br0)
6602 rr = zero;
6603 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6605 else if (ar0 && bi0)
6607 rr = zero;
6608 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6610 else if (ar0 && br0)
6612 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6613 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6614 ri = zero;
6616 else if (bi0)
6618 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6619 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6621 else if (ai0)
6623 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6624 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6626 else if (br0)
6628 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6629 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6630 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6632 else if (ar0)
6634 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6635 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6636 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6638 else
6639 return NULL;
6641 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6644 static tree
6645 fold_complex_mult (tree type, tree ac, tree bc)
6647 tree ar, ai, br, bi;
6649 if (TREE_CODE (ac) == COMPLEX_EXPR)
6650 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6651 else if (TREE_CODE (ac) == COMPLEX_CST)
6652 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6653 else
6654 return NULL;
6656 if (TREE_CODE (bc) == COMPLEX_EXPR)
6657 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6658 else if (TREE_CODE (bc) == COMPLEX_CST)
6659 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6660 else
6661 return NULL;
6663 return fold_complex_mult_parts (type, ar, ai, br, bi);
6666 /* Perform some simplifications of complex division when one or more of
6667 the components are constants or zeros. Return non-null if successful. */
6669 tree
6670 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6671 enum tree_code code)
6673 tree rr, ri, inner_type, zero;
6674 bool ar0, ai0, br0, bi0, bi1;
6676 inner_type = TREE_TYPE (type);
6677 zero = NULL;
6679 if (SCALAR_FLOAT_TYPE_P (inner_type))
6681 ar0 = ai0 = br0 = bi0 = bi1 = false;
6683 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6685 if (TREE_CODE (ar) == REAL_CST
6686 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6687 ar0 = true, zero = ar;
6689 if (TREE_CODE (ai) == REAL_CST
6690 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6691 ai0 = true, zero = ai;
6693 if (TREE_CODE (br) == REAL_CST
6694 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6695 br0 = true, zero = br;
6697 if (TREE_CODE (bi) == REAL_CST)
6699 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6700 bi0 = true, zero = bi;
6701 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6702 bi1 = true;
6705 else
6707 ar0 = integer_zerop (ar);
6708 if (ar0)
6709 zero = ar;
6710 ai0 = integer_zerop (ai);
6711 if (ai0)
6712 zero = ai;
6713 br0 = integer_zerop (br);
6714 if (br0)
6715 zero = br;
6716 bi0 = integer_zerop (bi);
6717 if (bi0)
6719 zero = bi;
6720 bi1 = false;
6722 else
6723 bi1 = integer_onep (bi);
6726 /* We won't optimize anything below unless something is zero. */
6727 if (zero == NULL)
6728 return NULL;
6730 if (ai0 && bi0)
6732 rr = fold_build2 (code, inner_type, ar, br);
6733 ri = zero;
6735 else if (ai0 && br0)
6737 rr = zero;
6738 ri = fold_build2 (code, inner_type, ar, bi);
6739 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6741 else if (ar0 && bi0)
6743 rr = zero;
6744 ri = fold_build2 (code, inner_type, ai, br);
6746 else if (ar0 && br0)
6748 rr = fold_build2 (code, inner_type, ai, bi);
6749 ri = zero;
6751 else if (bi0)
6753 rr = fold_build2 (code, inner_type, ar, br);
6754 ri = fold_build2 (code, inner_type, ai, br);
6756 else if (br0)
6758 rr = fold_build2 (code, inner_type, ai, bi);
6759 ri = fold_build2 (code, inner_type, ar, bi);
6760 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6762 else
6763 return NULL;
6765 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6768 static tree
6769 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6771 tree ar, ai, br, bi;
6773 if (TREE_CODE (ac) == COMPLEX_EXPR)
6774 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6775 else if (TREE_CODE (ac) == COMPLEX_CST)
6776 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6777 else
6778 return NULL;
6780 if (TREE_CODE (bc) == COMPLEX_EXPR)
6781 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6782 else if (TREE_CODE (bc) == COMPLEX_CST)
6783 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6784 else
6785 return NULL;
6787 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6790 /* Fold a unary expression of code CODE and type TYPE with operand
6791 OP0. Return the folded expression if folding is successful.
6792 Otherwise, return NULL_TREE. */
6794 tree
6795 fold_unary (enum tree_code code, tree type, tree op0)
6797 tree tem;
6798 tree arg0;
6799 enum tree_code_class kind = TREE_CODE_CLASS (code);
6801 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6802 && TREE_CODE_LENGTH (code) == 1);
6804 arg0 = op0;
6805 if (arg0)
6807 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6809 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6810 STRIP_SIGN_NOPS (arg0);
6812 else
6814 /* Strip any conversions that don't change the mode. This
6815 is safe for every expression, except for a comparison
6816 expression because its signedness is derived from its
6817 operands.
6819 Note that this is done as an internal manipulation within
6820 the constant folder, in order to find the simplest
6821 representation of the arguments so that their form can be
6822 studied. In any cases, the appropriate type conversions
6823 should be put back in the tree that will get out of the
6824 constant folder. */
6825 STRIP_NOPS (arg0);
6829 if (TREE_CODE_CLASS (code) == tcc_unary)
6831 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6832 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6833 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6834 else if (TREE_CODE (arg0) == COND_EXPR)
6836 tree arg01 = TREE_OPERAND (arg0, 1);
6837 tree arg02 = TREE_OPERAND (arg0, 2);
6838 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6839 arg01 = fold_build1 (code, type, arg01);
6840 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6841 arg02 = fold_build1 (code, type, arg02);
6842 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6843 arg01, arg02);
6845 /* If this was a conversion, and all we did was to move into
6846 inside the COND_EXPR, bring it back out. But leave it if
6847 it is a conversion from integer to integer and the
6848 result precision is no wider than a word since such a
6849 conversion is cheap and may be optimized away by combine,
6850 while it couldn't if it were outside the COND_EXPR. Then return
6851 so we don't get into an infinite recursion loop taking the
6852 conversion out and then back in. */
6854 if ((code == NOP_EXPR || code == CONVERT_EXPR
6855 || code == NON_LVALUE_EXPR)
6856 && TREE_CODE (tem) == COND_EXPR
6857 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6858 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6859 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6860 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6861 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6862 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6863 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6864 && (INTEGRAL_TYPE_P
6865 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6866 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6867 || flag_syntax_only))
6868 tem = build1 (code, type,
6869 build3 (COND_EXPR,
6870 TREE_TYPE (TREE_OPERAND
6871 (TREE_OPERAND (tem, 1), 0)),
6872 TREE_OPERAND (tem, 0),
6873 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6874 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6875 return tem;
6877 else if (COMPARISON_CLASS_P (arg0))
6879 if (TREE_CODE (type) == BOOLEAN_TYPE)
6881 arg0 = copy_node (arg0);
6882 TREE_TYPE (arg0) = type;
6883 return arg0;
6885 else if (TREE_CODE (type) != INTEGER_TYPE)
6886 return fold_build3 (COND_EXPR, type, arg0,
6887 fold_build1 (code, type,
6888 integer_one_node),
6889 fold_build1 (code, type,
6890 integer_zero_node));
6894 switch (code)
6896 case NOP_EXPR:
6897 case FLOAT_EXPR:
6898 case CONVERT_EXPR:
6899 case FIX_TRUNC_EXPR:
6900 case FIX_CEIL_EXPR:
6901 case FIX_FLOOR_EXPR:
6902 case FIX_ROUND_EXPR:
6903 if (TREE_TYPE (op0) == type)
6904 return op0;
6906 /* Handle cases of two conversions in a row. */
6907 if (TREE_CODE (op0) == NOP_EXPR
6908 || TREE_CODE (op0) == CONVERT_EXPR)
6910 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6911 tree inter_type = TREE_TYPE (op0);
6912 int inside_int = INTEGRAL_TYPE_P (inside_type);
6913 int inside_ptr = POINTER_TYPE_P (inside_type);
6914 int inside_float = FLOAT_TYPE_P (inside_type);
6915 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6916 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6917 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6918 int inter_int = INTEGRAL_TYPE_P (inter_type);
6919 int inter_ptr = POINTER_TYPE_P (inter_type);
6920 int inter_float = FLOAT_TYPE_P (inter_type);
6921 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6922 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6923 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6924 int final_int = INTEGRAL_TYPE_P (type);
6925 int final_ptr = POINTER_TYPE_P (type);
6926 int final_float = FLOAT_TYPE_P (type);
6927 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6928 unsigned int final_prec = TYPE_PRECISION (type);
6929 int final_unsignedp = TYPE_UNSIGNED (type);
6931 /* In addition to the cases of two conversions in a row
6932 handled below, if we are converting something to its own
6933 type via an object of identical or wider precision, neither
6934 conversion is needed. */
6935 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6936 && ((inter_int && final_int) || (inter_float && final_float))
6937 && inter_prec >= final_prec)
6938 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6940 /* Likewise, if the intermediate and final types are either both
6941 float or both integer, we don't need the middle conversion if
6942 it is wider than the final type and doesn't change the signedness
6943 (for integers). Avoid this if the final type is a pointer
6944 since then we sometimes need the inner conversion. Likewise if
6945 the outer has a precision not equal to the size of its mode. */
6946 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6947 || (inter_float && inside_float)
6948 || (inter_vec && inside_vec))
6949 && inter_prec >= inside_prec
6950 && (inter_float || inter_vec
6951 || inter_unsignedp == inside_unsignedp)
6952 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6953 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6954 && ! final_ptr
6955 && (! final_vec || inter_prec == inside_prec))
6956 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6958 /* If we have a sign-extension of a zero-extended value, we can
6959 replace that by a single zero-extension. */
6960 if (inside_int && inter_int && final_int
6961 && inside_prec < inter_prec && inter_prec < final_prec
6962 && inside_unsignedp && !inter_unsignedp)
6963 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6965 /* Two conversions in a row are not needed unless:
6966 - some conversion is floating-point (overstrict for now), or
6967 - some conversion is a vector (overstrict for now), or
6968 - the intermediate type is narrower than both initial and
6969 final, or
6970 - the intermediate type and innermost type differ in signedness,
6971 and the outermost type is wider than the intermediate, or
6972 - the initial type is a pointer type and the precisions of the
6973 intermediate and final types differ, or
6974 - the final type is a pointer type and the precisions of the
6975 initial and intermediate types differ. */
6976 if (! inside_float && ! inter_float && ! final_float
6977 && ! inside_vec && ! inter_vec && ! final_vec
6978 && (inter_prec > inside_prec || inter_prec > final_prec)
6979 && ! (inside_int && inter_int
6980 && inter_unsignedp != inside_unsignedp
6981 && inter_prec < final_prec)
6982 && ((inter_unsignedp && inter_prec > inside_prec)
6983 == (final_unsignedp && final_prec > inter_prec))
6984 && ! (inside_ptr && inter_prec != final_prec)
6985 && ! (final_ptr && inside_prec != inter_prec)
6986 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6987 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6988 && ! final_ptr)
6989 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6992 if (TREE_CODE (op0) == MODIFY_EXPR
6993 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6994 /* Detect assigning a bitfield. */
6995 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6996 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6998 /* Don't leave an assignment inside a conversion
6999 unless assigning a bitfield. */
7000 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7001 /* First do the assignment, then return converted constant. */
7002 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7003 TREE_NO_WARNING (tem) = 1;
7004 TREE_USED (tem) = 1;
7005 return tem;
7008 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7009 constants (if x has signed type, the sign bit cannot be set
7010 in c). This folds extension into the BIT_AND_EXPR. */
7011 if (INTEGRAL_TYPE_P (type)
7012 && TREE_CODE (type) != BOOLEAN_TYPE
7013 && TREE_CODE (op0) == BIT_AND_EXPR
7014 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7016 tree and = op0;
7017 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7018 int change = 0;
7020 if (TYPE_UNSIGNED (TREE_TYPE (and))
7021 || (TYPE_PRECISION (type)
7022 <= TYPE_PRECISION (TREE_TYPE (and))))
7023 change = 1;
7024 else if (TYPE_PRECISION (TREE_TYPE (and1))
7025 <= HOST_BITS_PER_WIDE_INT
7026 && host_integerp (and1, 1))
7028 unsigned HOST_WIDE_INT cst;
7030 cst = tree_low_cst (and1, 1);
7031 cst &= (HOST_WIDE_INT) -1
7032 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7033 change = (cst == 0);
7034 #ifdef LOAD_EXTEND_OP
7035 if (change
7036 && !flag_syntax_only
7037 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7038 == ZERO_EXTEND))
7040 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7041 and0 = fold_convert (uns, and0);
7042 and1 = fold_convert (uns, and1);
7044 #endif
7046 if (change)
7048 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7049 TREE_INT_CST_HIGH (and1));
7050 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7051 TREE_CONSTANT_OVERFLOW (and1));
7052 return fold_build2 (BIT_AND_EXPR, type,
7053 fold_convert (type, and0), tem);
7057 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7058 T2 being pointers to types of the same size. */
7059 if (POINTER_TYPE_P (type)
7060 && BINARY_CLASS_P (arg0)
7061 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7062 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7064 tree arg00 = TREE_OPERAND (arg0, 0);
7065 tree t0 = type;
7066 tree t1 = TREE_TYPE (arg00);
7067 tree tt0 = TREE_TYPE (t0);
7068 tree tt1 = TREE_TYPE (t1);
7069 tree s0 = TYPE_SIZE (tt0);
7070 tree s1 = TYPE_SIZE (tt1);
7072 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7073 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7074 TREE_OPERAND (arg0, 1));
7077 tem = fold_convert_const (code, type, arg0);
7078 return tem ? tem : NULL_TREE;
7080 case VIEW_CONVERT_EXPR:
7081 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7082 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7083 return NULL_TREE;
7085 case NEGATE_EXPR:
7086 if (negate_expr_p (arg0))
7087 return fold_convert (type, negate_expr (arg0));
7088 /* Convert - (~A) to A + 1. */
7089 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
7090 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
7091 build_int_cst (type, 1));
7092 return NULL_TREE;
7094 case ABS_EXPR:
7095 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7096 return fold_abs_const (arg0, type);
7097 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7098 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7099 /* Convert fabs((double)float) into (double)fabsf(float). */
7100 else if (TREE_CODE (arg0) == NOP_EXPR
7101 && TREE_CODE (type) == REAL_TYPE)
7103 tree targ0 = strip_float_extensions (arg0);
7104 if (targ0 != arg0)
7105 return fold_convert (type, fold_build1 (ABS_EXPR,
7106 TREE_TYPE (targ0),
7107 targ0));
7109 else if (tree_expr_nonnegative_p (arg0))
7110 return arg0;
7112 /* Strip sign ops from argument. */
7113 if (TREE_CODE (type) == REAL_TYPE)
7115 tem = fold_strip_sign_ops (arg0);
7116 if (tem)
7117 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7119 return NULL_TREE;
7121 case CONJ_EXPR:
7122 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7123 return fold_convert (type, arg0);
7124 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7125 return build2 (COMPLEX_EXPR, type,
7126 TREE_OPERAND (arg0, 0),
7127 negate_expr (TREE_OPERAND (arg0, 1)));
7128 else if (TREE_CODE (arg0) == COMPLEX_CST)
7129 return build_complex (type, TREE_REALPART (arg0),
7130 negate_expr (TREE_IMAGPART (arg0)));
7131 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7132 return fold_build2 (TREE_CODE (arg0), type,
7133 fold_build1 (CONJ_EXPR, type,
7134 TREE_OPERAND (arg0, 0)),
7135 fold_build1 (CONJ_EXPR, type,
7136 TREE_OPERAND (arg0, 1)));
7137 else if (TREE_CODE (arg0) == CONJ_EXPR)
7138 return TREE_OPERAND (arg0, 0);
7139 return NULL_TREE;
7141 case BIT_NOT_EXPR:
7142 if (TREE_CODE (arg0) == INTEGER_CST)
7143 return fold_not_const (arg0, type);
7144 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7145 return TREE_OPERAND (arg0, 0);
7146 /* Convert ~ (-A) to A - 1. */
7147 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7148 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7149 build_int_cst (type, 1));
7150 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7151 else if (INTEGRAL_TYPE_P (type)
7152 && ((TREE_CODE (arg0) == MINUS_EXPR
7153 && integer_onep (TREE_OPERAND (arg0, 1)))
7154 || (TREE_CODE (arg0) == PLUS_EXPR
7155 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7156 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7157 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7158 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7159 && (tem = fold_unary (BIT_NOT_EXPR, type,
7160 fold_convert (type,
7161 TREE_OPERAND (arg0, 0)))))
7162 return fold_build2 (BIT_XOR_EXPR, type, tem,
7163 fold_convert (type, TREE_OPERAND (arg0, 1)));
7164 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7165 && (tem = fold_unary (BIT_NOT_EXPR, type,
7166 fold_convert (type,
7167 TREE_OPERAND (arg0, 1)))))
7168 return fold_build2 (BIT_XOR_EXPR, type,
7169 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7171 return NULL_TREE;
7173 case TRUTH_NOT_EXPR:
7174 /* The argument to invert_truthvalue must have Boolean type. */
7175 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7176 arg0 = fold_convert (boolean_type_node, arg0);
7178 /* Note that the operand of this must be an int
7179 and its values must be 0 or 1.
7180 ("true" is a fixed value perhaps depending on the language,
7181 but we don't handle values other than 1 correctly yet.) */
7182 tem = invert_truthvalue (arg0);
7183 /* Avoid infinite recursion. */
7184 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7185 return NULL_TREE;
7186 return fold_convert (type, tem);
7188 case REALPART_EXPR:
7189 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7190 return NULL_TREE;
7191 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7192 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7193 TREE_OPERAND (arg0, 1));
7194 else if (TREE_CODE (arg0) == COMPLEX_CST)
7195 return TREE_REALPART (arg0);
7196 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7197 return fold_build2 (TREE_CODE (arg0), type,
7198 fold_build1 (REALPART_EXPR, type,
7199 TREE_OPERAND (arg0, 0)),
7200 fold_build1 (REALPART_EXPR, type,
7201 TREE_OPERAND (arg0, 1)));
7202 return NULL_TREE;
7204 case IMAGPART_EXPR:
7205 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7206 return fold_convert (type, integer_zero_node);
7207 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7208 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7209 TREE_OPERAND (arg0, 0));
7210 else if (TREE_CODE (arg0) == COMPLEX_CST)
7211 return TREE_IMAGPART (arg0);
7212 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7213 return fold_build2 (TREE_CODE (arg0), type,
7214 fold_build1 (IMAGPART_EXPR, type,
7215 TREE_OPERAND (arg0, 0)),
7216 fold_build1 (IMAGPART_EXPR, type,
7217 TREE_OPERAND (arg0, 1)));
7218 return NULL_TREE;
7220 default:
7221 return NULL_TREE;
7222 } /* switch (code) */
7225 /* Fold a binary expression of code CODE and type TYPE with operands
7226 OP0 and OP1. Return the folded expression if folding is
7227 successful. Otherwise, return NULL_TREE. */
7229 tree
7230 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7232 tree t1 = NULL_TREE;
7233 tree tem;
7234 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7235 enum tree_code_class kind = TREE_CODE_CLASS (code);
7237 /* WINS will be nonzero when the switch is done
7238 if all operands are constant. */
7239 int wins = 1;
7241 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7242 && TREE_CODE_LENGTH (code) == 2);
7244 arg0 = op0;
7245 arg1 = op1;
7247 if (arg0)
7249 tree subop;
7251 /* Strip any conversions that don't change the mode. This is
7252 safe for every expression, except for a comparison expression
7253 because its signedness is derived from its operands. So, in
7254 the latter case, only strip conversions that don't change the
7255 signedness.
7257 Note that this is done as an internal manipulation within the
7258 constant folder, in order to find the simplest representation
7259 of the arguments so that their form can be studied. In any
7260 cases, the appropriate type conversions should be put back in
7261 the tree that will get out of the constant folder. */
7262 if (kind == tcc_comparison)
7263 STRIP_SIGN_NOPS (arg0);
7264 else
7265 STRIP_NOPS (arg0);
7267 if (TREE_CODE (arg0) == COMPLEX_CST)
7268 subop = TREE_REALPART (arg0);
7269 else
7270 subop = arg0;
7272 if (TREE_CODE (subop) != INTEGER_CST
7273 && TREE_CODE (subop) != REAL_CST)
7274 /* Note that TREE_CONSTANT isn't enough:
7275 static var addresses are constant but we can't
7276 do arithmetic on them. */
7277 wins = 0;
7280 if (arg1)
7282 tree subop;
7284 /* Strip any conversions that don't change the mode. This is
7285 safe for every expression, except for a comparison expression
7286 because its signedness is derived from its operands. So, in
7287 the latter case, only strip conversions that don't change the
7288 signedness.
7290 Note that this is done as an internal manipulation within the
7291 constant folder, in order to find the simplest representation
7292 of the arguments so that their form can be studied. In any
7293 cases, the appropriate type conversions should be put back in
7294 the tree that will get out of the constant folder. */
7295 if (kind == tcc_comparison)
7296 STRIP_SIGN_NOPS (arg1);
7297 else
7298 STRIP_NOPS (arg1);
7300 if (TREE_CODE (arg1) == COMPLEX_CST)
7301 subop = TREE_REALPART (arg1);
7302 else
7303 subop = arg1;
7305 if (TREE_CODE (subop) != INTEGER_CST
7306 && TREE_CODE (subop) != REAL_CST)
7307 /* Note that TREE_CONSTANT isn't enough:
7308 static var addresses are constant but we can't
7309 do arithmetic on them. */
7310 wins = 0;
7313 /* If this is a commutative operation, and ARG0 is a constant, move it
7314 to ARG1 to reduce the number of tests below. */
7315 if (commutative_tree_code (code)
7316 && tree_swap_operands_p (arg0, arg1, true))
7317 return fold_build2 (code, type, op1, op0);
7319 /* Now WINS is set as described above,
7320 ARG0 is the first operand of EXPR,
7321 and ARG1 is the second operand (if it has more than one operand).
7323 First check for cases where an arithmetic operation is applied to a
7324 compound, conditional, or comparison operation. Push the arithmetic
7325 operation inside the compound or conditional to see if any folding
7326 can then be done. Convert comparison to conditional for this purpose.
7327 The also optimizes non-constant cases that used to be done in
7328 expand_expr.
7330 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7331 one of the operands is a comparison and the other is a comparison, a
7332 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7333 code below would make the expression more complex. Change it to a
7334 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7335 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7337 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7338 || code == EQ_EXPR || code == NE_EXPR)
7339 && ((truth_value_p (TREE_CODE (arg0))
7340 && (truth_value_p (TREE_CODE (arg1))
7341 || (TREE_CODE (arg1) == BIT_AND_EXPR
7342 && integer_onep (TREE_OPERAND (arg1, 1)))))
7343 || (truth_value_p (TREE_CODE (arg1))
7344 && (truth_value_p (TREE_CODE (arg0))
7345 || (TREE_CODE (arg0) == BIT_AND_EXPR
7346 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7348 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7349 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7350 : TRUTH_XOR_EXPR,
7351 boolean_type_node,
7352 fold_convert (boolean_type_node, arg0),
7353 fold_convert (boolean_type_node, arg1));
7355 if (code == EQ_EXPR)
7356 tem = invert_truthvalue (tem);
7358 return fold_convert (type, tem);
7361 if (TREE_CODE_CLASS (code) == tcc_comparison
7362 && TREE_CODE (arg0) == COMPOUND_EXPR)
7363 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7364 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7365 else if (TREE_CODE_CLASS (code) == tcc_comparison
7366 && TREE_CODE (arg1) == COMPOUND_EXPR)
7367 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7368 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7369 else if (TREE_CODE_CLASS (code) == tcc_binary
7370 || TREE_CODE_CLASS (code) == tcc_comparison)
7372 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7373 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7374 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7375 arg1));
7376 if (TREE_CODE (arg1) == COMPOUND_EXPR
7377 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7378 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7379 fold_build2 (code, type,
7380 arg0, TREE_OPERAND (arg1, 1)));
7382 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7384 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7385 arg0, arg1,
7386 /*cond_first_p=*/1);
7387 if (tem != NULL_TREE)
7388 return tem;
7391 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7393 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7394 arg1, arg0,
7395 /*cond_first_p=*/0);
7396 if (tem != NULL_TREE)
7397 return tem;
7401 switch (code)
7403 case PLUS_EXPR:
7404 /* A + (-B) -> A - B */
7405 if (TREE_CODE (arg1) == NEGATE_EXPR)
7406 return fold_build2 (MINUS_EXPR, type,
7407 fold_convert (type, arg0),
7408 fold_convert (type, TREE_OPERAND (arg1, 0)));
7409 /* (-A) + B -> B - A */
7410 if (TREE_CODE (arg0) == NEGATE_EXPR
7411 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7412 return fold_build2 (MINUS_EXPR, type,
7413 fold_convert (type, arg1),
7414 fold_convert (type, TREE_OPERAND (arg0, 0)));
7415 /* Convert ~A + 1 to -A. */
7416 if (INTEGRAL_TYPE_P (type)
7417 && TREE_CODE (arg0) == BIT_NOT_EXPR
7418 && integer_onep (arg1))
7419 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7421 if (TREE_CODE (type) == COMPLEX_TYPE)
7423 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7424 if (tem)
7425 return tem;
7428 if (! FLOAT_TYPE_P (type))
7430 if (integer_zerop (arg1))
7431 return non_lvalue (fold_convert (type, arg0));
7433 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7434 with a constant, and the two constants have no bits in common,
7435 we should treat this as a BIT_IOR_EXPR since this may produce more
7436 simplifications. */
7437 if (TREE_CODE (arg0) == BIT_AND_EXPR
7438 && TREE_CODE (arg1) == BIT_AND_EXPR
7439 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7440 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7441 && integer_zerop (const_binop (BIT_AND_EXPR,
7442 TREE_OPERAND (arg0, 1),
7443 TREE_OPERAND (arg1, 1), 0)))
7445 code = BIT_IOR_EXPR;
7446 goto bit_ior;
7449 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7450 (plus (plus (mult) (mult)) (foo)) so that we can
7451 take advantage of the factoring cases below. */
7452 if (((TREE_CODE (arg0) == PLUS_EXPR
7453 || TREE_CODE (arg0) == MINUS_EXPR)
7454 && TREE_CODE (arg1) == MULT_EXPR)
7455 || ((TREE_CODE (arg1) == PLUS_EXPR
7456 || TREE_CODE (arg1) == MINUS_EXPR)
7457 && TREE_CODE (arg0) == MULT_EXPR))
7459 tree parg0, parg1, parg, marg;
7460 enum tree_code pcode;
7462 if (TREE_CODE (arg1) == MULT_EXPR)
7463 parg = arg0, marg = arg1;
7464 else
7465 parg = arg1, marg = arg0;
7466 pcode = TREE_CODE (parg);
7467 parg0 = TREE_OPERAND (parg, 0);
7468 parg1 = TREE_OPERAND (parg, 1);
7469 STRIP_NOPS (parg0);
7470 STRIP_NOPS (parg1);
7472 if (TREE_CODE (parg0) == MULT_EXPR
7473 && TREE_CODE (parg1) != MULT_EXPR)
7474 return fold_build2 (pcode, type,
7475 fold_build2 (PLUS_EXPR, type,
7476 fold_convert (type, parg0),
7477 fold_convert (type, marg)),
7478 fold_convert (type, parg1));
7479 if (TREE_CODE (parg0) != MULT_EXPR
7480 && TREE_CODE (parg1) == MULT_EXPR)
7481 return fold_build2 (PLUS_EXPR, type,
7482 fold_convert (type, parg0),
7483 fold_build2 (pcode, type,
7484 fold_convert (type, marg),
7485 fold_convert (type,
7486 parg1)));
7489 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7491 tree arg00, arg01, arg10, arg11;
7492 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7494 /* (A * C) + (B * C) -> (A+B) * C.
7495 We are most concerned about the case where C is a constant,
7496 but other combinations show up during loop reduction. Since
7497 it is not difficult, try all four possibilities. */
7499 arg00 = TREE_OPERAND (arg0, 0);
7500 arg01 = TREE_OPERAND (arg0, 1);
7501 arg10 = TREE_OPERAND (arg1, 0);
7502 arg11 = TREE_OPERAND (arg1, 1);
7503 same = NULL_TREE;
7505 if (operand_equal_p (arg01, arg11, 0))
7506 same = arg01, alt0 = arg00, alt1 = arg10;
7507 else if (operand_equal_p (arg00, arg10, 0))
7508 same = arg00, alt0 = arg01, alt1 = arg11;
7509 else if (operand_equal_p (arg00, arg11, 0))
7510 same = arg00, alt0 = arg01, alt1 = arg10;
7511 else if (operand_equal_p (arg01, arg10, 0))
7512 same = arg01, alt0 = arg00, alt1 = arg11;
7514 /* No identical multiplicands; see if we can find a common
7515 power-of-two factor in non-power-of-two multiplies. This
7516 can help in multi-dimensional array access. */
7517 else if (TREE_CODE (arg01) == INTEGER_CST
7518 && TREE_CODE (arg11) == INTEGER_CST
7519 && TREE_INT_CST_HIGH (arg01) == 0
7520 && TREE_INT_CST_HIGH (arg11) == 0)
7522 HOST_WIDE_INT int01, int11, tmp;
7523 int01 = TREE_INT_CST_LOW (arg01);
7524 int11 = TREE_INT_CST_LOW (arg11);
7526 /* Move min of absolute values to int11. */
7527 if ((int01 >= 0 ? int01 : -int01)
7528 < (int11 >= 0 ? int11 : -int11))
7530 tmp = int01, int01 = int11, int11 = tmp;
7531 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7532 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7535 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7537 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7538 build_int_cst (NULL_TREE,
7539 int01 / int11));
7540 alt1 = arg10;
7541 same = arg11;
7545 if (same)
7546 return fold_build2 (MULT_EXPR, type,
7547 fold_build2 (PLUS_EXPR, type,
7548 fold_convert (type, alt0),
7549 fold_convert (type, alt1)),
7550 fold_convert (type, same));
7553 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7554 of the array. Loop optimizer sometimes produce this type of
7555 expressions. */
7556 if (TREE_CODE (arg0) == ADDR_EXPR)
7558 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7559 if (tem)
7560 return fold_convert (type, fold (tem));
7562 else if (TREE_CODE (arg1) == ADDR_EXPR)
7564 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7565 if (tem)
7566 return fold_convert (type, fold (tem));
7569 else
7571 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7572 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7573 return non_lvalue (fold_convert (type, arg0));
7575 /* Likewise if the operands are reversed. */
7576 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7577 return non_lvalue (fold_convert (type, arg1));
7579 /* Convert X + -C into X - C. */
7580 if (TREE_CODE (arg1) == REAL_CST
7581 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7583 tem = fold_negate_const (arg1, type);
7584 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7585 return fold_build2 (MINUS_EXPR, type,
7586 fold_convert (type, arg0),
7587 fold_convert (type, tem));
7590 if (flag_unsafe_math_optimizations
7591 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7592 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7593 && (tem = distribute_real_division (code, type, arg0, arg1)))
7594 return tem;
7596 /* Convert x+x into x*2.0. */
7597 if (operand_equal_p (arg0, arg1, 0)
7598 && SCALAR_FLOAT_TYPE_P (type))
7599 return fold_build2 (MULT_EXPR, type, arg0,
7600 build_real (type, dconst2));
7602 /* Convert x*c+x into x*(c+1). */
7603 if (flag_unsafe_math_optimizations
7604 && TREE_CODE (arg0) == MULT_EXPR
7605 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7606 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7607 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7609 REAL_VALUE_TYPE c;
7611 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7612 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7613 return fold_build2 (MULT_EXPR, type, arg1,
7614 build_real (type, c));
7617 /* Convert x+x*c into x*(c+1). */
7618 if (flag_unsafe_math_optimizations
7619 && TREE_CODE (arg1) == MULT_EXPR
7620 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7621 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7622 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7624 REAL_VALUE_TYPE c;
7626 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7627 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7628 return fold_build2 (MULT_EXPR, type, arg0,
7629 build_real (type, c));
7632 /* Convert x*c1+x*c2 into x*(c1+c2). */
7633 if (flag_unsafe_math_optimizations
7634 && TREE_CODE (arg0) == MULT_EXPR
7635 && TREE_CODE (arg1) == MULT_EXPR
7636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7637 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7638 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7639 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7640 && operand_equal_p (TREE_OPERAND (arg0, 0),
7641 TREE_OPERAND (arg1, 0), 0))
7643 REAL_VALUE_TYPE c1, c2;
7645 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7646 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7647 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7648 return fold_build2 (MULT_EXPR, type,
7649 TREE_OPERAND (arg0, 0),
7650 build_real (type, c1));
7652 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7653 if (flag_unsafe_math_optimizations
7654 && TREE_CODE (arg1) == PLUS_EXPR
7655 && TREE_CODE (arg0) != MULT_EXPR)
7657 tree tree10 = TREE_OPERAND (arg1, 0);
7658 tree tree11 = TREE_OPERAND (arg1, 1);
7659 if (TREE_CODE (tree11) == MULT_EXPR
7660 && TREE_CODE (tree10) == MULT_EXPR)
7662 tree tree0;
7663 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7664 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7667 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7668 if (flag_unsafe_math_optimizations
7669 && TREE_CODE (arg0) == PLUS_EXPR
7670 && TREE_CODE (arg1) != MULT_EXPR)
7672 tree tree00 = TREE_OPERAND (arg0, 0);
7673 tree tree01 = TREE_OPERAND (arg0, 1);
7674 if (TREE_CODE (tree01) == MULT_EXPR
7675 && TREE_CODE (tree00) == MULT_EXPR)
7677 tree tree0;
7678 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7679 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7684 bit_rotate:
7685 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7686 is a rotate of A by C1 bits. */
7687 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7688 is a rotate of A by B bits. */
7690 enum tree_code code0, code1;
7691 code0 = TREE_CODE (arg0);
7692 code1 = TREE_CODE (arg1);
7693 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7694 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7695 && operand_equal_p (TREE_OPERAND (arg0, 0),
7696 TREE_OPERAND (arg1, 0), 0)
7697 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7699 tree tree01, tree11;
7700 enum tree_code code01, code11;
7702 tree01 = TREE_OPERAND (arg0, 1);
7703 tree11 = TREE_OPERAND (arg1, 1);
7704 STRIP_NOPS (tree01);
7705 STRIP_NOPS (tree11);
7706 code01 = TREE_CODE (tree01);
7707 code11 = TREE_CODE (tree11);
7708 if (code01 == INTEGER_CST
7709 && code11 == INTEGER_CST
7710 && TREE_INT_CST_HIGH (tree01) == 0
7711 && TREE_INT_CST_HIGH (tree11) == 0
7712 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7713 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7714 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7715 code0 == LSHIFT_EXPR ? tree01 : tree11);
7716 else if (code11 == MINUS_EXPR)
7718 tree tree110, tree111;
7719 tree110 = TREE_OPERAND (tree11, 0);
7720 tree111 = TREE_OPERAND (tree11, 1);
7721 STRIP_NOPS (tree110);
7722 STRIP_NOPS (tree111);
7723 if (TREE_CODE (tree110) == INTEGER_CST
7724 && 0 == compare_tree_int (tree110,
7725 TYPE_PRECISION
7726 (TREE_TYPE (TREE_OPERAND
7727 (arg0, 0))))
7728 && operand_equal_p (tree01, tree111, 0))
7729 return build2 ((code0 == LSHIFT_EXPR
7730 ? LROTATE_EXPR
7731 : RROTATE_EXPR),
7732 type, TREE_OPERAND (arg0, 0), tree01);
7734 else if (code01 == MINUS_EXPR)
7736 tree tree010, tree011;
7737 tree010 = TREE_OPERAND (tree01, 0);
7738 tree011 = TREE_OPERAND (tree01, 1);
7739 STRIP_NOPS (tree010);
7740 STRIP_NOPS (tree011);
7741 if (TREE_CODE (tree010) == INTEGER_CST
7742 && 0 == compare_tree_int (tree010,
7743 TYPE_PRECISION
7744 (TREE_TYPE (TREE_OPERAND
7745 (arg0, 0))))
7746 && operand_equal_p (tree11, tree011, 0))
7747 return build2 ((code0 != LSHIFT_EXPR
7748 ? LROTATE_EXPR
7749 : RROTATE_EXPR),
7750 type, TREE_OPERAND (arg0, 0), tree11);
7755 associate:
7756 /* In most languages, can't associate operations on floats through
7757 parentheses. Rather than remember where the parentheses were, we
7758 don't associate floats at all, unless the user has specified
7759 -funsafe-math-optimizations. */
7761 if (! wins
7762 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7764 tree var0, con0, lit0, minus_lit0;
7765 tree var1, con1, lit1, minus_lit1;
7767 /* Split both trees into variables, constants, and literals. Then
7768 associate each group together, the constants with literals,
7769 then the result with variables. This increases the chances of
7770 literals being recombined later and of generating relocatable
7771 expressions for the sum of a constant and literal. */
7772 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7773 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7774 code == MINUS_EXPR);
7776 /* Only do something if we found more than two objects. Otherwise,
7777 nothing has changed and we risk infinite recursion. */
7778 if (2 < ((var0 != 0) + (var1 != 0)
7779 + (con0 != 0) + (con1 != 0)
7780 + (lit0 != 0) + (lit1 != 0)
7781 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7783 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7784 if (code == MINUS_EXPR)
7785 code = PLUS_EXPR;
7787 var0 = associate_trees (var0, var1, code, type);
7788 con0 = associate_trees (con0, con1, code, type);
7789 lit0 = associate_trees (lit0, lit1, code, type);
7790 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7792 /* Preserve the MINUS_EXPR if the negative part of the literal is
7793 greater than the positive part. Otherwise, the multiplicative
7794 folding code (i.e extract_muldiv) may be fooled in case
7795 unsigned constants are subtracted, like in the following
7796 example: ((X*2 + 4) - 8U)/2. */
7797 if (minus_lit0 && lit0)
7799 if (TREE_CODE (lit0) == INTEGER_CST
7800 && TREE_CODE (minus_lit0) == INTEGER_CST
7801 && tree_int_cst_lt (lit0, minus_lit0))
7803 minus_lit0 = associate_trees (minus_lit0, lit0,
7804 MINUS_EXPR, type);
7805 lit0 = 0;
7807 else
7809 lit0 = associate_trees (lit0, minus_lit0,
7810 MINUS_EXPR, type);
7811 minus_lit0 = 0;
7814 if (minus_lit0)
7816 if (con0 == 0)
7817 return fold_convert (type,
7818 associate_trees (var0, minus_lit0,
7819 MINUS_EXPR, type));
7820 else
7822 con0 = associate_trees (con0, minus_lit0,
7823 MINUS_EXPR, type);
7824 return fold_convert (type,
7825 associate_trees (var0, con0,
7826 PLUS_EXPR, type));
7830 con0 = associate_trees (con0, lit0, code, type);
7831 return fold_convert (type, associate_trees (var0, con0,
7832 code, type));
7836 binary:
7837 if (wins)
7838 t1 = const_binop (code, arg0, arg1, 0);
7839 if (t1 != NULL_TREE)
7841 /* The return value should always have
7842 the same type as the original expression. */
7843 if (TREE_TYPE (t1) != type)
7844 t1 = fold_convert (type, t1);
7846 return t1;
7848 return NULL_TREE;
7850 case MINUS_EXPR:
7851 /* A - (-B) -> A + B */
7852 if (TREE_CODE (arg1) == NEGATE_EXPR)
7853 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7854 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7855 if (TREE_CODE (arg0) == NEGATE_EXPR
7856 && (FLOAT_TYPE_P (type)
7857 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7858 && negate_expr_p (arg1)
7859 && reorder_operands_p (arg0, arg1))
7860 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7861 TREE_OPERAND (arg0, 0));
7862 /* Convert -A - 1 to ~A. */
7863 if (INTEGRAL_TYPE_P (type)
7864 && TREE_CODE (arg0) == NEGATE_EXPR
7865 && integer_onep (arg1))
7866 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7868 /* Convert -1 - A to ~A. */
7869 if (INTEGRAL_TYPE_P (type)
7870 && integer_all_onesp (arg0))
7871 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7873 if (TREE_CODE (type) == COMPLEX_TYPE)
7875 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7876 if (tem)
7877 return tem;
7880 if (! FLOAT_TYPE_P (type))
7882 if (! wins && integer_zerop (arg0))
7883 return negate_expr (fold_convert (type, arg1));
7884 if (integer_zerop (arg1))
7885 return non_lvalue (fold_convert (type, arg0));
7887 /* Fold A - (A & B) into ~B & A. */
7888 if (!TREE_SIDE_EFFECTS (arg0)
7889 && TREE_CODE (arg1) == BIT_AND_EXPR)
7891 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7892 return fold_build2 (BIT_AND_EXPR, type,
7893 fold_build1 (BIT_NOT_EXPR, type,
7894 TREE_OPERAND (arg1, 0)),
7895 arg0);
7896 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7897 return fold_build2 (BIT_AND_EXPR, type,
7898 fold_build1 (BIT_NOT_EXPR, type,
7899 TREE_OPERAND (arg1, 1)),
7900 arg0);
7903 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7904 any power of 2 minus 1. */
7905 if (TREE_CODE (arg0) == BIT_AND_EXPR
7906 && TREE_CODE (arg1) == BIT_AND_EXPR
7907 && operand_equal_p (TREE_OPERAND (arg0, 0),
7908 TREE_OPERAND (arg1, 0), 0))
7910 tree mask0 = TREE_OPERAND (arg0, 1);
7911 tree mask1 = TREE_OPERAND (arg1, 1);
7912 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7914 if (operand_equal_p (tem, mask1, 0))
7916 tem = fold_build2 (BIT_XOR_EXPR, type,
7917 TREE_OPERAND (arg0, 0), mask1);
7918 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7923 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7924 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7925 return non_lvalue (fold_convert (type, arg0));
7927 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7928 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7929 (-ARG1 + ARG0) reduces to -ARG1. */
7930 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7931 return negate_expr (fold_convert (type, arg1));
7933 /* Fold &x - &x. This can happen from &x.foo - &x.
7934 This is unsafe for certain floats even in non-IEEE formats.
7935 In IEEE, it is unsafe because it does wrong for NaNs.
7936 Also note that operand_equal_p is always false if an operand
7937 is volatile. */
7939 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7940 && operand_equal_p (arg0, arg1, 0))
7941 return fold_convert (type, integer_zero_node);
7943 /* A - B -> A + (-B) if B is easily negatable. */
7944 if (!wins && negate_expr_p (arg1)
7945 && ((FLOAT_TYPE_P (type)
7946 /* Avoid this transformation if B is a positive REAL_CST. */
7947 && (TREE_CODE (arg1) != REAL_CST
7948 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7949 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7950 return fold_build2 (PLUS_EXPR, type,
7951 fold_convert (type, arg0),
7952 fold_convert (type, negate_expr (arg1)));
7954 /* Try folding difference of addresses. */
7956 HOST_WIDE_INT diff;
7958 if ((TREE_CODE (arg0) == ADDR_EXPR
7959 || TREE_CODE (arg1) == ADDR_EXPR)
7960 && ptr_difference_const (arg0, arg1, &diff))
7961 return build_int_cst_type (type, diff);
7964 /* Fold &a[i] - &a[j] to i-j. */
7965 if (TREE_CODE (arg0) == ADDR_EXPR
7966 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7967 && TREE_CODE (arg1) == ADDR_EXPR
7968 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7970 tree aref0 = TREE_OPERAND (arg0, 0);
7971 tree aref1 = TREE_OPERAND (arg1, 0);
7972 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7973 TREE_OPERAND (aref1, 0), 0))
7975 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7976 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7977 tree esz = array_ref_element_size (aref0);
7978 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7979 return fold_build2 (MULT_EXPR, type, diff,
7980 fold_convert (type, esz));
7985 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7986 of the array. Loop optimizer sometimes produce this type of
7987 expressions. */
7988 if (TREE_CODE (arg0) == ADDR_EXPR)
7990 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7991 if (tem)
7992 return fold_convert (type, fold (tem));
7995 if (flag_unsafe_math_optimizations
7996 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7997 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7998 && (tem = distribute_real_division (code, type, arg0, arg1)))
7999 return tem;
8001 if (TREE_CODE (arg0) == MULT_EXPR
8002 && TREE_CODE (arg1) == MULT_EXPR
8003 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8005 /* (A * C) - (B * C) -> (A-B) * C. */
8006 if (operand_equal_p (TREE_OPERAND (arg0, 1),
8007 TREE_OPERAND (arg1, 1), 0))
8008 return fold_build2 (MULT_EXPR, type,
8009 fold_build2 (MINUS_EXPR, type,
8010 TREE_OPERAND (arg0, 0),
8011 TREE_OPERAND (arg1, 0)),
8012 TREE_OPERAND (arg0, 1));
8013 /* (A * C1) - (A * C2) -> A * (C1-C2). */
8014 if (operand_equal_p (TREE_OPERAND (arg0, 0),
8015 TREE_OPERAND (arg1, 0), 0))
8016 return fold_build2 (MULT_EXPR, type,
8017 TREE_OPERAND (arg0, 0),
8018 fold_build2 (MINUS_EXPR, type,
8019 TREE_OPERAND (arg0, 1),
8020 TREE_OPERAND (arg1, 1)));
8023 goto associate;
8025 case MULT_EXPR:
8026 /* (-A) * (-B) -> A * B */
8027 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8028 return fold_build2 (MULT_EXPR, type,
8029 TREE_OPERAND (arg0, 0),
8030 negate_expr (arg1));
8031 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8032 return fold_build2 (MULT_EXPR, type,
8033 negate_expr (arg0),
8034 TREE_OPERAND (arg1, 0));
8036 if (TREE_CODE (type) == COMPLEX_TYPE)
8038 tem = fold_complex_mult (type, arg0, arg1);
8039 if (tem)
8040 return tem;
8043 if (! FLOAT_TYPE_P (type))
8045 if (integer_zerop (arg1))
8046 return omit_one_operand (type, arg1, arg0);
8047 if (integer_onep (arg1))
8048 return non_lvalue (fold_convert (type, arg0));
8049 /* Transform x * -1 into -x. */
8050 if (integer_all_onesp (arg1))
8051 return fold_convert (type, negate_expr (arg0));
8053 /* (a * (1 << b)) is (a << b) */
8054 if (TREE_CODE (arg1) == LSHIFT_EXPR
8055 && integer_onep (TREE_OPERAND (arg1, 0)))
8056 return fold_build2 (LSHIFT_EXPR, type, arg0,
8057 TREE_OPERAND (arg1, 1));
8058 if (TREE_CODE (arg0) == LSHIFT_EXPR
8059 && integer_onep (TREE_OPERAND (arg0, 0)))
8060 return fold_build2 (LSHIFT_EXPR, type, arg1,
8061 TREE_OPERAND (arg0, 1));
8063 if (TREE_CODE (arg1) == INTEGER_CST
8064 && 0 != (tem = extract_muldiv (op0,
8065 fold_convert (type, arg1),
8066 code, NULL_TREE)))
8067 return fold_convert (type, tem);
8070 else
8072 /* Maybe fold x * 0 to 0. The expressions aren't the same
8073 when x is NaN, since x * 0 is also NaN. Nor are they the
8074 same in modes with signed zeros, since multiplying a
8075 negative value by 0 gives -0, not +0. */
8076 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8077 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8078 && real_zerop (arg1))
8079 return omit_one_operand (type, arg1, arg0);
8080 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8081 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8082 && real_onep (arg1))
8083 return non_lvalue (fold_convert (type, arg0));
8085 /* Transform x * -1.0 into -x. */
8086 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8087 && real_minus_onep (arg1))
8088 return fold_convert (type, negate_expr (arg0));
8090 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8091 if (flag_unsafe_math_optimizations
8092 && TREE_CODE (arg0) == RDIV_EXPR
8093 && TREE_CODE (arg1) == REAL_CST
8094 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8096 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8097 arg1, 0);
8098 if (tem)
8099 return fold_build2 (RDIV_EXPR, type, tem,
8100 TREE_OPERAND (arg0, 1));
8103 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8104 if (operand_equal_p (arg0, arg1, 0))
8106 tree tem = fold_strip_sign_ops (arg0);
8107 if (tem != NULL_TREE)
8109 tem = fold_convert (type, tem);
8110 return fold_build2 (MULT_EXPR, type, tem, tem);
8114 if (flag_unsafe_math_optimizations)
8116 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8117 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8119 /* Optimizations of root(...)*root(...). */
8120 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8122 tree rootfn, arg, arglist;
8123 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8124 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8126 /* Optimize sqrt(x)*sqrt(x) as x. */
8127 if (BUILTIN_SQRT_P (fcode0)
8128 && operand_equal_p (arg00, arg10, 0)
8129 && ! HONOR_SNANS (TYPE_MODE (type)))
8130 return arg00;
8132 /* Optimize root(x)*root(y) as root(x*y). */
8133 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8134 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8135 arglist = build_tree_list (NULL_TREE, arg);
8136 return build_function_call_expr (rootfn, arglist);
8139 /* Optimize expN(x)*expN(y) as expN(x+y). */
8140 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8142 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8143 tree arg = fold_build2 (PLUS_EXPR, type,
8144 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8145 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8146 tree arglist = build_tree_list (NULL_TREE, arg);
8147 return build_function_call_expr (expfn, arglist);
8150 /* Optimizations of pow(...)*pow(...). */
8151 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8152 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8153 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8155 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8156 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8157 1)));
8158 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8159 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8160 1)));
8162 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8163 if (operand_equal_p (arg01, arg11, 0))
8165 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8166 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8167 tree arglist = tree_cons (NULL_TREE, arg,
8168 build_tree_list (NULL_TREE,
8169 arg01));
8170 return build_function_call_expr (powfn, arglist);
8173 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8174 if (operand_equal_p (arg00, arg10, 0))
8176 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8177 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
8178 tree arglist = tree_cons (NULL_TREE, arg00,
8179 build_tree_list (NULL_TREE,
8180 arg));
8181 return build_function_call_expr (powfn, arglist);
8185 /* Optimize tan(x)*cos(x) as sin(x). */
8186 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8187 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8188 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8189 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8190 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8191 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8192 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8193 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8195 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8197 if (sinfn != NULL_TREE)
8198 return build_function_call_expr (sinfn,
8199 TREE_OPERAND (arg0, 1));
8202 /* Optimize x*pow(x,c) as pow(x,c+1). */
8203 if (fcode1 == BUILT_IN_POW
8204 || fcode1 == BUILT_IN_POWF
8205 || fcode1 == BUILT_IN_POWL)
8207 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8208 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8209 1)));
8210 if (TREE_CODE (arg11) == REAL_CST
8211 && ! TREE_CONSTANT_OVERFLOW (arg11)
8212 && operand_equal_p (arg0, arg10, 0))
8214 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8215 REAL_VALUE_TYPE c;
8216 tree arg, arglist;
8218 c = TREE_REAL_CST (arg11);
8219 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8220 arg = build_real (type, c);
8221 arglist = build_tree_list (NULL_TREE, arg);
8222 arglist = tree_cons (NULL_TREE, arg0, arglist);
8223 return build_function_call_expr (powfn, arglist);
8227 /* Optimize pow(x,c)*x as pow(x,c+1). */
8228 if (fcode0 == BUILT_IN_POW
8229 || fcode0 == BUILT_IN_POWF
8230 || fcode0 == BUILT_IN_POWL)
8232 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8233 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8234 1)));
8235 if (TREE_CODE (arg01) == REAL_CST
8236 && ! TREE_CONSTANT_OVERFLOW (arg01)
8237 && operand_equal_p (arg1, arg00, 0))
8239 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8240 REAL_VALUE_TYPE c;
8241 tree arg, arglist;
8243 c = TREE_REAL_CST (arg01);
8244 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8245 arg = build_real (type, c);
8246 arglist = build_tree_list (NULL_TREE, arg);
8247 arglist = tree_cons (NULL_TREE, arg1, arglist);
8248 return build_function_call_expr (powfn, arglist);
8252 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8253 if (! optimize_size
8254 && operand_equal_p (arg0, arg1, 0))
8256 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8258 if (powfn)
8260 tree arg = build_real (type, dconst2);
8261 tree arglist = build_tree_list (NULL_TREE, arg);
8262 arglist = tree_cons (NULL_TREE, arg0, arglist);
8263 return build_function_call_expr (powfn, arglist);
8268 goto associate;
8270 case BIT_IOR_EXPR:
8271 bit_ior:
8272 if (integer_all_onesp (arg1))
8273 return omit_one_operand (type, arg1, arg0);
8274 if (integer_zerop (arg1))
8275 return non_lvalue (fold_convert (type, arg0));
8276 if (operand_equal_p (arg0, arg1, 0))
8277 return non_lvalue (fold_convert (type, arg0));
8279 /* ~X | X is -1. */
8280 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8281 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8283 t1 = build_int_cst (type, -1);
8284 t1 = force_fit_type (t1, 0, false, false);
8285 return omit_one_operand (type, t1, arg1);
8288 /* X | ~X is -1. */
8289 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8290 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8292 t1 = build_int_cst (type, -1);
8293 t1 = force_fit_type (t1, 0, false, false);
8294 return omit_one_operand (type, t1, arg0);
8297 t1 = distribute_bit_expr (code, type, arg0, arg1);
8298 if (t1 != NULL_TREE)
8299 return t1;
8301 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8303 This results in more efficient code for machines without a NAND
8304 instruction. Combine will canonicalize to the first form
8305 which will allow use of NAND instructions provided by the
8306 backend if they exist. */
8307 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8308 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8310 return fold_build1 (BIT_NOT_EXPR, type,
8311 build2 (BIT_AND_EXPR, type,
8312 TREE_OPERAND (arg0, 0),
8313 TREE_OPERAND (arg1, 0)));
8316 /* See if this can be simplified into a rotate first. If that
8317 is unsuccessful continue in the association code. */
8318 goto bit_rotate;
8320 case BIT_XOR_EXPR:
8321 if (integer_zerop (arg1))
8322 return non_lvalue (fold_convert (type, arg0));
8323 if (integer_all_onesp (arg1))
8324 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8325 if (operand_equal_p (arg0, arg1, 0))
8326 return omit_one_operand (type, integer_zero_node, arg0);
8328 /* ~X ^ X is -1. */
8329 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8330 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8332 t1 = build_int_cst (type, -1);
8333 t1 = force_fit_type (t1, 0, false, false);
8334 return omit_one_operand (type, t1, arg1);
8337 /* X ^ ~X is -1. */
8338 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8339 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8341 t1 = build_int_cst (type, -1);
8342 t1 = force_fit_type (t1, 0, false, false);
8343 return omit_one_operand (type, t1, arg0);
8346 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8347 with a constant, and the two constants have no bits in common,
8348 we should treat this as a BIT_IOR_EXPR since this may produce more
8349 simplifications. */
8350 if (TREE_CODE (arg0) == BIT_AND_EXPR
8351 && TREE_CODE (arg1) == BIT_AND_EXPR
8352 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8353 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8354 && integer_zerop (const_binop (BIT_AND_EXPR,
8355 TREE_OPERAND (arg0, 1),
8356 TREE_OPERAND (arg1, 1), 0)))
8358 code = BIT_IOR_EXPR;
8359 goto bit_ior;
8362 /* Convert ~X ^ ~Y to X ^ Y. */
8363 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8364 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8365 return fold_build2 (code, type,
8366 fold_convert (type, TREE_OPERAND (arg0, 0)),
8367 fold_convert (type, TREE_OPERAND (arg1, 0)));
8369 /* See if this can be simplified into a rotate first. If that
8370 is unsuccessful continue in the association code. */
8371 goto bit_rotate;
8373 case BIT_AND_EXPR:
8374 if (integer_all_onesp (arg1))
8375 return non_lvalue (fold_convert (type, arg0));
8376 if (integer_zerop (arg1))
8377 return omit_one_operand (type, arg1, arg0);
8378 if (operand_equal_p (arg0, arg1, 0))
8379 return non_lvalue (fold_convert (type, arg0));
8381 /* ~X & X is always zero. */
8382 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8383 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8384 return omit_one_operand (type, integer_zero_node, arg1);
8386 /* X & ~X is always zero. */
8387 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8388 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8389 return omit_one_operand (type, integer_zero_node, arg0);
8391 t1 = distribute_bit_expr (code, type, arg0, arg1);
8392 if (t1 != NULL_TREE)
8393 return t1;
8394 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8395 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8396 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8398 unsigned int prec
8399 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8401 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8402 && (~TREE_INT_CST_LOW (arg1)
8403 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8404 return fold_convert (type, TREE_OPERAND (arg0, 0));
8407 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8409 This results in more efficient code for machines without a NOR
8410 instruction. Combine will canonicalize to the first form
8411 which will allow use of NOR instructions provided by the
8412 backend if they exist. */
8413 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8414 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8416 return fold_build1 (BIT_NOT_EXPR, type,
8417 build2 (BIT_IOR_EXPR, type,
8418 TREE_OPERAND (arg0, 0),
8419 TREE_OPERAND (arg1, 0)));
8422 goto associate;
8424 case RDIV_EXPR:
8425 /* Don't touch a floating-point divide by zero unless the mode
8426 of the constant can represent infinity. */
8427 if (TREE_CODE (arg1) == REAL_CST
8428 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8429 && real_zerop (arg1))
8430 return NULL_TREE;
8432 /* (-A) / (-B) -> A / B */
8433 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8434 return fold_build2 (RDIV_EXPR, type,
8435 TREE_OPERAND (arg0, 0),
8436 negate_expr (arg1));
8437 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8438 return fold_build2 (RDIV_EXPR, type,
8439 negate_expr (arg0),
8440 TREE_OPERAND (arg1, 0));
8442 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8443 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8444 && real_onep (arg1))
8445 return non_lvalue (fold_convert (type, arg0));
8447 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8448 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8449 && real_minus_onep (arg1))
8450 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8452 /* If ARG1 is a constant, we can convert this to a multiply by the
8453 reciprocal. This does not have the same rounding properties,
8454 so only do this if -funsafe-math-optimizations. We can actually
8455 always safely do it if ARG1 is a power of two, but it's hard to
8456 tell if it is or not in a portable manner. */
8457 if (TREE_CODE (arg1) == REAL_CST)
8459 if (flag_unsafe_math_optimizations
8460 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8461 arg1, 0)))
8462 return fold_build2 (MULT_EXPR, type, arg0, tem);
8463 /* Find the reciprocal if optimizing and the result is exact. */
8464 if (optimize)
8466 REAL_VALUE_TYPE r;
8467 r = TREE_REAL_CST (arg1);
8468 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8470 tem = build_real (type, r);
8471 return fold_build2 (MULT_EXPR, type,
8472 fold_convert (type, arg0), tem);
8476 /* Convert A/B/C to A/(B*C). */
8477 if (flag_unsafe_math_optimizations
8478 && TREE_CODE (arg0) == RDIV_EXPR)
8479 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8480 fold_build2 (MULT_EXPR, type,
8481 TREE_OPERAND (arg0, 1), arg1));
8483 /* Convert A/(B/C) to (A/B)*C. */
8484 if (flag_unsafe_math_optimizations
8485 && TREE_CODE (arg1) == RDIV_EXPR)
8486 return fold_build2 (MULT_EXPR, type,
8487 fold_build2 (RDIV_EXPR, type, arg0,
8488 TREE_OPERAND (arg1, 0)),
8489 TREE_OPERAND (arg1, 1));
8491 /* Convert C1/(X*C2) into (C1/C2)/X. */
8492 if (flag_unsafe_math_optimizations
8493 && TREE_CODE (arg1) == MULT_EXPR
8494 && TREE_CODE (arg0) == REAL_CST
8495 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8497 tree tem = const_binop (RDIV_EXPR, arg0,
8498 TREE_OPERAND (arg1, 1), 0);
8499 if (tem)
8500 return fold_build2 (RDIV_EXPR, type, tem,
8501 TREE_OPERAND (arg1, 0));
8504 if (TREE_CODE (type) == COMPLEX_TYPE)
8506 tem = fold_complex_div (type, arg0, arg1, code);
8507 if (tem)
8508 return tem;
8511 if (flag_unsafe_math_optimizations)
8513 enum built_in_function fcode = builtin_mathfn_code (arg1);
8514 /* Optimize x/expN(y) into x*expN(-y). */
8515 if (BUILTIN_EXPONENT_P (fcode))
8517 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8518 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8519 tree arglist = build_tree_list (NULL_TREE,
8520 fold_convert (type, arg));
8521 arg1 = build_function_call_expr (expfn, arglist);
8522 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8525 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8526 if (fcode == BUILT_IN_POW
8527 || fcode == BUILT_IN_POWF
8528 || fcode == BUILT_IN_POWL)
8530 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8531 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8532 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8533 tree neg11 = fold_convert (type, negate_expr (arg11));
8534 tree arglist = tree_cons(NULL_TREE, arg10,
8535 build_tree_list (NULL_TREE, neg11));
8536 arg1 = build_function_call_expr (powfn, arglist);
8537 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8541 if (flag_unsafe_math_optimizations)
8543 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8544 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8546 /* Optimize sin(x)/cos(x) as tan(x). */
8547 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8548 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8549 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8550 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8551 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8553 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8555 if (tanfn != NULL_TREE)
8556 return build_function_call_expr (tanfn,
8557 TREE_OPERAND (arg0, 1));
8560 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8561 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8562 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8563 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8564 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8565 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8567 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8569 if (tanfn != NULL_TREE)
8571 tree tmp = TREE_OPERAND (arg0, 1);
8572 tmp = build_function_call_expr (tanfn, tmp);
8573 return fold_build2 (RDIV_EXPR, type,
8574 build_real (type, dconst1), tmp);
8578 /* Optimize pow(x,c)/x as pow(x,c-1). */
8579 if (fcode0 == BUILT_IN_POW
8580 || fcode0 == BUILT_IN_POWF
8581 || fcode0 == BUILT_IN_POWL)
8583 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8584 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8585 if (TREE_CODE (arg01) == REAL_CST
8586 && ! TREE_CONSTANT_OVERFLOW (arg01)
8587 && operand_equal_p (arg1, arg00, 0))
8589 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8590 REAL_VALUE_TYPE c;
8591 tree arg, arglist;
8593 c = TREE_REAL_CST (arg01);
8594 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8595 arg = build_real (type, c);
8596 arglist = build_tree_list (NULL_TREE, arg);
8597 arglist = tree_cons (NULL_TREE, arg1, arglist);
8598 return build_function_call_expr (powfn, arglist);
8602 goto binary;
8604 case TRUNC_DIV_EXPR:
8605 case ROUND_DIV_EXPR:
8606 case FLOOR_DIV_EXPR:
8607 case CEIL_DIV_EXPR:
8608 case EXACT_DIV_EXPR:
8609 if (integer_onep (arg1))
8610 return non_lvalue (fold_convert (type, arg0));
8611 if (integer_zerop (arg1))
8612 return NULL_TREE;
8613 /* X / -1 is -X. */
8614 if (!TYPE_UNSIGNED (type)
8615 && TREE_CODE (arg1) == INTEGER_CST
8616 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8617 && TREE_INT_CST_HIGH (arg1) == -1)
8618 return fold_convert (type, negate_expr (arg0));
8620 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8621 operation, EXACT_DIV_EXPR.
8623 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8624 At one time others generated faster code, it's not clear if they do
8625 after the last round to changes to the DIV code in expmed.c. */
8626 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8627 && multiple_of_p (type, arg0, arg1))
8628 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8630 if (TREE_CODE (arg1) == INTEGER_CST
8631 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8632 return fold_convert (type, tem);
8634 if (TREE_CODE (type) == COMPLEX_TYPE)
8636 tem = fold_complex_div (type, arg0, arg1, code);
8637 if (tem)
8638 return tem;
8640 goto binary;
8642 case CEIL_MOD_EXPR:
8643 case FLOOR_MOD_EXPR:
8644 case ROUND_MOD_EXPR:
8645 case TRUNC_MOD_EXPR:
8646 /* X % 1 is always zero, but be sure to preserve any side
8647 effects in X. */
8648 if (integer_onep (arg1))
8649 return omit_one_operand (type, integer_zero_node, arg0);
8651 /* X % 0, return X % 0 unchanged so that we can get the
8652 proper warnings and errors. */
8653 if (integer_zerop (arg1))
8654 return NULL_TREE;
8656 /* 0 % X is always zero, but be sure to preserve any side
8657 effects in X. Place this after checking for X == 0. */
8658 if (integer_zerop (arg0))
8659 return omit_one_operand (type, integer_zero_node, arg1);
8661 /* X % -1 is zero. */
8662 if (!TYPE_UNSIGNED (type)
8663 && TREE_CODE (arg1) == INTEGER_CST
8664 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8665 && TREE_INT_CST_HIGH (arg1) == -1)
8666 return omit_one_operand (type, integer_zero_node, arg0);
8668 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8669 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8670 if (code == TRUNC_MOD_EXPR
8671 && TYPE_UNSIGNED (type)
8672 && integer_pow2p (arg1))
8674 unsigned HOST_WIDE_INT high, low;
8675 tree mask;
8676 int l;
8678 l = tree_log2 (arg1);
8679 if (l >= HOST_BITS_PER_WIDE_INT)
8681 high = ((unsigned HOST_WIDE_INT) 1
8682 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8683 low = -1;
8685 else
8687 high = 0;
8688 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8691 mask = build_int_cst_wide (type, low, high);
8692 return fold_build2 (BIT_AND_EXPR, type,
8693 fold_convert (type, arg0), mask);
8696 /* X % -C is the same as X % C. */
8697 if (code == TRUNC_MOD_EXPR
8698 && !TYPE_UNSIGNED (type)
8699 && TREE_CODE (arg1) == INTEGER_CST
8700 && !TREE_CONSTANT_OVERFLOW (arg1)
8701 && TREE_INT_CST_HIGH (arg1) < 0
8702 && !flag_trapv
8703 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8704 && !sign_bit_p (arg1, arg1))
8705 return fold_build2 (code, type, fold_convert (type, arg0),
8706 fold_convert (type, negate_expr (arg1)));
8708 /* X % -Y is the same as X % Y. */
8709 if (code == TRUNC_MOD_EXPR
8710 && !TYPE_UNSIGNED (type)
8711 && TREE_CODE (arg1) == NEGATE_EXPR
8712 && !flag_trapv)
8713 return fold_build2 (code, type, fold_convert (type, arg0),
8714 fold_convert (type, TREE_OPERAND (arg1, 0)));
8716 if (TREE_CODE (arg1) == INTEGER_CST
8717 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8718 return fold_convert (type, tem);
8720 goto binary;
8722 case LROTATE_EXPR:
8723 case RROTATE_EXPR:
8724 if (integer_all_onesp (arg0))
8725 return omit_one_operand (type, arg0, arg1);
8726 goto shift;
8728 case RSHIFT_EXPR:
8729 /* Optimize -1 >> x for arithmetic right shifts. */
8730 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8731 return omit_one_operand (type, arg0, arg1);
8732 /* ... fall through ... */
8734 case LSHIFT_EXPR:
8735 shift:
8736 if (integer_zerop (arg1))
8737 return non_lvalue (fold_convert (type, arg0));
8738 if (integer_zerop (arg0))
8739 return omit_one_operand (type, arg0, arg1);
8741 /* Since negative shift count is not well-defined,
8742 don't try to compute it in the compiler. */
8743 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8744 return NULL_TREE;
8745 /* Rewrite an LROTATE_EXPR by a constant into an
8746 RROTATE_EXPR by a new constant. */
8747 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8749 tree tem = build_int_cst (NULL_TREE,
8750 GET_MODE_BITSIZE (TYPE_MODE (type)));
8751 tem = fold_convert (TREE_TYPE (arg1), tem);
8752 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8753 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8756 /* If we have a rotate of a bit operation with the rotate count and
8757 the second operand of the bit operation both constant,
8758 permute the two operations. */
8759 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8760 && (TREE_CODE (arg0) == BIT_AND_EXPR
8761 || TREE_CODE (arg0) == BIT_IOR_EXPR
8762 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8763 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8764 return fold_build2 (TREE_CODE (arg0), type,
8765 fold_build2 (code, type,
8766 TREE_OPERAND (arg0, 0), arg1),
8767 fold_build2 (code, type,
8768 TREE_OPERAND (arg0, 1), arg1));
8770 /* Two consecutive rotates adding up to the width of the mode can
8771 be ignored. */
8772 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8773 && TREE_CODE (arg0) == RROTATE_EXPR
8774 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8775 && TREE_INT_CST_HIGH (arg1) == 0
8776 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8777 && ((TREE_INT_CST_LOW (arg1)
8778 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8779 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8780 return TREE_OPERAND (arg0, 0);
8782 goto binary;
8784 case MIN_EXPR:
8785 if (operand_equal_p (arg0, arg1, 0))
8786 return omit_one_operand (type, arg0, arg1);
8787 if (INTEGRAL_TYPE_P (type)
8788 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8789 return omit_one_operand (type, arg1, arg0);
8790 goto associate;
8792 case MAX_EXPR:
8793 if (operand_equal_p (arg0, arg1, 0))
8794 return omit_one_operand (type, arg0, arg1);
8795 if (INTEGRAL_TYPE_P (type)
8796 && TYPE_MAX_VALUE (type)
8797 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8798 return omit_one_operand (type, arg1, arg0);
8799 goto associate;
8801 case TRUTH_ANDIF_EXPR:
8802 /* Note that the operands of this must be ints
8803 and their values must be 0 or 1.
8804 ("true" is a fixed value perhaps depending on the language.) */
8805 /* If first arg is constant zero, return it. */
8806 if (integer_zerop (arg0))
8807 return fold_convert (type, arg0);
8808 case TRUTH_AND_EXPR:
8809 /* If either arg is constant true, drop it. */
8810 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8811 return non_lvalue (fold_convert (type, arg1));
8812 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8813 /* Preserve sequence points. */
8814 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8815 return non_lvalue (fold_convert (type, arg0));
8816 /* If second arg is constant zero, result is zero, but first arg
8817 must be evaluated. */
8818 if (integer_zerop (arg1))
8819 return omit_one_operand (type, arg1, arg0);
8820 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8821 case will be handled here. */
8822 if (integer_zerop (arg0))
8823 return omit_one_operand (type, arg0, arg1);
8825 /* !X && X is always false. */
8826 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8827 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8828 return omit_one_operand (type, integer_zero_node, arg1);
8829 /* X && !X is always false. */
8830 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8831 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8832 return omit_one_operand (type, integer_zero_node, arg0);
8834 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8835 means A >= Y && A != MAX, but in this case we know that
8836 A < X <= MAX. */
8838 if (!TREE_SIDE_EFFECTS (arg0)
8839 && !TREE_SIDE_EFFECTS (arg1))
8841 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8842 if (tem)
8843 return fold_build2 (code, type, tem, arg1);
8845 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8846 if (tem)
8847 return fold_build2 (code, type, arg0, tem);
8850 truth_andor:
8851 /* We only do these simplifications if we are optimizing. */
8852 if (!optimize)
8853 return NULL_TREE;
8855 /* Check for things like (A || B) && (A || C). We can convert this
8856 to A || (B && C). Note that either operator can be any of the four
8857 truth and/or operations and the transformation will still be
8858 valid. Also note that we only care about order for the
8859 ANDIF and ORIF operators. If B contains side effects, this
8860 might change the truth-value of A. */
8861 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8862 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8863 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8864 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8865 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8866 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8868 tree a00 = TREE_OPERAND (arg0, 0);
8869 tree a01 = TREE_OPERAND (arg0, 1);
8870 tree a10 = TREE_OPERAND (arg1, 0);
8871 tree a11 = TREE_OPERAND (arg1, 1);
8872 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8873 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8874 && (code == TRUTH_AND_EXPR
8875 || code == TRUTH_OR_EXPR));
8877 if (operand_equal_p (a00, a10, 0))
8878 return fold_build2 (TREE_CODE (arg0), type, a00,
8879 fold_build2 (code, type, a01, a11));
8880 else if (commutative && operand_equal_p (a00, a11, 0))
8881 return fold_build2 (TREE_CODE (arg0), type, a00,
8882 fold_build2 (code, type, a01, a10));
8883 else if (commutative && operand_equal_p (a01, a10, 0))
8884 return fold_build2 (TREE_CODE (arg0), type, a01,
8885 fold_build2 (code, type, a00, a11));
8887 /* This case if tricky because we must either have commutative
8888 operators or else A10 must not have side-effects. */
8890 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8891 && operand_equal_p (a01, a11, 0))
8892 return fold_build2 (TREE_CODE (arg0), type,
8893 fold_build2 (code, type, a00, a10),
8894 a01);
8897 /* See if we can build a range comparison. */
8898 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8899 return tem;
8901 /* Check for the possibility of merging component references. If our
8902 lhs is another similar operation, try to merge its rhs with our
8903 rhs. Then try to merge our lhs and rhs. */
8904 if (TREE_CODE (arg0) == code
8905 && 0 != (tem = fold_truthop (code, type,
8906 TREE_OPERAND (arg0, 1), arg1)))
8907 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8909 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8910 return tem;
8912 return NULL_TREE;
8914 case TRUTH_ORIF_EXPR:
8915 /* Note that the operands of this must be ints
8916 and their values must be 0 or true.
8917 ("true" is a fixed value perhaps depending on the language.) */
8918 /* If first arg is constant true, return it. */
8919 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8920 return fold_convert (type, arg0);
8921 case TRUTH_OR_EXPR:
8922 /* If either arg is constant zero, drop it. */
8923 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8924 return non_lvalue (fold_convert (type, arg1));
8925 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8926 /* Preserve sequence points. */
8927 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8928 return non_lvalue (fold_convert (type, arg0));
8929 /* If second arg is constant true, result is true, but we must
8930 evaluate first arg. */
8931 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8932 return omit_one_operand (type, arg1, arg0);
8933 /* Likewise for first arg, but note this only occurs here for
8934 TRUTH_OR_EXPR. */
8935 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8936 return omit_one_operand (type, arg0, arg1);
8938 /* !X || X is always true. */
8939 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8940 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8941 return omit_one_operand (type, integer_one_node, arg1);
8942 /* X || !X is always true. */
8943 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8944 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8945 return omit_one_operand (type, integer_one_node, arg0);
8947 goto truth_andor;
8949 case TRUTH_XOR_EXPR:
8950 /* If the second arg is constant zero, drop it. */
8951 if (integer_zerop (arg1))
8952 return non_lvalue (fold_convert (type, arg0));
8953 /* If the second arg is constant true, this is a logical inversion. */
8954 if (integer_onep (arg1))
8956 /* Only call invert_truthvalue if operand is a truth value. */
8957 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8958 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8959 else
8960 tem = invert_truthvalue (arg0);
8961 return non_lvalue (fold_convert (type, tem));
8963 /* Identical arguments cancel to zero. */
8964 if (operand_equal_p (arg0, arg1, 0))
8965 return omit_one_operand (type, integer_zero_node, arg0);
8967 /* !X ^ X is always true. */
8968 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8969 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8970 return omit_one_operand (type, integer_one_node, arg1);
8972 /* X ^ !X is always true. */
8973 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8974 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8975 return omit_one_operand (type, integer_one_node, arg0);
8977 return NULL_TREE;
8979 case EQ_EXPR:
8980 case NE_EXPR:
8981 case LT_EXPR:
8982 case GT_EXPR:
8983 case LE_EXPR:
8984 case GE_EXPR:
8985 /* If one arg is a real or integer constant, put it last. */
8986 if (tree_swap_operands_p (arg0, arg1, true))
8987 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8989 /* bool_var != 0 becomes bool_var. */
8990 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8991 && code == NE_EXPR)
8992 return non_lvalue (fold_convert (type, arg0));
8994 /* bool_var == 1 becomes bool_var. */
8995 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8996 && code == EQ_EXPR)
8997 return non_lvalue (fold_convert (type, arg0));
8999 /* If this is an equality comparison of the address of a non-weak
9000 object against zero, then we know the result. */
9001 if ((code == EQ_EXPR || code == NE_EXPR)
9002 && TREE_CODE (arg0) == ADDR_EXPR
9003 && DECL_P (TREE_OPERAND (arg0, 0))
9004 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
9005 && integer_zerop (arg1))
9006 return constant_boolean_node (code != EQ_EXPR, type);
9008 /* If this is an equality comparison of the address of two non-weak,
9009 unaliased symbols neither of which are extern (since we do not
9010 have access to attributes for externs), then we know the result. */
9011 if ((code == EQ_EXPR || code == NE_EXPR)
9012 && TREE_CODE (arg0) == ADDR_EXPR
9013 && DECL_P (TREE_OPERAND (arg0, 0))
9014 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
9015 && ! lookup_attribute ("alias",
9016 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
9017 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
9018 && TREE_CODE (arg1) == ADDR_EXPR
9019 && DECL_P (TREE_OPERAND (arg1, 0))
9020 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
9021 && ! lookup_attribute ("alias",
9022 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
9023 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
9024 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
9025 ? code == EQ_EXPR : code != EQ_EXPR,
9026 type);
9028 /* If this is a comparison of two exprs that look like an
9029 ARRAY_REF of the same object, then we can fold this to a
9030 comparison of the two offsets. */
9031 if (TREE_CODE_CLASS (code) == tcc_comparison)
9033 tree base0, offset0, base1, offset1;
9035 if (extract_array_ref (arg0, &base0, &offset0)
9036 && extract_array_ref (arg1, &base1, &offset1)
9037 && operand_equal_p (base0, base1, 0))
9039 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
9040 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
9041 offset0 = NULL_TREE;
9042 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
9043 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
9044 offset1 = NULL_TREE;
9045 if (offset0 == NULL_TREE
9046 && offset1 == NULL_TREE)
9048 offset0 = integer_zero_node;
9049 offset1 = integer_zero_node;
9051 else if (offset0 == NULL_TREE)
9052 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9053 else if (offset1 == NULL_TREE)
9054 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9056 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
9057 return fold_build2 (code, type, offset0, offset1);
9061 /* Transform comparisons of the form X +- C CMP X. */
9062 if ((code != EQ_EXPR && code != NE_EXPR)
9063 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9064 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9065 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9066 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9067 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9068 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9069 && !(flag_wrapv || flag_trapv))))
9071 tree arg01 = TREE_OPERAND (arg0, 1);
9072 enum tree_code code0 = TREE_CODE (arg0);
9073 int is_positive;
9075 if (TREE_CODE (arg01) == REAL_CST)
9076 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9077 else
9078 is_positive = tree_int_cst_sgn (arg01);
9080 /* (X - c) > X becomes false. */
9081 if (code == GT_EXPR
9082 && ((code0 == MINUS_EXPR && is_positive >= 0)
9083 || (code0 == PLUS_EXPR && is_positive <= 0)))
9084 return constant_boolean_node (0, type);
9086 /* Likewise (X + c) < X becomes false. */
9087 if (code == LT_EXPR
9088 && ((code0 == PLUS_EXPR && is_positive >= 0)
9089 || (code0 == MINUS_EXPR && is_positive <= 0)))
9090 return constant_boolean_node (0, type);
9092 /* Convert (X - c) <= X to true. */
9093 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9094 && code == LE_EXPR
9095 && ((code0 == MINUS_EXPR && is_positive >= 0)
9096 || (code0 == PLUS_EXPR && is_positive <= 0)))
9097 return constant_boolean_node (1, type);
9099 /* Convert (X + c) >= X to true. */
9100 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9101 && code == GE_EXPR
9102 && ((code0 == PLUS_EXPR && is_positive >= 0)
9103 || (code0 == MINUS_EXPR && is_positive <= 0)))
9104 return constant_boolean_node (1, type);
9106 if (TREE_CODE (arg01) == INTEGER_CST)
9108 /* Convert X + c > X and X - c < X to true for integers. */
9109 if (code == GT_EXPR
9110 && ((code0 == PLUS_EXPR && is_positive > 0)
9111 || (code0 == MINUS_EXPR && is_positive < 0)))
9112 return constant_boolean_node (1, type);
9114 if (code == LT_EXPR
9115 && ((code0 == MINUS_EXPR && is_positive > 0)
9116 || (code0 == PLUS_EXPR && is_positive < 0)))
9117 return constant_boolean_node (1, type);
9119 /* Convert X + c <= X and X - c >= X to false for integers. */
9120 if (code == LE_EXPR
9121 && ((code0 == PLUS_EXPR && is_positive > 0)
9122 || (code0 == MINUS_EXPR && is_positive < 0)))
9123 return constant_boolean_node (0, type);
9125 if (code == GE_EXPR
9126 && ((code0 == MINUS_EXPR && is_positive > 0)
9127 || (code0 == PLUS_EXPR && is_positive < 0)))
9128 return constant_boolean_node (0, type);
9132 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9134 tree targ0 = strip_float_extensions (arg0);
9135 tree targ1 = strip_float_extensions (arg1);
9136 tree newtype = TREE_TYPE (targ0);
9138 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9139 newtype = TREE_TYPE (targ1);
9141 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9142 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9143 return fold_build2 (code, type, fold_convert (newtype, targ0),
9144 fold_convert (newtype, targ1));
9146 /* (-a) CMP (-b) -> b CMP a */
9147 if (TREE_CODE (arg0) == NEGATE_EXPR
9148 && TREE_CODE (arg1) == NEGATE_EXPR)
9149 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9150 TREE_OPERAND (arg0, 0));
9152 if (TREE_CODE (arg1) == REAL_CST)
9154 REAL_VALUE_TYPE cst;
9155 cst = TREE_REAL_CST (arg1);
9157 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9158 if (TREE_CODE (arg0) == NEGATE_EXPR)
9159 return
9160 fold_build2 (swap_tree_comparison (code), type,
9161 TREE_OPERAND (arg0, 0),
9162 build_real (TREE_TYPE (arg1),
9163 REAL_VALUE_NEGATE (cst)));
9165 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9166 /* a CMP (-0) -> a CMP 0 */
9167 if (REAL_VALUE_MINUS_ZERO (cst))
9168 return fold_build2 (code, type, arg0,
9169 build_real (TREE_TYPE (arg1), dconst0));
9171 /* x != NaN is always true, other ops are always false. */
9172 if (REAL_VALUE_ISNAN (cst)
9173 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9175 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9176 return omit_one_operand (type, tem, arg0);
9179 /* Fold comparisons against infinity. */
9180 if (REAL_VALUE_ISINF (cst))
9182 tem = fold_inf_compare (code, type, arg0, arg1);
9183 if (tem != NULL_TREE)
9184 return tem;
9188 /* If this is a comparison of a real constant with a PLUS_EXPR
9189 or a MINUS_EXPR of a real constant, we can convert it into a
9190 comparison with a revised real constant as long as no overflow
9191 occurs when unsafe_math_optimizations are enabled. */
9192 if (flag_unsafe_math_optimizations
9193 && TREE_CODE (arg1) == REAL_CST
9194 && (TREE_CODE (arg0) == PLUS_EXPR
9195 || TREE_CODE (arg0) == MINUS_EXPR)
9196 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9197 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9198 ? MINUS_EXPR : PLUS_EXPR,
9199 arg1, TREE_OPERAND (arg0, 1), 0))
9200 && ! TREE_CONSTANT_OVERFLOW (tem))
9201 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9203 /* Likewise, we can simplify a comparison of a real constant with
9204 a MINUS_EXPR whose first operand is also a real constant, i.e.
9205 (c1 - x) < c2 becomes x > c1-c2. */
9206 if (flag_unsafe_math_optimizations
9207 && TREE_CODE (arg1) == REAL_CST
9208 && TREE_CODE (arg0) == MINUS_EXPR
9209 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9210 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9211 arg1, 0))
9212 && ! TREE_CONSTANT_OVERFLOW (tem))
9213 return fold_build2 (swap_tree_comparison (code), type,
9214 TREE_OPERAND (arg0, 1), tem);
9216 /* Fold comparisons against built-in math functions. */
9217 if (TREE_CODE (arg1) == REAL_CST
9218 && flag_unsafe_math_optimizations
9219 && ! flag_errno_math)
9221 enum built_in_function fcode = builtin_mathfn_code (arg0);
9223 if (fcode != END_BUILTINS)
9225 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9226 if (tem != NULL_TREE)
9227 return tem;
9232 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9233 if (TREE_CONSTANT (arg1)
9234 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9235 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9236 /* This optimization is invalid for ordered comparisons
9237 if CONST+INCR overflows or if foo+incr might overflow.
9238 This optimization is invalid for floating point due to rounding.
9239 For pointer types we assume overflow doesn't happen. */
9240 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9241 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9242 && (code == EQ_EXPR || code == NE_EXPR))))
9244 tree varop, newconst;
9246 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9248 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9249 arg1, TREE_OPERAND (arg0, 1));
9250 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9251 TREE_OPERAND (arg0, 0),
9252 TREE_OPERAND (arg0, 1));
9254 else
9256 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9257 arg1, TREE_OPERAND (arg0, 1));
9258 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9259 TREE_OPERAND (arg0, 0),
9260 TREE_OPERAND (arg0, 1));
9264 /* If VAROP is a reference to a bitfield, we must mask
9265 the constant by the width of the field. */
9266 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9267 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9268 && host_integerp (DECL_SIZE (TREE_OPERAND
9269 (TREE_OPERAND (varop, 0), 1)), 1))
9271 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9272 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9273 tree folded_compare, shift;
9275 /* First check whether the comparison would come out
9276 always the same. If we don't do that we would
9277 change the meaning with the masking. */
9278 folded_compare = fold_build2 (code, type,
9279 TREE_OPERAND (varop, 0), arg1);
9280 if (integer_zerop (folded_compare)
9281 || integer_onep (folded_compare))
9282 return omit_one_operand (type, folded_compare, varop);
9284 shift = build_int_cst (NULL_TREE,
9285 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9286 shift = fold_convert (TREE_TYPE (varop), shift);
9287 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9288 newconst, shift);
9289 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9290 newconst, shift);
9293 return fold_build2 (code, type, varop, newconst);
9296 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9297 This transformation affects the cases which are handled in later
9298 optimizations involving comparisons with non-negative constants. */
9299 if (TREE_CODE (arg1) == INTEGER_CST
9300 && TREE_CODE (arg0) != INTEGER_CST
9301 && tree_int_cst_sgn (arg1) > 0)
9303 switch (code)
9305 case GE_EXPR:
9306 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9307 return fold_build2 (GT_EXPR, type, arg0, arg1);
9309 case LT_EXPR:
9310 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9311 return fold_build2 (LE_EXPR, type, arg0, arg1);
9313 default:
9314 break;
9318 /* Comparisons with the highest or lowest possible integer of
9319 the specified size will have known values. */
9321 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9323 if (TREE_CODE (arg1) == INTEGER_CST
9324 && ! TREE_CONSTANT_OVERFLOW (arg1)
9325 && width <= 2 * HOST_BITS_PER_WIDE_INT
9326 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9327 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9329 HOST_WIDE_INT signed_max_hi;
9330 unsigned HOST_WIDE_INT signed_max_lo;
9331 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9333 if (width <= HOST_BITS_PER_WIDE_INT)
9335 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9336 - 1;
9337 signed_max_hi = 0;
9338 max_hi = 0;
9340 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9342 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9343 min_lo = 0;
9344 min_hi = 0;
9346 else
9348 max_lo = signed_max_lo;
9349 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9350 min_hi = -1;
9353 else
9355 width -= HOST_BITS_PER_WIDE_INT;
9356 signed_max_lo = -1;
9357 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9358 - 1;
9359 max_lo = -1;
9360 min_lo = 0;
9362 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9364 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9365 min_hi = 0;
9367 else
9369 max_hi = signed_max_hi;
9370 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9374 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9375 && TREE_INT_CST_LOW (arg1) == max_lo)
9376 switch (code)
9378 case GT_EXPR:
9379 return omit_one_operand (type, integer_zero_node, arg0);
9381 case GE_EXPR:
9382 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9384 case LE_EXPR:
9385 return omit_one_operand (type, integer_one_node, arg0);
9387 case LT_EXPR:
9388 return fold_build2 (NE_EXPR, type, arg0, arg1);
9390 /* The GE_EXPR and LT_EXPR cases above are not normally
9391 reached because of previous transformations. */
9393 default:
9394 break;
9396 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9397 == max_hi
9398 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9399 switch (code)
9401 case GT_EXPR:
9402 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9403 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9404 case LE_EXPR:
9405 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9406 return fold_build2 (NE_EXPR, type, arg0, arg1);
9407 default:
9408 break;
9410 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9411 == min_hi
9412 && TREE_INT_CST_LOW (arg1) == min_lo)
9413 switch (code)
9415 case LT_EXPR:
9416 return omit_one_operand (type, integer_zero_node, arg0);
9418 case LE_EXPR:
9419 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9421 case GE_EXPR:
9422 return omit_one_operand (type, integer_one_node, arg0);
9424 case GT_EXPR:
9425 return fold_build2 (NE_EXPR, type, arg0, arg1);
9427 default:
9428 break;
9430 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9431 == min_hi
9432 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9433 switch (code)
9435 case GE_EXPR:
9436 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9437 return fold_build2 (NE_EXPR, type, arg0, arg1);
9438 case LT_EXPR:
9439 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9440 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9441 default:
9442 break;
9445 else if (!in_gimple_form
9446 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9447 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9448 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9449 /* signed_type does not work on pointer types. */
9450 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9452 /* The following case also applies to X < signed_max+1
9453 and X >= signed_max+1 because previous transformations. */
9454 if (code == LE_EXPR || code == GT_EXPR)
9456 tree st0, st1;
9457 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9458 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9459 return fold
9460 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9461 type, fold_convert (st0, arg0),
9462 fold_convert (st1, integer_zero_node)));
9468 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9469 a MINUS_EXPR of a constant, we can convert it into a comparison with
9470 a revised constant as long as no overflow occurs. */
9471 if ((code == EQ_EXPR || code == NE_EXPR)
9472 && TREE_CODE (arg1) == INTEGER_CST
9473 && (TREE_CODE (arg0) == PLUS_EXPR
9474 || TREE_CODE (arg0) == MINUS_EXPR)
9475 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9476 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9477 ? MINUS_EXPR : PLUS_EXPR,
9478 arg1, TREE_OPERAND (arg0, 1), 0))
9479 && ! TREE_CONSTANT_OVERFLOW (tem))
9480 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9482 /* Similarly for a NEGATE_EXPR. */
9483 else if ((code == EQ_EXPR || code == NE_EXPR)
9484 && TREE_CODE (arg0) == NEGATE_EXPR
9485 && TREE_CODE (arg1) == INTEGER_CST
9486 && 0 != (tem = negate_expr (arg1))
9487 && TREE_CODE (tem) == INTEGER_CST
9488 && ! TREE_CONSTANT_OVERFLOW (tem))
9489 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9491 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9492 for !=. Don't do this for ordered comparisons due to overflow. */
9493 else if ((code == NE_EXPR || code == EQ_EXPR)
9494 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9495 return fold_build2 (code, type,
9496 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9498 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9499 && (TREE_CODE (arg0) == NOP_EXPR
9500 || TREE_CODE (arg0) == CONVERT_EXPR))
9502 /* If we are widening one operand of an integer comparison,
9503 see if the other operand is similarly being widened. Perhaps we
9504 can do the comparison in the narrower type. */
9505 tem = fold_widened_comparison (code, type, arg0, arg1);
9506 if (tem)
9507 return tem;
9509 /* Or if we are changing signedness. */
9510 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9511 if (tem)
9512 return tem;
9515 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9516 constant, we can simplify it. */
9517 else if (TREE_CODE (arg1) == INTEGER_CST
9518 && (TREE_CODE (arg0) == MIN_EXPR
9519 || TREE_CODE (arg0) == MAX_EXPR)
9520 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9522 tem = optimize_minmax_comparison (code, type, op0, op1);
9523 if (tem)
9524 return tem;
9526 return NULL_TREE;
9529 /* If we are comparing an ABS_EXPR with a constant, we can
9530 convert all the cases into explicit comparisons, but they may
9531 well not be faster than doing the ABS and one comparison.
9532 But ABS (X) <= C is a range comparison, which becomes a subtraction
9533 and a comparison, and is probably faster. */
9534 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9535 && TREE_CODE (arg0) == ABS_EXPR
9536 && ! TREE_SIDE_EFFECTS (arg0)
9537 && (0 != (tem = negate_expr (arg1)))
9538 && TREE_CODE (tem) == INTEGER_CST
9539 && ! TREE_CONSTANT_OVERFLOW (tem))
9540 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9541 build2 (GE_EXPR, type,
9542 TREE_OPERAND (arg0, 0), tem),
9543 build2 (LE_EXPR, type,
9544 TREE_OPERAND (arg0, 0), arg1));
9546 /* Convert ABS_EXPR<x> >= 0 to true. */
9547 else if (code == GE_EXPR
9548 && tree_expr_nonnegative_p (arg0)
9549 && (integer_zerop (arg1)
9550 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9551 && real_zerop (arg1))))
9552 return omit_one_operand (type, integer_one_node, arg0);
9554 /* Convert ABS_EXPR<x> < 0 to false. */
9555 else if (code == LT_EXPR
9556 && tree_expr_nonnegative_p (arg0)
9557 && (integer_zerop (arg1) || real_zerop (arg1)))
9558 return omit_one_operand (type, integer_zero_node, arg0);
9560 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9561 else if ((code == EQ_EXPR || code == NE_EXPR)
9562 && TREE_CODE (arg0) == ABS_EXPR
9563 && (integer_zerop (arg1) || real_zerop (arg1)))
9564 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9566 /* If this is an EQ or NE comparison with zero and ARG0 is
9567 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9568 two operations, but the latter can be done in one less insn
9569 on machines that have only two-operand insns or on which a
9570 constant cannot be the first operand. */
9571 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9572 && TREE_CODE (arg0) == BIT_AND_EXPR)
9574 tree arg00 = TREE_OPERAND (arg0, 0);
9575 tree arg01 = TREE_OPERAND (arg0, 1);
9576 if (TREE_CODE (arg00) == LSHIFT_EXPR
9577 && integer_onep (TREE_OPERAND (arg00, 0)))
9578 return
9579 fold_build2 (code, type,
9580 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9581 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9582 arg01, TREE_OPERAND (arg00, 1)),
9583 fold_convert (TREE_TYPE (arg0),
9584 integer_one_node)),
9585 arg1);
9586 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9587 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9588 return
9589 fold_build2 (code, type,
9590 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9591 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9592 arg00, TREE_OPERAND (arg01, 1)),
9593 fold_convert (TREE_TYPE (arg0),
9594 integer_one_node)),
9595 arg1);
9598 /* If this is an NE or EQ comparison of zero against the result of a
9599 signed MOD operation whose second operand is a power of 2, make
9600 the MOD operation unsigned since it is simpler and equivalent. */
9601 if ((code == NE_EXPR || code == EQ_EXPR)
9602 && integer_zerop (arg1)
9603 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9604 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9605 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9606 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9607 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9608 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9610 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9611 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9612 fold_convert (newtype,
9613 TREE_OPERAND (arg0, 0)),
9614 fold_convert (newtype,
9615 TREE_OPERAND (arg0, 1)));
9617 return fold_build2 (code, type, newmod,
9618 fold_convert (newtype, arg1));
9621 /* If this is an NE comparison of zero with an AND of one, remove the
9622 comparison since the AND will give the correct value. */
9623 if (code == NE_EXPR && integer_zerop (arg1)
9624 && TREE_CODE (arg0) == BIT_AND_EXPR
9625 && integer_onep (TREE_OPERAND (arg0, 1)))
9626 return fold_convert (type, arg0);
9628 /* If we have (A & C) == C where C is a power of 2, convert this into
9629 (A & C) != 0. Similarly for NE_EXPR. */
9630 if ((code == EQ_EXPR || code == NE_EXPR)
9631 && TREE_CODE (arg0) == BIT_AND_EXPR
9632 && integer_pow2p (TREE_OPERAND (arg0, 1))
9633 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9634 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9635 arg0, fold_convert (TREE_TYPE (arg0),
9636 integer_zero_node));
9638 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9639 bit, then fold the expression into A < 0 or A >= 0. */
9640 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9641 if (tem)
9642 return tem;
9644 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9645 Similarly for NE_EXPR. */
9646 if ((code == EQ_EXPR || code == NE_EXPR)
9647 && TREE_CODE (arg0) == BIT_AND_EXPR
9648 && TREE_CODE (arg1) == INTEGER_CST
9649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9651 tree notc = fold_build1 (BIT_NOT_EXPR,
9652 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9653 TREE_OPERAND (arg0, 1));
9654 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9655 arg1, notc);
9656 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9657 if (integer_nonzerop (dandnotc))
9658 return omit_one_operand (type, rslt, arg0);
9661 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9662 Similarly for NE_EXPR. */
9663 if ((code == EQ_EXPR || code == NE_EXPR)
9664 && TREE_CODE (arg0) == BIT_IOR_EXPR
9665 && TREE_CODE (arg1) == INTEGER_CST
9666 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9668 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9669 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9670 TREE_OPERAND (arg0, 1), notd);
9671 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9672 if (integer_nonzerop (candnotd))
9673 return omit_one_operand (type, rslt, arg0);
9676 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9677 and similarly for >= into !=. */
9678 if ((code == LT_EXPR || code == GE_EXPR)
9679 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9680 && TREE_CODE (arg1) == LSHIFT_EXPR
9681 && integer_onep (TREE_OPERAND (arg1, 0)))
9682 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9683 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9684 TREE_OPERAND (arg1, 1)),
9685 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9687 else if ((code == LT_EXPR || code == GE_EXPR)
9688 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9689 && (TREE_CODE (arg1) == NOP_EXPR
9690 || TREE_CODE (arg1) == CONVERT_EXPR)
9691 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9692 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9693 return
9694 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9695 fold_convert (TREE_TYPE (arg0),
9696 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9697 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9698 1))),
9699 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9701 /* Simplify comparison of something with itself. (For IEEE
9702 floating-point, we can only do some of these simplifications.) */
9703 if (operand_equal_p (arg0, arg1, 0))
9705 switch (code)
9707 case EQ_EXPR:
9708 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9709 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9710 return constant_boolean_node (1, type);
9711 break;
9713 case GE_EXPR:
9714 case LE_EXPR:
9715 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9716 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9717 return constant_boolean_node (1, type);
9718 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9720 case NE_EXPR:
9721 /* For NE, we can only do this simplification if integer
9722 or we don't honor IEEE floating point NaNs. */
9723 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9724 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9725 break;
9726 /* ... fall through ... */
9727 case GT_EXPR:
9728 case LT_EXPR:
9729 return constant_boolean_node (0, type);
9730 default:
9731 gcc_unreachable ();
9735 /* If we are comparing an expression that just has comparisons
9736 of two integer values, arithmetic expressions of those comparisons,
9737 and constants, we can simplify it. There are only three cases
9738 to check: the two values can either be equal, the first can be
9739 greater, or the second can be greater. Fold the expression for
9740 those three values. Since each value must be 0 or 1, we have
9741 eight possibilities, each of which corresponds to the constant 0
9742 or 1 or one of the six possible comparisons.
9744 This handles common cases like (a > b) == 0 but also handles
9745 expressions like ((x > y) - (y > x)) > 0, which supposedly
9746 occur in macroized code. */
9748 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9750 tree cval1 = 0, cval2 = 0;
9751 int save_p = 0;
9753 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9754 /* Don't handle degenerate cases here; they should already
9755 have been handled anyway. */
9756 && cval1 != 0 && cval2 != 0
9757 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9758 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9759 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9760 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9761 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9762 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9763 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9765 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9766 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9768 /* We can't just pass T to eval_subst in case cval1 or cval2
9769 was the same as ARG1. */
9771 tree high_result
9772 = fold_build2 (code, type,
9773 eval_subst (arg0, cval1, maxval,
9774 cval2, minval),
9775 arg1);
9776 tree equal_result
9777 = fold_build2 (code, type,
9778 eval_subst (arg0, cval1, maxval,
9779 cval2, maxval),
9780 arg1);
9781 tree low_result
9782 = fold_build2 (code, type,
9783 eval_subst (arg0, cval1, minval,
9784 cval2, maxval),
9785 arg1);
9787 /* All three of these results should be 0 or 1. Confirm they
9788 are. Then use those values to select the proper code
9789 to use. */
9791 if ((integer_zerop (high_result)
9792 || integer_onep (high_result))
9793 && (integer_zerop (equal_result)
9794 || integer_onep (equal_result))
9795 && (integer_zerop (low_result)
9796 || integer_onep (low_result)))
9798 /* Make a 3-bit mask with the high-order bit being the
9799 value for `>', the next for '=', and the low for '<'. */
9800 switch ((integer_onep (high_result) * 4)
9801 + (integer_onep (equal_result) * 2)
9802 + integer_onep (low_result))
9804 case 0:
9805 /* Always false. */
9806 return omit_one_operand (type, integer_zero_node, arg0);
9807 case 1:
9808 code = LT_EXPR;
9809 break;
9810 case 2:
9811 code = EQ_EXPR;
9812 break;
9813 case 3:
9814 code = LE_EXPR;
9815 break;
9816 case 4:
9817 code = GT_EXPR;
9818 break;
9819 case 5:
9820 code = NE_EXPR;
9821 break;
9822 case 6:
9823 code = GE_EXPR;
9824 break;
9825 case 7:
9826 /* Always true. */
9827 return omit_one_operand (type, integer_one_node, arg0);
9830 if (save_p)
9831 return save_expr (build2 (code, type, cval1, cval2));
9832 else
9833 return fold_build2 (code, type, cval1, cval2);
9838 /* If this is a comparison of a field, we may be able to simplify it. */
9839 if (((TREE_CODE (arg0) == COMPONENT_REF
9840 && lang_hooks.can_use_bit_fields_p ())
9841 || TREE_CODE (arg0) == BIT_FIELD_REF)
9842 && (code == EQ_EXPR || code == NE_EXPR)
9843 /* Handle the constant case even without -O
9844 to make sure the warnings are given. */
9845 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9847 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9848 if (t1)
9849 return t1;
9852 /* Fold a comparison of the address of COMPONENT_REFs with the same
9853 type and component to a comparison of the address of the base
9854 object. In short, &x->a OP &y->a to x OP y and
9855 &x->a OP &y.a to x OP &y */
9856 if (TREE_CODE (arg0) == ADDR_EXPR
9857 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9858 && TREE_CODE (arg1) == ADDR_EXPR
9859 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9861 tree cref0 = TREE_OPERAND (arg0, 0);
9862 tree cref1 = TREE_OPERAND (arg1, 0);
9863 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9865 tree op0 = TREE_OPERAND (cref0, 0);
9866 tree op1 = TREE_OPERAND (cref1, 0);
9867 return fold_build2 (code, type,
9868 build_fold_addr_expr (op0),
9869 build_fold_addr_expr (op1));
9873 /* If this is a comparison of complex values and either or both sides
9874 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9875 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9876 This may prevent needless evaluations. */
9877 if ((code == EQ_EXPR || code == NE_EXPR)
9878 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9879 && (TREE_CODE (arg0) == COMPLEX_EXPR
9880 || TREE_CODE (arg1) == COMPLEX_EXPR
9881 || TREE_CODE (arg0) == COMPLEX_CST
9882 || TREE_CODE (arg1) == COMPLEX_CST))
9884 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9885 tree real0, imag0, real1, imag1;
9887 arg0 = save_expr (arg0);
9888 arg1 = save_expr (arg1);
9889 real0 = fold_build1 (REALPART_EXPR, subtype, arg0);
9890 imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0);
9891 real1 = fold_build1 (REALPART_EXPR, subtype, arg1);
9892 imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1);
9894 return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9895 : TRUTH_ORIF_EXPR),
9896 type,
9897 fold_build2 (code, type, real0, real1),
9898 fold_build2 (code, type, imag0, imag1));
9901 /* Optimize comparisons of strlen vs zero to a compare of the
9902 first character of the string vs zero. To wit,
9903 strlen(ptr) == 0 => *ptr == 0
9904 strlen(ptr) != 0 => *ptr != 0
9905 Other cases should reduce to one of these two (or a constant)
9906 due to the return value of strlen being unsigned. */
9907 if ((code == EQ_EXPR || code == NE_EXPR)
9908 && integer_zerop (arg1)
9909 && TREE_CODE (arg0) == CALL_EXPR)
9911 tree fndecl = get_callee_fndecl (arg0);
9912 tree arglist;
9914 if (fndecl
9915 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9916 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9917 && (arglist = TREE_OPERAND (arg0, 1))
9918 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9919 && ! TREE_CHAIN (arglist))
9921 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9922 return fold_build2 (code, type, iref,
9923 build_int_cst (TREE_TYPE (iref), 0));
9927 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9928 into a single range test. */
9929 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9930 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9931 && TREE_CODE (arg1) == INTEGER_CST
9932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9933 && !integer_zerop (TREE_OPERAND (arg0, 1))
9934 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9935 && !TREE_OVERFLOW (arg1))
9937 t1 = fold_div_compare (code, type, arg0, arg1);
9938 if (t1 != NULL_TREE)
9939 return t1;
9942 if ((code == EQ_EXPR || code == NE_EXPR)
9943 && !TREE_SIDE_EFFECTS (arg0)
9944 && integer_zerop (arg1)
9945 && tree_expr_nonzero_p (arg0))
9946 return constant_boolean_node (code==NE_EXPR, type);
9948 t1 = fold_relational_const (code, type, arg0, arg1);
9949 return t1 == NULL_TREE ? NULL_TREE : t1;
9951 case UNORDERED_EXPR:
9952 case ORDERED_EXPR:
9953 case UNLT_EXPR:
9954 case UNLE_EXPR:
9955 case UNGT_EXPR:
9956 case UNGE_EXPR:
9957 case UNEQ_EXPR:
9958 case LTGT_EXPR:
9959 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9961 t1 = fold_relational_const (code, type, arg0, arg1);
9962 if (t1 != NULL_TREE)
9963 return t1;
9966 /* If the first operand is NaN, the result is constant. */
9967 if (TREE_CODE (arg0) == REAL_CST
9968 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9969 && (code != LTGT_EXPR || ! flag_trapping_math))
9971 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9972 ? integer_zero_node
9973 : integer_one_node;
9974 return omit_one_operand (type, t1, arg1);
9977 /* If the second operand is NaN, the result is constant. */
9978 if (TREE_CODE (arg1) == REAL_CST
9979 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9980 && (code != LTGT_EXPR || ! flag_trapping_math))
9982 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9983 ? integer_zero_node
9984 : integer_one_node;
9985 return omit_one_operand (type, t1, arg0);
9988 /* Simplify unordered comparison of something with itself. */
9989 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9990 && operand_equal_p (arg0, arg1, 0))
9991 return constant_boolean_node (1, type);
9993 if (code == LTGT_EXPR
9994 && !flag_trapping_math
9995 && operand_equal_p (arg0, arg1, 0))
9996 return constant_boolean_node (0, type);
9998 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10000 tree targ0 = strip_float_extensions (arg0);
10001 tree targ1 = strip_float_extensions (arg1);
10002 tree newtype = TREE_TYPE (targ0);
10004 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10005 newtype = TREE_TYPE (targ1);
10007 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10008 return fold_build2 (code, type, fold_convert (newtype, targ0),
10009 fold_convert (newtype, targ1));
10012 return NULL_TREE;
10014 case COMPOUND_EXPR:
10015 /* When pedantic, a compound expression can be neither an lvalue
10016 nor an integer constant expression. */
10017 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10018 return NULL_TREE;
10019 /* Don't let (0, 0) be null pointer constant. */
10020 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10021 : fold_convert (type, arg1);
10022 return pedantic_non_lvalue (tem);
10024 case COMPLEX_EXPR:
10025 if (wins)
10026 return build_complex (type, arg0, arg1);
10027 return NULL_TREE;
10029 case ASSERT_EXPR:
10030 /* An ASSERT_EXPR should never be passed to fold_binary. */
10031 gcc_unreachable ();
10033 default:
10034 return NULL_TREE;
10035 } /* switch (code) */
10038 /* Callback for walk_tree, looking for LABEL_EXPR.
10039 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10040 Do not check the sub-tree of GOTO_EXPR. */
10042 static tree
10043 contains_label_1 (tree *tp,
10044 int *walk_subtrees,
10045 void *data ATTRIBUTE_UNUSED)
10047 switch (TREE_CODE (*tp))
10049 case LABEL_EXPR:
10050 return *tp;
10051 case GOTO_EXPR:
10052 *walk_subtrees = 0;
10053 /* no break */
10054 default:
10055 return NULL_TREE;
10059 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10060 accessible from outside the sub-tree. Returns NULL_TREE if no
10061 addressable label is found. */
10063 static bool
10064 contains_label_p (tree st)
10066 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10069 /* Fold a ternary expression of code CODE and type TYPE with operands
10070 OP0, OP1, and OP2. Return the folded expression if folding is
10071 successful. Otherwise, return NULL_TREE. */
10073 tree
10074 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10076 tree tem;
10077 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10078 enum tree_code_class kind = TREE_CODE_CLASS (code);
10080 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10081 && TREE_CODE_LENGTH (code) == 3);
10083 /* Strip any conversions that don't change the mode. This is safe
10084 for every expression, except for a comparison expression because
10085 its signedness is derived from its operands. So, in the latter
10086 case, only strip conversions that don't change the signedness.
10088 Note that this is done as an internal manipulation within the
10089 constant folder, in order to find the simplest representation of
10090 the arguments so that their form can be studied. In any cases,
10091 the appropriate type conversions should be put back in the tree
10092 that will get out of the constant folder. */
10093 if (op0)
10095 arg0 = op0;
10096 STRIP_NOPS (arg0);
10099 if (op1)
10101 arg1 = op1;
10102 STRIP_NOPS (arg1);
10105 switch (code)
10107 case COMPONENT_REF:
10108 if (TREE_CODE (arg0) == CONSTRUCTOR
10109 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10111 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
10112 if (m)
10113 return TREE_VALUE (m);
10115 return NULL_TREE;
10117 case COND_EXPR:
10118 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10119 so all simple results must be passed through pedantic_non_lvalue. */
10120 if (TREE_CODE (arg0) == INTEGER_CST)
10122 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10123 tem = integer_zerop (arg0) ? op2 : op1;
10124 /* Only optimize constant conditions when the selected branch
10125 has the same type as the COND_EXPR. This avoids optimizing
10126 away "c ? x : throw", where the throw has a void type.
10127 Avoid throwing away that operand which contains label. */
10128 if ((!TREE_SIDE_EFFECTS (unused_op)
10129 || !contains_label_p (unused_op))
10130 && (! VOID_TYPE_P (TREE_TYPE (tem))
10131 || VOID_TYPE_P (type)))
10132 return pedantic_non_lvalue (tem);
10133 return NULL_TREE;
10135 if (operand_equal_p (arg1, op2, 0))
10136 return pedantic_omit_one_operand (type, arg1, arg0);
10138 /* If we have A op B ? A : C, we may be able to convert this to a
10139 simpler expression, depending on the operation and the values
10140 of B and C. Signed zeros prevent all of these transformations,
10141 for reasons given above each one.
10143 Also try swapping the arguments and inverting the conditional. */
10144 if (COMPARISON_CLASS_P (arg0)
10145 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10146 arg1, TREE_OPERAND (arg0, 1))
10147 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10149 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10150 if (tem)
10151 return tem;
10154 if (COMPARISON_CLASS_P (arg0)
10155 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10156 op2,
10157 TREE_OPERAND (arg0, 1))
10158 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10160 tem = invert_truthvalue (arg0);
10161 if (COMPARISON_CLASS_P (tem))
10163 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10164 if (tem)
10165 return tem;
10169 /* If the second operand is simpler than the third, swap them
10170 since that produces better jump optimization results. */
10171 if (tree_swap_operands_p (op1, op2, false))
10173 /* See if this can be inverted. If it can't, possibly because
10174 it was a floating-point inequality comparison, don't do
10175 anything. */
10176 tem = invert_truthvalue (arg0);
10178 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10179 return fold_build3 (code, type, tem, op2, op1);
10182 /* Convert A ? 1 : 0 to simply A. */
10183 if (integer_onep (op1)
10184 && integer_zerop (op2)
10185 /* If we try to convert OP0 to our type, the
10186 call to fold will try to move the conversion inside
10187 a COND, which will recurse. In that case, the COND_EXPR
10188 is probably the best choice, so leave it alone. */
10189 && type == TREE_TYPE (arg0))
10190 return pedantic_non_lvalue (arg0);
10192 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10193 over COND_EXPR in cases such as floating point comparisons. */
10194 if (integer_zerop (op1)
10195 && integer_onep (op2)
10196 && truth_value_p (TREE_CODE (arg0)))
10197 return pedantic_non_lvalue (fold_convert (type,
10198 invert_truthvalue (arg0)));
10200 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10201 if (TREE_CODE (arg0) == LT_EXPR
10202 && integer_zerop (TREE_OPERAND (arg0, 1))
10203 && integer_zerop (op2)
10204 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10205 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10206 TREE_TYPE (tem), tem, arg1));
10208 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10209 already handled above. */
10210 if (TREE_CODE (arg0) == BIT_AND_EXPR
10211 && integer_onep (TREE_OPERAND (arg0, 1))
10212 && integer_zerop (op2)
10213 && integer_pow2p (arg1))
10215 tree tem = TREE_OPERAND (arg0, 0);
10216 STRIP_NOPS (tem);
10217 if (TREE_CODE (tem) == RSHIFT_EXPR
10218 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10219 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10220 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10221 return fold_build2 (BIT_AND_EXPR, type,
10222 TREE_OPERAND (tem, 0), arg1);
10225 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10226 is probably obsolete because the first operand should be a
10227 truth value (that's why we have the two cases above), but let's
10228 leave it in until we can confirm this for all front-ends. */
10229 if (integer_zerop (op2)
10230 && TREE_CODE (arg0) == NE_EXPR
10231 && integer_zerop (TREE_OPERAND (arg0, 1))
10232 && integer_pow2p (arg1)
10233 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10234 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10235 arg1, OEP_ONLY_CONST))
10236 return pedantic_non_lvalue (fold_convert (type,
10237 TREE_OPERAND (arg0, 0)));
10239 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10240 if (integer_zerop (op2)
10241 && truth_value_p (TREE_CODE (arg0))
10242 && truth_value_p (TREE_CODE (arg1)))
10243 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10245 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10246 if (integer_onep (op2)
10247 && truth_value_p (TREE_CODE (arg0))
10248 && truth_value_p (TREE_CODE (arg1)))
10250 /* Only perform transformation if ARG0 is easily inverted. */
10251 tem = invert_truthvalue (arg0);
10252 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10253 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10256 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10257 if (integer_zerop (arg1)
10258 && truth_value_p (TREE_CODE (arg0))
10259 && truth_value_p (TREE_CODE (op2)))
10261 /* Only perform transformation if ARG0 is easily inverted. */
10262 tem = invert_truthvalue (arg0);
10263 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10264 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10267 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10268 if (integer_onep (arg1)
10269 && truth_value_p (TREE_CODE (arg0))
10270 && truth_value_p (TREE_CODE (op2)))
10271 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10273 return NULL_TREE;
10275 case CALL_EXPR:
10276 /* Check for a built-in function. */
10277 if (TREE_CODE (op0) == ADDR_EXPR
10278 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10279 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10281 tree fndecl = TREE_OPERAND (op0, 0);
10282 tree arglist = op1;
10283 tree tmp = fold_builtin (fndecl, arglist, false);
10284 if (tmp)
10285 return tmp;
10287 return NULL_TREE;
10289 case BIT_FIELD_REF:
10290 if (TREE_CODE (arg0) == VECTOR_CST
10291 && type == TREE_TYPE (TREE_TYPE (arg0))
10292 && host_integerp (arg1, 1)
10293 && host_integerp (op2, 1))
10295 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10296 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10298 if (width != 0
10299 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10300 && (idx % width) == 0
10301 && (idx = idx / width)
10302 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10304 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10305 while (idx-- > 0)
10306 elements = TREE_CHAIN (elements);
10307 return TREE_VALUE (elements);
10310 return NULL_TREE;
10312 default:
10313 return NULL_TREE;
10314 } /* switch (code) */
10317 /* Perform constant folding and related simplification of EXPR.
10318 The related simplifications include x*1 => x, x*0 => 0, etc.,
10319 and application of the associative law.
10320 NOP_EXPR conversions may be removed freely (as long as we
10321 are careful not to change the type of the overall expression).
10322 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10323 but we can constant-fold them if they have constant operands. */
10325 #ifdef ENABLE_FOLD_CHECKING
10326 # define fold(x) fold_1 (x)
10327 static tree fold_1 (tree);
10328 static
10329 #endif
10330 tree
10331 fold (tree expr)
10333 const tree t = expr;
10334 enum tree_code code = TREE_CODE (t);
10335 enum tree_code_class kind = TREE_CODE_CLASS (code);
10336 tree tem;
10338 /* Return right away if a constant. */
10339 if (kind == tcc_constant)
10340 return t;
10342 if (IS_EXPR_CODE_CLASS (kind))
10344 tree type = TREE_TYPE (t);
10345 tree op0, op1, op2;
10347 switch (TREE_CODE_LENGTH (code))
10349 case 1:
10350 op0 = TREE_OPERAND (t, 0);
10351 tem = fold_unary (code, type, op0);
10352 return tem ? tem : expr;
10353 case 2:
10354 op0 = TREE_OPERAND (t, 0);
10355 op1 = TREE_OPERAND (t, 1);
10356 tem = fold_binary (code, type, op0, op1);
10357 return tem ? tem : expr;
10358 case 3:
10359 op0 = TREE_OPERAND (t, 0);
10360 op1 = TREE_OPERAND (t, 1);
10361 op2 = TREE_OPERAND (t, 2);
10362 tem = fold_ternary (code, type, op0, op1, op2);
10363 return tem ? tem : expr;
10364 default:
10365 break;
10369 switch (code)
10371 case CONST_DECL:
10372 return fold (DECL_INITIAL (t));
10374 default:
10375 return t;
10376 } /* switch (code) */
10379 #ifdef ENABLE_FOLD_CHECKING
10380 #undef fold
10382 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10383 static void fold_check_failed (tree, tree);
10384 void print_fold_checksum (tree);
10386 /* When --enable-checking=fold, compute a digest of expr before
10387 and after actual fold call to see if fold did not accidentally
10388 change original expr. */
10390 tree
10391 fold (tree expr)
10393 tree ret;
10394 struct md5_ctx ctx;
10395 unsigned char checksum_before[16], checksum_after[16];
10396 htab_t ht;
10398 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10399 md5_init_ctx (&ctx);
10400 fold_checksum_tree (expr, &ctx, ht);
10401 md5_finish_ctx (&ctx, checksum_before);
10402 htab_empty (ht);
10404 ret = fold_1 (expr);
10406 md5_init_ctx (&ctx);
10407 fold_checksum_tree (expr, &ctx, ht);
10408 md5_finish_ctx (&ctx, checksum_after);
10409 htab_delete (ht);
10411 if (memcmp (checksum_before, checksum_after, 16))
10412 fold_check_failed (expr, ret);
10414 return ret;
10417 void
10418 print_fold_checksum (tree expr)
10420 struct md5_ctx ctx;
10421 unsigned char checksum[16], cnt;
10422 htab_t ht;
10424 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10425 md5_init_ctx (&ctx);
10426 fold_checksum_tree (expr, &ctx, ht);
10427 md5_finish_ctx (&ctx, checksum);
10428 htab_delete (ht);
10429 for (cnt = 0; cnt < 16; ++cnt)
10430 fprintf (stderr, "%02x", checksum[cnt]);
10431 putc ('\n', stderr);
10434 static void
10435 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10437 internal_error ("fold check: original tree changed by fold");
10440 static void
10441 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10443 void **slot;
10444 enum tree_code code;
10445 char buf[sizeof (struct tree_decl)];
10446 int i, len;
10448 recursive_label:
10450 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10451 <= sizeof (struct tree_decl))
10452 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10453 if (expr == NULL)
10454 return;
10455 slot = htab_find_slot (ht, expr, INSERT);
10456 if (*slot != NULL)
10457 return;
10458 *slot = expr;
10459 code = TREE_CODE (expr);
10460 if (TREE_CODE_CLASS (code) == tcc_declaration
10461 && DECL_ASSEMBLER_NAME_SET_P (expr))
10463 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10464 memcpy (buf, expr, tree_size (expr));
10465 expr = (tree) buf;
10466 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10468 else if (TREE_CODE_CLASS (code) == tcc_type
10469 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10470 || TYPE_CACHED_VALUES_P (expr)
10471 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10473 /* Allow these fields to be modified. */
10474 memcpy (buf, expr, tree_size (expr));
10475 expr = (tree) buf;
10476 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10477 TYPE_POINTER_TO (expr) = NULL;
10478 TYPE_REFERENCE_TO (expr) = NULL;
10479 if (TYPE_CACHED_VALUES_P (expr))
10481 TYPE_CACHED_VALUES_P (expr) = 0;
10482 TYPE_CACHED_VALUES (expr) = NULL;
10485 md5_process_bytes (expr, tree_size (expr), ctx);
10486 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10487 if (TREE_CODE_CLASS (code) != tcc_type
10488 && TREE_CODE_CLASS (code) != tcc_declaration
10489 && code != TREE_LIST)
10490 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10491 switch (TREE_CODE_CLASS (code))
10493 case tcc_constant:
10494 switch (code)
10496 case STRING_CST:
10497 md5_process_bytes (TREE_STRING_POINTER (expr),
10498 TREE_STRING_LENGTH (expr), ctx);
10499 break;
10500 case COMPLEX_CST:
10501 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10502 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10503 break;
10504 case VECTOR_CST:
10505 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10506 break;
10507 default:
10508 break;
10510 break;
10511 case tcc_exceptional:
10512 switch (code)
10514 case TREE_LIST:
10515 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10516 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10517 expr = TREE_CHAIN (expr);
10518 goto recursive_label;
10519 break;
10520 case TREE_VEC:
10521 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10522 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10523 break;
10524 default:
10525 break;
10527 break;
10528 case tcc_expression:
10529 case tcc_reference:
10530 case tcc_comparison:
10531 case tcc_unary:
10532 case tcc_binary:
10533 case tcc_statement:
10534 len = TREE_CODE_LENGTH (code);
10535 for (i = 0; i < len; ++i)
10536 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10537 break;
10538 case tcc_declaration:
10539 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10540 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10541 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10542 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10543 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10544 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10545 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10546 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10547 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10548 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10549 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10550 break;
10551 case tcc_type:
10552 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10553 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10554 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10555 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10556 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10557 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10558 if (INTEGRAL_TYPE_P (expr)
10559 || SCALAR_FLOAT_TYPE_P (expr))
10561 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10562 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10564 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10565 if (TREE_CODE (expr) == RECORD_TYPE
10566 || TREE_CODE (expr) == UNION_TYPE
10567 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10568 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10569 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10570 break;
10571 default:
10572 break;
10576 #endif
10578 /* Fold a unary tree expression with code CODE of type TYPE with an
10579 operand OP0. Return a folded expression if successful. Otherwise,
10580 return a tree expression with code CODE of type TYPE with an
10581 operand OP0. */
10583 tree
10584 fold_build1 (enum tree_code code, tree type, tree op0)
10586 tree tem = fold_unary (code, type, op0);
10587 if (tem)
10588 return tem;
10590 return build1 (code, type, op0);
10593 /* Fold a binary tree expression with code CODE of type TYPE with
10594 operands OP0 and OP1. Return a folded expression if successful.
10595 Otherwise, return a tree expression with code CODE of type TYPE
10596 with operands OP0 and OP1. */
10598 tree
10599 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10601 tree tem = fold_binary (code, type, op0, op1);
10602 if (tem)
10603 return tem;
10605 return build2 (code, type, op0, op1);
10608 /* Fold a ternary tree expression with code CODE of type TYPE with
10609 operands OP0, OP1, and OP2. Return a folded expression if
10610 successful. Otherwise, return a tree expression with code CODE of
10611 type TYPE with operands OP0, OP1, and OP2. */
10613 tree
10614 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10616 tree tem = fold_ternary (code, type, op0, op1, op2);
10617 if (tem)
10618 return tem;
10620 return build3 (code, type, op0, op1, op2);
10623 /* Perform constant folding and related simplification of initializer
10624 expression EXPR. This behaves identically to "fold" but ignores
10625 potential run-time traps and exceptions that fold must preserve. */
10627 tree
10628 fold_initializer (tree expr)
10630 int saved_signaling_nans = flag_signaling_nans;
10631 int saved_trapping_math = flag_trapping_math;
10632 int saved_rounding_math = flag_rounding_math;
10633 int saved_trapv = flag_trapv;
10634 tree result;
10636 flag_signaling_nans = 0;
10637 flag_trapping_math = 0;
10638 flag_rounding_math = 0;
10639 flag_trapv = 0;
10641 result = fold (expr);
10643 flag_signaling_nans = saved_signaling_nans;
10644 flag_trapping_math = saved_trapping_math;
10645 flag_rounding_math = saved_rounding_math;
10646 flag_trapv = saved_trapv;
10648 return result;
10651 /* Determine if first argument is a multiple of second argument. Return 0 if
10652 it is not, or we cannot easily determined it to be.
10654 An example of the sort of thing we care about (at this point; this routine
10655 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10656 fold cases do now) is discovering that
10658 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10660 is a multiple of
10662 SAVE_EXPR (J * 8)
10664 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10666 This code also handles discovering that
10668 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10670 is a multiple of 8 so we don't have to worry about dealing with a
10671 possible remainder.
10673 Note that we *look* inside a SAVE_EXPR only to determine how it was
10674 calculated; it is not safe for fold to do much of anything else with the
10675 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10676 at run time. For example, the latter example above *cannot* be implemented
10677 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10678 evaluation time of the original SAVE_EXPR is not necessarily the same at
10679 the time the new expression is evaluated. The only optimization of this
10680 sort that would be valid is changing
10682 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10684 divided by 8 to
10686 SAVE_EXPR (I) * SAVE_EXPR (J)
10688 (where the same SAVE_EXPR (J) is used in the original and the
10689 transformed version). */
10691 static int
10692 multiple_of_p (tree type, tree top, tree bottom)
10694 if (operand_equal_p (top, bottom, 0))
10695 return 1;
10697 if (TREE_CODE (type) != INTEGER_TYPE)
10698 return 0;
10700 switch (TREE_CODE (top))
10702 case BIT_AND_EXPR:
10703 /* Bitwise and provides a power of two multiple. If the mask is
10704 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10705 if (!integer_pow2p (bottom))
10706 return 0;
10707 /* FALLTHRU */
10709 case MULT_EXPR:
10710 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10711 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10713 case PLUS_EXPR:
10714 case MINUS_EXPR:
10715 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10716 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10718 case LSHIFT_EXPR:
10719 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10721 tree op1, t1;
10723 op1 = TREE_OPERAND (top, 1);
10724 /* const_binop may not detect overflow correctly,
10725 so check for it explicitly here. */
10726 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10727 > TREE_INT_CST_LOW (op1)
10728 && TREE_INT_CST_HIGH (op1) == 0
10729 && 0 != (t1 = fold_convert (type,
10730 const_binop (LSHIFT_EXPR,
10731 size_one_node,
10732 op1, 0)))
10733 && ! TREE_OVERFLOW (t1))
10734 return multiple_of_p (type, t1, bottom);
10736 return 0;
10738 case NOP_EXPR:
10739 /* Can't handle conversions from non-integral or wider integral type. */
10740 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10741 || (TYPE_PRECISION (type)
10742 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10743 return 0;
10745 /* .. fall through ... */
10747 case SAVE_EXPR:
10748 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10750 case INTEGER_CST:
10751 if (TREE_CODE (bottom) != INTEGER_CST
10752 || (TYPE_UNSIGNED (type)
10753 && (tree_int_cst_sgn (top) < 0
10754 || tree_int_cst_sgn (bottom) < 0)))
10755 return 0;
10756 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10757 top, bottom, 0));
10759 default:
10760 return 0;
10764 /* Return true if `t' is known to be non-negative. */
10767 tree_expr_nonnegative_p (tree t)
10769 switch (TREE_CODE (t))
10771 case ABS_EXPR:
10772 return 1;
10774 case INTEGER_CST:
10775 return tree_int_cst_sgn (t) >= 0;
10777 case REAL_CST:
10778 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10780 case PLUS_EXPR:
10781 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10782 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10783 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10785 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10786 both unsigned and at least 2 bits shorter than the result. */
10787 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10788 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10789 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10791 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10792 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10793 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10794 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10796 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10797 TYPE_PRECISION (inner2)) + 1;
10798 return prec < TYPE_PRECISION (TREE_TYPE (t));
10801 break;
10803 case MULT_EXPR:
10804 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10806 /* x * x for floating point x is always non-negative. */
10807 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10808 return 1;
10809 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10810 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10813 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10814 both unsigned and their total bits is shorter than the result. */
10815 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10816 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10817 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10819 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10820 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10821 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10822 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10823 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10824 < TYPE_PRECISION (TREE_TYPE (t));
10826 return 0;
10828 case TRUNC_DIV_EXPR:
10829 case CEIL_DIV_EXPR:
10830 case FLOOR_DIV_EXPR:
10831 case ROUND_DIV_EXPR:
10832 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10833 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10835 case TRUNC_MOD_EXPR:
10836 case CEIL_MOD_EXPR:
10837 case FLOOR_MOD_EXPR:
10838 case ROUND_MOD_EXPR:
10839 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10841 case RDIV_EXPR:
10842 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10843 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10845 case BIT_AND_EXPR:
10846 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10847 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10848 case BIT_IOR_EXPR:
10849 case BIT_XOR_EXPR:
10850 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10851 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10853 case NOP_EXPR:
10855 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10856 tree outer_type = TREE_TYPE (t);
10858 if (TREE_CODE (outer_type) == REAL_TYPE)
10860 if (TREE_CODE (inner_type) == REAL_TYPE)
10861 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10862 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10864 if (TYPE_UNSIGNED (inner_type))
10865 return 1;
10866 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10869 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10871 if (TREE_CODE (inner_type) == REAL_TYPE)
10872 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10873 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10874 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10875 && TYPE_UNSIGNED (inner_type);
10878 break;
10880 case COND_EXPR:
10881 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10882 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10883 case COMPOUND_EXPR:
10884 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10885 case MIN_EXPR:
10886 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10887 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10888 case MAX_EXPR:
10889 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10890 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10891 case MODIFY_EXPR:
10892 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10893 case BIND_EXPR:
10894 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10895 case SAVE_EXPR:
10896 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10897 case NON_LVALUE_EXPR:
10898 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10899 case FLOAT_EXPR:
10900 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10902 case TARGET_EXPR:
10904 tree temp = TARGET_EXPR_SLOT (t);
10905 t = TARGET_EXPR_INITIAL (t);
10907 /* If the initializer is non-void, then it's a normal expression
10908 that will be assigned to the slot. */
10909 if (!VOID_TYPE_P (t))
10910 return tree_expr_nonnegative_p (t);
10912 /* Otherwise, the initializer sets the slot in some way. One common
10913 way is an assignment statement at the end of the initializer. */
10914 while (1)
10916 if (TREE_CODE (t) == BIND_EXPR)
10917 t = expr_last (BIND_EXPR_BODY (t));
10918 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10919 || TREE_CODE (t) == TRY_CATCH_EXPR)
10920 t = expr_last (TREE_OPERAND (t, 0));
10921 else if (TREE_CODE (t) == STATEMENT_LIST)
10922 t = expr_last (t);
10923 else
10924 break;
10926 if (TREE_CODE (t) == MODIFY_EXPR
10927 && TREE_OPERAND (t, 0) == temp)
10928 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10930 return 0;
10933 case CALL_EXPR:
10935 tree fndecl = get_callee_fndecl (t);
10936 tree arglist = TREE_OPERAND (t, 1);
10937 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10938 switch (DECL_FUNCTION_CODE (fndecl))
10940 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10941 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10942 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10943 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10945 CASE_BUILTIN_F (BUILT_IN_ACOS)
10946 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10947 CASE_BUILTIN_F (BUILT_IN_CABS)
10948 CASE_BUILTIN_F (BUILT_IN_COSH)
10949 CASE_BUILTIN_F (BUILT_IN_ERFC)
10950 CASE_BUILTIN_F (BUILT_IN_EXP)
10951 CASE_BUILTIN_F (BUILT_IN_EXP10)
10952 CASE_BUILTIN_F (BUILT_IN_EXP2)
10953 CASE_BUILTIN_F (BUILT_IN_FABS)
10954 CASE_BUILTIN_F (BUILT_IN_FDIM)
10955 CASE_BUILTIN_F (BUILT_IN_FREXP)
10956 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10957 CASE_BUILTIN_F (BUILT_IN_POW10)
10958 CASE_BUILTIN_I (BUILT_IN_FFS)
10959 CASE_BUILTIN_I (BUILT_IN_PARITY)
10960 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10961 /* Always true. */
10962 return 1;
10964 CASE_BUILTIN_F (BUILT_IN_SQRT)
10965 /* sqrt(-0.0) is -0.0. */
10966 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10967 return 1;
10968 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10970 CASE_BUILTIN_F (BUILT_IN_ASINH)
10971 CASE_BUILTIN_F (BUILT_IN_ATAN)
10972 CASE_BUILTIN_F (BUILT_IN_ATANH)
10973 CASE_BUILTIN_F (BUILT_IN_CBRT)
10974 CASE_BUILTIN_F (BUILT_IN_CEIL)
10975 CASE_BUILTIN_F (BUILT_IN_ERF)
10976 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10977 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10978 CASE_BUILTIN_F (BUILT_IN_FMOD)
10979 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10980 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10981 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10982 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10983 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10984 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10985 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10986 CASE_BUILTIN_F (BUILT_IN_LRINT)
10987 CASE_BUILTIN_F (BUILT_IN_LROUND)
10988 CASE_BUILTIN_F (BUILT_IN_MODF)
10989 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10990 CASE_BUILTIN_F (BUILT_IN_POW)
10991 CASE_BUILTIN_F (BUILT_IN_RINT)
10992 CASE_BUILTIN_F (BUILT_IN_ROUND)
10993 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10994 CASE_BUILTIN_F (BUILT_IN_SINH)
10995 CASE_BUILTIN_F (BUILT_IN_TANH)
10996 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10997 /* True if the 1st argument is nonnegative. */
10998 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11000 CASE_BUILTIN_F (BUILT_IN_FMAX)
11001 /* True if the 1st OR 2nd arguments are nonnegative. */
11002 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11003 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11005 CASE_BUILTIN_F (BUILT_IN_FMIN)
11006 /* True if the 1st AND 2nd arguments are nonnegative. */
11007 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11008 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11010 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11011 /* True if the 2nd argument is nonnegative. */
11012 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11014 default:
11015 break;
11016 #undef CASE_BUILTIN_F
11017 #undef CASE_BUILTIN_I
11021 /* ... fall through ... */
11023 default:
11024 if (truth_value_p (TREE_CODE (t)))
11025 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11026 return 1;
11029 /* We don't know sign of `t', so be conservative and return false. */
11030 return 0;
11033 /* Return true when T is an address and is known to be nonzero.
11034 For floating point we further ensure that T is not denormal.
11035 Similar logic is present in nonzero_address in rtlanal.h. */
11037 static bool
11038 tree_expr_nonzero_p (tree t)
11040 tree type = TREE_TYPE (t);
11042 /* Doing something useful for floating point would need more work. */
11043 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11044 return false;
11046 switch (TREE_CODE (t))
11048 case ABS_EXPR:
11049 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11050 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11052 case INTEGER_CST:
11053 /* We used to test for !integer_zerop here. This does not work correctly
11054 if TREE_CONSTANT_OVERFLOW (t). */
11055 return (TREE_INT_CST_LOW (t) != 0
11056 || TREE_INT_CST_HIGH (t) != 0);
11058 case PLUS_EXPR:
11059 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11061 /* With the presence of negative values it is hard
11062 to say something. */
11063 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11064 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11065 return false;
11066 /* One of operands must be positive and the other non-negative. */
11067 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11068 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11070 break;
11072 case MULT_EXPR:
11073 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11075 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11076 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11078 break;
11080 case NOP_EXPR:
11082 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11083 tree outer_type = TREE_TYPE (t);
11085 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11086 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11088 break;
11090 case ADDR_EXPR:
11092 tree base = get_base_address (TREE_OPERAND (t, 0));
11094 if (!base)
11095 return false;
11097 /* Weak declarations may link to NULL. */
11098 if (DECL_P (base))
11099 return !DECL_WEAK (base);
11101 /* Constants are never weak. */
11102 if (CONSTANT_CLASS_P (base))
11103 return true;
11105 return false;
11108 case COND_EXPR:
11109 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11110 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11112 case MIN_EXPR:
11113 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11114 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11116 case MAX_EXPR:
11117 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11119 /* When both operands are nonzero, then MAX must be too. */
11120 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11121 return true;
11123 /* MAX where operand 0 is positive is positive. */
11124 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11126 /* MAX where operand 1 is positive is positive. */
11127 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11128 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11129 return true;
11130 break;
11132 case COMPOUND_EXPR:
11133 case MODIFY_EXPR:
11134 case BIND_EXPR:
11135 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11137 case SAVE_EXPR:
11138 case NON_LVALUE_EXPR:
11139 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11141 case BIT_IOR_EXPR:
11142 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11143 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11145 default:
11146 break;
11148 return false;
11151 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11152 attempt to fold the expression to a constant without modifying TYPE,
11153 OP0 or OP1.
11155 If the expression could be simplified to a constant, then return
11156 the constant. If the expression would not be simplified to a
11157 constant, then return NULL_TREE. */
11159 tree
11160 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11162 tree tem = fold_binary (code, type, op0, op1);
11163 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11166 /* Given the components of a unary expression CODE, TYPE and OP0,
11167 attempt to fold the expression to a constant without modifying
11168 TYPE or OP0.
11170 If the expression could be simplified to a constant, then return
11171 the constant. If the expression would not be simplified to a
11172 constant, then return NULL_TREE. */
11174 tree
11175 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11177 tree tem = fold_unary (code, type, op0);
11178 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11181 /* If EXP represents referencing an element in a constant string
11182 (either via pointer arithmetic or array indexing), return the
11183 tree representing the value accessed, otherwise return NULL. */
11185 tree
11186 fold_read_from_constant_string (tree exp)
11188 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11190 tree exp1 = TREE_OPERAND (exp, 0);
11191 tree index;
11192 tree string;
11194 if (TREE_CODE (exp) == INDIRECT_REF)
11195 string = string_constant (exp1, &index);
11196 else
11198 tree low_bound = array_ref_low_bound (exp);
11199 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11201 /* Optimize the special-case of a zero lower bound.
11203 We convert the low_bound to sizetype to avoid some problems
11204 with constant folding. (E.g. suppose the lower bound is 1,
11205 and its mode is QI. Without the conversion,l (ARRAY
11206 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11207 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11208 if (! integer_zerop (low_bound))
11209 index = size_diffop (index, fold_convert (sizetype, low_bound));
11211 string = exp1;
11214 if (string
11215 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11216 && TREE_CODE (string) == STRING_CST
11217 && TREE_CODE (index) == INTEGER_CST
11218 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11219 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11220 == MODE_INT)
11221 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11222 return fold_convert (TREE_TYPE (exp),
11223 build_int_cst (NULL_TREE,
11224 (TREE_STRING_POINTER (string)
11225 [TREE_INT_CST_LOW (index)])));
11227 return NULL;
11230 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11231 an integer constant or real constant.
11233 TYPE is the type of the result. */
11235 static tree
11236 fold_negate_const (tree arg0, tree type)
11238 tree t = NULL_TREE;
11240 switch (TREE_CODE (arg0))
11242 case INTEGER_CST:
11244 unsigned HOST_WIDE_INT low;
11245 HOST_WIDE_INT high;
11246 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11247 TREE_INT_CST_HIGH (arg0),
11248 &low, &high);
11249 t = build_int_cst_wide (type, low, high);
11250 t = force_fit_type (t, 1,
11251 (overflow | TREE_OVERFLOW (arg0))
11252 && !TYPE_UNSIGNED (type),
11253 TREE_CONSTANT_OVERFLOW (arg0));
11254 break;
11257 case REAL_CST:
11258 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11259 break;
11261 default:
11262 gcc_unreachable ();
11265 return t;
11268 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11269 an integer constant or real constant.
11271 TYPE is the type of the result. */
11273 tree
11274 fold_abs_const (tree arg0, tree type)
11276 tree t = NULL_TREE;
11278 switch (TREE_CODE (arg0))
11280 case INTEGER_CST:
11281 /* If the value is unsigned, then the absolute value is
11282 the same as the ordinary value. */
11283 if (TYPE_UNSIGNED (type))
11284 t = arg0;
11285 /* Similarly, if the value is non-negative. */
11286 else if (INT_CST_LT (integer_minus_one_node, arg0))
11287 t = arg0;
11288 /* If the value is negative, then the absolute value is
11289 its negation. */
11290 else
11292 unsigned HOST_WIDE_INT low;
11293 HOST_WIDE_INT high;
11294 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11295 TREE_INT_CST_HIGH (arg0),
11296 &low, &high);
11297 t = build_int_cst_wide (type, low, high);
11298 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11299 TREE_CONSTANT_OVERFLOW (arg0));
11301 break;
11303 case REAL_CST:
11304 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11305 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11306 else
11307 t = arg0;
11308 break;
11310 default:
11311 gcc_unreachable ();
11314 return t;
11317 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11318 constant. TYPE is the type of the result. */
11320 static tree
11321 fold_not_const (tree arg0, tree type)
11323 tree t = NULL_TREE;
11325 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11327 t = build_int_cst_wide (type,
11328 ~ TREE_INT_CST_LOW (arg0),
11329 ~ TREE_INT_CST_HIGH (arg0));
11330 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11331 TREE_CONSTANT_OVERFLOW (arg0));
11333 return t;
11336 /* Given CODE, a relational operator, the target type, TYPE and two
11337 constant operands OP0 and OP1, return the result of the
11338 relational operation. If the result is not a compile time
11339 constant, then return NULL_TREE. */
11341 static tree
11342 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11344 int result, invert;
11346 /* From here on, the only cases we handle are when the result is
11347 known to be a constant. */
11349 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11351 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11352 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11354 /* Handle the cases where either operand is a NaN. */
11355 if (real_isnan (c0) || real_isnan (c1))
11357 switch (code)
11359 case EQ_EXPR:
11360 case ORDERED_EXPR:
11361 result = 0;
11362 break;
11364 case NE_EXPR:
11365 case UNORDERED_EXPR:
11366 case UNLT_EXPR:
11367 case UNLE_EXPR:
11368 case UNGT_EXPR:
11369 case UNGE_EXPR:
11370 case UNEQ_EXPR:
11371 result = 1;
11372 break;
11374 case LT_EXPR:
11375 case LE_EXPR:
11376 case GT_EXPR:
11377 case GE_EXPR:
11378 case LTGT_EXPR:
11379 if (flag_trapping_math)
11380 return NULL_TREE;
11381 result = 0;
11382 break;
11384 default:
11385 gcc_unreachable ();
11388 return constant_boolean_node (result, type);
11391 return constant_boolean_node (real_compare (code, c0, c1), type);
11394 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11396 To compute GT, swap the arguments and do LT.
11397 To compute GE, do LT and invert the result.
11398 To compute LE, swap the arguments, do LT and invert the result.
11399 To compute NE, do EQ and invert the result.
11401 Therefore, the code below must handle only EQ and LT. */
11403 if (code == LE_EXPR || code == GT_EXPR)
11405 tree tem = op0;
11406 op0 = op1;
11407 op1 = tem;
11408 code = swap_tree_comparison (code);
11411 /* Note that it is safe to invert for real values here because we
11412 have already handled the one case that it matters. */
11414 invert = 0;
11415 if (code == NE_EXPR || code == GE_EXPR)
11417 invert = 1;
11418 code = invert_tree_comparison (code, false);
11421 /* Compute a result for LT or EQ if args permit;
11422 Otherwise return T. */
11423 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11425 if (code == EQ_EXPR)
11426 result = tree_int_cst_equal (op0, op1);
11427 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11428 result = INT_CST_LT_UNSIGNED (op0, op1);
11429 else
11430 result = INT_CST_LT (op0, op1);
11432 else
11433 return NULL_TREE;
11435 if (invert)
11436 result ^= 1;
11437 return constant_boolean_node (result, type);
11440 /* Build an expression for the a clean point containing EXPR with type TYPE.
11441 Don't build a cleanup point expression for EXPR which don't have side
11442 effects. */
11444 tree
11445 fold_build_cleanup_point_expr (tree type, tree expr)
11447 /* If the expression does not have side effects then we don't have to wrap
11448 it with a cleanup point expression. */
11449 if (!TREE_SIDE_EFFECTS (expr))
11450 return expr;
11452 /* If the expression is a return, check to see if the expression inside the
11453 return has no side effects or the right hand side of the modify expression
11454 inside the return. If either don't have side effects set we don't need to
11455 wrap the expression in a cleanup point expression. Note we don't check the
11456 left hand side of the modify because it should always be a return decl. */
11457 if (TREE_CODE (expr) == RETURN_EXPR)
11459 tree op = TREE_OPERAND (expr, 0);
11460 if (!op || !TREE_SIDE_EFFECTS (op))
11461 return expr;
11462 op = TREE_OPERAND (op, 1);
11463 if (!TREE_SIDE_EFFECTS (op))
11464 return expr;
11467 return build1 (CLEANUP_POINT_EXPR, type, expr);
11470 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11471 avoid confusing the gimplify process. */
11473 tree
11474 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11476 /* The size of the object is not relevant when talking about its address. */
11477 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11478 t = TREE_OPERAND (t, 0);
11480 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11481 if (TREE_CODE (t) == INDIRECT_REF
11482 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11484 t = TREE_OPERAND (t, 0);
11485 if (TREE_TYPE (t) != ptrtype)
11486 t = build1 (NOP_EXPR, ptrtype, t);
11488 else
11490 tree base = t;
11492 while (handled_component_p (base))
11493 base = TREE_OPERAND (base, 0);
11494 if (DECL_P (base))
11495 TREE_ADDRESSABLE (base) = 1;
11497 t = build1 (ADDR_EXPR, ptrtype, t);
11500 return t;
11503 tree
11504 build_fold_addr_expr (tree t)
11506 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11509 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11510 of an indirection through OP0, or NULL_TREE if no simplification is
11511 possible. */
11513 tree
11514 fold_indirect_ref_1 (tree type, tree op0)
11516 tree sub = op0;
11517 tree subtype;
11519 STRIP_NOPS (sub);
11520 subtype = TREE_TYPE (sub);
11521 if (!POINTER_TYPE_P (subtype))
11522 return NULL_TREE;
11524 if (TREE_CODE (sub) == ADDR_EXPR)
11526 tree op = TREE_OPERAND (sub, 0);
11527 tree optype = TREE_TYPE (op);
11528 /* *&p => p */
11529 if (type == optype)
11530 return op;
11531 /* *(foo *)&fooarray => fooarray[0] */
11532 else if (TREE_CODE (optype) == ARRAY_TYPE
11533 && type == TREE_TYPE (optype))
11535 tree type_domain = TYPE_DOMAIN (optype);
11536 tree min_val = size_zero_node;
11537 if (type_domain && TYPE_MIN_VALUE (type_domain))
11538 min_val = TYPE_MIN_VALUE (type_domain);
11539 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11543 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11544 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11545 && type == TREE_TYPE (TREE_TYPE (subtype)))
11547 tree type_domain;
11548 tree min_val = size_zero_node;
11549 sub = build_fold_indirect_ref (sub);
11550 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11551 if (type_domain && TYPE_MIN_VALUE (type_domain))
11552 min_val = TYPE_MIN_VALUE (type_domain);
11553 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11556 return NULL_TREE;
11559 /* Builds an expression for an indirection through T, simplifying some
11560 cases. */
11562 tree
11563 build_fold_indirect_ref (tree t)
11565 tree type = TREE_TYPE (TREE_TYPE (t));
11566 tree sub = fold_indirect_ref_1 (type, t);
11568 if (sub)
11569 return sub;
11570 else
11571 return build1 (INDIRECT_REF, type, t);
11574 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11576 tree
11577 fold_indirect_ref (tree t)
11579 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11581 if (sub)
11582 return sub;
11583 else
11584 return t;
11587 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11588 whose result is ignored. The type of the returned tree need not be
11589 the same as the original expression. */
11591 tree
11592 fold_ignored_result (tree t)
11594 if (!TREE_SIDE_EFFECTS (t))
11595 return integer_zero_node;
11597 for (;;)
11598 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11600 case tcc_unary:
11601 t = TREE_OPERAND (t, 0);
11602 break;
11604 case tcc_binary:
11605 case tcc_comparison:
11606 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11607 t = TREE_OPERAND (t, 0);
11608 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11609 t = TREE_OPERAND (t, 1);
11610 else
11611 return t;
11612 break;
11614 case tcc_expression:
11615 switch (TREE_CODE (t))
11617 case COMPOUND_EXPR:
11618 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11619 return t;
11620 t = TREE_OPERAND (t, 0);
11621 break;
11623 case COND_EXPR:
11624 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11625 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11626 return t;
11627 t = TREE_OPERAND (t, 0);
11628 break;
11630 default:
11631 return t;
11633 break;
11635 default:
11636 return t;
11640 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11641 This can only be applied to objects of a sizetype. */
11643 tree
11644 round_up (tree value, int divisor)
11646 tree div = NULL_TREE;
11648 gcc_assert (divisor > 0);
11649 if (divisor == 1)
11650 return value;
11652 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11653 have to do anything. Only do this when we are not given a const,
11654 because in that case, this check is more expensive than just
11655 doing it. */
11656 if (TREE_CODE (value) != INTEGER_CST)
11658 div = build_int_cst (TREE_TYPE (value), divisor);
11660 if (multiple_of_p (TREE_TYPE (value), value, div))
11661 return value;
11664 /* If divisor is a power of two, simplify this to bit manipulation. */
11665 if (divisor == (divisor & -divisor))
11667 tree t;
11669 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11670 value = size_binop (PLUS_EXPR, value, t);
11671 t = build_int_cst (TREE_TYPE (value), -divisor);
11672 value = size_binop (BIT_AND_EXPR, value, t);
11674 else
11676 if (!div)
11677 div = build_int_cst (TREE_TYPE (value), divisor);
11678 value = size_binop (CEIL_DIV_EXPR, value, div);
11679 value = size_binop (MULT_EXPR, value, div);
11682 return value;
11685 /* Likewise, but round down. */
11687 tree
11688 round_down (tree value, int divisor)
11690 tree div = NULL_TREE;
11692 gcc_assert (divisor > 0);
11693 if (divisor == 1)
11694 return value;
11696 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11697 have to do anything. Only do this when we are not given a const,
11698 because in that case, this check is more expensive than just
11699 doing it. */
11700 if (TREE_CODE (value) != INTEGER_CST)
11702 div = build_int_cst (TREE_TYPE (value), divisor);
11704 if (multiple_of_p (TREE_TYPE (value), value, div))
11705 return value;
11708 /* If divisor is a power of two, simplify this to bit manipulation. */
11709 if (divisor == (divisor & -divisor))
11711 tree t;
11713 t = build_int_cst (TREE_TYPE (value), -divisor);
11714 value = size_binop (BIT_AND_EXPR, value, t);
11716 else
11718 if (!div)
11719 div = build_int_cst (TREE_TYPE (value), divisor);
11720 value = size_binop (FLOOR_DIV_EXPR, value, div);
11721 value = size_binop (MULT_EXPR, value, div);
11724 return value;
11727 /* Returns the pointer to the base of the object addressed by EXP and
11728 extracts the information about the offset of the access, storing it
11729 to PBITPOS and POFFSET. */
11731 static tree
11732 split_address_to_core_and_offset (tree exp,
11733 HOST_WIDE_INT *pbitpos, tree *poffset)
11735 tree core;
11736 enum machine_mode mode;
11737 int unsignedp, volatilep;
11738 HOST_WIDE_INT bitsize;
11740 if (TREE_CODE (exp) == ADDR_EXPR)
11742 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11743 poffset, &mode, &unsignedp, &volatilep,
11744 false);
11746 if (TREE_CODE (core) == INDIRECT_REF)
11747 core = TREE_OPERAND (core, 0);
11749 else
11751 core = exp;
11752 *pbitpos = 0;
11753 *poffset = NULL_TREE;
11756 return core;
11759 /* Returns true if addresses of E1 and E2 differ by a constant, false
11760 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11762 bool
11763 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11765 tree core1, core2;
11766 HOST_WIDE_INT bitpos1, bitpos2;
11767 tree toffset1, toffset2, tdiff, type;
11769 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11770 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11772 if (bitpos1 % BITS_PER_UNIT != 0
11773 || bitpos2 % BITS_PER_UNIT != 0
11774 || !operand_equal_p (core1, core2, 0))
11775 return false;
11777 if (toffset1 && toffset2)
11779 type = TREE_TYPE (toffset1);
11780 if (type != TREE_TYPE (toffset2))
11781 toffset2 = fold_convert (type, toffset2);
11783 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11784 if (!host_integerp (tdiff, 0))
11785 return false;
11787 *diff = tree_low_cst (tdiff, 0);
11789 else if (toffset1 || toffset2)
11791 /* If only one of the offsets is non-constant, the difference cannot
11792 be a constant. */
11793 return false;
11795 else
11796 *diff = 0;
11798 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11799 return true;
11802 /* Simplify the floating point expression EXP when the sign of the
11803 result is not significant. Return NULL_TREE if no simplification
11804 is possible. */
11806 tree
11807 fold_strip_sign_ops (tree exp)
11809 tree arg0, arg1;
11811 switch (TREE_CODE (exp))
11813 case ABS_EXPR:
11814 case NEGATE_EXPR:
11815 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11816 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11818 case MULT_EXPR:
11819 case RDIV_EXPR:
11820 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11821 return NULL_TREE;
11822 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11823 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11824 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11825 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11826 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11827 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11828 break;
11830 default:
11831 break;
11833 return NULL_TREE;