Mark ChangeLog
[official-gcc.git] / gcc / fold-const.c
blob782a30200ce96ec257aca585b7ca9dbab341fdd7
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
63 /* Non-zero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
90 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree, int);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static tree combine_comparisons (enum tree_code, enum tree_code,
100 enum tree_code, tree, tree, tree);
101 static int truth_value_p (enum tree_code);
102 static int operand_equal_for_comparison_p (tree, tree, tree);
103 static int twoval_comparison_p (tree, tree *, tree *, int *);
104 static tree eval_subst (tree, tree, tree, tree, tree);
105 static tree pedantic_omit_one_operand (tree, tree, tree);
106 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
107 static tree make_bit_field_ref (tree, tree, int, int, int);
108 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
109 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (tree, int);
113 static tree sign_bit_p (tree, tree);
114 static int simple_operand_p (tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 static tree make_range (tree, int *, tree *, tree *, bool *);
119 static tree build_range_check (tree, tree, int, tree, tree);
120 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
121 tree);
122 static tree fold_range_test (enum tree_code, tree, tree, tree);
123 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
124 static tree unextend (tree, int, int, tree);
125 static tree fold_truthop (enum tree_code, tree, tree, tree);
126 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static int multiple_of_p (tree, tree, tree);
130 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
131 tree, tree,
132 tree, tree, int);
133 static bool fold_real_zero_addition_p (tree, tree, int);
134 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 tree, tree, tree);
136 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (tree, tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static int native_encode_expr (tree, unsigned char *, int);
143 static tree native_interpret_expr (tree, unsigned char *, int);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
149 addition.
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 sign. */
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
157 We do that by representing the two-word integer in 4 words, with only
158 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
159 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 #define LOWPART(x) \
162 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
163 #define HIGHPART(x) \
164 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
165 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167 /* Unpack a two-word integer into 4 words.
168 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
169 WORDS points to the array of HOST_WIDE_INTs. */
171 static void
172 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174 words[0] = LOWPART (low);
175 words[1] = HIGHPART (low);
176 words[2] = LOWPART (hi);
177 words[3] = HIGHPART (hi);
180 /* Pack an array of 4 words into a two-word integer.
181 WORDS points to the array of words.
182 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 static void
185 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
186 HOST_WIDE_INT *hi)
188 *low = words[0] + words[1] * BASE;
189 *hi = words[2] + words[3] * BASE;
192 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
193 in overflow of the value, when >0 we are only interested in signed
194 overflow, for <0 we are interested in any overflow. OVERFLOWED
195 indicates whether overflow has already occurred. CONST_OVERFLOWED
196 indicates whether constant overflow has already occurred. We force
197 T's value to be within range of T's type (by setting to 0 or 1 all
198 the bits outside the type's range). We set TREE_OVERFLOWED if,
199 OVERFLOWED is nonzero,
200 or OVERFLOWABLE is >0 and signed overflow occurs
201 or OVERFLOWABLE is <0 and any overflow occurs
202 We set TREE_CONSTANT_OVERFLOWED if,
203 CONST_OVERFLOWED is nonzero
204 or we set TREE_OVERFLOWED.
205 We return either the original T, or a copy. */
207 tree
208 force_fit_type (tree t, int overflowable,
209 bool overflowed, bool overflowed_const)
211 unsigned HOST_WIDE_INT low;
212 HOST_WIDE_INT high;
213 unsigned int prec;
214 int sign_extended_type;
216 gcc_assert (TREE_CODE (t) == INTEGER_CST);
218 low = TREE_INT_CST_LOW (t);
219 high = TREE_INT_CST_HIGH (t);
221 if (POINTER_TYPE_P (TREE_TYPE (t))
222 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
223 prec = POINTER_SIZE;
224 else
225 prec = TYPE_PRECISION (TREE_TYPE (t));
226 /* Size types *are* sign extended. */
227 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
228 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
229 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
231 /* First clear all bits that are beyond the type's precision. */
233 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 else if (prec > HOST_BITS_PER_WIDE_INT)
236 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
237 else
239 high = 0;
240 if (prec < HOST_BITS_PER_WIDE_INT)
241 low &= ~((HOST_WIDE_INT) (-1) << prec);
244 if (!sign_extended_type)
245 /* No sign extension */;
246 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
247 /* Correct width already. */;
248 else if (prec > HOST_BITS_PER_WIDE_INT)
250 /* Sign extend top half? */
251 if (high & ((unsigned HOST_WIDE_INT)1
252 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
253 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
255 else if (prec == HOST_BITS_PER_WIDE_INT)
257 if ((HOST_WIDE_INT)low < 0)
258 high = -1;
260 else
262 /* Sign extend bottom half? */
263 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 high = -1;
266 low |= (HOST_WIDE_INT)(-1) << prec;
270 /* If the value changed, return a new node. */
271 if (overflowed || overflowed_const
272 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
274 t = build_int_cst_wide (TREE_TYPE (t), low, high);
276 if (overflowed
277 || overflowable < 0
278 || (overflowable > 0 && sign_extended_type))
280 t = copy_node (t);
281 TREE_OVERFLOW (t) = 1;
282 TREE_CONSTANT_OVERFLOW (t) = 1;
284 else if (overflowed_const)
286 t = copy_node (t);
287 TREE_CONSTANT_OVERFLOW (t) = 1;
291 return t;
294 /* Add two doubleword integers with doubleword result.
295 Return nonzero if the operation overflows according to UNSIGNED_P.
296 Each argument is given as two `HOST_WIDE_INT' pieces.
297 One argument is L1 and H1; the other, L2 and H2.
298 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
301 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
302 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
303 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
304 bool unsigned_p)
306 unsigned HOST_WIDE_INT l;
307 HOST_WIDE_INT h;
309 l = l1 + l2;
310 h = h1 + h2 + (l < l1);
312 *lv = l;
313 *hv = h;
315 if (unsigned_p)
316 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
317 else
318 return OVERFLOW_SUM_SIGN (h1, h2, h);
321 /* Negate a doubleword integer with doubleword result.
322 Return nonzero if the operation overflows, assuming it's signed.
323 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
324 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
327 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
328 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
330 if (l1 == 0)
332 *lv = 0;
333 *hv = - h1;
334 return (*hv & h1) < 0;
336 else
338 *lv = -l1;
339 *hv = ~h1;
340 return 0;
344 /* Multiply two doubleword integers with doubleword result.
345 Return nonzero if the operation overflows according to UNSIGNED_P.
346 Each argument is given as two `HOST_WIDE_INT' pieces.
347 One argument is L1 and H1; the other, L2 and H2.
348 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
351 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
352 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
353 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
354 bool unsigned_p)
356 HOST_WIDE_INT arg1[4];
357 HOST_WIDE_INT arg2[4];
358 HOST_WIDE_INT prod[4 * 2];
359 unsigned HOST_WIDE_INT carry;
360 int i, j, k;
361 unsigned HOST_WIDE_INT toplow, neglow;
362 HOST_WIDE_INT tophigh, neghigh;
364 encode (arg1, l1, h1);
365 encode (arg2, l2, h2);
367 memset (prod, 0, sizeof prod);
369 for (i = 0; i < 4; i++)
371 carry = 0;
372 for (j = 0; j < 4; j++)
374 k = i + j;
375 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
376 carry += arg1[i] * arg2[j];
377 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
378 carry += prod[k];
379 prod[k] = LOWPART (carry);
380 carry = HIGHPART (carry);
382 prod[i + 4] = carry;
385 decode (prod, lv, hv);
386 decode (prod + 4, &toplow, &tophigh);
388 /* Unsigned overflow is immediate. */
389 if (unsigned_p)
390 return (toplow | tophigh) != 0;
392 /* Check for signed overflow by calculating the signed representation of the
393 top half of the result; it should agree with the low half's sign bit. */
394 if (h1 < 0)
396 neg_double (l2, h2, &neglow, &neghigh);
397 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
399 if (h2 < 0)
401 neg_double (l1, h1, &neglow, &neghigh);
402 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
404 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
407 /* Shift the doubleword integer in L1, H1 left by COUNT places
408 keeping only PREC bits of result.
409 Shift right if COUNT is negative.
410 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
411 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413 void
414 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
415 HOST_WIDE_INT count, unsigned int prec,
416 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
418 unsigned HOST_WIDE_INT signmask;
420 if (count < 0)
422 rshift_double (l1, h1, -count, prec, lv, hv, arith);
423 return;
426 if (SHIFT_COUNT_TRUNCATED)
427 count %= prec;
429 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
431 /* Shifting by the host word size is undefined according to the
432 ANSI standard, so we must handle this as a special case. */
433 *hv = 0;
434 *lv = 0;
436 else if (count >= HOST_BITS_PER_WIDE_INT)
438 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
439 *lv = 0;
441 else
443 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
444 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
445 *lv = l1 << count;
448 /* Sign extend all bits that are beyond the precision. */
450 signmask = -((prec > HOST_BITS_PER_WIDE_INT
451 ? ((unsigned HOST_WIDE_INT) *hv
452 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
453 : (*lv >> (prec - 1))) & 1);
455 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
457 else if (prec >= HOST_BITS_PER_WIDE_INT)
459 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
460 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
462 else
464 *hv = signmask;
465 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
466 *lv |= signmask << prec;
470 /* Shift the doubleword integer in L1, H1 right by COUNT places
471 keeping only PREC bits of result. COUNT must be positive.
472 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
473 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
475 void
476 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
477 HOST_WIDE_INT count, unsigned int prec,
478 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
479 int arith)
481 unsigned HOST_WIDE_INT signmask;
483 signmask = (arith
484 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
485 : 0);
487 if (SHIFT_COUNT_TRUNCATED)
488 count %= prec;
490 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
492 /* Shifting by the host word size is undefined according to the
493 ANSI standard, so we must handle this as a special case. */
494 *hv = 0;
495 *lv = 0;
497 else if (count >= HOST_BITS_PER_WIDE_INT)
499 *hv = 0;
500 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
502 else
504 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
505 *lv = ((l1 >> count)
506 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
509 /* Zero / sign extend all bits that are beyond the precision. */
511 if (count >= (HOST_WIDE_INT)prec)
513 *hv = signmask;
514 *lv = signmask;
516 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
518 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
520 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
521 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
523 else
525 *hv = signmask;
526 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
527 *lv |= signmask << (prec - count);
531 /* Rotate the doubleword integer in L1, H1 left by COUNT places
532 keeping only PREC bits of result.
533 Rotate right if COUNT is negative.
534 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536 void
537 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
538 HOST_WIDE_INT count, unsigned int prec,
539 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541 unsigned HOST_WIDE_INT s1l, s2l;
542 HOST_WIDE_INT s1h, s2h;
544 count %= prec;
545 if (count < 0)
546 count += prec;
548 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
549 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
550 *lv = s1l | s2l;
551 *hv = s1h | s2h;
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result. COUNT must be positive.
556 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 void
559 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
560 HOST_WIDE_INT count, unsigned int prec,
561 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563 unsigned HOST_WIDE_INT s1l, s2l;
564 HOST_WIDE_INT s1h, s2h;
566 count %= prec;
567 if (count < 0)
568 count += prec;
570 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
571 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
572 *lv = s1l | s2l;
573 *hv = s1h | s2h;
576 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
577 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
578 CODE is a tree code for a kind of division, one of
579 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
580 or EXACT_DIV_EXPR
581 It controls how the quotient is rounded to an integer.
582 Return nonzero if the operation overflows.
583 UNS nonzero says do unsigned division. */
586 div_and_round_double (enum tree_code code, int uns,
587 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
588 HOST_WIDE_INT hnum_orig,
589 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
590 HOST_WIDE_INT hden_orig,
591 unsigned HOST_WIDE_INT *lquo,
592 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
593 HOST_WIDE_INT *hrem)
595 int quo_neg = 0;
596 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
597 HOST_WIDE_INT den[4], quo[4];
598 int i, j;
599 unsigned HOST_WIDE_INT work;
600 unsigned HOST_WIDE_INT carry = 0;
601 unsigned HOST_WIDE_INT lnum = lnum_orig;
602 HOST_WIDE_INT hnum = hnum_orig;
603 unsigned HOST_WIDE_INT lden = lden_orig;
604 HOST_WIDE_INT hden = hden_orig;
605 int overflow = 0;
607 if (hden == 0 && lden == 0)
608 overflow = 1, lden = 1;
610 /* Calculate quotient sign and convert operands to unsigned. */
611 if (!uns)
613 if (hnum < 0)
615 quo_neg = ~ quo_neg;
616 /* (minimum integer) / (-1) is the only overflow case. */
617 if (neg_double (lnum, hnum, &lnum, &hnum)
618 && ((HOST_WIDE_INT) lden & hden) == -1)
619 overflow = 1;
621 if (hden < 0)
623 quo_neg = ~ quo_neg;
624 neg_double (lden, hden, &lden, &hden);
628 if (hnum == 0 && hden == 0)
629 { /* single precision */
630 *hquo = *hrem = 0;
631 /* This unsigned division rounds toward zero. */
632 *lquo = lnum / lden;
633 goto finish_up;
636 if (hnum == 0)
637 { /* trivial case: dividend < divisor */
638 /* hden != 0 already checked. */
639 *hquo = *lquo = 0;
640 *hrem = hnum;
641 *lrem = lnum;
642 goto finish_up;
645 memset (quo, 0, sizeof quo);
647 memset (num, 0, sizeof num); /* to zero 9th element */
648 memset (den, 0, sizeof den);
650 encode (num, lnum, hnum);
651 encode (den, lden, hden);
653 /* Special code for when the divisor < BASE. */
654 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
656 /* hnum != 0 already checked. */
657 for (i = 4 - 1; i >= 0; i--)
659 work = num[i] + carry * BASE;
660 quo[i] = work / lden;
661 carry = work % lden;
664 else
666 /* Full double precision division,
667 with thanks to Don Knuth's "Seminumerical Algorithms". */
668 int num_hi_sig, den_hi_sig;
669 unsigned HOST_WIDE_INT quo_est, scale;
671 /* Find the highest nonzero divisor digit. */
672 for (i = 4 - 1;; i--)
673 if (den[i] != 0)
675 den_hi_sig = i;
676 break;
679 /* Insure that the first digit of the divisor is at least BASE/2.
680 This is required by the quotient digit estimation algorithm. */
682 scale = BASE / (den[den_hi_sig] + 1);
683 if (scale > 1)
684 { /* scale divisor and dividend */
685 carry = 0;
686 for (i = 0; i <= 4 - 1; i++)
688 work = (num[i] * scale) + carry;
689 num[i] = LOWPART (work);
690 carry = HIGHPART (work);
693 num[4] = carry;
694 carry = 0;
695 for (i = 0; i <= 4 - 1; i++)
697 work = (den[i] * scale) + carry;
698 den[i] = LOWPART (work);
699 carry = HIGHPART (work);
700 if (den[i] != 0) den_hi_sig = i;
704 num_hi_sig = 4;
706 /* Main loop */
707 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
709 /* Guess the next quotient digit, quo_est, by dividing the first
710 two remaining dividend digits by the high order quotient digit.
711 quo_est is never low and is at most 2 high. */
712 unsigned HOST_WIDE_INT tmp;
714 num_hi_sig = i + den_hi_sig + 1;
715 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
716 if (num[num_hi_sig] != den[den_hi_sig])
717 quo_est = work / den[den_hi_sig];
718 else
719 quo_est = BASE - 1;
721 /* Refine quo_est so it's usually correct, and at most one high. */
722 tmp = work - quo_est * den[den_hi_sig];
723 if (tmp < BASE
724 && (den[den_hi_sig - 1] * quo_est
725 > (tmp * BASE + num[num_hi_sig - 2])))
726 quo_est--;
728 /* Try QUO_EST as the quotient digit, by multiplying the
729 divisor by QUO_EST and subtracting from the remaining dividend.
730 Keep in mind that QUO_EST is the I - 1st digit. */
732 carry = 0;
733 for (j = 0; j <= den_hi_sig; j++)
735 work = quo_est * den[j] + carry;
736 carry = HIGHPART (work);
737 work = num[i + j] - LOWPART (work);
738 num[i + j] = LOWPART (work);
739 carry += HIGHPART (work) != 0;
742 /* If quo_est was high by one, then num[i] went negative and
743 we need to correct things. */
744 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746 quo_est--;
747 carry = 0; /* add divisor back in */
748 for (j = 0; j <= den_hi_sig; j++)
750 work = num[i + j] + den[j] + carry;
751 carry = HIGHPART (work);
752 num[i + j] = LOWPART (work);
755 num [num_hi_sig] += carry;
758 /* Store the quotient digit. */
759 quo[i] = quo_est;
763 decode (quo, lquo, hquo);
765 finish_up:
766 /* If result is negative, make it so. */
767 if (quo_neg)
768 neg_double (*lquo, *hquo, lquo, hquo);
770 /* Compute trial remainder: rem = num - (quo * den) */
771 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
772 neg_double (*lrem, *hrem, lrem, hrem);
773 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
775 switch (code)
777 case TRUNC_DIV_EXPR:
778 case TRUNC_MOD_EXPR: /* round toward zero */
779 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
780 return overflow;
782 case FLOOR_DIV_EXPR:
783 case FLOOR_MOD_EXPR: /* round toward negative infinity */
784 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
786 /* quo = quo - 1; */
787 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
788 lquo, hquo);
790 else
791 return overflow;
792 break;
794 case CEIL_DIV_EXPR:
795 case CEIL_MOD_EXPR: /* round toward positive infinity */
796 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
798 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
799 lquo, hquo);
801 else
802 return overflow;
803 break;
805 case ROUND_DIV_EXPR:
806 case ROUND_MOD_EXPR: /* round to closest integer */
808 unsigned HOST_WIDE_INT labs_rem = *lrem;
809 HOST_WIDE_INT habs_rem = *hrem;
810 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
811 HOST_WIDE_INT habs_den = hden, htwice;
813 /* Get absolute values. */
814 if (*hrem < 0)
815 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
816 if (hden < 0)
817 neg_double (lden, hden, &labs_den, &habs_den);
819 /* If (2 * abs (lrem) >= abs (lden)) */
820 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
821 labs_rem, habs_rem, &ltwice, &htwice);
823 if (((unsigned HOST_WIDE_INT) habs_den
824 < (unsigned HOST_WIDE_INT) htwice)
825 || (((unsigned HOST_WIDE_INT) habs_den
826 == (unsigned HOST_WIDE_INT) htwice)
827 && (labs_den < ltwice)))
829 if (*hquo < 0)
830 /* quo = quo - 1; */
831 add_double (*lquo, *hquo,
832 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
833 else
834 /* quo = quo + 1; */
835 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
836 lquo, hquo);
838 else
839 return overflow;
841 break;
843 default:
844 gcc_unreachable ();
847 /* Compute true remainder: rem = num - (quo * den) */
848 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
849 neg_double (*lrem, *hrem, lrem, hrem);
850 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
851 return overflow;
854 /* If ARG2 divides ARG1 with zero remainder, carries out the division
855 of type CODE and returns the quotient.
856 Otherwise returns NULL_TREE. */
858 static tree
859 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
861 unsigned HOST_WIDE_INT int1l, int2l;
862 HOST_WIDE_INT int1h, int2h;
863 unsigned HOST_WIDE_INT quol, reml;
864 HOST_WIDE_INT quoh, remh;
865 tree type = TREE_TYPE (arg1);
866 int uns = TYPE_UNSIGNED (type);
868 int1l = TREE_INT_CST_LOW (arg1);
869 int1h = TREE_INT_CST_HIGH (arg1);
870 int2l = TREE_INT_CST_LOW (arg2);
871 int2h = TREE_INT_CST_HIGH (arg2);
873 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
874 &quol, &quoh, &reml, &remh);
875 if (remh != 0 || reml != 0)
876 return NULL_TREE;
878 return build_int_cst_wide (type, quol, quoh);
881 /* This is non-zero if we should defer warnings about undefined
882 overflow. This facility exists because these warnings are a
883 special case. The code to estimate loop iterations does not want
884 to issue any warnings, since it works with expressions which do not
885 occur in user code. Various bits of cleanup code call fold(), but
886 only use the result if it has certain characteristics (e.g., is a
887 constant); that code only wants to issue a warning if the result is
888 used. */
890 static int fold_deferring_overflow_warnings;
892 /* If a warning about undefined overflow is deferred, this is the
893 warning. Note that this may cause us to turn two warnings into
894 one, but that is fine since it is sufficient to only give one
895 warning per expression. */
897 static const char* fold_deferred_overflow_warning;
899 /* If a warning about undefined overflow is deferred, this is the
900 level at which the warning should be emitted. */
902 static enum warn_strict_overflow_code fold_deferred_overflow_code;
904 /* Start deferring overflow warnings. We could use a stack here to
905 permit nested calls, but at present it is not necessary. */
907 void
908 fold_defer_overflow_warnings (void)
910 ++fold_deferring_overflow_warnings;
913 /* Stop deferring overflow warnings. If there is a pending warning,
914 and ISSUE is true, then issue the warning if appropriate. STMT is
915 the statement with which the warning should be associated (used for
916 location information); STMT may be NULL. CODE is the level of the
917 warning--a warn_strict_overflow_code value. This function will use
918 the smaller of CODE and the deferred code when deciding whether to
919 issue the warning. CODE may be zero to mean to always use the
920 deferred code. */
922 void
923 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
925 const char *warnmsg;
926 location_t locus;
928 gcc_assert (fold_deferring_overflow_warnings > 0);
929 --fold_deferring_overflow_warnings;
930 if (fold_deferring_overflow_warnings > 0)
932 if (fold_deferred_overflow_warning != NULL
933 && code != 0
934 && code < (int) fold_deferred_overflow_code)
935 fold_deferred_overflow_code = code;
936 return;
939 warnmsg = fold_deferred_overflow_warning;
940 fold_deferred_overflow_warning = NULL;
942 if (!issue || warnmsg == NULL)
943 return;
945 /* Use the smallest code level when deciding to issue the
946 warning. */
947 if (code == 0 || code > (int) fold_deferred_overflow_code)
948 code = fold_deferred_overflow_code;
950 if (!issue_strict_overflow_warning (code))
951 return;
953 if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt))
954 locus = input_location;
955 else
956 locus = EXPR_LOCATION (stmt);
957 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
960 /* Stop deferring overflow warnings, ignoring any deferred
961 warnings. */
963 void
964 fold_undefer_and_ignore_overflow_warnings (void)
966 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
969 /* Whether we are deferring overflow warnings. */
971 bool
972 fold_deferring_overflow_warnings_p (void)
974 return fold_deferring_overflow_warnings > 0;
977 /* This is called when we fold something based on the fact that signed
978 overflow is undefined. */
980 static void
981 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
983 gcc_assert (!flag_wrapv && !flag_trapv);
984 if (fold_deferring_overflow_warnings > 0)
986 if (fold_deferred_overflow_warning == NULL
987 || wc < fold_deferred_overflow_code)
989 fold_deferred_overflow_warning = gmsgid;
990 fold_deferred_overflow_code = wc;
993 else if (issue_strict_overflow_warning (wc))
994 warning (OPT_Wstrict_overflow, gmsgid);
997 /* Return true if the built-in mathematical function specified by CODE
998 is odd, i.e. -f(x) == f(-x). */
1000 static bool
1001 negate_mathfn_p (enum built_in_function code)
1003 switch (code)
1005 CASE_FLT_FN (BUILT_IN_ASIN):
1006 CASE_FLT_FN (BUILT_IN_ASINH):
1007 CASE_FLT_FN (BUILT_IN_ATAN):
1008 CASE_FLT_FN (BUILT_IN_ATANH):
1009 CASE_FLT_FN (BUILT_IN_CBRT):
1010 CASE_FLT_FN (BUILT_IN_SIN):
1011 CASE_FLT_FN (BUILT_IN_SINH):
1012 CASE_FLT_FN (BUILT_IN_TAN):
1013 CASE_FLT_FN (BUILT_IN_TANH):
1014 return true;
1016 default:
1017 break;
1019 return false;
1022 /* Check whether we may negate an integer constant T without causing
1023 overflow. */
1025 bool
1026 may_negate_without_overflow_p (tree t)
1028 unsigned HOST_WIDE_INT val;
1029 unsigned int prec;
1030 tree type;
1032 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1034 type = TREE_TYPE (t);
1035 if (TYPE_UNSIGNED (type))
1036 return false;
1038 prec = TYPE_PRECISION (type);
1039 if (prec > HOST_BITS_PER_WIDE_INT)
1041 if (TREE_INT_CST_LOW (t) != 0)
1042 return true;
1043 prec -= HOST_BITS_PER_WIDE_INT;
1044 val = TREE_INT_CST_HIGH (t);
1046 else
1047 val = TREE_INT_CST_LOW (t);
1048 if (prec < HOST_BITS_PER_WIDE_INT)
1049 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1050 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1053 /* Determine whether an expression T can be cheaply negated using
1054 the function negate_expr without introducing undefined overflow. */
1056 static bool
1057 negate_expr_p (tree t)
1059 tree type;
1061 if (t == 0)
1062 return false;
1064 type = TREE_TYPE (t);
1066 STRIP_SIGN_NOPS (t);
1067 switch (TREE_CODE (t))
1069 case INTEGER_CST:
1070 if (TYPE_OVERFLOW_WRAPS (type))
1071 return true;
1073 /* Check that -CST will not overflow type. */
1074 return may_negate_without_overflow_p (t);
1075 case BIT_NOT_EXPR:
1076 return (INTEGRAL_TYPE_P (type)
1077 && TYPE_OVERFLOW_WRAPS (type));
1079 case REAL_CST:
1080 case NEGATE_EXPR:
1081 return true;
1083 case COMPLEX_CST:
1084 return negate_expr_p (TREE_REALPART (t))
1085 && negate_expr_p (TREE_IMAGPART (t));
1087 case PLUS_EXPR:
1088 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
1089 return false;
1090 /* -(A + B) -> (-B) - A. */
1091 if (negate_expr_p (TREE_OPERAND (t, 1))
1092 && reorder_operands_p (TREE_OPERAND (t, 0),
1093 TREE_OPERAND (t, 1)))
1094 return true;
1095 /* -(A + B) -> (-A) - B. */
1096 return negate_expr_p (TREE_OPERAND (t, 0));
1098 case MINUS_EXPR:
1099 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1100 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1101 && reorder_operands_p (TREE_OPERAND (t, 0),
1102 TREE_OPERAND (t, 1));
1104 case MULT_EXPR:
1105 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1106 break;
1108 /* Fall through. */
1110 case RDIV_EXPR:
1111 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1112 return negate_expr_p (TREE_OPERAND (t, 1))
1113 || negate_expr_p (TREE_OPERAND (t, 0));
1114 break;
1116 case TRUNC_DIV_EXPR:
1117 case ROUND_DIV_EXPR:
1118 case FLOOR_DIV_EXPR:
1119 case CEIL_DIV_EXPR:
1120 case EXACT_DIV_EXPR:
1121 /* In general we can't negate A / B, because if A is INT_MIN and
1122 B is 1, we may turn this into INT_MIN / -1 which is undefined
1123 and actually traps on some architectures. But if overflow is
1124 undefined, we can negate, because - (INT_MIN / 1) is an
1125 overflow. */
1126 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1127 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1128 break;
1129 return negate_expr_p (TREE_OPERAND (t, 1))
1130 || negate_expr_p (TREE_OPERAND (t, 0));
1132 case NOP_EXPR:
1133 /* Negate -((double)float) as (double)(-float). */
1134 if (TREE_CODE (type) == REAL_TYPE)
1136 tree tem = strip_float_extensions (t);
1137 if (tem != t)
1138 return negate_expr_p (tem);
1140 break;
1142 case CALL_EXPR:
1143 /* Negate -f(x) as f(-x). */
1144 if (negate_mathfn_p (builtin_mathfn_code (t)))
1145 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1146 break;
1148 case RSHIFT_EXPR:
1149 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1150 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1152 tree op1 = TREE_OPERAND (t, 1);
1153 if (TREE_INT_CST_HIGH (op1) == 0
1154 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1155 == TREE_INT_CST_LOW (op1))
1156 return true;
1158 break;
1160 default:
1161 break;
1163 return false;
1166 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1167 simplification is possible.
1168 If negate_expr_p would return true for T, NULL_TREE will never be
1169 returned. */
1171 static tree
1172 fold_negate_expr (tree t)
1174 tree type = TREE_TYPE (t);
1175 tree tem;
1177 switch (TREE_CODE (t))
1179 /* Convert - (~A) to A + 1. */
1180 case BIT_NOT_EXPR:
1181 if (INTEGRAL_TYPE_P (type))
1182 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1183 build_int_cst (type, 1));
1184 break;
1186 case INTEGER_CST:
1187 tem = fold_negate_const (t, type);
1188 if (!TREE_OVERFLOW (tem)
1189 || !TYPE_OVERFLOW_TRAPS (type))
1190 return tem;
1191 break;
1193 case REAL_CST:
1194 tem = fold_negate_const (t, type);
1195 /* Two's complement FP formats, such as c4x, may overflow. */
1196 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1197 return tem;
1198 break;
1200 case COMPLEX_CST:
1202 tree rpart = negate_expr (TREE_REALPART (t));
1203 tree ipart = negate_expr (TREE_IMAGPART (t));
1205 if ((TREE_CODE (rpart) == REAL_CST
1206 && TREE_CODE (ipart) == REAL_CST)
1207 || (TREE_CODE (rpart) == INTEGER_CST
1208 && TREE_CODE (ipart) == INTEGER_CST))
1209 return build_complex (type, rpart, ipart);
1211 break;
1213 case NEGATE_EXPR:
1214 return TREE_OPERAND (t, 0);
1216 case PLUS_EXPR:
1217 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1219 /* -(A + B) -> (-B) - A. */
1220 if (negate_expr_p (TREE_OPERAND (t, 1))
1221 && reorder_operands_p (TREE_OPERAND (t, 0),
1222 TREE_OPERAND (t, 1)))
1224 tem = negate_expr (TREE_OPERAND (t, 1));
1225 return fold_build2 (MINUS_EXPR, type,
1226 tem, TREE_OPERAND (t, 0));
1229 /* -(A + B) -> (-A) - B. */
1230 if (negate_expr_p (TREE_OPERAND (t, 0)))
1232 tem = negate_expr (TREE_OPERAND (t, 0));
1233 return fold_build2 (MINUS_EXPR, type,
1234 tem, TREE_OPERAND (t, 1));
1237 break;
1239 case MINUS_EXPR:
1240 /* - (A - B) -> B - A */
1241 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1242 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1243 return fold_build2 (MINUS_EXPR, type,
1244 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1245 break;
1247 case MULT_EXPR:
1248 if (TYPE_UNSIGNED (type))
1249 break;
1251 /* Fall through. */
1253 case RDIV_EXPR:
1254 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1256 tem = TREE_OPERAND (t, 1);
1257 if (negate_expr_p (tem))
1258 return fold_build2 (TREE_CODE (t), type,
1259 TREE_OPERAND (t, 0), negate_expr (tem));
1260 tem = TREE_OPERAND (t, 0);
1261 if (negate_expr_p (tem))
1262 return fold_build2 (TREE_CODE (t), type,
1263 negate_expr (tem), TREE_OPERAND (t, 1));
1265 break;
1267 case TRUNC_DIV_EXPR:
1268 case ROUND_DIV_EXPR:
1269 case FLOOR_DIV_EXPR:
1270 case CEIL_DIV_EXPR:
1271 case EXACT_DIV_EXPR:
1272 /* In general we can't negate A / B, because if A is INT_MIN and
1273 B is 1, we may turn this into INT_MIN / -1 which is undefined
1274 and actually traps on some architectures. But if overflow is
1275 undefined, we can negate, because - (INT_MIN / 1) is an
1276 overflow. */
1277 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1279 const char * const warnmsg = G_("assuming signed overflow does not "
1280 "occur when negating a division");
1281 tem = TREE_OPERAND (t, 1);
1282 if (negate_expr_p (tem))
1284 if (INTEGRAL_TYPE_P (type)
1285 && (TREE_CODE (tem) != INTEGER_CST
1286 || integer_onep (tem)))
1287 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1288 return fold_build2 (TREE_CODE (t), type,
1289 TREE_OPERAND (t, 0), negate_expr (tem));
1291 tem = TREE_OPERAND (t, 0);
1292 if (negate_expr_p (tem))
1294 if (INTEGRAL_TYPE_P (type)
1295 && (TREE_CODE (tem) != INTEGER_CST
1296 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1297 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1298 return fold_build2 (TREE_CODE (t), type,
1299 negate_expr (tem), TREE_OPERAND (t, 1));
1302 break;
1304 case NOP_EXPR:
1305 /* Convert -((double)float) into (double)(-float). */
1306 if (TREE_CODE (type) == REAL_TYPE)
1308 tem = strip_float_extensions (t);
1309 if (tem != t && negate_expr_p (tem))
1310 return negate_expr (tem);
1312 break;
1314 case CALL_EXPR:
1315 /* Negate -f(x) as f(-x). */
1316 if (negate_mathfn_p (builtin_mathfn_code (t))
1317 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1319 tree fndecl, arg, arglist;
1321 fndecl = get_callee_fndecl (t);
1322 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1323 arglist = build_tree_list (NULL_TREE, arg);
1324 return build_function_call_expr (fndecl, arglist);
1326 break;
1328 case RSHIFT_EXPR:
1329 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1330 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1332 tree op1 = TREE_OPERAND (t, 1);
1333 if (TREE_INT_CST_HIGH (op1) == 0
1334 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1335 == TREE_INT_CST_LOW (op1))
1337 tree ntype = TYPE_UNSIGNED (type)
1338 ? lang_hooks.types.signed_type (type)
1339 : lang_hooks.types.unsigned_type (type);
1340 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1341 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1342 return fold_convert (type, temp);
1345 break;
1347 default:
1348 break;
1351 return NULL_TREE;
1354 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1355 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1356 return NULL_TREE. */
1358 static tree
1359 negate_expr (tree t)
1361 tree type, tem;
1363 if (t == NULL_TREE)
1364 return NULL_TREE;
1366 type = TREE_TYPE (t);
1367 STRIP_SIGN_NOPS (t);
1369 tem = fold_negate_expr (t);
1370 if (!tem)
1371 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1372 return fold_convert (type, tem);
1375 /* Split a tree IN into a constant, literal and variable parts that could be
1376 combined with CODE to make IN. "constant" means an expression with
1377 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1378 commutative arithmetic operation. Store the constant part into *CONP,
1379 the literal in *LITP and return the variable part. If a part isn't
1380 present, set it to null. If the tree does not decompose in this way,
1381 return the entire tree as the variable part and the other parts as null.
1383 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1384 case, we negate an operand that was subtracted. Except if it is a
1385 literal for which we use *MINUS_LITP instead.
1387 If NEGATE_P is true, we are negating all of IN, again except a literal
1388 for which we use *MINUS_LITP instead.
1390 If IN is itself a literal or constant, return it as appropriate.
1392 Note that we do not guarantee that any of the three values will be the
1393 same type as IN, but they will have the same signedness and mode. */
1395 static tree
1396 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1397 tree *minus_litp, int negate_p)
1399 tree var = 0;
1401 *conp = 0;
1402 *litp = 0;
1403 *minus_litp = 0;
1405 /* Strip any conversions that don't change the machine mode or signedness. */
1406 STRIP_SIGN_NOPS (in);
1408 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1409 *litp = in;
1410 else if (TREE_CODE (in) == code
1411 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1412 /* We can associate addition and subtraction together (even
1413 though the C standard doesn't say so) for integers because
1414 the value is not affected. For reals, the value might be
1415 affected, so we can't. */
1416 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1417 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1419 tree op0 = TREE_OPERAND (in, 0);
1420 tree op1 = TREE_OPERAND (in, 1);
1421 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1422 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1424 /* First see if either of the operands is a literal, then a constant. */
1425 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1426 *litp = op0, op0 = 0;
1427 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1428 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1430 if (op0 != 0 && TREE_CONSTANT (op0))
1431 *conp = op0, op0 = 0;
1432 else if (op1 != 0 && TREE_CONSTANT (op1))
1433 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1435 /* If we haven't dealt with either operand, this is not a case we can
1436 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1437 if (op0 != 0 && op1 != 0)
1438 var = in;
1439 else if (op0 != 0)
1440 var = op0;
1441 else
1442 var = op1, neg_var_p = neg1_p;
1444 /* Now do any needed negations. */
1445 if (neg_litp_p)
1446 *minus_litp = *litp, *litp = 0;
1447 if (neg_conp_p)
1448 *conp = negate_expr (*conp);
1449 if (neg_var_p)
1450 var = negate_expr (var);
1452 else if (TREE_CONSTANT (in))
1453 *conp = in;
1454 else
1455 var = in;
1457 if (negate_p)
1459 if (*litp)
1460 *minus_litp = *litp, *litp = 0;
1461 else if (*minus_litp)
1462 *litp = *minus_litp, *minus_litp = 0;
1463 *conp = negate_expr (*conp);
1464 var = negate_expr (var);
1467 return var;
1470 /* Re-associate trees split by the above function. T1 and T2 are either
1471 expressions to associate or null. Return the new expression, if any. If
1472 we build an operation, do it in TYPE and with CODE. */
1474 static tree
1475 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1477 if (t1 == 0)
1478 return t2;
1479 else if (t2 == 0)
1480 return t1;
1482 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1483 try to fold this since we will have infinite recursion. But do
1484 deal with any NEGATE_EXPRs. */
1485 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1486 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1488 if (code == PLUS_EXPR)
1490 if (TREE_CODE (t1) == NEGATE_EXPR)
1491 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1492 fold_convert (type, TREE_OPERAND (t1, 0)));
1493 else if (TREE_CODE (t2) == NEGATE_EXPR)
1494 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1495 fold_convert (type, TREE_OPERAND (t2, 0)));
1496 else if (integer_zerop (t2))
1497 return fold_convert (type, t1);
1499 else if (code == MINUS_EXPR)
1501 if (integer_zerop (t2))
1502 return fold_convert (type, t1);
1505 return build2 (code, type, fold_convert (type, t1),
1506 fold_convert (type, t2));
1509 return fold_build2 (code, type, fold_convert (type, t1),
1510 fold_convert (type, t2));
1513 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1514 to produce a new constant. Return NULL_TREE if we don't know how
1515 to evaluate CODE at compile-time.
1517 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1519 tree
1520 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1522 unsigned HOST_WIDE_INT int1l, int2l;
1523 HOST_WIDE_INT int1h, int2h;
1524 unsigned HOST_WIDE_INT low;
1525 HOST_WIDE_INT hi;
1526 unsigned HOST_WIDE_INT garbagel;
1527 HOST_WIDE_INT garbageh;
1528 tree t;
1529 tree type = TREE_TYPE (arg1);
1530 int uns = TYPE_UNSIGNED (type);
1531 int is_sizetype
1532 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1533 int overflow = 0;
1535 int1l = TREE_INT_CST_LOW (arg1);
1536 int1h = TREE_INT_CST_HIGH (arg1);
1537 int2l = TREE_INT_CST_LOW (arg2);
1538 int2h = TREE_INT_CST_HIGH (arg2);
1540 switch (code)
1542 case BIT_IOR_EXPR:
1543 low = int1l | int2l, hi = int1h | int2h;
1544 break;
1546 case BIT_XOR_EXPR:
1547 low = int1l ^ int2l, hi = int1h ^ int2h;
1548 break;
1550 case BIT_AND_EXPR:
1551 low = int1l & int2l, hi = int1h & int2h;
1552 break;
1554 case RSHIFT_EXPR:
1555 int2l = -int2l;
1556 case LSHIFT_EXPR:
1557 /* It's unclear from the C standard whether shifts can overflow.
1558 The following code ignores overflow; perhaps a C standard
1559 interpretation ruling is needed. */
1560 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1561 &low, &hi, !uns);
1562 break;
1564 case RROTATE_EXPR:
1565 int2l = - int2l;
1566 case LROTATE_EXPR:
1567 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1568 &low, &hi);
1569 break;
1571 case PLUS_EXPR:
1572 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1573 break;
1575 case MINUS_EXPR:
1576 neg_double (int2l, int2h, &low, &hi);
1577 add_double (int1l, int1h, low, hi, &low, &hi);
1578 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1579 break;
1581 case MULT_EXPR:
1582 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1583 break;
1585 case TRUNC_DIV_EXPR:
1586 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1587 case EXACT_DIV_EXPR:
1588 /* This is a shortcut for a common special case. */
1589 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1590 && ! TREE_CONSTANT_OVERFLOW (arg1)
1591 && ! TREE_CONSTANT_OVERFLOW (arg2)
1592 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1594 if (code == CEIL_DIV_EXPR)
1595 int1l += int2l - 1;
1597 low = int1l / int2l, hi = 0;
1598 break;
1601 /* ... fall through ... */
1603 case ROUND_DIV_EXPR:
1604 if (int2h == 0 && int2l == 0)
1605 return NULL_TREE;
1606 if (int2h == 0 && int2l == 1)
1608 low = int1l, hi = int1h;
1609 break;
1611 if (int1l == int2l && int1h == int2h
1612 && ! (int1l == 0 && int1h == 0))
1614 low = 1, hi = 0;
1615 break;
1617 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1618 &low, &hi, &garbagel, &garbageh);
1619 break;
1621 case TRUNC_MOD_EXPR:
1622 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1623 /* This is a shortcut for a common special case. */
1624 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1625 && ! TREE_CONSTANT_OVERFLOW (arg1)
1626 && ! TREE_CONSTANT_OVERFLOW (arg2)
1627 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1629 if (code == CEIL_MOD_EXPR)
1630 int1l += int2l - 1;
1631 low = int1l % int2l, hi = 0;
1632 break;
1635 /* ... fall through ... */
1637 case ROUND_MOD_EXPR:
1638 if (int2h == 0 && int2l == 0)
1639 return NULL_TREE;
1640 overflow = div_and_round_double (code, uns,
1641 int1l, int1h, int2l, int2h,
1642 &garbagel, &garbageh, &low, &hi);
1643 break;
1645 case MIN_EXPR:
1646 case MAX_EXPR:
1647 if (uns)
1648 low = (((unsigned HOST_WIDE_INT) int1h
1649 < (unsigned HOST_WIDE_INT) int2h)
1650 || (((unsigned HOST_WIDE_INT) int1h
1651 == (unsigned HOST_WIDE_INT) int2h)
1652 && int1l < int2l));
1653 else
1654 low = (int1h < int2h
1655 || (int1h == int2h && int1l < int2l));
1657 if (low == (code == MIN_EXPR))
1658 low = int1l, hi = int1h;
1659 else
1660 low = int2l, hi = int2h;
1661 break;
1663 default:
1664 return NULL_TREE;
1667 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1669 if (notrunc)
1671 /* Propagate overflow flags ourselves. */
1672 if (((!uns || is_sizetype) && overflow)
1673 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1675 t = copy_node (t);
1676 TREE_OVERFLOW (t) = 1;
1677 TREE_CONSTANT_OVERFLOW (t) = 1;
1679 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1681 t = copy_node (t);
1682 TREE_CONSTANT_OVERFLOW (t) = 1;
1685 else
1686 t = force_fit_type (t, 1,
1687 ((!uns || is_sizetype) && overflow)
1688 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1689 TREE_CONSTANT_OVERFLOW (arg1)
1690 | TREE_CONSTANT_OVERFLOW (arg2));
1692 return t;
1695 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1696 constant. We assume ARG1 and ARG2 have the same data type, or at least
1697 are the same kind of constant and the same machine mode. Return zero if
1698 combining the constants is not allowed in the current operating mode.
1700 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1702 static tree
1703 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1705 /* Sanity check for the recursive cases. */
1706 if (!arg1 || !arg2)
1707 return NULL_TREE;
1709 STRIP_NOPS (arg1);
1710 STRIP_NOPS (arg2);
1712 if (TREE_CODE (arg1) == INTEGER_CST)
1713 return int_const_binop (code, arg1, arg2, notrunc);
1715 if (TREE_CODE (arg1) == REAL_CST)
1717 enum machine_mode mode;
1718 REAL_VALUE_TYPE d1;
1719 REAL_VALUE_TYPE d2;
1720 REAL_VALUE_TYPE value;
1721 REAL_VALUE_TYPE result;
1722 bool inexact;
1723 tree t, type;
1725 /* The following codes are handled by real_arithmetic. */
1726 switch (code)
1728 case PLUS_EXPR:
1729 case MINUS_EXPR:
1730 case MULT_EXPR:
1731 case RDIV_EXPR:
1732 case MIN_EXPR:
1733 case MAX_EXPR:
1734 break;
1736 default:
1737 return NULL_TREE;
1740 d1 = TREE_REAL_CST (arg1);
1741 d2 = TREE_REAL_CST (arg2);
1743 type = TREE_TYPE (arg1);
1744 mode = TYPE_MODE (type);
1746 /* Don't perform operation if we honor signaling NaNs and
1747 either operand is a NaN. */
1748 if (HONOR_SNANS (mode)
1749 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1750 return NULL_TREE;
1752 /* Don't perform operation if it would raise a division
1753 by zero exception. */
1754 if (code == RDIV_EXPR
1755 && REAL_VALUES_EQUAL (d2, dconst0)
1756 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1757 return NULL_TREE;
1759 /* If either operand is a NaN, just return it. Otherwise, set up
1760 for floating-point trap; we return an overflow. */
1761 if (REAL_VALUE_ISNAN (d1))
1762 return arg1;
1763 else if (REAL_VALUE_ISNAN (d2))
1764 return arg2;
1766 inexact = real_arithmetic (&value, code, &d1, &d2);
1767 real_convert (&result, mode, &value);
1769 /* Don't constant fold this floating point operation if
1770 the result has overflowed and flag_trapping_math. */
1771 if (flag_trapping_math
1772 && MODE_HAS_INFINITIES (mode)
1773 && REAL_VALUE_ISINF (result)
1774 && !REAL_VALUE_ISINF (d1)
1775 && !REAL_VALUE_ISINF (d2))
1776 return NULL_TREE;
1778 /* Don't constant fold this floating point operation if the
1779 result may dependent upon the run-time rounding mode and
1780 flag_rounding_math is set, or if GCC's software emulation
1781 is unable to accurately represent the result. */
1782 if ((flag_rounding_math
1783 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1784 && !flag_unsafe_math_optimizations))
1785 && (inexact || !real_identical (&result, &value)))
1786 return NULL_TREE;
1788 t = build_real (type, result);
1790 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1791 TREE_CONSTANT_OVERFLOW (t)
1792 = TREE_OVERFLOW (t)
1793 | TREE_CONSTANT_OVERFLOW (arg1)
1794 | TREE_CONSTANT_OVERFLOW (arg2);
1795 return t;
1798 if (TREE_CODE (arg1) == COMPLEX_CST)
1800 tree type = TREE_TYPE (arg1);
1801 tree r1 = TREE_REALPART (arg1);
1802 tree i1 = TREE_IMAGPART (arg1);
1803 tree r2 = TREE_REALPART (arg2);
1804 tree i2 = TREE_IMAGPART (arg2);
1805 tree real, imag;
1807 switch (code)
1809 case PLUS_EXPR:
1810 case MINUS_EXPR:
1811 real = const_binop (code, r1, r2, notrunc);
1812 imag = const_binop (code, i1, i2, notrunc);
1813 break;
1815 case MULT_EXPR:
1816 real = const_binop (MINUS_EXPR,
1817 const_binop (MULT_EXPR, r1, r2, notrunc),
1818 const_binop (MULT_EXPR, i1, i2, notrunc),
1819 notrunc);
1820 imag = const_binop (PLUS_EXPR,
1821 const_binop (MULT_EXPR, r1, i2, notrunc),
1822 const_binop (MULT_EXPR, i1, r2, notrunc),
1823 notrunc);
1824 break;
1826 case RDIV_EXPR:
1828 tree magsquared
1829 = const_binop (PLUS_EXPR,
1830 const_binop (MULT_EXPR, r2, r2, notrunc),
1831 const_binop (MULT_EXPR, i2, i2, notrunc),
1832 notrunc);
1833 tree t1
1834 = const_binop (PLUS_EXPR,
1835 const_binop (MULT_EXPR, r1, r2, notrunc),
1836 const_binop (MULT_EXPR, i1, i2, notrunc),
1837 notrunc);
1838 tree t2
1839 = const_binop (MINUS_EXPR,
1840 const_binop (MULT_EXPR, i1, r2, notrunc),
1841 const_binop (MULT_EXPR, r1, i2, notrunc),
1842 notrunc);
1844 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1845 code = TRUNC_DIV_EXPR;
1847 real = const_binop (code, t1, magsquared, notrunc);
1848 imag = const_binop (code, t2, magsquared, notrunc);
1850 break;
1852 default:
1853 return NULL_TREE;
1856 if (real && imag)
1857 return build_complex (type, real, imag);
1860 return NULL_TREE;
1863 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1864 indicates which particular sizetype to create. */
1866 tree
1867 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1869 return build_int_cst (sizetype_tab[(int) kind], number);
1872 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1873 is a tree code. The type of the result is taken from the operands.
1874 Both must be the same type integer type and it must be a size type.
1875 If the operands are constant, so is the result. */
1877 tree
1878 size_binop (enum tree_code code, tree arg0, tree arg1)
1880 tree type = TREE_TYPE (arg0);
1882 if (arg0 == error_mark_node || arg1 == error_mark_node)
1883 return error_mark_node;
1885 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1886 && type == TREE_TYPE (arg1));
1888 /* Handle the special case of two integer constants faster. */
1889 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1891 /* And some specific cases even faster than that. */
1892 if (code == PLUS_EXPR && integer_zerop (arg0))
1893 return arg1;
1894 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1895 && integer_zerop (arg1))
1896 return arg0;
1897 else if (code == MULT_EXPR && integer_onep (arg0))
1898 return arg1;
1900 /* Handle general case of two integer constants. */
1901 return int_const_binop (code, arg0, arg1, 0);
1904 return fold_build2 (code, type, arg0, arg1);
1907 /* Given two values, either both of sizetype or both of bitsizetype,
1908 compute the difference between the two values. Return the value
1909 in signed type corresponding to the type of the operands. */
1911 tree
1912 size_diffop (tree arg0, tree arg1)
1914 tree type = TREE_TYPE (arg0);
1915 tree ctype;
1917 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1918 && type == TREE_TYPE (arg1));
1920 /* If the type is already signed, just do the simple thing. */
1921 if (!TYPE_UNSIGNED (type))
1922 return size_binop (MINUS_EXPR, arg0, arg1);
1924 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1926 /* If either operand is not a constant, do the conversions to the signed
1927 type and subtract. The hardware will do the right thing with any
1928 overflow in the subtraction. */
1929 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1930 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1931 fold_convert (ctype, arg1));
1933 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1934 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1935 overflow) and negate (which can't either). Special-case a result
1936 of zero while we're here. */
1937 if (tree_int_cst_equal (arg0, arg1))
1938 return build_int_cst (ctype, 0);
1939 else if (tree_int_cst_lt (arg1, arg0))
1940 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1941 else
1942 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1943 fold_convert (ctype, size_binop (MINUS_EXPR,
1944 arg1, arg0)));
1947 /* A subroutine of fold_convert_const handling conversions of an
1948 INTEGER_CST to another integer type. */
1950 static tree
1951 fold_convert_const_int_from_int (tree type, tree arg1)
1953 tree t;
1955 /* Given an integer constant, make new constant with new type,
1956 appropriately sign-extended or truncated. */
1957 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1958 TREE_INT_CST_HIGH (arg1));
1960 t = force_fit_type (t,
1961 /* Don't set the overflow when
1962 converting a pointer */
1963 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1964 (TREE_INT_CST_HIGH (arg1) < 0
1965 && (TYPE_UNSIGNED (type)
1966 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1967 | TREE_OVERFLOW (arg1),
1968 TREE_CONSTANT_OVERFLOW (arg1));
1970 return t;
1973 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1974 to an integer type. */
1976 static tree
1977 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1979 int overflow = 0;
1980 tree t;
1982 /* The following code implements the floating point to integer
1983 conversion rules required by the Java Language Specification,
1984 that IEEE NaNs are mapped to zero and values that overflow
1985 the target precision saturate, i.e. values greater than
1986 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1987 are mapped to INT_MIN. These semantics are allowed by the
1988 C and C++ standards that simply state that the behavior of
1989 FP-to-integer conversion is unspecified upon overflow. */
1991 HOST_WIDE_INT high, low;
1992 REAL_VALUE_TYPE r;
1993 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1995 switch (code)
1997 case FIX_TRUNC_EXPR:
1998 real_trunc (&r, VOIDmode, &x);
1999 break;
2001 case FIX_CEIL_EXPR:
2002 real_ceil (&r, VOIDmode, &x);
2003 break;
2005 case FIX_FLOOR_EXPR:
2006 real_floor (&r, VOIDmode, &x);
2007 break;
2009 case FIX_ROUND_EXPR:
2010 real_round (&r, VOIDmode, &x);
2011 break;
2013 default:
2014 gcc_unreachable ();
2017 /* If R is NaN, return zero and show we have an overflow. */
2018 if (REAL_VALUE_ISNAN (r))
2020 overflow = 1;
2021 high = 0;
2022 low = 0;
2025 /* See if R is less than the lower bound or greater than the
2026 upper bound. */
2028 if (! overflow)
2030 tree lt = TYPE_MIN_VALUE (type);
2031 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2032 if (REAL_VALUES_LESS (r, l))
2034 overflow = 1;
2035 high = TREE_INT_CST_HIGH (lt);
2036 low = TREE_INT_CST_LOW (lt);
2040 if (! overflow)
2042 tree ut = TYPE_MAX_VALUE (type);
2043 if (ut)
2045 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2046 if (REAL_VALUES_LESS (u, r))
2048 overflow = 1;
2049 high = TREE_INT_CST_HIGH (ut);
2050 low = TREE_INT_CST_LOW (ut);
2055 if (! overflow)
2056 REAL_VALUE_TO_INT (&low, &high, r);
2058 t = build_int_cst_wide (type, low, high);
2060 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
2061 TREE_CONSTANT_OVERFLOW (arg1));
2062 return t;
2065 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2066 to another floating point type. */
2068 static tree
2069 fold_convert_const_real_from_real (tree type, tree arg1)
2071 REAL_VALUE_TYPE value;
2072 tree t;
2074 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2075 t = build_real (type, value);
2077 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2078 TREE_CONSTANT_OVERFLOW (t)
2079 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2080 return t;
2083 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2084 type TYPE. If no simplification can be done return NULL_TREE. */
2086 static tree
2087 fold_convert_const (enum tree_code code, tree type, tree arg1)
2089 if (TREE_TYPE (arg1) == type)
2090 return arg1;
2092 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2094 if (TREE_CODE (arg1) == INTEGER_CST)
2095 return fold_convert_const_int_from_int (type, arg1);
2096 else if (TREE_CODE (arg1) == REAL_CST)
2097 return fold_convert_const_int_from_real (code, type, arg1);
2099 else if (TREE_CODE (type) == REAL_TYPE)
2101 if (TREE_CODE (arg1) == INTEGER_CST)
2102 return build_real_from_int_cst (type, arg1);
2103 if (TREE_CODE (arg1) == REAL_CST)
2104 return fold_convert_const_real_from_real (type, arg1);
2106 return NULL_TREE;
2109 /* Construct a vector of zero elements of vector type TYPE. */
2111 static tree
2112 build_zero_vector (tree type)
2114 tree elem, list;
2115 int i, units;
2117 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2118 units = TYPE_VECTOR_SUBPARTS (type);
2120 list = NULL_TREE;
2121 for (i = 0; i < units; i++)
2122 list = tree_cons (NULL_TREE, elem, list);
2123 return build_vector (type, list);
2126 /* Convert expression ARG to type TYPE. Used by the middle-end for
2127 simple conversions in preference to calling the front-end's convert. */
2129 tree
2130 fold_convert (tree type, tree arg)
2132 tree orig = TREE_TYPE (arg);
2133 tree tem;
2135 if (type == orig)
2136 return arg;
2138 if (TREE_CODE (arg) == ERROR_MARK
2139 || TREE_CODE (type) == ERROR_MARK
2140 || TREE_CODE (orig) == ERROR_MARK)
2141 return error_mark_node;
2143 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2144 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2145 TYPE_MAIN_VARIANT (orig)))
2146 return fold_build1 (NOP_EXPR, type, arg);
2148 switch (TREE_CODE (type))
2150 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2151 case POINTER_TYPE: case REFERENCE_TYPE:
2152 case OFFSET_TYPE:
2153 if (TREE_CODE (arg) == INTEGER_CST)
2155 tem = fold_convert_const (NOP_EXPR, type, arg);
2156 if (tem != NULL_TREE)
2157 return tem;
2159 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2160 || TREE_CODE (orig) == OFFSET_TYPE)
2161 return fold_build1 (NOP_EXPR, type, arg);
2162 if (TREE_CODE (orig) == COMPLEX_TYPE)
2164 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2165 return fold_convert (type, tem);
2167 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2168 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2169 return fold_build1 (NOP_EXPR, type, arg);
2171 case REAL_TYPE:
2172 if (TREE_CODE (arg) == INTEGER_CST)
2174 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2175 if (tem != NULL_TREE)
2176 return tem;
2178 else if (TREE_CODE (arg) == REAL_CST)
2180 tem = fold_convert_const (NOP_EXPR, type, arg);
2181 if (tem != NULL_TREE)
2182 return tem;
2185 switch (TREE_CODE (orig))
2187 case INTEGER_TYPE:
2188 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2189 case POINTER_TYPE: case REFERENCE_TYPE:
2190 return fold_build1 (FLOAT_EXPR, type, arg);
2192 case REAL_TYPE:
2193 return fold_build1 (NOP_EXPR, type, arg);
2195 case COMPLEX_TYPE:
2196 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2197 return fold_convert (type, tem);
2199 default:
2200 gcc_unreachable ();
2203 case COMPLEX_TYPE:
2204 switch (TREE_CODE (orig))
2206 case INTEGER_TYPE:
2207 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2208 case POINTER_TYPE: case REFERENCE_TYPE:
2209 case REAL_TYPE:
2210 return build2 (COMPLEX_EXPR, type,
2211 fold_convert (TREE_TYPE (type), arg),
2212 fold_convert (TREE_TYPE (type), integer_zero_node));
2213 case COMPLEX_TYPE:
2215 tree rpart, ipart;
2217 if (TREE_CODE (arg) == COMPLEX_EXPR)
2219 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2220 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2221 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2224 arg = save_expr (arg);
2225 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2226 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2227 rpart = fold_convert (TREE_TYPE (type), rpart);
2228 ipart = fold_convert (TREE_TYPE (type), ipart);
2229 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2232 default:
2233 gcc_unreachable ();
2236 case VECTOR_TYPE:
2237 if (integer_zerop (arg))
2238 return build_zero_vector (type);
2239 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2240 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2241 || TREE_CODE (orig) == VECTOR_TYPE);
2242 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2244 case VOID_TYPE:
2245 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2247 default:
2248 gcc_unreachable ();
2252 /* Return false if expr can be assumed not to be an lvalue, true
2253 otherwise. */
2255 static bool
2256 maybe_lvalue_p (tree x)
2258 /* We only need to wrap lvalue tree codes. */
2259 switch (TREE_CODE (x))
2261 case VAR_DECL:
2262 case PARM_DECL:
2263 case RESULT_DECL:
2264 case LABEL_DECL:
2265 case FUNCTION_DECL:
2266 case SSA_NAME:
2268 case COMPONENT_REF:
2269 case INDIRECT_REF:
2270 case ALIGN_INDIRECT_REF:
2271 case MISALIGNED_INDIRECT_REF:
2272 case ARRAY_REF:
2273 case ARRAY_RANGE_REF:
2274 case BIT_FIELD_REF:
2275 case OBJ_TYPE_REF:
2277 case REALPART_EXPR:
2278 case IMAGPART_EXPR:
2279 case PREINCREMENT_EXPR:
2280 case PREDECREMENT_EXPR:
2281 case SAVE_EXPR:
2282 case TRY_CATCH_EXPR:
2283 case WITH_CLEANUP_EXPR:
2284 case COMPOUND_EXPR:
2285 case MODIFY_EXPR:
2286 case TARGET_EXPR:
2287 case COND_EXPR:
2288 case BIND_EXPR:
2289 case MIN_EXPR:
2290 case MAX_EXPR:
2291 break;
2293 default:
2294 /* Assume the worst for front-end tree codes. */
2295 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2296 break;
2297 return false;
2300 return true;
2303 /* Return an expr equal to X but certainly not valid as an lvalue. */
2305 tree
2306 non_lvalue (tree x)
2308 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2309 us. */
2310 if (in_gimple_form)
2311 return x;
2313 if (! maybe_lvalue_p (x))
2314 return x;
2315 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2318 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2319 Zero means allow extended lvalues. */
2321 int pedantic_lvalues;
2323 /* When pedantic, return an expr equal to X but certainly not valid as a
2324 pedantic lvalue. Otherwise, return X. */
2326 static tree
2327 pedantic_non_lvalue (tree x)
2329 if (pedantic_lvalues)
2330 return non_lvalue (x);
2331 else
2332 return x;
2335 /* Given a tree comparison code, return the code that is the logical inverse
2336 of the given code. It is not safe to do this for floating-point
2337 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2338 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2340 enum tree_code
2341 invert_tree_comparison (enum tree_code code, bool honor_nans)
2343 if (honor_nans && flag_trapping_math)
2344 return ERROR_MARK;
2346 switch (code)
2348 case EQ_EXPR:
2349 return NE_EXPR;
2350 case NE_EXPR:
2351 return EQ_EXPR;
2352 case GT_EXPR:
2353 return honor_nans ? UNLE_EXPR : LE_EXPR;
2354 case GE_EXPR:
2355 return honor_nans ? UNLT_EXPR : LT_EXPR;
2356 case LT_EXPR:
2357 return honor_nans ? UNGE_EXPR : GE_EXPR;
2358 case LE_EXPR:
2359 return honor_nans ? UNGT_EXPR : GT_EXPR;
2360 case LTGT_EXPR:
2361 return UNEQ_EXPR;
2362 case UNEQ_EXPR:
2363 return LTGT_EXPR;
2364 case UNGT_EXPR:
2365 return LE_EXPR;
2366 case UNGE_EXPR:
2367 return LT_EXPR;
2368 case UNLT_EXPR:
2369 return GE_EXPR;
2370 case UNLE_EXPR:
2371 return GT_EXPR;
2372 case ORDERED_EXPR:
2373 return UNORDERED_EXPR;
2374 case UNORDERED_EXPR:
2375 return ORDERED_EXPR;
2376 default:
2377 gcc_unreachable ();
2381 /* Similar, but return the comparison that results if the operands are
2382 swapped. This is safe for floating-point. */
2384 enum tree_code
2385 swap_tree_comparison (enum tree_code code)
2387 switch (code)
2389 case EQ_EXPR:
2390 case NE_EXPR:
2391 case ORDERED_EXPR:
2392 case UNORDERED_EXPR:
2393 case LTGT_EXPR:
2394 case UNEQ_EXPR:
2395 return code;
2396 case GT_EXPR:
2397 return LT_EXPR;
2398 case GE_EXPR:
2399 return LE_EXPR;
2400 case LT_EXPR:
2401 return GT_EXPR;
2402 case LE_EXPR:
2403 return GE_EXPR;
2404 case UNGT_EXPR:
2405 return UNLT_EXPR;
2406 case UNGE_EXPR:
2407 return UNLE_EXPR;
2408 case UNLT_EXPR:
2409 return UNGT_EXPR;
2410 case UNLE_EXPR:
2411 return UNGE_EXPR;
2412 default:
2413 gcc_unreachable ();
2418 /* Convert a comparison tree code from an enum tree_code representation
2419 into a compcode bit-based encoding. This function is the inverse of
2420 compcode_to_comparison. */
2422 static enum comparison_code
2423 comparison_to_compcode (enum tree_code code)
2425 switch (code)
2427 case LT_EXPR:
2428 return COMPCODE_LT;
2429 case EQ_EXPR:
2430 return COMPCODE_EQ;
2431 case LE_EXPR:
2432 return COMPCODE_LE;
2433 case GT_EXPR:
2434 return COMPCODE_GT;
2435 case NE_EXPR:
2436 return COMPCODE_NE;
2437 case GE_EXPR:
2438 return COMPCODE_GE;
2439 case ORDERED_EXPR:
2440 return COMPCODE_ORD;
2441 case UNORDERED_EXPR:
2442 return COMPCODE_UNORD;
2443 case UNLT_EXPR:
2444 return COMPCODE_UNLT;
2445 case UNEQ_EXPR:
2446 return COMPCODE_UNEQ;
2447 case UNLE_EXPR:
2448 return COMPCODE_UNLE;
2449 case UNGT_EXPR:
2450 return COMPCODE_UNGT;
2451 case LTGT_EXPR:
2452 return COMPCODE_LTGT;
2453 case UNGE_EXPR:
2454 return COMPCODE_UNGE;
2455 default:
2456 gcc_unreachable ();
2460 /* Convert a compcode bit-based encoding of a comparison operator back
2461 to GCC's enum tree_code representation. This function is the
2462 inverse of comparison_to_compcode. */
2464 static enum tree_code
2465 compcode_to_comparison (enum comparison_code code)
2467 switch (code)
2469 case COMPCODE_LT:
2470 return LT_EXPR;
2471 case COMPCODE_EQ:
2472 return EQ_EXPR;
2473 case COMPCODE_LE:
2474 return LE_EXPR;
2475 case COMPCODE_GT:
2476 return GT_EXPR;
2477 case COMPCODE_NE:
2478 return NE_EXPR;
2479 case COMPCODE_GE:
2480 return GE_EXPR;
2481 case COMPCODE_ORD:
2482 return ORDERED_EXPR;
2483 case COMPCODE_UNORD:
2484 return UNORDERED_EXPR;
2485 case COMPCODE_UNLT:
2486 return UNLT_EXPR;
2487 case COMPCODE_UNEQ:
2488 return UNEQ_EXPR;
2489 case COMPCODE_UNLE:
2490 return UNLE_EXPR;
2491 case COMPCODE_UNGT:
2492 return UNGT_EXPR;
2493 case COMPCODE_LTGT:
2494 return LTGT_EXPR;
2495 case COMPCODE_UNGE:
2496 return UNGE_EXPR;
2497 default:
2498 gcc_unreachable ();
2502 /* Return a tree for the comparison which is the combination of
2503 doing the AND or OR (depending on CODE) of the two operations LCODE
2504 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2505 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2506 if this makes the transformation invalid. */
2508 tree
2509 combine_comparisons (enum tree_code code, enum tree_code lcode,
2510 enum tree_code rcode, tree truth_type,
2511 tree ll_arg, tree lr_arg)
2513 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2514 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2515 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2516 enum comparison_code compcode;
2518 switch (code)
2520 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2521 compcode = lcompcode & rcompcode;
2522 break;
2524 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2525 compcode = lcompcode | rcompcode;
2526 break;
2528 default:
2529 return NULL_TREE;
2532 if (!honor_nans)
2534 /* Eliminate unordered comparisons, as well as LTGT and ORD
2535 which are not used unless the mode has NaNs. */
2536 compcode &= ~COMPCODE_UNORD;
2537 if (compcode == COMPCODE_LTGT)
2538 compcode = COMPCODE_NE;
2539 else if (compcode == COMPCODE_ORD)
2540 compcode = COMPCODE_TRUE;
2542 else if (flag_trapping_math)
2544 /* Check that the original operation and the optimized ones will trap
2545 under the same condition. */
2546 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2547 && (lcompcode != COMPCODE_EQ)
2548 && (lcompcode != COMPCODE_ORD);
2549 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2550 && (rcompcode != COMPCODE_EQ)
2551 && (rcompcode != COMPCODE_ORD);
2552 bool trap = (compcode & COMPCODE_UNORD) == 0
2553 && (compcode != COMPCODE_EQ)
2554 && (compcode != COMPCODE_ORD);
2556 /* In a short-circuited boolean expression the LHS might be
2557 such that the RHS, if evaluated, will never trap. For
2558 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2559 if neither x nor y is NaN. (This is a mixed blessing: for
2560 example, the expression above will never trap, hence
2561 optimizing it to x < y would be invalid). */
2562 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2563 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2564 rtrap = false;
2566 /* If the comparison was short-circuited, and only the RHS
2567 trapped, we may now generate a spurious trap. */
2568 if (rtrap && !ltrap
2569 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2570 return NULL_TREE;
2572 /* If we changed the conditions that cause a trap, we lose. */
2573 if ((ltrap || rtrap) != trap)
2574 return NULL_TREE;
2577 if (compcode == COMPCODE_TRUE)
2578 return constant_boolean_node (true, truth_type);
2579 else if (compcode == COMPCODE_FALSE)
2580 return constant_boolean_node (false, truth_type);
2581 else
2582 return fold_build2 (compcode_to_comparison (compcode),
2583 truth_type, ll_arg, lr_arg);
2586 /* Return nonzero if CODE is a tree code that represents a truth value. */
2588 static int
2589 truth_value_p (enum tree_code code)
2591 return (TREE_CODE_CLASS (code) == tcc_comparison
2592 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2593 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2594 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2597 /* Return nonzero if two operands (typically of the same tree node)
2598 are necessarily equal. If either argument has side-effects this
2599 function returns zero. FLAGS modifies behavior as follows:
2601 If OEP_ONLY_CONST is set, only return nonzero for constants.
2602 This function tests whether the operands are indistinguishable;
2603 it does not test whether they are equal using C's == operation.
2604 The distinction is important for IEEE floating point, because
2605 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2606 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2608 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2609 even though it may hold multiple values during a function.
2610 This is because a GCC tree node guarantees that nothing else is
2611 executed between the evaluation of its "operands" (which may often
2612 be evaluated in arbitrary order). Hence if the operands themselves
2613 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2614 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2615 unset means assuming isochronic (or instantaneous) tree equivalence.
2616 Unless comparing arbitrary expression trees, such as from different
2617 statements, this flag can usually be left unset.
2619 If OEP_PURE_SAME is set, then pure functions with identical arguments
2620 are considered the same. It is used when the caller has other ways
2621 to ensure that global memory is unchanged in between. */
2624 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2626 /* If either is ERROR_MARK, they aren't equal. */
2627 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2628 return 0;
2630 /* If both types don't have the same signedness, then we can't consider
2631 them equal. We must check this before the STRIP_NOPS calls
2632 because they may change the signedness of the arguments. */
2633 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2634 return 0;
2636 /* If both types don't have the same precision, then it is not safe
2637 to strip NOPs. */
2638 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2639 return 0;
2641 STRIP_NOPS (arg0);
2642 STRIP_NOPS (arg1);
2644 /* In case both args are comparisons but with different comparison
2645 code, try to swap the comparison operands of one arg to produce
2646 a match and compare that variant. */
2647 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2648 && COMPARISON_CLASS_P (arg0)
2649 && COMPARISON_CLASS_P (arg1))
2651 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2653 if (TREE_CODE (arg0) == swap_code)
2654 return operand_equal_p (TREE_OPERAND (arg0, 0),
2655 TREE_OPERAND (arg1, 1), flags)
2656 && operand_equal_p (TREE_OPERAND (arg0, 1),
2657 TREE_OPERAND (arg1, 0), flags);
2660 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2661 /* This is needed for conversions and for COMPONENT_REF.
2662 Might as well play it safe and always test this. */
2663 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2664 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2665 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2666 return 0;
2668 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2669 We don't care about side effects in that case because the SAVE_EXPR
2670 takes care of that for us. In all other cases, two expressions are
2671 equal if they have no side effects. If we have two identical
2672 expressions with side effects that should be treated the same due
2673 to the only side effects being identical SAVE_EXPR's, that will
2674 be detected in the recursive calls below. */
2675 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2676 && (TREE_CODE (arg0) == SAVE_EXPR
2677 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2678 return 1;
2680 /* Next handle constant cases, those for which we can return 1 even
2681 if ONLY_CONST is set. */
2682 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2683 switch (TREE_CODE (arg0))
2685 case INTEGER_CST:
2686 return (! TREE_CONSTANT_OVERFLOW (arg0)
2687 && ! TREE_CONSTANT_OVERFLOW (arg1)
2688 && tree_int_cst_equal (arg0, arg1));
2690 case REAL_CST:
2691 return (! TREE_CONSTANT_OVERFLOW (arg0)
2692 && ! TREE_CONSTANT_OVERFLOW (arg1)
2693 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2694 TREE_REAL_CST (arg1)));
2696 case VECTOR_CST:
2698 tree v1, v2;
2700 if (TREE_CONSTANT_OVERFLOW (arg0)
2701 || TREE_CONSTANT_OVERFLOW (arg1))
2702 return 0;
2704 v1 = TREE_VECTOR_CST_ELTS (arg0);
2705 v2 = TREE_VECTOR_CST_ELTS (arg1);
2706 while (v1 && v2)
2708 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2709 flags))
2710 return 0;
2711 v1 = TREE_CHAIN (v1);
2712 v2 = TREE_CHAIN (v2);
2715 return v1 == v2;
2718 case COMPLEX_CST:
2719 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2720 flags)
2721 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2722 flags));
2724 case STRING_CST:
2725 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2726 && ! memcmp (TREE_STRING_POINTER (arg0),
2727 TREE_STRING_POINTER (arg1),
2728 TREE_STRING_LENGTH (arg0)));
2730 case ADDR_EXPR:
2731 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2733 default:
2734 break;
2737 if (flags & OEP_ONLY_CONST)
2738 return 0;
2740 /* Define macros to test an operand from arg0 and arg1 for equality and a
2741 variant that allows null and views null as being different from any
2742 non-null value. In the latter case, if either is null, the both
2743 must be; otherwise, do the normal comparison. */
2744 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2745 TREE_OPERAND (arg1, N), flags)
2747 #define OP_SAME_WITH_NULL(N) \
2748 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2749 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2751 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2753 case tcc_unary:
2754 /* Two conversions are equal only if signedness and modes match. */
2755 switch (TREE_CODE (arg0))
2757 case NOP_EXPR:
2758 case CONVERT_EXPR:
2759 case FIX_CEIL_EXPR:
2760 case FIX_TRUNC_EXPR:
2761 case FIX_FLOOR_EXPR:
2762 case FIX_ROUND_EXPR:
2763 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2764 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2765 return 0;
2766 break;
2767 default:
2768 break;
2771 return OP_SAME (0);
2774 case tcc_comparison:
2775 case tcc_binary:
2776 if (OP_SAME (0) && OP_SAME (1))
2777 return 1;
2779 /* For commutative ops, allow the other order. */
2780 return (commutative_tree_code (TREE_CODE (arg0))
2781 && operand_equal_p (TREE_OPERAND (arg0, 0),
2782 TREE_OPERAND (arg1, 1), flags)
2783 && operand_equal_p (TREE_OPERAND (arg0, 1),
2784 TREE_OPERAND (arg1, 0), flags));
2786 case tcc_reference:
2787 /* If either of the pointer (or reference) expressions we are
2788 dereferencing contain a side effect, these cannot be equal. */
2789 if (TREE_SIDE_EFFECTS (arg0)
2790 || TREE_SIDE_EFFECTS (arg1))
2791 return 0;
2793 switch (TREE_CODE (arg0))
2795 case INDIRECT_REF:
2796 case ALIGN_INDIRECT_REF:
2797 case MISALIGNED_INDIRECT_REF:
2798 case REALPART_EXPR:
2799 case IMAGPART_EXPR:
2800 return OP_SAME (0);
2802 case ARRAY_REF:
2803 case ARRAY_RANGE_REF:
2804 /* Operands 2 and 3 may be null. */
2805 return (OP_SAME (0)
2806 && OP_SAME (1)
2807 && OP_SAME_WITH_NULL (2)
2808 && OP_SAME_WITH_NULL (3));
2810 case COMPONENT_REF:
2811 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2812 may be NULL when we're called to compare MEM_EXPRs. */
2813 return OP_SAME_WITH_NULL (0)
2814 && OP_SAME (1)
2815 && OP_SAME_WITH_NULL (2);
2817 case BIT_FIELD_REF:
2818 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2820 default:
2821 return 0;
2824 case tcc_expression:
2825 switch (TREE_CODE (arg0))
2827 case ADDR_EXPR:
2828 case TRUTH_NOT_EXPR:
2829 return OP_SAME (0);
2831 case TRUTH_ANDIF_EXPR:
2832 case TRUTH_ORIF_EXPR:
2833 return OP_SAME (0) && OP_SAME (1);
2835 case TRUTH_AND_EXPR:
2836 case TRUTH_OR_EXPR:
2837 case TRUTH_XOR_EXPR:
2838 if (OP_SAME (0) && OP_SAME (1))
2839 return 1;
2841 /* Otherwise take into account this is a commutative operation. */
2842 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2843 TREE_OPERAND (arg1, 1), flags)
2844 && operand_equal_p (TREE_OPERAND (arg0, 1),
2845 TREE_OPERAND (arg1, 0), flags));
2847 case CALL_EXPR:
2848 /* If the CALL_EXPRs call different functions, then they
2849 clearly can not be equal. */
2850 if (!OP_SAME (0))
2851 return 0;
2854 unsigned int cef = call_expr_flags (arg0);
2855 if (flags & OEP_PURE_SAME)
2856 cef &= ECF_CONST | ECF_PURE;
2857 else
2858 cef &= ECF_CONST;
2859 if (!cef)
2860 return 0;
2863 /* Now see if all the arguments are the same. operand_equal_p
2864 does not handle TREE_LIST, so we walk the operands here
2865 feeding them to operand_equal_p. */
2866 arg0 = TREE_OPERAND (arg0, 1);
2867 arg1 = TREE_OPERAND (arg1, 1);
2868 while (arg0 && arg1)
2870 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2871 flags))
2872 return 0;
2874 arg0 = TREE_CHAIN (arg0);
2875 arg1 = TREE_CHAIN (arg1);
2878 /* If we get here and both argument lists are exhausted
2879 then the CALL_EXPRs are equal. */
2880 return ! (arg0 || arg1);
2882 default:
2883 return 0;
2886 case tcc_declaration:
2887 /* Consider __builtin_sqrt equal to sqrt. */
2888 return (TREE_CODE (arg0) == FUNCTION_DECL
2889 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2890 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2891 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2893 default:
2894 return 0;
2897 #undef OP_SAME
2898 #undef OP_SAME_WITH_NULL
2901 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2902 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2904 When in doubt, return 0. */
2906 static int
2907 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2909 int unsignedp1, unsignedpo;
2910 tree primarg0, primarg1, primother;
2911 unsigned int correct_width;
2913 if (operand_equal_p (arg0, arg1, 0))
2914 return 1;
2916 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2917 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2918 return 0;
2920 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2921 and see if the inner values are the same. This removes any
2922 signedness comparison, which doesn't matter here. */
2923 primarg0 = arg0, primarg1 = arg1;
2924 STRIP_NOPS (primarg0);
2925 STRIP_NOPS (primarg1);
2926 if (operand_equal_p (primarg0, primarg1, 0))
2927 return 1;
2929 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2930 actual comparison operand, ARG0.
2932 First throw away any conversions to wider types
2933 already present in the operands. */
2935 primarg1 = get_narrower (arg1, &unsignedp1);
2936 primother = get_narrower (other, &unsignedpo);
2938 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2939 if (unsignedp1 == unsignedpo
2940 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2941 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2943 tree type = TREE_TYPE (arg0);
2945 /* Make sure shorter operand is extended the right way
2946 to match the longer operand. */
2947 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2948 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2950 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2951 return 1;
2954 return 0;
2957 /* See if ARG is an expression that is either a comparison or is performing
2958 arithmetic on comparisons. The comparisons must only be comparing
2959 two different values, which will be stored in *CVAL1 and *CVAL2; if
2960 they are nonzero it means that some operands have already been found.
2961 No variables may be used anywhere else in the expression except in the
2962 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2963 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2965 If this is true, return 1. Otherwise, return zero. */
2967 static int
2968 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2970 enum tree_code code = TREE_CODE (arg);
2971 enum tree_code_class class = TREE_CODE_CLASS (code);
2973 /* We can handle some of the tcc_expression cases here. */
2974 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2975 class = tcc_unary;
2976 else if (class == tcc_expression
2977 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2978 || code == COMPOUND_EXPR))
2979 class = tcc_binary;
2981 else if (class == tcc_expression && code == SAVE_EXPR
2982 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2984 /* If we've already found a CVAL1 or CVAL2, this expression is
2985 two complex to handle. */
2986 if (*cval1 || *cval2)
2987 return 0;
2989 class = tcc_unary;
2990 *save_p = 1;
2993 switch (class)
2995 case tcc_unary:
2996 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2998 case tcc_binary:
2999 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3000 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3001 cval1, cval2, save_p));
3003 case tcc_constant:
3004 return 1;
3006 case tcc_expression:
3007 if (code == COND_EXPR)
3008 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3009 cval1, cval2, save_p)
3010 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3011 cval1, cval2, save_p)
3012 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3013 cval1, cval2, save_p));
3014 return 0;
3016 case tcc_comparison:
3017 /* First see if we can handle the first operand, then the second. For
3018 the second operand, we know *CVAL1 can't be zero. It must be that
3019 one side of the comparison is each of the values; test for the
3020 case where this isn't true by failing if the two operands
3021 are the same. */
3023 if (operand_equal_p (TREE_OPERAND (arg, 0),
3024 TREE_OPERAND (arg, 1), 0))
3025 return 0;
3027 if (*cval1 == 0)
3028 *cval1 = TREE_OPERAND (arg, 0);
3029 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3031 else if (*cval2 == 0)
3032 *cval2 = TREE_OPERAND (arg, 0);
3033 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3035 else
3036 return 0;
3038 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3040 else if (*cval2 == 0)
3041 *cval2 = TREE_OPERAND (arg, 1);
3042 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3044 else
3045 return 0;
3047 return 1;
3049 default:
3050 return 0;
3054 /* ARG is a tree that is known to contain just arithmetic operations and
3055 comparisons. Evaluate the operations in the tree substituting NEW0 for
3056 any occurrence of OLD0 as an operand of a comparison and likewise for
3057 NEW1 and OLD1. */
3059 static tree
3060 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3062 tree type = TREE_TYPE (arg);
3063 enum tree_code code = TREE_CODE (arg);
3064 enum tree_code_class class = TREE_CODE_CLASS (code);
3066 /* We can handle some of the tcc_expression cases here. */
3067 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3068 class = tcc_unary;
3069 else if (class == tcc_expression
3070 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3071 class = tcc_binary;
3073 switch (class)
3075 case tcc_unary:
3076 return fold_build1 (code, type,
3077 eval_subst (TREE_OPERAND (arg, 0),
3078 old0, new0, old1, new1));
3080 case tcc_binary:
3081 return fold_build2 (code, type,
3082 eval_subst (TREE_OPERAND (arg, 0),
3083 old0, new0, old1, new1),
3084 eval_subst (TREE_OPERAND (arg, 1),
3085 old0, new0, old1, new1));
3087 case tcc_expression:
3088 switch (code)
3090 case SAVE_EXPR:
3091 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3093 case COMPOUND_EXPR:
3094 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3096 case COND_EXPR:
3097 return fold_build3 (code, type,
3098 eval_subst (TREE_OPERAND (arg, 0),
3099 old0, new0, old1, new1),
3100 eval_subst (TREE_OPERAND (arg, 1),
3101 old0, new0, old1, new1),
3102 eval_subst (TREE_OPERAND (arg, 2),
3103 old0, new0, old1, new1));
3104 default:
3105 break;
3107 /* Fall through - ??? */
3109 case tcc_comparison:
3111 tree arg0 = TREE_OPERAND (arg, 0);
3112 tree arg1 = TREE_OPERAND (arg, 1);
3114 /* We need to check both for exact equality and tree equality. The
3115 former will be true if the operand has a side-effect. In that
3116 case, we know the operand occurred exactly once. */
3118 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3119 arg0 = new0;
3120 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3121 arg0 = new1;
3123 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3124 arg1 = new0;
3125 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3126 arg1 = new1;
3128 return fold_build2 (code, type, arg0, arg1);
3131 default:
3132 return arg;
3136 /* Return a tree for the case when the result of an expression is RESULT
3137 converted to TYPE and OMITTED was previously an operand of the expression
3138 but is now not needed (e.g., we folded OMITTED * 0).
3140 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3141 the conversion of RESULT to TYPE. */
3143 tree
3144 omit_one_operand (tree type, tree result, tree omitted)
3146 tree t = fold_convert (type, result);
3148 if (TREE_SIDE_EFFECTS (omitted))
3149 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3151 return non_lvalue (t);
3154 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3156 static tree
3157 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3159 tree t = fold_convert (type, result);
3161 if (TREE_SIDE_EFFECTS (omitted))
3162 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3164 return pedantic_non_lvalue (t);
3167 /* Return a tree for the case when the result of an expression is RESULT
3168 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3169 of the expression but are now not needed.
3171 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3172 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3173 evaluated before OMITTED2. Otherwise, if neither has side effects,
3174 just do the conversion of RESULT to TYPE. */
3176 tree
3177 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3179 tree t = fold_convert (type, result);
3181 if (TREE_SIDE_EFFECTS (omitted2))
3182 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3183 if (TREE_SIDE_EFFECTS (omitted1))
3184 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3186 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3190 /* Return a simplified tree node for the truth-negation of ARG. This
3191 never alters ARG itself. We assume that ARG is an operation that
3192 returns a truth value (0 or 1).
3194 FIXME: one would think we would fold the result, but it causes
3195 problems with the dominator optimizer. */
3197 tree
3198 fold_truth_not_expr (tree arg)
3200 tree type = TREE_TYPE (arg);
3201 enum tree_code code = TREE_CODE (arg);
3203 /* If this is a comparison, we can simply invert it, except for
3204 floating-point non-equality comparisons, in which case we just
3205 enclose a TRUTH_NOT_EXPR around what we have. */
3207 if (TREE_CODE_CLASS (code) == tcc_comparison)
3209 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3210 if (FLOAT_TYPE_P (op_type)
3211 && flag_trapping_math
3212 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3213 && code != NE_EXPR && code != EQ_EXPR)
3214 return NULL_TREE;
3215 else
3217 code = invert_tree_comparison (code,
3218 HONOR_NANS (TYPE_MODE (op_type)));
3219 if (code == ERROR_MARK)
3220 return NULL_TREE;
3221 else
3222 return build2 (code, type,
3223 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3227 switch (code)
3229 case INTEGER_CST:
3230 return constant_boolean_node (integer_zerop (arg), type);
3232 case TRUTH_AND_EXPR:
3233 return build2 (TRUTH_OR_EXPR, type,
3234 invert_truthvalue (TREE_OPERAND (arg, 0)),
3235 invert_truthvalue (TREE_OPERAND (arg, 1)));
3237 case TRUTH_OR_EXPR:
3238 return build2 (TRUTH_AND_EXPR, type,
3239 invert_truthvalue (TREE_OPERAND (arg, 0)),
3240 invert_truthvalue (TREE_OPERAND (arg, 1)));
3242 case TRUTH_XOR_EXPR:
3243 /* Here we can invert either operand. We invert the first operand
3244 unless the second operand is a TRUTH_NOT_EXPR in which case our
3245 result is the XOR of the first operand with the inside of the
3246 negation of the second operand. */
3248 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3249 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3250 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3251 else
3252 return build2 (TRUTH_XOR_EXPR, type,
3253 invert_truthvalue (TREE_OPERAND (arg, 0)),
3254 TREE_OPERAND (arg, 1));
3256 case TRUTH_ANDIF_EXPR:
3257 return build2 (TRUTH_ORIF_EXPR, type,
3258 invert_truthvalue (TREE_OPERAND (arg, 0)),
3259 invert_truthvalue (TREE_OPERAND (arg, 1)));
3261 case TRUTH_ORIF_EXPR:
3262 return build2 (TRUTH_ANDIF_EXPR, type,
3263 invert_truthvalue (TREE_OPERAND (arg, 0)),
3264 invert_truthvalue (TREE_OPERAND (arg, 1)));
3266 case TRUTH_NOT_EXPR:
3267 return TREE_OPERAND (arg, 0);
3269 case COND_EXPR:
3271 tree arg1 = TREE_OPERAND (arg, 1);
3272 tree arg2 = TREE_OPERAND (arg, 2);
3273 /* A COND_EXPR may have a throw as one operand, which
3274 then has void type. Just leave void operands
3275 as they are. */
3276 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3277 VOID_TYPE_P (TREE_TYPE (arg1))
3278 ? arg1 : invert_truthvalue (arg1),
3279 VOID_TYPE_P (TREE_TYPE (arg2))
3280 ? arg2 : invert_truthvalue (arg2));
3283 case COMPOUND_EXPR:
3284 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3285 invert_truthvalue (TREE_OPERAND (arg, 1)));
3287 case NON_LVALUE_EXPR:
3288 return invert_truthvalue (TREE_OPERAND (arg, 0));
3290 case NOP_EXPR:
3291 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3292 return build1 (TRUTH_NOT_EXPR, type, arg);
3294 case CONVERT_EXPR:
3295 case FLOAT_EXPR:
3296 return build1 (TREE_CODE (arg), type,
3297 invert_truthvalue (TREE_OPERAND (arg, 0)));
3299 case BIT_AND_EXPR:
3300 if (!integer_onep (TREE_OPERAND (arg, 1)))
3301 break;
3302 return build2 (EQ_EXPR, type, arg,
3303 build_int_cst (type, 0));
3305 case SAVE_EXPR:
3306 return build1 (TRUTH_NOT_EXPR, type, arg);
3308 case CLEANUP_POINT_EXPR:
3309 return build1 (CLEANUP_POINT_EXPR, type,
3310 invert_truthvalue (TREE_OPERAND (arg, 0)));
3312 default:
3313 break;
3316 return NULL_TREE;
3319 /* Return a simplified tree node for the truth-negation of ARG. This
3320 never alters ARG itself. We assume that ARG is an operation that
3321 returns a truth value (0 or 1).
3323 FIXME: one would think we would fold the result, but it causes
3324 problems with the dominator optimizer. */
3326 tree
3327 invert_truthvalue (tree arg)
3329 tree tem;
3331 if (TREE_CODE (arg) == ERROR_MARK)
3332 return arg;
3334 tem = fold_truth_not_expr (arg);
3335 if (!tem)
3336 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3338 return tem;
3341 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3342 operands are another bit-wise operation with a common input. If so,
3343 distribute the bit operations to save an operation and possibly two if
3344 constants are involved. For example, convert
3345 (A | B) & (A | C) into A | (B & C)
3346 Further simplification will occur if B and C are constants.
3348 If this optimization cannot be done, 0 will be returned. */
3350 static tree
3351 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3353 tree common;
3354 tree left, right;
3356 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3357 || TREE_CODE (arg0) == code
3358 || (TREE_CODE (arg0) != BIT_AND_EXPR
3359 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3360 return 0;
3362 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3364 common = TREE_OPERAND (arg0, 0);
3365 left = TREE_OPERAND (arg0, 1);
3366 right = TREE_OPERAND (arg1, 1);
3368 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3370 common = TREE_OPERAND (arg0, 0);
3371 left = TREE_OPERAND (arg0, 1);
3372 right = TREE_OPERAND (arg1, 0);
3374 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3376 common = TREE_OPERAND (arg0, 1);
3377 left = TREE_OPERAND (arg0, 0);
3378 right = TREE_OPERAND (arg1, 1);
3380 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3382 common = TREE_OPERAND (arg0, 1);
3383 left = TREE_OPERAND (arg0, 0);
3384 right = TREE_OPERAND (arg1, 0);
3386 else
3387 return 0;
3389 return fold_build2 (TREE_CODE (arg0), type, common,
3390 fold_build2 (code, type, left, right));
3393 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3394 with code CODE. This optimization is unsafe. */
3395 static tree
3396 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3398 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3399 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3401 /* (A / C) +- (B / C) -> (A +- B) / C. */
3402 if (mul0 == mul1
3403 && operand_equal_p (TREE_OPERAND (arg0, 1),
3404 TREE_OPERAND (arg1, 1), 0))
3405 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3406 fold_build2 (code, type,
3407 TREE_OPERAND (arg0, 0),
3408 TREE_OPERAND (arg1, 0)),
3409 TREE_OPERAND (arg0, 1));
3411 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3412 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3413 TREE_OPERAND (arg1, 0), 0)
3414 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3415 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3417 REAL_VALUE_TYPE r0, r1;
3418 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3419 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3420 if (!mul0)
3421 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3422 if (!mul1)
3423 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3424 real_arithmetic (&r0, code, &r0, &r1);
3425 return fold_build2 (MULT_EXPR, type,
3426 TREE_OPERAND (arg0, 0),
3427 build_real (type, r0));
3430 return NULL_TREE;
3433 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3434 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3436 static tree
3437 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3438 int unsignedp)
3440 tree result;
3442 if (bitpos == 0)
3444 tree size = TYPE_SIZE (TREE_TYPE (inner));
3445 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3446 || POINTER_TYPE_P (TREE_TYPE (inner)))
3447 && host_integerp (size, 0)
3448 && tree_low_cst (size, 0) == bitsize)
3449 return fold_convert (type, inner);
3452 result = build3 (BIT_FIELD_REF, type, inner,
3453 size_int (bitsize), bitsize_int (bitpos));
3455 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3457 return result;
3460 /* Optimize a bit-field compare.
3462 There are two cases: First is a compare against a constant and the
3463 second is a comparison of two items where the fields are at the same
3464 bit position relative to the start of a chunk (byte, halfword, word)
3465 large enough to contain it. In these cases we can avoid the shift
3466 implicit in bitfield extractions.
3468 For constants, we emit a compare of the shifted constant with the
3469 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3470 compared. For two fields at the same position, we do the ANDs with the
3471 similar mask and compare the result of the ANDs.
3473 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3474 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3475 are the left and right operands of the comparison, respectively.
3477 If the optimization described above can be done, we return the resulting
3478 tree. Otherwise we return zero. */
3480 static tree
3481 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3482 tree lhs, tree rhs)
3484 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3485 tree type = TREE_TYPE (lhs);
3486 tree signed_type, unsigned_type;
3487 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3488 enum machine_mode lmode, rmode, nmode;
3489 int lunsignedp, runsignedp;
3490 int lvolatilep = 0, rvolatilep = 0;
3491 tree linner, rinner = NULL_TREE;
3492 tree mask;
3493 tree offset;
3495 /* Get all the information about the extractions being done. If the bit size
3496 if the same as the size of the underlying object, we aren't doing an
3497 extraction at all and so can do nothing. We also don't want to
3498 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3499 then will no longer be able to replace it. */
3500 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3501 &lunsignedp, &lvolatilep, false);
3502 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3503 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3504 return 0;
3506 if (!const_p)
3508 /* If this is not a constant, we can only do something if bit positions,
3509 sizes, and signedness are the same. */
3510 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3511 &runsignedp, &rvolatilep, false);
3513 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3514 || lunsignedp != runsignedp || offset != 0
3515 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3516 return 0;
3519 /* See if we can find a mode to refer to this field. We should be able to,
3520 but fail if we can't. */
3521 nmode = get_best_mode (lbitsize, lbitpos,
3522 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3523 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3524 TYPE_ALIGN (TREE_TYPE (rinner))),
3525 word_mode, lvolatilep || rvolatilep);
3526 if (nmode == VOIDmode)
3527 return 0;
3529 /* Set signed and unsigned types of the precision of this mode for the
3530 shifts below. */
3531 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3532 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3534 /* Compute the bit position and size for the new reference and our offset
3535 within it. If the new reference is the same size as the original, we
3536 won't optimize anything, so return zero. */
3537 nbitsize = GET_MODE_BITSIZE (nmode);
3538 nbitpos = lbitpos & ~ (nbitsize - 1);
3539 lbitpos -= nbitpos;
3540 if (nbitsize == lbitsize)
3541 return 0;
3543 if (BYTES_BIG_ENDIAN)
3544 lbitpos = nbitsize - lbitsize - lbitpos;
3546 /* Make the mask to be used against the extracted field. */
3547 mask = build_int_cst (unsigned_type, -1);
3548 mask = force_fit_type (mask, 0, false, false);
3549 mask = fold_convert (unsigned_type, mask);
3550 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3551 mask = const_binop (RSHIFT_EXPR, mask,
3552 size_int (nbitsize - lbitsize - lbitpos), 0);
3554 if (! const_p)
3555 /* If not comparing with constant, just rework the comparison
3556 and return. */
3557 return build2 (code, compare_type,
3558 build2 (BIT_AND_EXPR, unsigned_type,
3559 make_bit_field_ref (linner, unsigned_type,
3560 nbitsize, nbitpos, 1),
3561 mask),
3562 build2 (BIT_AND_EXPR, unsigned_type,
3563 make_bit_field_ref (rinner, unsigned_type,
3564 nbitsize, nbitpos, 1),
3565 mask));
3567 /* Otherwise, we are handling the constant case. See if the constant is too
3568 big for the field. Warn and return a tree of for 0 (false) if so. We do
3569 this not only for its own sake, but to avoid having to test for this
3570 error case below. If we didn't, we might generate wrong code.
3572 For unsigned fields, the constant shifted right by the field length should
3573 be all zero. For signed fields, the high-order bits should agree with
3574 the sign bit. */
3576 if (lunsignedp)
3578 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3579 fold_convert (unsigned_type, rhs),
3580 size_int (lbitsize), 0)))
3582 warning (0, "comparison is always %d due to width of bit-field",
3583 code == NE_EXPR);
3584 return constant_boolean_node (code == NE_EXPR, compare_type);
3587 else
3589 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3590 size_int (lbitsize - 1), 0);
3591 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3593 warning (0, "comparison is always %d due to width of bit-field",
3594 code == NE_EXPR);
3595 return constant_boolean_node (code == NE_EXPR, compare_type);
3599 /* Single-bit compares should always be against zero. */
3600 if (lbitsize == 1 && ! integer_zerop (rhs))
3602 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3603 rhs = build_int_cst (type, 0);
3606 /* Make a new bitfield reference, shift the constant over the
3607 appropriate number of bits and mask it with the computed mask
3608 (in case this was a signed field). If we changed it, make a new one. */
3609 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3610 if (lvolatilep)
3612 TREE_SIDE_EFFECTS (lhs) = 1;
3613 TREE_THIS_VOLATILE (lhs) = 1;
3616 rhs = const_binop (BIT_AND_EXPR,
3617 const_binop (LSHIFT_EXPR,
3618 fold_convert (unsigned_type, rhs),
3619 size_int (lbitpos), 0),
3620 mask, 0);
3622 return build2 (code, compare_type,
3623 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3624 rhs);
3627 /* Subroutine for fold_truthop: decode a field reference.
3629 If EXP is a comparison reference, we return the innermost reference.
3631 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3632 set to the starting bit number.
3634 If the innermost field can be completely contained in a mode-sized
3635 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3637 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3638 otherwise it is not changed.
3640 *PUNSIGNEDP is set to the signedness of the field.
3642 *PMASK is set to the mask used. This is either contained in a
3643 BIT_AND_EXPR or derived from the width of the field.
3645 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3647 Return 0 if this is not a component reference or is one that we can't
3648 do anything with. */
3650 static tree
3651 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3652 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3653 int *punsignedp, int *pvolatilep,
3654 tree *pmask, tree *pand_mask)
3656 tree outer_type = 0;
3657 tree and_mask = 0;
3658 tree mask, inner, offset;
3659 tree unsigned_type;
3660 unsigned int precision;
3662 /* All the optimizations using this function assume integer fields.
3663 There are problems with FP fields since the type_for_size call
3664 below can fail for, e.g., XFmode. */
3665 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3666 return 0;
3668 /* We are interested in the bare arrangement of bits, so strip everything
3669 that doesn't affect the machine mode. However, record the type of the
3670 outermost expression if it may matter below. */
3671 if (TREE_CODE (exp) == NOP_EXPR
3672 || TREE_CODE (exp) == CONVERT_EXPR
3673 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3674 outer_type = TREE_TYPE (exp);
3675 STRIP_NOPS (exp);
3677 if (TREE_CODE (exp) == BIT_AND_EXPR)
3679 and_mask = TREE_OPERAND (exp, 1);
3680 exp = TREE_OPERAND (exp, 0);
3681 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3682 if (TREE_CODE (and_mask) != INTEGER_CST)
3683 return 0;
3686 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3687 punsignedp, pvolatilep, false);
3688 if ((inner == exp && and_mask == 0)
3689 || *pbitsize < 0 || offset != 0
3690 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3691 return 0;
3693 /* If the number of bits in the reference is the same as the bitsize of
3694 the outer type, then the outer type gives the signedness. Otherwise
3695 (in case of a small bitfield) the signedness is unchanged. */
3696 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3697 *punsignedp = TYPE_UNSIGNED (outer_type);
3699 /* Compute the mask to access the bitfield. */
3700 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3701 precision = TYPE_PRECISION (unsigned_type);
3703 mask = build_int_cst (unsigned_type, -1);
3704 mask = force_fit_type (mask, 0, false, false);
3706 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3707 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3709 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3710 if (and_mask != 0)
3711 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3712 fold_convert (unsigned_type, and_mask), mask);
3714 *pmask = mask;
3715 *pand_mask = and_mask;
3716 return inner;
3719 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3720 bit positions. */
3722 static int
3723 all_ones_mask_p (tree mask, int size)
3725 tree type = TREE_TYPE (mask);
3726 unsigned int precision = TYPE_PRECISION (type);
3727 tree tmask;
3729 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3730 tmask = force_fit_type (tmask, 0, false, false);
3732 return
3733 tree_int_cst_equal (mask,
3734 const_binop (RSHIFT_EXPR,
3735 const_binop (LSHIFT_EXPR, tmask,
3736 size_int (precision - size),
3738 size_int (precision - size), 0));
3741 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3742 represents the sign bit of EXP's type. If EXP represents a sign
3743 or zero extension, also test VAL against the unextended type.
3744 The return value is the (sub)expression whose sign bit is VAL,
3745 or NULL_TREE otherwise. */
3747 static tree
3748 sign_bit_p (tree exp, tree val)
3750 unsigned HOST_WIDE_INT mask_lo, lo;
3751 HOST_WIDE_INT mask_hi, hi;
3752 int width;
3753 tree t;
3755 /* Tree EXP must have an integral type. */
3756 t = TREE_TYPE (exp);
3757 if (! INTEGRAL_TYPE_P (t))
3758 return NULL_TREE;
3760 /* Tree VAL must be an integer constant. */
3761 if (TREE_CODE (val) != INTEGER_CST
3762 || TREE_CONSTANT_OVERFLOW (val))
3763 return NULL_TREE;
3765 width = TYPE_PRECISION (t);
3766 if (width > HOST_BITS_PER_WIDE_INT)
3768 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3769 lo = 0;
3771 mask_hi = ((unsigned HOST_WIDE_INT) -1
3772 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3773 mask_lo = -1;
3775 else
3777 hi = 0;
3778 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3780 mask_hi = 0;
3781 mask_lo = ((unsigned HOST_WIDE_INT) -1
3782 >> (HOST_BITS_PER_WIDE_INT - width));
3785 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3786 treat VAL as if it were unsigned. */
3787 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3788 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3789 return exp;
3791 /* Handle extension from a narrower type. */
3792 if (TREE_CODE (exp) == NOP_EXPR
3793 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3794 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3796 return NULL_TREE;
3799 /* Subroutine for fold_truthop: determine if an operand is simple enough
3800 to be evaluated unconditionally. */
3802 static int
3803 simple_operand_p (tree exp)
3805 /* Strip any conversions that don't change the machine mode. */
3806 STRIP_NOPS (exp);
3808 return (CONSTANT_CLASS_P (exp)
3809 || TREE_CODE (exp) == SSA_NAME
3810 || (DECL_P (exp)
3811 && ! TREE_ADDRESSABLE (exp)
3812 && ! TREE_THIS_VOLATILE (exp)
3813 && ! DECL_NONLOCAL (exp)
3814 /* Don't regard global variables as simple. They may be
3815 allocated in ways unknown to the compiler (shared memory,
3816 #pragma weak, etc). */
3817 && ! TREE_PUBLIC (exp)
3818 && ! DECL_EXTERNAL (exp)
3819 /* Loading a static variable is unduly expensive, but global
3820 registers aren't expensive. */
3821 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3824 /* The following functions are subroutines to fold_range_test and allow it to
3825 try to change a logical combination of comparisons into a range test.
3827 For example, both
3828 X == 2 || X == 3 || X == 4 || X == 5
3830 X >= 2 && X <= 5
3831 are converted to
3832 (unsigned) (X - 2) <= 3
3834 We describe each set of comparisons as being either inside or outside
3835 a range, using a variable named like IN_P, and then describe the
3836 range with a lower and upper bound. If one of the bounds is omitted,
3837 it represents either the highest or lowest value of the type.
3839 In the comments below, we represent a range by two numbers in brackets
3840 preceded by a "+" to designate being inside that range, or a "-" to
3841 designate being outside that range, so the condition can be inverted by
3842 flipping the prefix. An omitted bound is represented by a "-". For
3843 example, "- [-, 10]" means being outside the range starting at the lowest
3844 possible value and ending at 10, in other words, being greater than 10.
3845 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3846 always false.
3848 We set up things so that the missing bounds are handled in a consistent
3849 manner so neither a missing bound nor "true" and "false" need to be
3850 handled using a special case. */
3852 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3853 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3854 and UPPER1_P are nonzero if the respective argument is an upper bound
3855 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3856 must be specified for a comparison. ARG1 will be converted to ARG0's
3857 type if both are specified. */
3859 static tree
3860 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3861 tree arg1, int upper1_p)
3863 tree tem;
3864 int result;
3865 int sgn0, sgn1;
3867 /* If neither arg represents infinity, do the normal operation.
3868 Else, if not a comparison, return infinity. Else handle the special
3869 comparison rules. Note that most of the cases below won't occur, but
3870 are handled for consistency. */
3872 if (arg0 != 0 && arg1 != 0)
3874 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3875 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3876 STRIP_NOPS (tem);
3877 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3880 if (TREE_CODE_CLASS (code) != tcc_comparison)
3881 return 0;
3883 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3884 for neither. In real maths, we cannot assume open ended ranges are
3885 the same. But, this is computer arithmetic, where numbers are finite.
3886 We can therefore make the transformation of any unbounded range with
3887 the value Z, Z being greater than any representable number. This permits
3888 us to treat unbounded ranges as equal. */
3889 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3890 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3891 switch (code)
3893 case EQ_EXPR:
3894 result = sgn0 == sgn1;
3895 break;
3896 case NE_EXPR:
3897 result = sgn0 != sgn1;
3898 break;
3899 case LT_EXPR:
3900 result = sgn0 < sgn1;
3901 break;
3902 case LE_EXPR:
3903 result = sgn0 <= sgn1;
3904 break;
3905 case GT_EXPR:
3906 result = sgn0 > sgn1;
3907 break;
3908 case GE_EXPR:
3909 result = sgn0 >= sgn1;
3910 break;
3911 default:
3912 gcc_unreachable ();
3915 return constant_boolean_node (result, type);
3918 /* Given EXP, a logical expression, set the range it is testing into
3919 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3920 actually being tested. *PLOW and *PHIGH will be made of the same
3921 type as the returned expression. If EXP is not a comparison, we
3922 will most likely not be returning a useful value and range. Set
3923 *STRICT_OVERFLOW_P to true if the return value is only valid
3924 because signed overflow is undefined; otherwise, do not change
3925 *STRICT_OVERFLOW_P. */
3927 static tree
3928 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3929 bool *strict_overflow_p)
3931 enum tree_code code;
3932 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3933 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3934 int in_p, n_in_p;
3935 tree low, high, n_low, n_high;
3937 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3938 and see if we can refine the range. Some of the cases below may not
3939 happen, but it doesn't seem worth worrying about this. We "continue"
3940 the outer loop when we've changed something; otherwise we "break"
3941 the switch, which will "break" the while. */
3943 in_p = 0;
3944 low = high = build_int_cst (TREE_TYPE (exp), 0);
3946 while (1)
3948 code = TREE_CODE (exp);
3949 exp_type = TREE_TYPE (exp);
3951 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3953 if (TREE_CODE_LENGTH (code) > 0)
3954 arg0 = TREE_OPERAND (exp, 0);
3955 if (TREE_CODE_CLASS (code) == tcc_comparison
3956 || TREE_CODE_CLASS (code) == tcc_unary
3957 || TREE_CODE_CLASS (code) == tcc_binary)
3958 arg0_type = TREE_TYPE (arg0);
3959 if (TREE_CODE_CLASS (code) == tcc_binary
3960 || TREE_CODE_CLASS (code) == tcc_comparison
3961 || (TREE_CODE_CLASS (code) == tcc_expression
3962 && TREE_CODE_LENGTH (code) > 1))
3963 arg1 = TREE_OPERAND (exp, 1);
3966 switch (code)
3968 case TRUTH_NOT_EXPR:
3969 in_p = ! in_p, exp = arg0;
3970 continue;
3972 case EQ_EXPR: case NE_EXPR:
3973 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3974 /* We can only do something if the range is testing for zero
3975 and if the second operand is an integer constant. Note that
3976 saying something is "in" the range we make is done by
3977 complementing IN_P since it will set in the initial case of
3978 being not equal to zero; "out" is leaving it alone. */
3979 if (low == 0 || high == 0
3980 || ! integer_zerop (low) || ! integer_zerop (high)
3981 || TREE_CODE (arg1) != INTEGER_CST)
3982 break;
3984 switch (code)
3986 case NE_EXPR: /* - [c, c] */
3987 low = high = arg1;
3988 break;
3989 case EQ_EXPR: /* + [c, c] */
3990 in_p = ! in_p, low = high = arg1;
3991 break;
3992 case GT_EXPR: /* - [-, c] */
3993 low = 0, high = arg1;
3994 break;
3995 case GE_EXPR: /* + [c, -] */
3996 in_p = ! in_p, low = arg1, high = 0;
3997 break;
3998 case LT_EXPR: /* - [c, -] */
3999 low = arg1, high = 0;
4000 break;
4001 case LE_EXPR: /* + [-, c] */
4002 in_p = ! in_p, low = 0, high = arg1;
4003 break;
4004 default:
4005 gcc_unreachable ();
4008 /* If this is an unsigned comparison, we also know that EXP is
4009 greater than or equal to zero. We base the range tests we make
4010 on that fact, so we record it here so we can parse existing
4011 range tests. We test arg0_type since often the return type
4012 of, e.g. EQ_EXPR, is boolean. */
4013 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4015 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4016 in_p, low, high, 1,
4017 build_int_cst (arg0_type, 0),
4018 NULL_TREE))
4019 break;
4021 in_p = n_in_p, low = n_low, high = n_high;
4023 /* If the high bound is missing, but we have a nonzero low
4024 bound, reverse the range so it goes from zero to the low bound
4025 minus 1. */
4026 if (high == 0 && low && ! integer_zerop (low))
4028 in_p = ! in_p;
4029 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4030 integer_one_node, 0);
4031 low = build_int_cst (arg0_type, 0);
4035 exp = arg0;
4036 continue;
4038 case NEGATE_EXPR:
4039 /* (-x) IN [a,b] -> x in [-b, -a] */
4040 n_low = range_binop (MINUS_EXPR, exp_type,
4041 build_int_cst (exp_type, 0),
4042 0, high, 1);
4043 n_high = range_binop (MINUS_EXPR, exp_type,
4044 build_int_cst (exp_type, 0),
4045 0, low, 0);
4046 low = n_low, high = n_high;
4047 exp = arg0;
4048 continue;
4050 case BIT_NOT_EXPR:
4051 /* ~ X -> -X - 1 */
4052 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4053 build_int_cst (exp_type, 1));
4054 continue;
4056 case PLUS_EXPR: case MINUS_EXPR:
4057 if (TREE_CODE (arg1) != INTEGER_CST)
4058 break;
4060 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4061 move a constant to the other side. */
4062 if (!TYPE_UNSIGNED (arg0_type)
4063 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4064 break;
4066 /* If EXP is signed, any overflow in the computation is undefined,
4067 so we don't worry about it so long as our computations on
4068 the bounds don't overflow. For unsigned, overflow is defined
4069 and this is exactly the right thing. */
4070 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4071 arg0_type, low, 0, arg1, 0);
4072 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4073 arg0_type, high, 1, arg1, 0);
4074 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4075 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4076 break;
4078 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4079 *strict_overflow_p = true;
4081 /* Check for an unsigned range which has wrapped around the maximum
4082 value thus making n_high < n_low, and normalize it. */
4083 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4085 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4086 integer_one_node, 0);
4087 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4088 integer_one_node, 0);
4090 /* If the range is of the form +/- [ x+1, x ], we won't
4091 be able to normalize it. But then, it represents the
4092 whole range or the empty set, so make it
4093 +/- [ -, - ]. */
4094 if (tree_int_cst_equal (n_low, low)
4095 && tree_int_cst_equal (n_high, high))
4096 low = high = 0;
4097 else
4098 in_p = ! in_p;
4100 else
4101 low = n_low, high = n_high;
4103 exp = arg0;
4104 continue;
4106 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4107 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4108 break;
4110 if (! INTEGRAL_TYPE_P (arg0_type)
4111 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4112 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4113 break;
4115 n_low = low, n_high = high;
4117 if (n_low != 0)
4118 n_low = fold_convert (arg0_type, n_low);
4120 if (n_high != 0)
4121 n_high = fold_convert (arg0_type, n_high);
4124 /* If we're converting arg0 from an unsigned type, to exp,
4125 a signed type, we will be doing the comparison as unsigned.
4126 The tests above have already verified that LOW and HIGH
4127 are both positive.
4129 So we have to ensure that we will handle large unsigned
4130 values the same way that the current signed bounds treat
4131 negative values. */
4133 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4135 tree high_positive;
4136 tree equiv_type = lang_hooks.types.type_for_mode
4137 (TYPE_MODE (arg0_type), 1);
4139 /* A range without an upper bound is, naturally, unbounded.
4140 Since convert would have cropped a very large value, use
4141 the max value for the destination type. */
4142 high_positive
4143 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4144 : TYPE_MAX_VALUE (arg0_type);
4146 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4147 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4148 fold_convert (arg0_type,
4149 high_positive),
4150 fold_convert (arg0_type,
4151 integer_one_node));
4153 /* If the low bound is specified, "and" the range with the
4154 range for which the original unsigned value will be
4155 positive. */
4156 if (low != 0)
4158 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4159 1, n_low, n_high, 1,
4160 fold_convert (arg0_type,
4161 integer_zero_node),
4162 high_positive))
4163 break;
4165 in_p = (n_in_p == in_p);
4167 else
4169 /* Otherwise, "or" the range with the range of the input
4170 that will be interpreted as negative. */
4171 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4172 0, n_low, n_high, 1,
4173 fold_convert (arg0_type,
4174 integer_zero_node),
4175 high_positive))
4176 break;
4178 in_p = (in_p != n_in_p);
4182 exp = arg0;
4183 low = n_low, high = n_high;
4184 continue;
4186 default:
4187 break;
4190 break;
4193 /* If EXP is a constant, we can evaluate whether this is true or false. */
4194 if (TREE_CODE (exp) == INTEGER_CST)
4196 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4197 exp, 0, low, 0))
4198 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4199 exp, 1, high, 1)));
4200 low = high = 0;
4201 exp = 0;
4204 *pin_p = in_p, *plow = low, *phigh = high;
4205 return exp;
4208 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4209 type, TYPE, return an expression to test if EXP is in (or out of, depending
4210 on IN_P) the range. Return 0 if the test couldn't be created. */
4212 static tree
4213 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4215 tree etype = TREE_TYPE (exp);
4216 tree value;
4218 #ifdef HAVE_canonicalize_funcptr_for_compare
4219 /* Disable this optimization for function pointer expressions
4220 on targets that require function pointer canonicalization. */
4221 if (HAVE_canonicalize_funcptr_for_compare
4222 && TREE_CODE (etype) == POINTER_TYPE
4223 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4224 return NULL_TREE;
4225 #endif
4227 if (! in_p)
4229 value = build_range_check (type, exp, 1, low, high);
4230 if (value != 0)
4231 return invert_truthvalue (value);
4233 return 0;
4236 if (low == 0 && high == 0)
4237 return build_int_cst (type, 1);
4239 if (low == 0)
4240 return fold_build2 (LE_EXPR, type, exp,
4241 fold_convert (etype, high));
4243 if (high == 0)
4244 return fold_build2 (GE_EXPR, type, exp,
4245 fold_convert (etype, low));
4247 if (operand_equal_p (low, high, 0))
4248 return fold_build2 (EQ_EXPR, type, exp,
4249 fold_convert (etype, low));
4251 if (integer_zerop (low))
4253 if (! TYPE_UNSIGNED (etype))
4255 etype = lang_hooks.types.unsigned_type (etype);
4256 high = fold_convert (etype, high);
4257 exp = fold_convert (etype, exp);
4259 return build_range_check (type, exp, 1, 0, high);
4262 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4263 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4265 unsigned HOST_WIDE_INT lo;
4266 HOST_WIDE_INT hi;
4267 int prec;
4269 prec = TYPE_PRECISION (etype);
4270 if (prec <= HOST_BITS_PER_WIDE_INT)
4272 hi = 0;
4273 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4275 else
4277 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4278 lo = (unsigned HOST_WIDE_INT) -1;
4281 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4283 if (TYPE_UNSIGNED (etype))
4285 etype = lang_hooks.types.signed_type (etype);
4286 exp = fold_convert (etype, exp);
4288 return fold_build2 (GT_EXPR, type, exp,
4289 build_int_cst (etype, 0));
4293 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4294 This requires wrap-around arithmetics for the type of the expression. */
4295 switch (TREE_CODE (etype))
4297 case INTEGER_TYPE:
4298 /* There is no requirement that LOW be within the range of ETYPE
4299 if the latter is a subtype. It must, however, be within the base
4300 type of ETYPE. So be sure we do the subtraction in that type. */
4301 if (TREE_TYPE (etype))
4302 etype = TREE_TYPE (etype);
4303 break;
4305 case ENUMERAL_TYPE:
4306 case BOOLEAN_TYPE:
4307 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4308 TYPE_UNSIGNED (etype));
4309 break;
4311 default:
4312 break;
4315 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4316 if (TREE_CODE (etype) == INTEGER_TYPE
4317 && !TYPE_OVERFLOW_WRAPS (etype))
4319 tree utype, minv, maxv;
4321 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4322 for the type in question, as we rely on this here. */
4323 utype = lang_hooks.types.unsigned_type (etype);
4324 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4325 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4326 integer_one_node, 1);
4327 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4329 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4330 minv, 1, maxv, 1)))
4331 etype = utype;
4332 else
4333 return 0;
4336 high = fold_convert (etype, high);
4337 low = fold_convert (etype, low);
4338 exp = fold_convert (etype, exp);
4340 value = const_binop (MINUS_EXPR, high, low, 0);
4342 if (value != 0 && !TREE_OVERFLOW (value))
4343 return build_range_check (type,
4344 fold_build2 (MINUS_EXPR, etype, exp, low),
4345 1, build_int_cst (etype, 0), value);
4347 return 0;
4350 /* Return the predecessor of VAL in its type, handling the infinite case. */
4352 static tree
4353 range_predecessor (tree val)
4355 tree type = TREE_TYPE (val);
4357 if (INTEGRAL_TYPE_P (type)
4358 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4359 return 0;
4360 else
4361 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4364 /* Return the successor of VAL in its type, handling the infinite case. */
4366 static tree
4367 range_successor (tree val)
4369 tree type = TREE_TYPE (val);
4371 if (INTEGRAL_TYPE_P (type)
4372 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4373 return 0;
4374 else
4375 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4378 /* Given two ranges, see if we can merge them into one. Return 1 if we
4379 can, 0 if we can't. Set the output range into the specified parameters. */
4381 static int
4382 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4383 tree high0, int in1_p, tree low1, tree high1)
4385 int no_overlap;
4386 int subset;
4387 int temp;
4388 tree tem;
4389 int in_p;
4390 tree low, high;
4391 int lowequal = ((low0 == 0 && low1 == 0)
4392 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4393 low0, 0, low1, 0)));
4394 int highequal = ((high0 == 0 && high1 == 0)
4395 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4396 high0, 1, high1, 1)));
4398 /* Make range 0 be the range that starts first, or ends last if they
4399 start at the same value. Swap them if it isn't. */
4400 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4401 low0, 0, low1, 0))
4402 || (lowequal
4403 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4404 high1, 1, high0, 1))))
4406 temp = in0_p, in0_p = in1_p, in1_p = temp;
4407 tem = low0, low0 = low1, low1 = tem;
4408 tem = high0, high0 = high1, high1 = tem;
4411 /* Now flag two cases, whether the ranges are disjoint or whether the
4412 second range is totally subsumed in the first. Note that the tests
4413 below are simplified by the ones above. */
4414 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4415 high0, 1, low1, 0));
4416 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4417 high1, 1, high0, 1));
4419 /* We now have four cases, depending on whether we are including or
4420 excluding the two ranges. */
4421 if (in0_p && in1_p)
4423 /* If they don't overlap, the result is false. If the second range
4424 is a subset it is the result. Otherwise, the range is from the start
4425 of the second to the end of the first. */
4426 if (no_overlap)
4427 in_p = 0, low = high = 0;
4428 else if (subset)
4429 in_p = 1, low = low1, high = high1;
4430 else
4431 in_p = 1, low = low1, high = high0;
4434 else if (in0_p && ! in1_p)
4436 /* If they don't overlap, the result is the first range. If they are
4437 equal, the result is false. If the second range is a subset of the
4438 first, and the ranges begin at the same place, we go from just after
4439 the end of the second range to the end of the first. If the second
4440 range is not a subset of the first, or if it is a subset and both
4441 ranges end at the same place, the range starts at the start of the
4442 first range and ends just before the second range.
4443 Otherwise, we can't describe this as a single range. */
4444 if (no_overlap)
4445 in_p = 1, low = low0, high = high0;
4446 else if (lowequal && highequal)
4447 in_p = 0, low = high = 0;
4448 else if (subset && lowequal)
4450 low = range_successor (high1);
4451 high = high0;
4452 in_p = 1;
4453 if (low == 0)
4455 /* We are in the weird situation where high0 > high1 but
4456 high1 has no successor. Punt. */
4457 return 0;
4460 else if (! subset || highequal)
4462 low = low0;
4463 high = range_predecessor (low1);
4464 in_p = 1;
4465 if (high == 0)
4467 /* low0 < low1 but low1 has no predecessor. Punt. */
4468 return 0;
4471 else
4472 return 0;
4475 else if (! in0_p && in1_p)
4477 /* If they don't overlap, the result is the second range. If the second
4478 is a subset of the first, the result is false. Otherwise,
4479 the range starts just after the first range and ends at the
4480 end of the second. */
4481 if (no_overlap)
4482 in_p = 1, low = low1, high = high1;
4483 else if (subset || highequal)
4484 in_p = 0, low = high = 0;
4485 else
4487 low = range_successor (high0);
4488 high = high1;
4489 in_p = 1;
4490 if (low == 0)
4492 /* high1 > high0 but high0 has no successor. Punt. */
4493 return 0;
4498 else
4500 /* The case where we are excluding both ranges. Here the complex case
4501 is if they don't overlap. In that case, the only time we have a
4502 range is if they are adjacent. If the second is a subset of the
4503 first, the result is the first. Otherwise, the range to exclude
4504 starts at the beginning of the first range and ends at the end of the
4505 second. */
4506 if (no_overlap)
4508 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4509 range_successor (high0),
4510 1, low1, 0)))
4511 in_p = 0, low = low0, high = high1;
4512 else
4514 /* Canonicalize - [min, x] into - [-, x]. */
4515 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4516 switch (TREE_CODE (TREE_TYPE (low0)))
4518 case ENUMERAL_TYPE:
4519 if (TYPE_PRECISION (TREE_TYPE (low0))
4520 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4521 break;
4522 /* FALLTHROUGH */
4523 case INTEGER_TYPE:
4524 if (tree_int_cst_equal (low0,
4525 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4526 low0 = 0;
4527 break;
4528 case POINTER_TYPE:
4529 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4530 && integer_zerop (low0))
4531 low0 = 0;
4532 break;
4533 default:
4534 break;
4537 /* Canonicalize - [x, max] into - [x, -]. */
4538 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4539 switch (TREE_CODE (TREE_TYPE (high1)))
4541 case ENUMERAL_TYPE:
4542 if (TYPE_PRECISION (TREE_TYPE (high1))
4543 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4544 break;
4545 /* FALLTHROUGH */
4546 case INTEGER_TYPE:
4547 if (tree_int_cst_equal (high1,
4548 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4549 high1 = 0;
4550 break;
4551 case POINTER_TYPE:
4552 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4553 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4554 high1, 1,
4555 integer_one_node, 1)))
4556 high1 = 0;
4557 break;
4558 default:
4559 break;
4562 /* The ranges might be also adjacent between the maximum and
4563 minimum values of the given type. For
4564 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4565 return + [x + 1, y - 1]. */
4566 if (low0 == 0 && high1 == 0)
4568 low = range_successor (high0);
4569 high = range_predecessor (low1);
4570 if (low == 0 || high == 0)
4571 return 0;
4573 in_p = 1;
4575 else
4576 return 0;
4579 else if (subset)
4580 in_p = 0, low = low0, high = high0;
4581 else
4582 in_p = 0, low = low0, high = high1;
4585 *pin_p = in_p, *plow = low, *phigh = high;
4586 return 1;
4590 /* Subroutine of fold, looking inside expressions of the form
4591 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4592 of the COND_EXPR. This function is being used also to optimize
4593 A op B ? C : A, by reversing the comparison first.
4595 Return a folded expression whose code is not a COND_EXPR
4596 anymore, or NULL_TREE if no folding opportunity is found. */
4598 static tree
4599 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4601 enum tree_code comp_code = TREE_CODE (arg0);
4602 tree arg00 = TREE_OPERAND (arg0, 0);
4603 tree arg01 = TREE_OPERAND (arg0, 1);
4604 tree arg1_type = TREE_TYPE (arg1);
4605 tree tem;
4607 STRIP_NOPS (arg1);
4608 STRIP_NOPS (arg2);
4610 /* If we have A op 0 ? A : -A, consider applying the following
4611 transformations:
4613 A == 0? A : -A same as -A
4614 A != 0? A : -A same as A
4615 A >= 0? A : -A same as abs (A)
4616 A > 0? A : -A same as abs (A)
4617 A <= 0? A : -A same as -abs (A)
4618 A < 0? A : -A same as -abs (A)
4620 None of these transformations work for modes with signed
4621 zeros. If A is +/-0, the first two transformations will
4622 change the sign of the result (from +0 to -0, or vice
4623 versa). The last four will fix the sign of the result,
4624 even though the original expressions could be positive or
4625 negative, depending on the sign of A.
4627 Note that all these transformations are correct if A is
4628 NaN, since the two alternatives (A and -A) are also NaNs. */
4629 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4630 ? real_zerop (arg01)
4631 : integer_zerop (arg01))
4632 && ((TREE_CODE (arg2) == NEGATE_EXPR
4633 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4634 /* In the case that A is of the form X-Y, '-A' (arg2) may
4635 have already been folded to Y-X, check for that. */
4636 || (TREE_CODE (arg1) == MINUS_EXPR
4637 && TREE_CODE (arg2) == MINUS_EXPR
4638 && operand_equal_p (TREE_OPERAND (arg1, 0),
4639 TREE_OPERAND (arg2, 1), 0)
4640 && operand_equal_p (TREE_OPERAND (arg1, 1),
4641 TREE_OPERAND (arg2, 0), 0))))
4642 switch (comp_code)
4644 case EQ_EXPR:
4645 case UNEQ_EXPR:
4646 tem = fold_convert (arg1_type, arg1);
4647 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4648 case NE_EXPR:
4649 case LTGT_EXPR:
4650 return pedantic_non_lvalue (fold_convert (type, arg1));
4651 case UNGE_EXPR:
4652 case UNGT_EXPR:
4653 if (flag_trapping_math)
4654 break;
4655 /* Fall through. */
4656 case GE_EXPR:
4657 case GT_EXPR:
4658 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4659 arg1 = fold_convert (lang_hooks.types.signed_type
4660 (TREE_TYPE (arg1)), arg1);
4661 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4662 return pedantic_non_lvalue (fold_convert (type, tem));
4663 case UNLE_EXPR:
4664 case UNLT_EXPR:
4665 if (flag_trapping_math)
4666 break;
4667 case LE_EXPR:
4668 case LT_EXPR:
4669 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4670 arg1 = fold_convert (lang_hooks.types.signed_type
4671 (TREE_TYPE (arg1)), arg1);
4672 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4673 return negate_expr (fold_convert (type, tem));
4674 default:
4675 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4676 break;
4679 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4680 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4681 both transformations are correct when A is NaN: A != 0
4682 is then true, and A == 0 is false. */
4684 if (integer_zerop (arg01) && integer_zerop (arg2))
4686 if (comp_code == NE_EXPR)
4687 return pedantic_non_lvalue (fold_convert (type, arg1));
4688 else if (comp_code == EQ_EXPR)
4689 return build_int_cst (type, 0);
4692 /* Try some transformations of A op B ? A : B.
4694 A == B? A : B same as B
4695 A != B? A : B same as A
4696 A >= B? A : B same as max (A, B)
4697 A > B? A : B same as max (B, A)
4698 A <= B? A : B same as min (A, B)
4699 A < B? A : B same as min (B, A)
4701 As above, these transformations don't work in the presence
4702 of signed zeros. For example, if A and B are zeros of
4703 opposite sign, the first two transformations will change
4704 the sign of the result. In the last four, the original
4705 expressions give different results for (A=+0, B=-0) and
4706 (A=-0, B=+0), but the transformed expressions do not.
4708 The first two transformations are correct if either A or B
4709 is a NaN. In the first transformation, the condition will
4710 be false, and B will indeed be chosen. In the case of the
4711 second transformation, the condition A != B will be true,
4712 and A will be chosen.
4714 The conversions to max() and min() are not correct if B is
4715 a number and A is not. The conditions in the original
4716 expressions will be false, so all four give B. The min()
4717 and max() versions would give a NaN instead. */
4718 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4719 /* Avoid these transformations if the COND_EXPR may be used
4720 as an lvalue in the C++ front-end. PR c++/19199. */
4721 && (in_gimple_form
4722 || (strcmp (lang_hooks.name, "GNU C++") != 0
4723 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4724 || ! maybe_lvalue_p (arg1)
4725 || ! maybe_lvalue_p (arg2)))
4727 tree comp_op0 = arg00;
4728 tree comp_op1 = arg01;
4729 tree comp_type = TREE_TYPE (comp_op0);
4731 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4732 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4734 comp_type = type;
4735 comp_op0 = arg1;
4736 comp_op1 = arg2;
4739 switch (comp_code)
4741 case EQ_EXPR:
4742 return pedantic_non_lvalue (fold_convert (type, arg2));
4743 case NE_EXPR:
4744 return pedantic_non_lvalue (fold_convert (type, arg1));
4745 case LE_EXPR:
4746 case LT_EXPR:
4747 case UNLE_EXPR:
4748 case UNLT_EXPR:
4749 /* In C++ a ?: expression can be an lvalue, so put the
4750 operand which will be used if they are equal first
4751 so that we can convert this back to the
4752 corresponding COND_EXPR. */
4753 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4755 comp_op0 = fold_convert (comp_type, comp_op0);
4756 comp_op1 = fold_convert (comp_type, comp_op1);
4757 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4758 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4759 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4760 return pedantic_non_lvalue (fold_convert (type, tem));
4762 break;
4763 case GE_EXPR:
4764 case GT_EXPR:
4765 case UNGE_EXPR:
4766 case UNGT_EXPR:
4767 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4769 comp_op0 = fold_convert (comp_type, comp_op0);
4770 comp_op1 = fold_convert (comp_type, comp_op1);
4771 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4772 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4773 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4774 return pedantic_non_lvalue (fold_convert (type, tem));
4776 break;
4777 case UNEQ_EXPR:
4778 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4779 return pedantic_non_lvalue (fold_convert (type, arg2));
4780 break;
4781 case LTGT_EXPR:
4782 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4783 return pedantic_non_lvalue (fold_convert (type, arg1));
4784 break;
4785 default:
4786 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4787 break;
4791 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4792 we might still be able to simplify this. For example,
4793 if C1 is one less or one more than C2, this might have started
4794 out as a MIN or MAX and been transformed by this function.
4795 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4797 if (INTEGRAL_TYPE_P (type)
4798 && TREE_CODE (arg01) == INTEGER_CST
4799 && TREE_CODE (arg2) == INTEGER_CST)
4800 switch (comp_code)
4802 case EQ_EXPR:
4803 /* We can replace A with C1 in this case. */
4804 arg1 = fold_convert (type, arg01);
4805 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4807 case LT_EXPR:
4808 /* If C1 is C2 + 1, this is min(A, C2). */
4809 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4810 OEP_ONLY_CONST)
4811 && operand_equal_p (arg01,
4812 const_binop (PLUS_EXPR, arg2,
4813 integer_one_node, 0),
4814 OEP_ONLY_CONST))
4815 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4816 type, arg1, arg2));
4817 break;
4819 case LE_EXPR:
4820 /* If C1 is C2 - 1, this is min(A, C2). */
4821 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4822 OEP_ONLY_CONST)
4823 && operand_equal_p (arg01,
4824 const_binop (MINUS_EXPR, arg2,
4825 integer_one_node, 0),
4826 OEP_ONLY_CONST))
4827 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4828 type, arg1, arg2));
4829 break;
4831 case GT_EXPR:
4832 /* If C1 is C2 - 1, this is max(A, C2). */
4833 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4834 OEP_ONLY_CONST)
4835 && operand_equal_p (arg01,
4836 const_binop (MINUS_EXPR, arg2,
4837 integer_one_node, 0),
4838 OEP_ONLY_CONST))
4839 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4840 type, arg1, arg2));
4841 break;
4843 case GE_EXPR:
4844 /* If C1 is C2 + 1, this is max(A, C2). */
4845 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4846 OEP_ONLY_CONST)
4847 && operand_equal_p (arg01,
4848 const_binop (PLUS_EXPR, arg2,
4849 integer_one_node, 0),
4850 OEP_ONLY_CONST))
4851 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4852 type, arg1, arg2));
4853 break;
4854 case NE_EXPR:
4855 break;
4856 default:
4857 gcc_unreachable ();
4860 return NULL_TREE;
4865 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4866 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4867 #endif
4869 /* EXP is some logical combination of boolean tests. See if we can
4870 merge it into some range test. Return the new tree if so. */
4872 static tree
4873 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4875 int or_op = (code == TRUTH_ORIF_EXPR
4876 || code == TRUTH_OR_EXPR);
4877 int in0_p, in1_p, in_p;
4878 tree low0, low1, low, high0, high1, high;
4879 bool strict_overflow_p = false;
4880 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4881 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4882 tree tem;
4883 const char * const warnmsg = G_("assuming signed overflow does not occur "
4884 "when simplifying range test");
4886 /* If this is an OR operation, invert both sides; we will invert
4887 again at the end. */
4888 if (or_op)
4889 in0_p = ! in0_p, in1_p = ! in1_p;
4891 /* If both expressions are the same, if we can merge the ranges, and we
4892 can build the range test, return it or it inverted. If one of the
4893 ranges is always true or always false, consider it to be the same
4894 expression as the other. */
4895 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4896 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4897 in1_p, low1, high1)
4898 && 0 != (tem = (build_range_check (type,
4899 lhs != 0 ? lhs
4900 : rhs != 0 ? rhs : integer_zero_node,
4901 in_p, low, high))))
4903 if (strict_overflow_p)
4904 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4905 return or_op ? invert_truthvalue (tem) : tem;
4908 /* On machines where the branch cost is expensive, if this is a
4909 short-circuited branch and the underlying object on both sides
4910 is the same, make a non-short-circuit operation. */
4911 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4912 && lhs != 0 && rhs != 0
4913 && (code == TRUTH_ANDIF_EXPR
4914 || code == TRUTH_ORIF_EXPR)
4915 && operand_equal_p (lhs, rhs, 0))
4917 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4918 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4919 which cases we can't do this. */
4920 if (simple_operand_p (lhs))
4921 return build2 (code == TRUTH_ANDIF_EXPR
4922 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4923 type, op0, op1);
4925 else if (lang_hooks.decls.global_bindings_p () == 0
4926 && ! CONTAINS_PLACEHOLDER_P (lhs))
4928 tree common = save_expr (lhs);
4930 if (0 != (lhs = build_range_check (type, common,
4931 or_op ? ! in0_p : in0_p,
4932 low0, high0))
4933 && (0 != (rhs = build_range_check (type, common,
4934 or_op ? ! in1_p : in1_p,
4935 low1, high1))))
4937 if (strict_overflow_p)
4938 fold_overflow_warning (warnmsg,
4939 WARN_STRICT_OVERFLOW_COMPARISON);
4940 return build2 (code == TRUTH_ANDIF_EXPR
4941 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4942 type, lhs, rhs);
4947 return 0;
4950 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4951 bit value. Arrange things so the extra bits will be set to zero if and
4952 only if C is signed-extended to its full width. If MASK is nonzero,
4953 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4955 static tree
4956 unextend (tree c, int p, int unsignedp, tree mask)
4958 tree type = TREE_TYPE (c);
4959 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4960 tree temp;
4962 if (p == modesize || unsignedp)
4963 return c;
4965 /* We work by getting just the sign bit into the low-order bit, then
4966 into the high-order bit, then sign-extend. We then XOR that value
4967 with C. */
4968 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4969 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4971 /* We must use a signed type in order to get an arithmetic right shift.
4972 However, we must also avoid introducing accidental overflows, so that
4973 a subsequent call to integer_zerop will work. Hence we must
4974 do the type conversion here. At this point, the constant is either
4975 zero or one, and the conversion to a signed type can never overflow.
4976 We could get an overflow if this conversion is done anywhere else. */
4977 if (TYPE_UNSIGNED (type))
4978 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4980 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4981 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4982 if (mask != 0)
4983 temp = const_binop (BIT_AND_EXPR, temp,
4984 fold_convert (TREE_TYPE (c), mask), 0);
4985 /* If necessary, convert the type back to match the type of C. */
4986 if (TYPE_UNSIGNED (type))
4987 temp = fold_convert (type, temp);
4989 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4992 /* Find ways of folding logical expressions of LHS and RHS:
4993 Try to merge two comparisons to the same innermost item.
4994 Look for range tests like "ch >= '0' && ch <= '9'".
4995 Look for combinations of simple terms on machines with expensive branches
4996 and evaluate the RHS unconditionally.
4998 For example, if we have p->a == 2 && p->b == 4 and we can make an
4999 object large enough to span both A and B, we can do this with a comparison
5000 against the object ANDed with the a mask.
5002 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5003 operations to do this with one comparison.
5005 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5006 function and the one above.
5008 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5009 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5011 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5012 two operands.
5014 We return the simplified tree or 0 if no optimization is possible. */
5016 static tree
5017 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5019 /* If this is the "or" of two comparisons, we can do something if
5020 the comparisons are NE_EXPR. If this is the "and", we can do something
5021 if the comparisons are EQ_EXPR. I.e.,
5022 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5024 WANTED_CODE is this operation code. For single bit fields, we can
5025 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5026 comparison for one-bit fields. */
5028 enum tree_code wanted_code;
5029 enum tree_code lcode, rcode;
5030 tree ll_arg, lr_arg, rl_arg, rr_arg;
5031 tree ll_inner, lr_inner, rl_inner, rr_inner;
5032 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5033 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5034 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5035 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5036 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5037 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5038 enum machine_mode lnmode, rnmode;
5039 tree ll_mask, lr_mask, rl_mask, rr_mask;
5040 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5041 tree l_const, r_const;
5042 tree lntype, rntype, result;
5043 int first_bit, end_bit;
5044 int volatilep;
5045 tree orig_lhs = lhs, orig_rhs = rhs;
5046 enum tree_code orig_code = code;
5048 /* Start by getting the comparison codes. Fail if anything is volatile.
5049 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5050 it were surrounded with a NE_EXPR. */
5052 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5053 return 0;
5055 lcode = TREE_CODE (lhs);
5056 rcode = TREE_CODE (rhs);
5058 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5060 lhs = build2 (NE_EXPR, truth_type, lhs,
5061 build_int_cst (TREE_TYPE (lhs), 0));
5062 lcode = NE_EXPR;
5065 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5067 rhs = build2 (NE_EXPR, truth_type, rhs,
5068 build_int_cst (TREE_TYPE (rhs), 0));
5069 rcode = NE_EXPR;
5072 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5073 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5074 return 0;
5076 ll_arg = TREE_OPERAND (lhs, 0);
5077 lr_arg = TREE_OPERAND (lhs, 1);
5078 rl_arg = TREE_OPERAND (rhs, 0);
5079 rr_arg = TREE_OPERAND (rhs, 1);
5081 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5082 if (simple_operand_p (ll_arg)
5083 && simple_operand_p (lr_arg))
5085 tree result;
5086 if (operand_equal_p (ll_arg, rl_arg, 0)
5087 && operand_equal_p (lr_arg, rr_arg, 0))
5089 result = combine_comparisons (code, lcode, rcode,
5090 truth_type, ll_arg, lr_arg);
5091 if (result)
5092 return result;
5094 else if (operand_equal_p (ll_arg, rr_arg, 0)
5095 && operand_equal_p (lr_arg, rl_arg, 0))
5097 result = combine_comparisons (code, lcode,
5098 swap_tree_comparison (rcode),
5099 truth_type, ll_arg, lr_arg);
5100 if (result)
5101 return result;
5105 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5106 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5108 /* If the RHS can be evaluated unconditionally and its operands are
5109 simple, it wins to evaluate the RHS unconditionally on machines
5110 with expensive branches. In this case, this isn't a comparison
5111 that can be merged. Avoid doing this if the RHS is a floating-point
5112 comparison since those can trap. */
5114 if (BRANCH_COST >= 2
5115 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5116 && simple_operand_p (rl_arg)
5117 && simple_operand_p (rr_arg))
5119 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5120 if (code == TRUTH_OR_EXPR
5121 && lcode == NE_EXPR && integer_zerop (lr_arg)
5122 && rcode == NE_EXPR && integer_zerop (rr_arg)
5123 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5124 return build2 (NE_EXPR, truth_type,
5125 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5126 ll_arg, rl_arg),
5127 build_int_cst (TREE_TYPE (ll_arg), 0));
5129 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5130 if (code == TRUTH_AND_EXPR
5131 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5132 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5133 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5134 return build2 (EQ_EXPR, truth_type,
5135 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5136 ll_arg, rl_arg),
5137 build_int_cst (TREE_TYPE (ll_arg), 0));
5139 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5141 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5142 return build2 (code, truth_type, lhs, rhs);
5143 return NULL_TREE;
5147 /* See if the comparisons can be merged. Then get all the parameters for
5148 each side. */
5150 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5151 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5152 return 0;
5154 volatilep = 0;
5155 ll_inner = decode_field_reference (ll_arg,
5156 &ll_bitsize, &ll_bitpos, &ll_mode,
5157 &ll_unsignedp, &volatilep, &ll_mask,
5158 &ll_and_mask);
5159 lr_inner = decode_field_reference (lr_arg,
5160 &lr_bitsize, &lr_bitpos, &lr_mode,
5161 &lr_unsignedp, &volatilep, &lr_mask,
5162 &lr_and_mask);
5163 rl_inner = decode_field_reference (rl_arg,
5164 &rl_bitsize, &rl_bitpos, &rl_mode,
5165 &rl_unsignedp, &volatilep, &rl_mask,
5166 &rl_and_mask);
5167 rr_inner = decode_field_reference (rr_arg,
5168 &rr_bitsize, &rr_bitpos, &rr_mode,
5169 &rr_unsignedp, &volatilep, &rr_mask,
5170 &rr_and_mask);
5172 /* It must be true that the inner operation on the lhs of each
5173 comparison must be the same if we are to be able to do anything.
5174 Then see if we have constants. If not, the same must be true for
5175 the rhs's. */
5176 if (volatilep || ll_inner == 0 || rl_inner == 0
5177 || ! operand_equal_p (ll_inner, rl_inner, 0))
5178 return 0;
5180 if (TREE_CODE (lr_arg) == INTEGER_CST
5181 && TREE_CODE (rr_arg) == INTEGER_CST)
5182 l_const = lr_arg, r_const = rr_arg;
5183 else if (lr_inner == 0 || rr_inner == 0
5184 || ! operand_equal_p (lr_inner, rr_inner, 0))
5185 return 0;
5186 else
5187 l_const = r_const = 0;
5189 /* If either comparison code is not correct for our logical operation,
5190 fail. However, we can convert a one-bit comparison against zero into
5191 the opposite comparison against that bit being set in the field. */
5193 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5194 if (lcode != wanted_code)
5196 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5198 /* Make the left operand unsigned, since we are only interested
5199 in the value of one bit. Otherwise we are doing the wrong
5200 thing below. */
5201 ll_unsignedp = 1;
5202 l_const = ll_mask;
5204 else
5205 return 0;
5208 /* This is analogous to the code for l_const above. */
5209 if (rcode != wanted_code)
5211 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5213 rl_unsignedp = 1;
5214 r_const = rl_mask;
5216 else
5217 return 0;
5220 /* After this point all optimizations will generate bit-field
5221 references, which we might not want. */
5222 if (! lang_hooks.can_use_bit_fields_p ())
5223 return 0;
5225 /* See if we can find a mode that contains both fields being compared on
5226 the left. If we can't, fail. Otherwise, update all constants and masks
5227 to be relative to a field of that size. */
5228 first_bit = MIN (ll_bitpos, rl_bitpos);
5229 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5230 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5231 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5232 volatilep);
5233 if (lnmode == VOIDmode)
5234 return 0;
5236 lnbitsize = GET_MODE_BITSIZE (lnmode);
5237 lnbitpos = first_bit & ~ (lnbitsize - 1);
5238 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5239 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5241 if (BYTES_BIG_ENDIAN)
5243 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5244 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5247 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5248 size_int (xll_bitpos), 0);
5249 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5250 size_int (xrl_bitpos), 0);
5252 if (l_const)
5254 l_const = fold_convert (lntype, l_const);
5255 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5256 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5257 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5258 fold_build1 (BIT_NOT_EXPR,
5259 lntype, ll_mask),
5260 0)))
5262 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5264 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5267 if (r_const)
5269 r_const = fold_convert (lntype, r_const);
5270 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5271 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5272 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5273 fold_build1 (BIT_NOT_EXPR,
5274 lntype, rl_mask),
5275 0)))
5277 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5279 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5283 /* If the right sides are not constant, do the same for it. Also,
5284 disallow this optimization if a size or signedness mismatch occurs
5285 between the left and right sides. */
5286 if (l_const == 0)
5288 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5289 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5290 /* Make sure the two fields on the right
5291 correspond to the left without being swapped. */
5292 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5293 return 0;
5295 first_bit = MIN (lr_bitpos, rr_bitpos);
5296 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5297 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5298 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5299 volatilep);
5300 if (rnmode == VOIDmode)
5301 return 0;
5303 rnbitsize = GET_MODE_BITSIZE (rnmode);
5304 rnbitpos = first_bit & ~ (rnbitsize - 1);
5305 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5306 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5308 if (BYTES_BIG_ENDIAN)
5310 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5311 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5314 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5315 size_int (xlr_bitpos), 0);
5316 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5317 size_int (xrr_bitpos), 0);
5319 /* Make a mask that corresponds to both fields being compared.
5320 Do this for both items being compared. If the operands are the
5321 same size and the bits being compared are in the same position
5322 then we can do this by masking both and comparing the masked
5323 results. */
5324 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5325 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5326 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5328 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5329 ll_unsignedp || rl_unsignedp);
5330 if (! all_ones_mask_p (ll_mask, lnbitsize))
5331 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5333 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5334 lr_unsignedp || rr_unsignedp);
5335 if (! all_ones_mask_p (lr_mask, rnbitsize))
5336 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5338 return build2 (wanted_code, truth_type, lhs, rhs);
5341 /* There is still another way we can do something: If both pairs of
5342 fields being compared are adjacent, we may be able to make a wider
5343 field containing them both.
5345 Note that we still must mask the lhs/rhs expressions. Furthermore,
5346 the mask must be shifted to account for the shift done by
5347 make_bit_field_ref. */
5348 if ((ll_bitsize + ll_bitpos == rl_bitpos
5349 && lr_bitsize + lr_bitpos == rr_bitpos)
5350 || (ll_bitpos == rl_bitpos + rl_bitsize
5351 && lr_bitpos == rr_bitpos + rr_bitsize))
5353 tree type;
5355 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5356 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5357 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5358 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5360 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5361 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5362 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5363 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5365 /* Convert to the smaller type before masking out unwanted bits. */
5366 type = lntype;
5367 if (lntype != rntype)
5369 if (lnbitsize > rnbitsize)
5371 lhs = fold_convert (rntype, lhs);
5372 ll_mask = fold_convert (rntype, ll_mask);
5373 type = rntype;
5375 else if (lnbitsize < rnbitsize)
5377 rhs = fold_convert (lntype, rhs);
5378 lr_mask = fold_convert (lntype, lr_mask);
5379 type = lntype;
5383 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5384 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5386 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5387 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5389 return build2 (wanted_code, truth_type, lhs, rhs);
5392 return 0;
5395 /* Handle the case of comparisons with constants. If there is something in
5396 common between the masks, those bits of the constants must be the same.
5397 If not, the condition is always false. Test for this to avoid generating
5398 incorrect code below. */
5399 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5400 if (! integer_zerop (result)
5401 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5402 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5404 if (wanted_code == NE_EXPR)
5406 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5407 return constant_boolean_node (true, truth_type);
5409 else
5411 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5412 return constant_boolean_node (false, truth_type);
5416 /* Construct the expression we will return. First get the component
5417 reference we will make. Unless the mask is all ones the width of
5418 that field, perform the mask operation. Then compare with the
5419 merged constant. */
5420 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5421 ll_unsignedp || rl_unsignedp);
5423 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5424 if (! all_ones_mask_p (ll_mask, lnbitsize))
5425 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5427 return build2 (wanted_code, truth_type, result,
5428 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5431 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5432 constant. */
5434 static tree
5435 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5437 tree arg0 = op0;
5438 enum tree_code op_code;
5439 tree comp_const = op1;
5440 tree minmax_const;
5441 int consts_equal, consts_lt;
5442 tree inner;
5444 STRIP_SIGN_NOPS (arg0);
5446 op_code = TREE_CODE (arg0);
5447 minmax_const = TREE_OPERAND (arg0, 1);
5448 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5449 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5450 inner = TREE_OPERAND (arg0, 0);
5452 /* If something does not permit us to optimize, return the original tree. */
5453 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5454 || TREE_CODE (comp_const) != INTEGER_CST
5455 || TREE_CONSTANT_OVERFLOW (comp_const)
5456 || TREE_CODE (minmax_const) != INTEGER_CST
5457 || TREE_CONSTANT_OVERFLOW (minmax_const))
5458 return NULL_TREE;
5460 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5461 and GT_EXPR, doing the rest with recursive calls using logical
5462 simplifications. */
5463 switch (code)
5465 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5467 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5468 type, op0, op1);
5469 if (tem)
5470 return invert_truthvalue (tem);
5471 return NULL_TREE;
5474 case GE_EXPR:
5475 return
5476 fold_build2 (TRUTH_ORIF_EXPR, type,
5477 optimize_minmax_comparison
5478 (EQ_EXPR, type, arg0, comp_const),
5479 optimize_minmax_comparison
5480 (GT_EXPR, type, arg0, comp_const));
5482 case EQ_EXPR:
5483 if (op_code == MAX_EXPR && consts_equal)
5484 /* MAX (X, 0) == 0 -> X <= 0 */
5485 return fold_build2 (LE_EXPR, type, inner, comp_const);
5487 else if (op_code == MAX_EXPR && consts_lt)
5488 /* MAX (X, 0) == 5 -> X == 5 */
5489 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5491 else if (op_code == MAX_EXPR)
5492 /* MAX (X, 0) == -1 -> false */
5493 return omit_one_operand (type, integer_zero_node, inner);
5495 else if (consts_equal)
5496 /* MIN (X, 0) == 0 -> X >= 0 */
5497 return fold_build2 (GE_EXPR, type, inner, comp_const);
5499 else if (consts_lt)
5500 /* MIN (X, 0) == 5 -> false */
5501 return omit_one_operand (type, integer_zero_node, inner);
5503 else
5504 /* MIN (X, 0) == -1 -> X == -1 */
5505 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5507 case GT_EXPR:
5508 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5509 /* MAX (X, 0) > 0 -> X > 0
5510 MAX (X, 0) > 5 -> X > 5 */
5511 return fold_build2 (GT_EXPR, type, inner, comp_const);
5513 else if (op_code == MAX_EXPR)
5514 /* MAX (X, 0) > -1 -> true */
5515 return omit_one_operand (type, integer_one_node, inner);
5517 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5518 /* MIN (X, 0) > 0 -> false
5519 MIN (X, 0) > 5 -> false */
5520 return omit_one_operand (type, integer_zero_node, inner);
5522 else
5523 /* MIN (X, 0) > -1 -> X > -1 */
5524 return fold_build2 (GT_EXPR, type, inner, comp_const);
5526 default:
5527 return NULL_TREE;
5531 /* T is an integer expression that is being multiplied, divided, or taken a
5532 modulus (CODE says which and what kind of divide or modulus) by a
5533 constant C. See if we can eliminate that operation by folding it with
5534 other operations already in T. WIDE_TYPE, if non-null, is a type that
5535 should be used for the computation if wider than our type.
5537 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5538 (X * 2) + (Y * 4). We must, however, be assured that either the original
5539 expression would not overflow or that overflow is undefined for the type
5540 in the language in question.
5542 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5543 the machine has a multiply-accumulate insn or that this is part of an
5544 addressing calculation.
5546 If we return a non-null expression, it is an equivalent form of the
5547 original computation, but need not be in the original type.
5549 We set *STRICT_OVERFLOW_P to true if the return values depends on
5550 signed overflow being undefined. Otherwise we do not change
5551 *STRICT_OVERFLOW_P. */
5553 static tree
5554 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5555 bool *strict_overflow_p)
5557 /* To avoid exponential search depth, refuse to allow recursion past
5558 three levels. Beyond that (1) it's highly unlikely that we'll find
5559 something interesting and (2) we've probably processed it before
5560 when we built the inner expression. */
5562 static int depth;
5563 tree ret;
5565 if (depth > 3)
5566 return NULL;
5568 depth++;
5569 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5570 depth--;
5572 return ret;
5575 static tree
5576 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5577 bool *strict_overflow_p)
5579 tree type = TREE_TYPE (t);
5580 enum tree_code tcode = TREE_CODE (t);
5581 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5582 > GET_MODE_SIZE (TYPE_MODE (type)))
5583 ? wide_type : type);
5584 tree t1, t2;
5585 int same_p = tcode == code;
5586 tree op0 = NULL_TREE, op1 = NULL_TREE;
5587 bool sub_strict_overflow_p;
5589 /* Don't deal with constants of zero here; they confuse the code below. */
5590 if (integer_zerop (c))
5591 return NULL_TREE;
5593 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5594 op0 = TREE_OPERAND (t, 0);
5596 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5597 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5599 /* Note that we need not handle conditional operations here since fold
5600 already handles those cases. So just do arithmetic here. */
5601 switch (tcode)
5603 case INTEGER_CST:
5604 /* For a constant, we can always simplify if we are a multiply
5605 or (for divide and modulus) if it is a multiple of our constant. */
5606 if (code == MULT_EXPR
5607 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5608 return const_binop (code, fold_convert (ctype, t),
5609 fold_convert (ctype, c), 0);
5610 break;
5612 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5613 /* If op0 is an expression ... */
5614 if ((COMPARISON_CLASS_P (op0)
5615 || UNARY_CLASS_P (op0)
5616 || BINARY_CLASS_P (op0)
5617 || EXPRESSION_CLASS_P (op0))
5618 /* ... and is unsigned, and its type is smaller than ctype,
5619 then we cannot pass through as widening. */
5620 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5621 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5622 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5623 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5624 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5625 /* ... or this is a truncation (t is narrower than op0),
5626 then we cannot pass through this narrowing. */
5627 || (GET_MODE_SIZE (TYPE_MODE (type))
5628 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5629 /* ... or signedness changes for division or modulus,
5630 then we cannot pass through this conversion. */
5631 || (code != MULT_EXPR
5632 && (TYPE_UNSIGNED (ctype)
5633 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5634 break;
5636 /* Pass the constant down and see if we can make a simplification. If
5637 we can, replace this expression with the inner simplification for
5638 possible later conversion to our or some other type. */
5639 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5640 && TREE_CODE (t2) == INTEGER_CST
5641 && ! TREE_CONSTANT_OVERFLOW (t2)
5642 && (0 != (t1 = extract_muldiv (op0, t2, code,
5643 code == MULT_EXPR
5644 ? ctype : NULL_TREE,
5645 strict_overflow_p))))
5646 return t1;
5647 break;
5649 case ABS_EXPR:
5650 /* If widening the type changes it from signed to unsigned, then we
5651 must avoid building ABS_EXPR itself as unsigned. */
5652 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5654 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5655 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5656 != 0)
5658 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5659 return fold_convert (ctype, t1);
5661 break;
5663 /* If the constant is negative, we cannot simplify this. */
5664 if (tree_int_cst_sgn (c) == -1)
5665 break;
5666 /* FALLTHROUGH */
5667 case NEGATE_EXPR:
5668 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5669 != 0)
5670 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5671 break;
5673 case MIN_EXPR: case MAX_EXPR:
5674 /* If widening the type changes the signedness, then we can't perform
5675 this optimization as that changes the result. */
5676 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5677 break;
5679 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5680 sub_strict_overflow_p = false;
5681 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5682 &sub_strict_overflow_p)) != 0
5683 && (t2 = extract_muldiv (op1, c, code, wide_type,
5684 &sub_strict_overflow_p)) != 0)
5686 if (tree_int_cst_sgn (c) < 0)
5687 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5688 if (sub_strict_overflow_p)
5689 *strict_overflow_p = true;
5690 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5691 fold_convert (ctype, t2));
5693 break;
5695 case LSHIFT_EXPR: case RSHIFT_EXPR:
5696 /* If the second operand is constant, this is a multiplication
5697 or floor division, by a power of two, so we can treat it that
5698 way unless the multiplier or divisor overflows. Signed
5699 left-shift overflow is implementation-defined rather than
5700 undefined in C90, so do not convert signed left shift into
5701 multiplication. */
5702 if (TREE_CODE (op1) == INTEGER_CST
5703 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5704 /* const_binop may not detect overflow correctly,
5705 so check for it explicitly here. */
5706 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5707 && TREE_INT_CST_HIGH (op1) == 0
5708 && 0 != (t1 = fold_convert (ctype,
5709 const_binop (LSHIFT_EXPR,
5710 size_one_node,
5711 op1, 0)))
5712 && ! TREE_OVERFLOW (t1))
5713 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5714 ? MULT_EXPR : FLOOR_DIV_EXPR,
5715 ctype, fold_convert (ctype, op0), t1),
5716 c, code, wide_type, strict_overflow_p);
5717 break;
5719 case PLUS_EXPR: case MINUS_EXPR:
5720 /* See if we can eliminate the operation on both sides. If we can, we
5721 can return a new PLUS or MINUS. If we can't, the only remaining
5722 cases where we can do anything are if the second operand is a
5723 constant. */
5724 sub_strict_overflow_p = false;
5725 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5726 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5727 if (t1 != 0 && t2 != 0
5728 && (code == MULT_EXPR
5729 /* If not multiplication, we can only do this if both operands
5730 are divisible by c. */
5731 || (multiple_of_p (ctype, op0, c)
5732 && multiple_of_p (ctype, op1, c))))
5734 if (sub_strict_overflow_p)
5735 *strict_overflow_p = true;
5736 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5737 fold_convert (ctype, t2));
5740 /* If this was a subtraction, negate OP1 and set it to be an addition.
5741 This simplifies the logic below. */
5742 if (tcode == MINUS_EXPR)
5743 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5745 if (TREE_CODE (op1) != INTEGER_CST)
5746 break;
5748 /* If either OP1 or C are negative, this optimization is not safe for
5749 some of the division and remainder types while for others we need
5750 to change the code. */
5751 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5753 if (code == CEIL_DIV_EXPR)
5754 code = FLOOR_DIV_EXPR;
5755 else if (code == FLOOR_DIV_EXPR)
5756 code = CEIL_DIV_EXPR;
5757 else if (code != MULT_EXPR
5758 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5759 break;
5762 /* If it's a multiply or a division/modulus operation of a multiple
5763 of our constant, do the operation and verify it doesn't overflow. */
5764 if (code == MULT_EXPR
5765 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5767 op1 = const_binop (code, fold_convert (ctype, op1),
5768 fold_convert (ctype, c), 0);
5769 /* We allow the constant to overflow with wrapping semantics. */
5770 if (op1 == 0
5771 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5772 break;
5774 else
5775 break;
5777 /* If we have an unsigned type is not a sizetype, we cannot widen
5778 the operation since it will change the result if the original
5779 computation overflowed. */
5780 if (TYPE_UNSIGNED (ctype)
5781 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5782 && ctype != type)
5783 break;
5785 /* If we were able to eliminate our operation from the first side,
5786 apply our operation to the second side and reform the PLUS. */
5787 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5788 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5790 /* The last case is if we are a multiply. In that case, we can
5791 apply the distributive law to commute the multiply and addition
5792 if the multiplication of the constants doesn't overflow. */
5793 if (code == MULT_EXPR)
5794 return fold_build2 (tcode, ctype,
5795 fold_build2 (code, ctype,
5796 fold_convert (ctype, op0),
5797 fold_convert (ctype, c)),
5798 op1);
5800 break;
5802 case MULT_EXPR:
5803 /* We have a special case here if we are doing something like
5804 (C * 8) % 4 since we know that's zero. */
5805 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5806 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5807 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5808 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5809 return omit_one_operand (type, integer_zero_node, op0);
5811 /* ... fall through ... */
5813 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5814 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5815 /* If we can extract our operation from the LHS, do so and return a
5816 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5817 do something only if the second operand is a constant. */
5818 if (same_p
5819 && (t1 = extract_muldiv (op0, c, code, wide_type,
5820 strict_overflow_p)) != 0)
5821 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5822 fold_convert (ctype, op1));
5823 else if (tcode == MULT_EXPR && code == MULT_EXPR
5824 && (t1 = extract_muldiv (op1, c, code, wide_type,
5825 strict_overflow_p)) != 0)
5826 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5827 fold_convert (ctype, t1));
5828 else if (TREE_CODE (op1) != INTEGER_CST)
5829 return 0;
5831 /* If these are the same operation types, we can associate them
5832 assuming no overflow. */
5833 if (tcode == code
5834 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5835 fold_convert (ctype, c), 0))
5836 && ! TREE_OVERFLOW (t1))
5837 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5839 /* If these operations "cancel" each other, we have the main
5840 optimizations of this pass, which occur when either constant is a
5841 multiple of the other, in which case we replace this with either an
5842 operation or CODE or TCODE.
5844 If we have an unsigned type that is not a sizetype, we cannot do
5845 this since it will change the result if the original computation
5846 overflowed. */
5847 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5848 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5849 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5850 || (tcode == MULT_EXPR
5851 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5852 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5854 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5856 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5857 *strict_overflow_p = true;
5858 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5859 fold_convert (ctype,
5860 const_binop (TRUNC_DIV_EXPR,
5861 op1, c, 0)));
5863 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5865 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5866 *strict_overflow_p = true;
5867 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5868 fold_convert (ctype,
5869 const_binop (TRUNC_DIV_EXPR,
5870 c, op1, 0)));
5873 break;
5875 default:
5876 break;
5879 return 0;
5882 /* Return a node which has the indicated constant VALUE (either 0 or
5883 1), and is of the indicated TYPE. */
5885 tree
5886 constant_boolean_node (int value, tree type)
5888 if (type == integer_type_node)
5889 return value ? integer_one_node : integer_zero_node;
5890 else if (type == boolean_type_node)
5891 return value ? boolean_true_node : boolean_false_node;
5892 else
5893 return build_int_cst (type, value);
5897 /* Return true if expr looks like an ARRAY_REF and set base and
5898 offset to the appropriate trees. If there is no offset,
5899 offset is set to NULL_TREE. Base will be canonicalized to
5900 something you can get the element type from using
5901 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5902 in bytes to the base. */
5904 static bool
5905 extract_array_ref (tree expr, tree *base, tree *offset)
5907 /* One canonical form is a PLUS_EXPR with the first
5908 argument being an ADDR_EXPR with a possible NOP_EXPR
5909 attached. */
5910 if (TREE_CODE (expr) == PLUS_EXPR)
5912 tree op0 = TREE_OPERAND (expr, 0);
5913 tree inner_base, dummy1;
5914 /* Strip NOP_EXPRs here because the C frontends and/or
5915 folders present us (int *)&x.a + 4B possibly. */
5916 STRIP_NOPS (op0);
5917 if (extract_array_ref (op0, &inner_base, &dummy1))
5919 *base = inner_base;
5920 if (dummy1 == NULL_TREE)
5921 *offset = TREE_OPERAND (expr, 1);
5922 else
5923 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5924 dummy1, TREE_OPERAND (expr, 1));
5925 return true;
5928 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5929 which we transform into an ADDR_EXPR with appropriate
5930 offset. For other arguments to the ADDR_EXPR we assume
5931 zero offset and as such do not care about the ADDR_EXPR
5932 type and strip possible nops from it. */
5933 else if (TREE_CODE (expr) == ADDR_EXPR)
5935 tree op0 = TREE_OPERAND (expr, 0);
5936 if (TREE_CODE (op0) == ARRAY_REF)
5938 tree idx = TREE_OPERAND (op0, 1);
5939 *base = TREE_OPERAND (op0, 0);
5940 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5941 array_ref_element_size (op0));
5943 else
5945 /* Handle array-to-pointer decay as &a. */
5946 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5947 *base = TREE_OPERAND (expr, 0);
5948 else
5949 *base = expr;
5950 *offset = NULL_TREE;
5952 return true;
5954 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5955 else if (SSA_VAR_P (expr)
5956 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5958 *base = expr;
5959 *offset = NULL_TREE;
5960 return true;
5963 return false;
5967 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5968 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5969 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5970 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5971 COND is the first argument to CODE; otherwise (as in the example
5972 given here), it is the second argument. TYPE is the type of the
5973 original expression. Return NULL_TREE if no simplification is
5974 possible. */
5976 static tree
5977 fold_binary_op_with_conditional_arg (enum tree_code code,
5978 tree type, tree op0, tree op1,
5979 tree cond, tree arg, int cond_first_p)
5981 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5982 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5983 tree test, true_value, false_value;
5984 tree lhs = NULL_TREE;
5985 tree rhs = NULL_TREE;
5987 /* This transformation is only worthwhile if we don't have to wrap
5988 arg in a SAVE_EXPR, and the operation can be simplified on at least
5989 one of the branches once its pushed inside the COND_EXPR. */
5990 if (!TREE_CONSTANT (arg))
5991 return NULL_TREE;
5993 if (TREE_CODE (cond) == COND_EXPR)
5995 test = TREE_OPERAND (cond, 0);
5996 true_value = TREE_OPERAND (cond, 1);
5997 false_value = TREE_OPERAND (cond, 2);
5998 /* If this operand throws an expression, then it does not make
5999 sense to try to perform a logical or arithmetic operation
6000 involving it. */
6001 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6002 lhs = true_value;
6003 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6004 rhs = false_value;
6006 else
6008 tree testtype = TREE_TYPE (cond);
6009 test = cond;
6010 true_value = constant_boolean_node (true, testtype);
6011 false_value = constant_boolean_node (false, testtype);
6014 arg = fold_convert (arg_type, arg);
6015 if (lhs == 0)
6017 true_value = fold_convert (cond_type, true_value);
6018 if (cond_first_p)
6019 lhs = fold_build2 (code, type, true_value, arg);
6020 else
6021 lhs = fold_build2 (code, type, arg, true_value);
6023 if (rhs == 0)
6025 false_value = fold_convert (cond_type, false_value);
6026 if (cond_first_p)
6027 rhs = fold_build2 (code, type, false_value, arg);
6028 else
6029 rhs = fold_build2 (code, type, arg, false_value);
6032 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6033 return fold_convert (type, test);
6037 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6039 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6040 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6041 ADDEND is the same as X.
6043 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6044 and finite. The problematic cases are when X is zero, and its mode
6045 has signed zeros. In the case of rounding towards -infinity,
6046 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6047 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6049 static bool
6050 fold_real_zero_addition_p (tree type, tree addend, int negate)
6052 if (!real_zerop (addend))
6053 return false;
6055 /* Don't allow the fold with -fsignaling-nans. */
6056 if (HONOR_SNANS (TYPE_MODE (type)))
6057 return false;
6059 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6060 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6061 return true;
6063 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6064 if (TREE_CODE (addend) == REAL_CST
6065 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6066 negate = !negate;
6068 /* The mode has signed zeros, and we have to honor their sign.
6069 In this situation, there is only one case we can return true for.
6070 X - 0 is the same as X unless rounding towards -infinity is
6071 supported. */
6072 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6075 /* Subroutine of fold() that checks comparisons of built-in math
6076 functions against real constants.
6078 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6079 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6080 is the type of the result and ARG0 and ARG1 are the operands of the
6081 comparison. ARG1 must be a TREE_REAL_CST.
6083 The function returns the constant folded tree if a simplification
6084 can be made, and NULL_TREE otherwise. */
6086 static tree
6087 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6088 tree type, tree arg0, tree arg1)
6090 REAL_VALUE_TYPE c;
6092 if (BUILTIN_SQRT_P (fcode))
6094 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6095 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6097 c = TREE_REAL_CST (arg1);
6098 if (REAL_VALUE_NEGATIVE (c))
6100 /* sqrt(x) < y is always false, if y is negative. */
6101 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6102 return omit_one_operand (type, integer_zero_node, arg);
6104 /* sqrt(x) > y is always true, if y is negative and we
6105 don't care about NaNs, i.e. negative values of x. */
6106 if (code == NE_EXPR || !HONOR_NANS (mode))
6107 return omit_one_operand (type, integer_one_node, arg);
6109 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6110 return fold_build2 (GE_EXPR, type, arg,
6111 build_real (TREE_TYPE (arg), dconst0));
6113 else if (code == GT_EXPR || code == GE_EXPR)
6115 REAL_VALUE_TYPE c2;
6117 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6118 real_convert (&c2, mode, &c2);
6120 if (REAL_VALUE_ISINF (c2))
6122 /* sqrt(x) > y is x == +Inf, when y is very large. */
6123 if (HONOR_INFINITIES (mode))
6124 return fold_build2 (EQ_EXPR, type, arg,
6125 build_real (TREE_TYPE (arg), c2));
6127 /* sqrt(x) > y is always false, when y is very large
6128 and we don't care about infinities. */
6129 return omit_one_operand (type, integer_zero_node, arg);
6132 /* sqrt(x) > c is the same as x > c*c. */
6133 return fold_build2 (code, type, arg,
6134 build_real (TREE_TYPE (arg), c2));
6136 else if (code == LT_EXPR || code == LE_EXPR)
6138 REAL_VALUE_TYPE c2;
6140 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6141 real_convert (&c2, mode, &c2);
6143 if (REAL_VALUE_ISINF (c2))
6145 /* sqrt(x) < y is always true, when y is a very large
6146 value and we don't care about NaNs or Infinities. */
6147 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6148 return omit_one_operand (type, integer_one_node, arg);
6150 /* sqrt(x) < y is x != +Inf when y is very large and we
6151 don't care about NaNs. */
6152 if (! HONOR_NANS (mode))
6153 return fold_build2 (NE_EXPR, type, arg,
6154 build_real (TREE_TYPE (arg), c2));
6156 /* sqrt(x) < y is x >= 0 when y is very large and we
6157 don't care about Infinities. */
6158 if (! HONOR_INFINITIES (mode))
6159 return fold_build2 (GE_EXPR, type, arg,
6160 build_real (TREE_TYPE (arg), dconst0));
6162 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6163 if (lang_hooks.decls.global_bindings_p () != 0
6164 || CONTAINS_PLACEHOLDER_P (arg))
6165 return NULL_TREE;
6167 arg = save_expr (arg);
6168 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6169 fold_build2 (GE_EXPR, type, arg,
6170 build_real (TREE_TYPE (arg),
6171 dconst0)),
6172 fold_build2 (NE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg),
6174 c2)));
6177 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6178 if (! HONOR_NANS (mode))
6179 return fold_build2 (code, type, arg,
6180 build_real (TREE_TYPE (arg), c2));
6182 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6183 if (lang_hooks.decls.global_bindings_p () == 0
6184 && ! CONTAINS_PLACEHOLDER_P (arg))
6186 arg = save_expr (arg);
6187 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6188 fold_build2 (GE_EXPR, type, arg,
6189 build_real (TREE_TYPE (arg),
6190 dconst0)),
6191 fold_build2 (code, type, arg,
6192 build_real (TREE_TYPE (arg),
6193 c2)));
6198 return NULL_TREE;
6201 /* Subroutine of fold() that optimizes comparisons against Infinities,
6202 either +Inf or -Inf.
6204 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6205 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6206 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6208 The function returns the constant folded tree if a simplification
6209 can be made, and NULL_TREE otherwise. */
6211 static tree
6212 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6214 enum machine_mode mode;
6215 REAL_VALUE_TYPE max;
6216 tree temp;
6217 bool neg;
6219 mode = TYPE_MODE (TREE_TYPE (arg0));
6221 /* For negative infinity swap the sense of the comparison. */
6222 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6223 if (neg)
6224 code = swap_tree_comparison (code);
6226 switch (code)
6228 case GT_EXPR:
6229 /* x > +Inf is always false, if with ignore sNANs. */
6230 if (HONOR_SNANS (mode))
6231 return NULL_TREE;
6232 return omit_one_operand (type, integer_zero_node, arg0);
6234 case LE_EXPR:
6235 /* x <= +Inf is always true, if we don't case about NaNs. */
6236 if (! HONOR_NANS (mode))
6237 return omit_one_operand (type, integer_one_node, arg0);
6239 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6240 if (lang_hooks.decls.global_bindings_p () == 0
6241 && ! CONTAINS_PLACEHOLDER_P (arg0))
6243 arg0 = save_expr (arg0);
6244 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6246 break;
6248 case EQ_EXPR:
6249 case GE_EXPR:
6250 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6251 real_maxval (&max, neg, mode);
6252 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6253 arg0, build_real (TREE_TYPE (arg0), max));
6255 case LT_EXPR:
6256 /* x < +Inf is always equal to x <= DBL_MAX. */
6257 real_maxval (&max, neg, mode);
6258 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6259 arg0, build_real (TREE_TYPE (arg0), max));
6261 case NE_EXPR:
6262 /* x != +Inf is always equal to !(x > DBL_MAX). */
6263 real_maxval (&max, neg, mode);
6264 if (! HONOR_NANS (mode))
6265 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6266 arg0, build_real (TREE_TYPE (arg0), max));
6268 /* The transformation below creates non-gimple code and thus is
6269 not appropriate if we are in gimple form. */
6270 if (in_gimple_form)
6271 return NULL_TREE;
6273 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6274 arg0, build_real (TREE_TYPE (arg0), max));
6275 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6277 default:
6278 break;
6281 return NULL_TREE;
6284 /* Subroutine of fold() that optimizes comparisons of a division by
6285 a nonzero integer constant against an integer constant, i.e.
6286 X/C1 op C2.
6288 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6289 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6290 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6292 The function returns the constant folded tree if a simplification
6293 can be made, and NULL_TREE otherwise. */
6295 static tree
6296 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6298 tree prod, tmp, hi, lo;
6299 tree arg00 = TREE_OPERAND (arg0, 0);
6300 tree arg01 = TREE_OPERAND (arg0, 1);
6301 unsigned HOST_WIDE_INT lpart;
6302 HOST_WIDE_INT hpart;
6303 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6304 bool neg_overflow;
6305 int overflow;
6307 /* We have to do this the hard way to detect unsigned overflow.
6308 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6309 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6310 TREE_INT_CST_HIGH (arg01),
6311 TREE_INT_CST_LOW (arg1),
6312 TREE_INT_CST_HIGH (arg1),
6313 &lpart, &hpart, unsigned_p);
6314 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6315 prod = force_fit_type (prod, -1, overflow, false);
6316 neg_overflow = false;
6318 if (unsigned_p)
6320 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6321 lo = prod;
6323 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6324 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6325 TREE_INT_CST_HIGH (prod),
6326 TREE_INT_CST_LOW (tmp),
6327 TREE_INT_CST_HIGH (tmp),
6328 &lpart, &hpart, unsigned_p);
6329 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6330 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6331 TREE_CONSTANT_OVERFLOW (prod));
6333 else if (tree_int_cst_sgn (arg01) >= 0)
6335 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6336 switch (tree_int_cst_sgn (arg1))
6338 case -1:
6339 neg_overflow = true;
6340 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6341 hi = prod;
6342 break;
6344 case 0:
6345 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6346 hi = tmp;
6347 break;
6349 case 1:
6350 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6351 lo = prod;
6352 break;
6354 default:
6355 gcc_unreachable ();
6358 else
6360 /* A negative divisor reverses the relational operators. */
6361 code = swap_tree_comparison (code);
6363 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6364 switch (tree_int_cst_sgn (arg1))
6366 case -1:
6367 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6368 lo = prod;
6369 break;
6371 case 0:
6372 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6373 lo = tmp;
6374 break;
6376 case 1:
6377 neg_overflow = true;
6378 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6379 hi = prod;
6380 break;
6382 default:
6383 gcc_unreachable ();
6387 switch (code)
6389 case EQ_EXPR:
6390 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6391 return omit_one_operand (type, integer_zero_node, arg00);
6392 if (TREE_OVERFLOW (hi))
6393 return fold_build2 (GE_EXPR, type, arg00, lo);
6394 if (TREE_OVERFLOW (lo))
6395 return fold_build2 (LE_EXPR, type, arg00, hi);
6396 return build_range_check (type, arg00, 1, lo, hi);
6398 case NE_EXPR:
6399 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6400 return omit_one_operand (type, integer_one_node, arg00);
6401 if (TREE_OVERFLOW (hi))
6402 return fold_build2 (LT_EXPR, type, arg00, lo);
6403 if (TREE_OVERFLOW (lo))
6404 return fold_build2 (GT_EXPR, type, arg00, hi);
6405 return build_range_check (type, arg00, 0, lo, hi);
6407 case LT_EXPR:
6408 if (TREE_OVERFLOW (lo))
6410 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6411 return omit_one_operand (type, tmp, arg00);
6413 return fold_build2 (LT_EXPR, type, arg00, lo);
6415 case LE_EXPR:
6416 if (TREE_OVERFLOW (hi))
6418 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6419 return omit_one_operand (type, tmp, arg00);
6421 return fold_build2 (LE_EXPR, type, arg00, hi);
6423 case GT_EXPR:
6424 if (TREE_OVERFLOW (hi))
6426 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6427 return omit_one_operand (type, tmp, arg00);
6429 return fold_build2 (GT_EXPR, type, arg00, hi);
6431 case GE_EXPR:
6432 if (TREE_OVERFLOW (lo))
6434 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6435 return omit_one_operand (type, tmp, arg00);
6437 return fold_build2 (GE_EXPR, type, arg00, lo);
6439 default:
6440 break;
6443 return NULL_TREE;
6447 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6448 equality/inequality test, then return a simplified form of the test
6449 using a sign testing. Otherwise return NULL. TYPE is the desired
6450 result type. */
6452 static tree
6453 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6454 tree result_type)
6456 /* If this is testing a single bit, we can optimize the test. */
6457 if ((code == NE_EXPR || code == EQ_EXPR)
6458 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6459 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6461 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6462 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6463 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6465 if (arg00 != NULL_TREE
6466 /* This is only a win if casting to a signed type is cheap,
6467 i.e. when arg00's type is not a partial mode. */
6468 && TYPE_PRECISION (TREE_TYPE (arg00))
6469 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6471 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6472 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6473 result_type, fold_convert (stype, arg00),
6474 build_int_cst (stype, 0));
6478 return NULL_TREE;
6481 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6482 equality/inequality test, then return a simplified form of
6483 the test using shifts and logical operations. Otherwise return
6484 NULL. TYPE is the desired result type. */
6486 tree
6487 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6488 tree result_type)
6490 /* If this is testing a single bit, we can optimize the test. */
6491 if ((code == NE_EXPR || code == EQ_EXPR)
6492 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6493 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6495 tree inner = TREE_OPERAND (arg0, 0);
6496 tree type = TREE_TYPE (arg0);
6497 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6498 enum machine_mode operand_mode = TYPE_MODE (type);
6499 int ops_unsigned;
6500 tree signed_type, unsigned_type, intermediate_type;
6501 tree tem;
6503 /* First, see if we can fold the single bit test into a sign-bit
6504 test. */
6505 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6506 result_type);
6507 if (tem)
6508 return tem;
6510 /* Otherwise we have (A & C) != 0 where C is a single bit,
6511 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6512 Similarly for (A & C) == 0. */
6514 /* If INNER is a right shift of a constant and it plus BITNUM does
6515 not overflow, adjust BITNUM and INNER. */
6516 if (TREE_CODE (inner) == RSHIFT_EXPR
6517 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6518 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6519 && bitnum < TYPE_PRECISION (type)
6520 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6521 bitnum - TYPE_PRECISION (type)))
6523 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6524 inner = TREE_OPERAND (inner, 0);
6527 /* If we are going to be able to omit the AND below, we must do our
6528 operations as unsigned. If we must use the AND, we have a choice.
6529 Normally unsigned is faster, but for some machines signed is. */
6530 #ifdef LOAD_EXTEND_OP
6531 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6532 && !flag_syntax_only) ? 0 : 1;
6533 #else
6534 ops_unsigned = 1;
6535 #endif
6537 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6538 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6539 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6540 inner = fold_convert (intermediate_type, inner);
6542 if (bitnum != 0)
6543 inner = build2 (RSHIFT_EXPR, intermediate_type,
6544 inner, size_int (bitnum));
6546 if (code == EQ_EXPR)
6547 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6548 inner, integer_one_node);
6550 /* Put the AND last so it can combine with more things. */
6551 inner = build2 (BIT_AND_EXPR, intermediate_type,
6552 inner, integer_one_node);
6554 /* Make sure to return the proper type. */
6555 inner = fold_convert (result_type, inner);
6557 return inner;
6559 return NULL_TREE;
6562 /* Check whether we are allowed to reorder operands arg0 and arg1,
6563 such that the evaluation of arg1 occurs before arg0. */
6565 static bool
6566 reorder_operands_p (tree arg0, tree arg1)
6568 if (! flag_evaluation_order)
6569 return true;
6570 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6571 return true;
6572 return ! TREE_SIDE_EFFECTS (arg0)
6573 && ! TREE_SIDE_EFFECTS (arg1);
6576 /* Test whether it is preferable two swap two operands, ARG0 and
6577 ARG1, for example because ARG0 is an integer constant and ARG1
6578 isn't. If REORDER is true, only recommend swapping if we can
6579 evaluate the operands in reverse order. */
6581 bool
6582 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6584 STRIP_SIGN_NOPS (arg0);
6585 STRIP_SIGN_NOPS (arg1);
6587 if (TREE_CODE (arg1) == INTEGER_CST)
6588 return 0;
6589 if (TREE_CODE (arg0) == INTEGER_CST)
6590 return 1;
6592 if (TREE_CODE (arg1) == REAL_CST)
6593 return 0;
6594 if (TREE_CODE (arg0) == REAL_CST)
6595 return 1;
6597 if (TREE_CODE (arg1) == COMPLEX_CST)
6598 return 0;
6599 if (TREE_CODE (arg0) == COMPLEX_CST)
6600 return 1;
6602 if (TREE_CONSTANT (arg1))
6603 return 0;
6604 if (TREE_CONSTANT (arg0))
6605 return 1;
6607 if (optimize_size)
6608 return 0;
6610 if (reorder && flag_evaluation_order
6611 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6612 return 0;
6614 if (DECL_P (arg1))
6615 return 0;
6616 if (DECL_P (arg0))
6617 return 1;
6619 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6620 for commutative and comparison operators. Ensuring a canonical
6621 form allows the optimizers to find additional redundancies without
6622 having to explicitly check for both orderings. */
6623 if (TREE_CODE (arg0) == SSA_NAME
6624 && TREE_CODE (arg1) == SSA_NAME
6625 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6626 return 1;
6628 return 0;
6631 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6632 ARG0 is extended to a wider type. */
6634 static tree
6635 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6637 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6638 tree arg1_unw;
6639 tree shorter_type, outer_type;
6640 tree min, max;
6641 bool above, below;
6643 if (arg0_unw == arg0)
6644 return NULL_TREE;
6645 shorter_type = TREE_TYPE (arg0_unw);
6647 #ifdef HAVE_canonicalize_funcptr_for_compare
6648 /* Disable this optimization if we're casting a function pointer
6649 type on targets that require function pointer canonicalization. */
6650 if (HAVE_canonicalize_funcptr_for_compare
6651 && TREE_CODE (shorter_type) == POINTER_TYPE
6652 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6653 return NULL_TREE;
6654 #endif
6656 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6657 return NULL_TREE;
6659 arg1_unw = get_unwidened (arg1, shorter_type);
6661 /* If possible, express the comparison in the shorter mode. */
6662 if ((code == EQ_EXPR || code == NE_EXPR
6663 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6664 && (TREE_TYPE (arg1_unw) == shorter_type
6665 || (TREE_CODE (arg1_unw) == INTEGER_CST
6666 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6667 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6668 && int_fits_type_p (arg1_unw, shorter_type))))
6669 return fold_build2 (code, type, arg0_unw,
6670 fold_convert (shorter_type, arg1_unw));
6672 if (TREE_CODE (arg1_unw) != INTEGER_CST
6673 || TREE_CODE (shorter_type) != INTEGER_TYPE
6674 || !int_fits_type_p (arg1_unw, shorter_type))
6675 return NULL_TREE;
6677 /* If we are comparing with the integer that does not fit into the range
6678 of the shorter type, the result is known. */
6679 outer_type = TREE_TYPE (arg1_unw);
6680 min = lower_bound_in_type (outer_type, shorter_type);
6681 max = upper_bound_in_type (outer_type, shorter_type);
6683 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6684 max, arg1_unw));
6685 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6686 arg1_unw, min));
6688 switch (code)
6690 case EQ_EXPR:
6691 if (above || below)
6692 return omit_one_operand (type, integer_zero_node, arg0);
6693 break;
6695 case NE_EXPR:
6696 if (above || below)
6697 return omit_one_operand (type, integer_one_node, arg0);
6698 break;
6700 case LT_EXPR:
6701 case LE_EXPR:
6702 if (above)
6703 return omit_one_operand (type, integer_one_node, arg0);
6704 else if (below)
6705 return omit_one_operand (type, integer_zero_node, arg0);
6707 case GT_EXPR:
6708 case GE_EXPR:
6709 if (above)
6710 return omit_one_operand (type, integer_zero_node, arg0);
6711 else if (below)
6712 return omit_one_operand (type, integer_one_node, arg0);
6714 default:
6715 break;
6718 return NULL_TREE;
6721 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6722 ARG0 just the signedness is changed. */
6724 static tree
6725 fold_sign_changed_comparison (enum tree_code code, tree type,
6726 tree arg0, tree arg1)
6728 tree arg0_inner, tmp;
6729 tree inner_type, outer_type;
6731 if (TREE_CODE (arg0) != NOP_EXPR
6732 && TREE_CODE (arg0) != CONVERT_EXPR)
6733 return NULL_TREE;
6735 outer_type = TREE_TYPE (arg0);
6736 arg0_inner = TREE_OPERAND (arg0, 0);
6737 inner_type = TREE_TYPE (arg0_inner);
6739 #ifdef HAVE_canonicalize_funcptr_for_compare
6740 /* Disable this optimization if we're casting a function pointer
6741 type on targets that require function pointer canonicalization. */
6742 if (HAVE_canonicalize_funcptr_for_compare
6743 && TREE_CODE (inner_type) == POINTER_TYPE
6744 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6745 return NULL_TREE;
6746 #endif
6748 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6749 return NULL_TREE;
6751 if (TREE_CODE (arg1) != INTEGER_CST
6752 && !((TREE_CODE (arg1) == NOP_EXPR
6753 || TREE_CODE (arg1) == CONVERT_EXPR)
6754 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6755 return NULL_TREE;
6757 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6758 && code != NE_EXPR
6759 && code != EQ_EXPR)
6760 return NULL_TREE;
6762 if (TREE_CODE (arg1) == INTEGER_CST)
6764 tmp = build_int_cst_wide (inner_type,
6765 TREE_INT_CST_LOW (arg1),
6766 TREE_INT_CST_HIGH (arg1));
6767 arg1 = force_fit_type (tmp, 0,
6768 TREE_OVERFLOW (arg1),
6769 TREE_CONSTANT_OVERFLOW (arg1));
6771 else
6772 arg1 = fold_convert (inner_type, arg1);
6774 return fold_build2 (code, type, arg0_inner, arg1);
6777 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6778 step of the array. Reconstructs s and delta in the case of s * delta
6779 being an integer constant (and thus already folded).
6780 ADDR is the address. MULT is the multiplicative expression.
6781 If the function succeeds, the new address expression is returned. Otherwise
6782 NULL_TREE is returned. */
6784 static tree
6785 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6787 tree s, delta, step;
6788 tree ref = TREE_OPERAND (addr, 0), pref;
6789 tree ret, pos;
6790 tree itype;
6792 /* Canonicalize op1 into a possibly non-constant delta
6793 and an INTEGER_CST s. */
6794 if (TREE_CODE (op1) == MULT_EXPR)
6796 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6798 STRIP_NOPS (arg0);
6799 STRIP_NOPS (arg1);
6801 if (TREE_CODE (arg0) == INTEGER_CST)
6803 s = arg0;
6804 delta = arg1;
6806 else if (TREE_CODE (arg1) == INTEGER_CST)
6808 s = arg1;
6809 delta = arg0;
6811 else
6812 return NULL_TREE;
6814 else if (TREE_CODE (op1) == INTEGER_CST)
6816 delta = op1;
6817 s = NULL_TREE;
6819 else
6821 /* Simulate we are delta * 1. */
6822 delta = op1;
6823 s = integer_one_node;
6826 for (;; ref = TREE_OPERAND (ref, 0))
6828 if (TREE_CODE (ref) == ARRAY_REF)
6830 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6831 if (! itype)
6832 continue;
6834 step = array_ref_element_size (ref);
6835 if (TREE_CODE (step) != INTEGER_CST)
6836 continue;
6838 if (s)
6840 if (! tree_int_cst_equal (step, s))
6841 continue;
6843 else
6845 /* Try if delta is a multiple of step. */
6846 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6847 if (! tmp)
6848 continue;
6849 delta = tmp;
6852 break;
6855 if (!handled_component_p (ref))
6856 return NULL_TREE;
6859 /* We found the suitable array reference. So copy everything up to it,
6860 and replace the index. */
6862 pref = TREE_OPERAND (addr, 0);
6863 ret = copy_node (pref);
6864 pos = ret;
6866 while (pref != ref)
6868 pref = TREE_OPERAND (pref, 0);
6869 TREE_OPERAND (pos, 0) = copy_node (pref);
6870 pos = TREE_OPERAND (pos, 0);
6873 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6874 fold_convert (itype,
6875 TREE_OPERAND (pos, 1)),
6876 fold_convert (itype, delta));
6878 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6882 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6883 means A >= Y && A != MAX, but in this case we know that
6884 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6886 static tree
6887 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6889 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6891 if (TREE_CODE (bound) == LT_EXPR)
6892 a = TREE_OPERAND (bound, 0);
6893 else if (TREE_CODE (bound) == GT_EXPR)
6894 a = TREE_OPERAND (bound, 1);
6895 else
6896 return NULL_TREE;
6898 typea = TREE_TYPE (a);
6899 if (!INTEGRAL_TYPE_P (typea)
6900 && !POINTER_TYPE_P (typea))
6901 return NULL_TREE;
6903 if (TREE_CODE (ineq) == LT_EXPR)
6905 a1 = TREE_OPERAND (ineq, 1);
6906 y = TREE_OPERAND (ineq, 0);
6908 else if (TREE_CODE (ineq) == GT_EXPR)
6910 a1 = TREE_OPERAND (ineq, 0);
6911 y = TREE_OPERAND (ineq, 1);
6913 else
6914 return NULL_TREE;
6916 if (TREE_TYPE (a1) != typea)
6917 return NULL_TREE;
6919 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6920 if (!integer_onep (diff))
6921 return NULL_TREE;
6923 return fold_build2 (GE_EXPR, type, a, y);
6926 /* Fold a sum or difference of at least one multiplication.
6927 Returns the folded tree or NULL if no simplification could be made. */
6929 static tree
6930 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6932 tree arg00, arg01, arg10, arg11;
6933 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6935 /* (A * C) +- (B * C) -> (A+-B) * C.
6936 (A * C) +- A -> A * (C+-1).
6937 We are most concerned about the case where C is a constant,
6938 but other combinations show up during loop reduction. Since
6939 it is not difficult, try all four possibilities. */
6941 if (TREE_CODE (arg0) == MULT_EXPR)
6943 arg00 = TREE_OPERAND (arg0, 0);
6944 arg01 = TREE_OPERAND (arg0, 1);
6946 else
6948 arg00 = arg0;
6949 arg01 = build_one_cst (type);
6951 if (TREE_CODE (arg1) == MULT_EXPR)
6953 arg10 = TREE_OPERAND (arg1, 0);
6954 arg11 = TREE_OPERAND (arg1, 1);
6956 else
6958 arg10 = arg1;
6959 arg11 = build_one_cst (type);
6961 same = NULL_TREE;
6963 if (operand_equal_p (arg01, arg11, 0))
6964 same = arg01, alt0 = arg00, alt1 = arg10;
6965 else if (operand_equal_p (arg00, arg10, 0))
6966 same = arg00, alt0 = arg01, alt1 = arg11;
6967 else if (operand_equal_p (arg00, arg11, 0))
6968 same = arg00, alt0 = arg01, alt1 = arg10;
6969 else if (operand_equal_p (arg01, arg10, 0))
6970 same = arg01, alt0 = arg00, alt1 = arg11;
6972 /* No identical multiplicands; see if we can find a common
6973 power-of-two factor in non-power-of-two multiplies. This
6974 can help in multi-dimensional array access. */
6975 else if (host_integerp (arg01, 0)
6976 && host_integerp (arg11, 0))
6978 HOST_WIDE_INT int01, int11, tmp;
6979 bool swap = false;
6980 tree maybe_same;
6981 int01 = TREE_INT_CST_LOW (arg01);
6982 int11 = TREE_INT_CST_LOW (arg11);
6984 /* Move min of absolute values to int11. */
6985 if ((int01 >= 0 ? int01 : -int01)
6986 < (int11 >= 0 ? int11 : -int11))
6988 tmp = int01, int01 = int11, int11 = tmp;
6989 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6990 maybe_same = arg01;
6991 swap = true;
6993 else
6994 maybe_same = arg11;
6996 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6998 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6999 build_int_cst (TREE_TYPE (arg00),
7000 int01 / int11));
7001 alt1 = arg10;
7002 same = maybe_same;
7003 if (swap)
7004 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7008 if (same)
7009 return fold_build2 (MULT_EXPR, type,
7010 fold_build2 (code, type,
7011 fold_convert (type, alt0),
7012 fold_convert (type, alt1)),
7013 fold_convert (type, same));
7015 return NULL_TREE;
7018 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7019 specified by EXPR into the buffer PTR of length LEN bytes.
7020 Return the number of bytes placed in the buffer, or zero
7021 upon failure. */
7023 static int
7024 native_encode_int (tree expr, unsigned char *ptr, int len)
7026 tree type = TREE_TYPE (expr);
7027 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7028 int byte, offset, word, words;
7029 unsigned char value;
7031 if (total_bytes > len)
7032 return 0;
7033 words = total_bytes / UNITS_PER_WORD;
7035 for (byte = 0; byte < total_bytes; byte++)
7037 int bitpos = byte * BITS_PER_UNIT;
7038 if (bitpos < HOST_BITS_PER_WIDE_INT)
7039 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7040 else
7041 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7042 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7044 if (total_bytes > UNITS_PER_WORD)
7046 word = byte / UNITS_PER_WORD;
7047 if (WORDS_BIG_ENDIAN)
7048 word = (words - 1) - word;
7049 offset = word * UNITS_PER_WORD;
7050 if (BYTES_BIG_ENDIAN)
7051 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7052 else
7053 offset += byte % UNITS_PER_WORD;
7055 else
7056 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7057 ptr[offset] = value;
7059 return total_bytes;
7063 /* Subroutine of native_encode_expr. Encode the REAL_CST
7064 specified by EXPR into the buffer PTR of length LEN bytes.
7065 Return the number of bytes placed in the buffer, or zero
7066 upon failure. */
7068 static int
7069 native_encode_real (tree expr, unsigned char *ptr, int len)
7071 tree type = TREE_TYPE (expr);
7072 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7073 int byte, offset, word, words, bitpos;
7074 unsigned char value;
7076 /* There are always 32 bits in each long, no matter the size of
7077 the hosts long. We handle floating point representations with
7078 up to 192 bits. */
7079 long tmp[6];
7081 if (total_bytes > len)
7082 return 0;
7083 words = 32 / UNITS_PER_WORD;
7085 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7087 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7088 bitpos += BITS_PER_UNIT)
7090 byte = (bitpos / BITS_PER_UNIT) & 3;
7091 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7093 if (UNITS_PER_WORD < 4)
7095 word = byte / UNITS_PER_WORD;
7096 if (WORDS_BIG_ENDIAN)
7097 word = (words - 1) - word;
7098 offset = word * UNITS_PER_WORD;
7099 if (BYTES_BIG_ENDIAN)
7100 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7101 else
7102 offset += byte % UNITS_PER_WORD;
7104 else
7105 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7106 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7108 return total_bytes;
7111 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7112 specified by EXPR into the buffer PTR of length LEN bytes.
7113 Return the number of bytes placed in the buffer, or zero
7114 upon failure. */
7116 static int
7117 native_encode_complex (tree expr, unsigned char *ptr, int len)
7119 int rsize, isize;
7120 tree part;
7122 part = TREE_REALPART (expr);
7123 rsize = native_encode_expr (part, ptr, len);
7124 if (rsize == 0)
7125 return 0;
7126 part = TREE_IMAGPART (expr);
7127 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7128 if (isize != rsize)
7129 return 0;
7130 return rsize + isize;
7134 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7135 specified by EXPR into the buffer PTR of length LEN bytes.
7136 Return the number of bytes placed in the buffer, or zero
7137 upon failure. */
7139 static int
7140 native_encode_vector (tree expr, unsigned char *ptr, int len)
7142 int i, size, offset, count;
7143 tree itype, elem, elements;
7145 offset = 0;
7146 elements = TREE_VECTOR_CST_ELTS (expr);
7147 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7148 itype = TREE_TYPE (TREE_TYPE (expr));
7149 size = GET_MODE_SIZE (TYPE_MODE (itype));
7150 for (i = 0; i < count; i++)
7152 if (elements)
7154 elem = TREE_VALUE (elements);
7155 elements = TREE_CHAIN (elements);
7157 else
7158 elem = NULL_TREE;
7160 if (elem)
7162 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7163 return 0;
7165 else
7167 if (offset + size > len)
7168 return 0;
7169 memset (ptr+offset, 0, size);
7171 offset += size;
7173 return offset;
7177 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7178 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7179 buffer PTR of length LEN bytes. Return the number of bytes
7180 placed in the buffer, or zero upon failure. */
7182 static int
7183 native_encode_expr (tree expr, unsigned char *ptr, int len)
7185 switch (TREE_CODE (expr))
7187 case INTEGER_CST:
7188 return native_encode_int (expr, ptr, len);
7190 case REAL_CST:
7191 return native_encode_real (expr, ptr, len);
7193 case COMPLEX_CST:
7194 return native_encode_complex (expr, ptr, len);
7196 case VECTOR_CST:
7197 return native_encode_vector (expr, ptr, len);
7199 default:
7200 return 0;
7205 /* Subroutine of native_interpret_expr. Interpret the contents of
7206 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7207 If the buffer cannot be interpreted, return NULL_TREE. */
7209 static tree
7210 native_interpret_int (tree type, unsigned char *ptr, int len)
7212 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7213 int byte, offset, word, words;
7214 unsigned char value;
7215 unsigned int HOST_WIDE_INT lo = 0;
7216 HOST_WIDE_INT hi = 0;
7218 if (total_bytes > len)
7219 return NULL_TREE;
7220 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7221 return NULL_TREE;
7222 words = total_bytes / UNITS_PER_WORD;
7224 for (byte = 0; byte < total_bytes; byte++)
7226 int bitpos = byte * BITS_PER_UNIT;
7227 if (total_bytes > UNITS_PER_WORD)
7229 word = byte / UNITS_PER_WORD;
7230 if (WORDS_BIG_ENDIAN)
7231 word = (words - 1) - word;
7232 offset = word * UNITS_PER_WORD;
7233 if (BYTES_BIG_ENDIAN)
7234 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7235 else
7236 offset += byte % UNITS_PER_WORD;
7238 else
7239 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7240 value = ptr[offset];
7242 if (bitpos < HOST_BITS_PER_WIDE_INT)
7243 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7244 else
7245 hi |= (unsigned HOST_WIDE_INT) value
7246 << (bitpos - HOST_BITS_PER_WIDE_INT);
7249 return force_fit_type (build_int_cst_wide (type, lo, hi),
7250 0, false, false);
7254 /* Subroutine of native_interpret_expr. Interpret the contents of
7255 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7256 If the buffer cannot be interpreted, return NULL_TREE. */
7258 static tree
7259 native_interpret_real (tree type, unsigned char *ptr, int len)
7261 enum machine_mode mode = TYPE_MODE (type);
7262 int total_bytes = GET_MODE_SIZE (mode);
7263 int byte, offset, word, words, bitpos;
7264 unsigned char value;
7265 /* There are always 32 bits in each long, no matter the size of
7266 the hosts long. We handle floating point representations with
7267 up to 192 bits. */
7268 REAL_VALUE_TYPE r;
7269 long tmp[6];
7271 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7272 if (total_bytes > len || total_bytes > 24)
7273 return NULL_TREE;
7274 words = 32 / UNITS_PER_WORD;
7276 memset (tmp, 0, sizeof (tmp));
7277 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7278 bitpos += BITS_PER_UNIT)
7280 byte = (bitpos / BITS_PER_UNIT) & 3;
7281 if (UNITS_PER_WORD < 4)
7283 word = byte / UNITS_PER_WORD;
7284 if (WORDS_BIG_ENDIAN)
7285 word = (words - 1) - word;
7286 offset = word * UNITS_PER_WORD;
7287 if (BYTES_BIG_ENDIAN)
7288 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7289 else
7290 offset += byte % UNITS_PER_WORD;
7292 else
7293 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7294 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7296 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7299 real_from_target (&r, tmp, mode);
7300 return build_real (type, r);
7304 /* Subroutine of native_interpret_expr. Interpret the contents of
7305 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7306 If the buffer cannot be interpreted, return NULL_TREE. */
7308 static tree
7309 native_interpret_complex (tree type, unsigned char *ptr, int len)
7311 tree etype, rpart, ipart;
7312 int size;
7314 etype = TREE_TYPE (type);
7315 size = GET_MODE_SIZE (TYPE_MODE (etype));
7316 if (size * 2 > len)
7317 return NULL_TREE;
7318 rpart = native_interpret_expr (etype, ptr, size);
7319 if (!rpart)
7320 return NULL_TREE;
7321 ipart = native_interpret_expr (etype, ptr+size, size);
7322 if (!ipart)
7323 return NULL_TREE;
7324 return build_complex (type, rpart, ipart);
7328 /* Subroutine of native_interpret_expr. Interpret the contents of
7329 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7330 If the buffer cannot be interpreted, return NULL_TREE. */
7332 static tree
7333 native_interpret_vector (tree type, unsigned char *ptr, int len)
7335 tree etype, elem, elements;
7336 int i, size, count;
7338 etype = TREE_TYPE (type);
7339 size = GET_MODE_SIZE (TYPE_MODE (etype));
7340 count = TYPE_VECTOR_SUBPARTS (type);
7341 if (size * count > len)
7342 return NULL_TREE;
7344 elements = NULL_TREE;
7345 for (i = count - 1; i >= 0; i--)
7347 elem = native_interpret_expr (etype, ptr+(i*size), size);
7348 if (!elem)
7349 return NULL_TREE;
7350 elements = tree_cons (NULL_TREE, elem, elements);
7352 return build_vector (type, elements);
7356 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7357 the buffer PTR of length LEN as a constant of type TYPE. For
7358 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7359 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7360 return NULL_TREE. */
7362 static tree
7363 native_interpret_expr (tree type, unsigned char *ptr, int len)
7365 switch (TREE_CODE (type))
7367 case INTEGER_TYPE:
7368 case ENUMERAL_TYPE:
7369 case BOOLEAN_TYPE:
7370 return native_interpret_int (type, ptr, len);
7372 case REAL_TYPE:
7373 return native_interpret_real (type, ptr, len);
7375 case COMPLEX_TYPE:
7376 return native_interpret_complex (type, ptr, len);
7378 case VECTOR_TYPE:
7379 return native_interpret_vector (type, ptr, len);
7381 default:
7382 return NULL_TREE;
7387 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7388 TYPE at compile-time. If we're unable to perform the conversion
7389 return NULL_TREE. */
7391 static tree
7392 fold_view_convert_expr (tree type, tree expr)
7394 /* We support up to 512-bit values (for V8DFmode). */
7395 unsigned char buffer[64];
7396 int len;
7398 /* Check that the host and target are sane. */
7399 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7400 return NULL_TREE;
7402 len = native_encode_expr (expr, buffer, sizeof (buffer));
7403 if (len == 0)
7404 return NULL_TREE;
7406 return native_interpret_expr (type, buffer, len);
7410 /* Fold a unary expression of code CODE and type TYPE with operand
7411 OP0. Return the folded expression if folding is successful.
7412 Otherwise, return NULL_TREE. */
7414 tree
7415 fold_unary (enum tree_code code, tree type, tree op0)
7417 tree tem;
7418 tree arg0;
7419 enum tree_code_class kind = TREE_CODE_CLASS (code);
7421 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7422 && TREE_CODE_LENGTH (code) == 1);
7424 arg0 = op0;
7425 if (arg0)
7427 if (code == NOP_EXPR || code == CONVERT_EXPR
7428 || code == FLOAT_EXPR || code == ABS_EXPR)
7430 /* Don't use STRIP_NOPS, because signedness of argument type
7431 matters. */
7432 STRIP_SIGN_NOPS (arg0);
7434 else
7436 /* Strip any conversions that don't change the mode. This
7437 is safe for every expression, except for a comparison
7438 expression because its signedness is derived from its
7439 operands.
7441 Note that this is done as an internal manipulation within
7442 the constant folder, in order to find the simplest
7443 representation of the arguments so that their form can be
7444 studied. In any cases, the appropriate type conversions
7445 should be put back in the tree that will get out of the
7446 constant folder. */
7447 STRIP_NOPS (arg0);
7451 if (TREE_CODE_CLASS (code) == tcc_unary)
7453 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7454 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7455 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7456 else if (TREE_CODE (arg0) == COND_EXPR)
7458 tree arg01 = TREE_OPERAND (arg0, 1);
7459 tree arg02 = TREE_OPERAND (arg0, 2);
7460 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7461 arg01 = fold_build1 (code, type, arg01);
7462 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7463 arg02 = fold_build1 (code, type, arg02);
7464 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7465 arg01, arg02);
7467 /* If this was a conversion, and all we did was to move into
7468 inside the COND_EXPR, bring it back out. But leave it if
7469 it is a conversion from integer to integer and the
7470 result precision is no wider than a word since such a
7471 conversion is cheap and may be optimized away by combine,
7472 while it couldn't if it were outside the COND_EXPR. Then return
7473 so we don't get into an infinite recursion loop taking the
7474 conversion out and then back in. */
7476 if ((code == NOP_EXPR || code == CONVERT_EXPR
7477 || code == NON_LVALUE_EXPR)
7478 && TREE_CODE (tem) == COND_EXPR
7479 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7480 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7481 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7482 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7483 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7484 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7485 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7486 && (INTEGRAL_TYPE_P
7487 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7488 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7489 || flag_syntax_only))
7490 tem = build1 (code, type,
7491 build3 (COND_EXPR,
7492 TREE_TYPE (TREE_OPERAND
7493 (TREE_OPERAND (tem, 1), 0)),
7494 TREE_OPERAND (tem, 0),
7495 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7496 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7497 return tem;
7499 else if (COMPARISON_CLASS_P (arg0))
7501 if (TREE_CODE (type) == BOOLEAN_TYPE)
7503 arg0 = copy_node (arg0);
7504 TREE_TYPE (arg0) = type;
7505 return arg0;
7507 else if (TREE_CODE (type) != INTEGER_TYPE)
7508 return fold_build3 (COND_EXPR, type, arg0,
7509 fold_build1 (code, type,
7510 integer_one_node),
7511 fold_build1 (code, type,
7512 integer_zero_node));
7516 switch (code)
7518 case NOP_EXPR:
7519 case FLOAT_EXPR:
7520 case CONVERT_EXPR:
7521 case FIX_TRUNC_EXPR:
7522 case FIX_CEIL_EXPR:
7523 case FIX_FLOOR_EXPR:
7524 case FIX_ROUND_EXPR:
7525 if (TREE_TYPE (op0) == type)
7526 return op0;
7528 /* If we have (type) (a CMP b) and type is an integral type, return
7529 new expression involving the new type. */
7530 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7531 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7532 TREE_OPERAND (op0, 1));
7534 /* Handle cases of two conversions in a row. */
7535 if (TREE_CODE (op0) == NOP_EXPR
7536 || TREE_CODE (op0) == CONVERT_EXPR)
7538 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7539 tree inter_type = TREE_TYPE (op0);
7540 int inside_int = INTEGRAL_TYPE_P (inside_type);
7541 int inside_ptr = POINTER_TYPE_P (inside_type);
7542 int inside_float = FLOAT_TYPE_P (inside_type);
7543 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7544 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7545 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7546 int inter_int = INTEGRAL_TYPE_P (inter_type);
7547 int inter_ptr = POINTER_TYPE_P (inter_type);
7548 int inter_float = FLOAT_TYPE_P (inter_type);
7549 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7550 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7551 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7552 int final_int = INTEGRAL_TYPE_P (type);
7553 int final_ptr = POINTER_TYPE_P (type);
7554 int final_float = FLOAT_TYPE_P (type);
7555 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7556 unsigned int final_prec = TYPE_PRECISION (type);
7557 int final_unsignedp = TYPE_UNSIGNED (type);
7559 /* In addition to the cases of two conversions in a row
7560 handled below, if we are converting something to its own
7561 type via an object of identical or wider precision, neither
7562 conversion is needed. */
7563 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7564 && (((inter_int || inter_ptr) && final_int)
7565 || (inter_float && final_float))
7566 && inter_prec >= final_prec)
7567 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7569 /* Likewise, if the intermediate and final types are either both
7570 float or both integer, we don't need the middle conversion if
7571 it is wider than the final type and doesn't change the signedness
7572 (for integers). Avoid this if the final type is a pointer
7573 since then we sometimes need the inner conversion. Likewise if
7574 the outer has a precision not equal to the size of its mode. */
7575 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7576 || (inter_float && inside_float)
7577 || (inter_vec && inside_vec))
7578 && inter_prec >= inside_prec
7579 && (inter_float || inter_vec
7580 || inter_unsignedp == inside_unsignedp)
7581 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7582 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7583 && ! final_ptr
7584 && (! final_vec || inter_prec == inside_prec))
7585 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7587 /* If we have a sign-extension of a zero-extended value, we can
7588 replace that by a single zero-extension. */
7589 if (inside_int && inter_int && final_int
7590 && inside_prec < inter_prec && inter_prec < final_prec
7591 && inside_unsignedp && !inter_unsignedp)
7592 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7594 /* Two conversions in a row are not needed unless:
7595 - some conversion is floating-point (overstrict for now), or
7596 - some conversion is a vector (overstrict for now), or
7597 - the intermediate type is narrower than both initial and
7598 final, or
7599 - the intermediate type and innermost type differ in signedness,
7600 and the outermost type is wider than the intermediate, or
7601 - the initial type is a pointer type and the precisions of the
7602 intermediate and final types differ, or
7603 - the final type is a pointer type and the precisions of the
7604 initial and intermediate types differ.
7605 - the final type is a pointer type and the initial type not
7606 - the initial type is a pointer to an array and the final type
7607 not. */
7608 /* Java pointer type conversions generate checks in some
7609 cases, so we explicitly disallow this optimization. */
7610 if (! inside_float && ! inter_float && ! final_float
7611 && ! inside_vec && ! inter_vec && ! final_vec
7612 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7613 && ! (inside_int && inter_int
7614 && inter_unsignedp != inside_unsignedp
7615 && inter_prec < final_prec)
7616 && ((inter_unsignedp && inter_prec > inside_prec)
7617 == (final_unsignedp && final_prec > inter_prec))
7618 && ! (inside_ptr && inter_prec != final_prec)
7619 && ! (final_ptr && inside_prec != inter_prec)
7620 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7621 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7622 && final_ptr == inside_ptr
7623 && ! (inside_ptr
7624 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7625 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)
7626 && ! ((strcmp (lang_hooks.name, "GNU Java") == 0)
7627 && final_ptr))
7628 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7631 /* Handle (T *)&A.B.C for A being of type T and B and C
7632 living at offset zero. This occurs frequently in
7633 C++ upcasting and then accessing the base. */
7634 if (TREE_CODE (op0) == ADDR_EXPR
7635 && POINTER_TYPE_P (type)
7636 && handled_component_p (TREE_OPERAND (op0, 0)))
7638 HOST_WIDE_INT bitsize, bitpos;
7639 tree offset;
7640 enum machine_mode mode;
7641 int unsignedp, volatilep;
7642 tree base = TREE_OPERAND (op0, 0);
7643 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7644 &mode, &unsignedp, &volatilep, false);
7645 /* If the reference was to a (constant) zero offset, we can use
7646 the address of the base if it has the same base type
7647 as the result type. */
7648 if (! offset && bitpos == 0
7649 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7650 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7651 return fold_convert (type, build_fold_addr_expr (base));
7654 if (TREE_CODE (op0) == MODIFY_EXPR
7655 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7656 /* Detect assigning a bitfield. */
7657 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7658 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7660 /* Don't leave an assignment inside a conversion
7661 unless assigning a bitfield. */
7662 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7663 /* First do the assignment, then return converted constant. */
7664 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7665 TREE_NO_WARNING (tem) = 1;
7666 TREE_USED (tem) = 1;
7667 return tem;
7670 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7671 constants (if x has signed type, the sign bit cannot be set
7672 in c). This folds extension into the BIT_AND_EXPR. */
7673 if (INTEGRAL_TYPE_P (type)
7674 && TREE_CODE (type) != BOOLEAN_TYPE
7675 && TREE_CODE (op0) == BIT_AND_EXPR
7676 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7678 tree and = op0;
7679 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7680 int change = 0;
7682 if (TYPE_UNSIGNED (TREE_TYPE (and))
7683 || (TYPE_PRECISION (type)
7684 <= TYPE_PRECISION (TREE_TYPE (and))))
7685 change = 1;
7686 else if (TYPE_PRECISION (TREE_TYPE (and1))
7687 <= HOST_BITS_PER_WIDE_INT
7688 && host_integerp (and1, 1))
7690 unsigned HOST_WIDE_INT cst;
7692 cst = tree_low_cst (and1, 1);
7693 cst &= (HOST_WIDE_INT) -1
7694 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7695 change = (cst == 0);
7696 #ifdef LOAD_EXTEND_OP
7697 if (change
7698 && !flag_syntax_only
7699 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7700 == ZERO_EXTEND))
7702 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7703 and0 = fold_convert (uns, and0);
7704 and1 = fold_convert (uns, and1);
7706 #endif
7708 if (change)
7710 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7711 TREE_INT_CST_HIGH (and1));
7712 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7713 TREE_CONSTANT_OVERFLOW (and1));
7714 return fold_build2 (BIT_AND_EXPR, type,
7715 fold_convert (type, and0), tem);
7719 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7720 T2 being pointers to types of the same size. */
7721 if (POINTER_TYPE_P (type)
7722 && BINARY_CLASS_P (arg0)
7723 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7724 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7726 tree arg00 = TREE_OPERAND (arg0, 0);
7727 tree t0 = type;
7728 tree t1 = TREE_TYPE (arg00);
7729 tree tt0 = TREE_TYPE (t0);
7730 tree tt1 = TREE_TYPE (t1);
7731 tree s0 = TYPE_SIZE (tt0);
7732 tree s1 = TYPE_SIZE (tt1);
7734 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7735 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7736 TREE_OPERAND (arg0, 1));
7739 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7740 of the same precision, and X is a integer type not narrower than
7741 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7742 if (INTEGRAL_TYPE_P (type)
7743 && TREE_CODE (op0) == BIT_NOT_EXPR
7744 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7745 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7746 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7747 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7749 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7750 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7751 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7752 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7755 tem = fold_convert_const (code, type, op0);
7756 return tem ? tem : NULL_TREE;
7758 case VIEW_CONVERT_EXPR:
7759 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7760 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7761 return fold_view_convert_expr (type, op0);
7763 case NEGATE_EXPR:
7764 tem = fold_negate_expr (arg0);
7765 if (tem)
7766 return fold_convert (type, tem);
7767 return NULL_TREE;
7769 case ABS_EXPR:
7770 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7771 return fold_abs_const (arg0, type);
7772 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7773 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7774 /* Convert fabs((double)float) into (double)fabsf(float). */
7775 else if (TREE_CODE (arg0) == NOP_EXPR
7776 && TREE_CODE (type) == REAL_TYPE)
7778 tree targ0 = strip_float_extensions (arg0);
7779 if (targ0 != arg0)
7780 return fold_convert (type, fold_build1 (ABS_EXPR,
7781 TREE_TYPE (targ0),
7782 targ0));
7784 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7785 else if (TREE_CODE (arg0) == ABS_EXPR)
7786 return arg0;
7787 else if (tree_expr_nonnegative_p (arg0))
7788 return arg0;
7790 /* Strip sign ops from argument. */
7791 if (TREE_CODE (type) == REAL_TYPE)
7793 tem = fold_strip_sign_ops (arg0);
7794 if (tem)
7795 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7797 return NULL_TREE;
7799 case CONJ_EXPR:
7800 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7801 return fold_convert (type, arg0);
7802 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7804 tree itype = TREE_TYPE (type);
7805 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7806 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7807 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7809 if (TREE_CODE (arg0) == COMPLEX_CST)
7811 tree itype = TREE_TYPE (type);
7812 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7813 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7814 return build_complex (type, rpart, negate_expr (ipart));
7816 if (TREE_CODE (arg0) == CONJ_EXPR)
7817 return fold_convert (type, TREE_OPERAND (arg0, 0));
7818 return NULL_TREE;
7820 case BIT_NOT_EXPR:
7821 if (TREE_CODE (arg0) == INTEGER_CST)
7822 return fold_not_const (arg0, type);
7823 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7824 return TREE_OPERAND (arg0, 0);
7825 /* Convert ~ (-A) to A - 1. */
7826 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7827 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7828 build_int_cst (type, 1));
7829 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7830 else if (INTEGRAL_TYPE_P (type)
7831 && ((TREE_CODE (arg0) == MINUS_EXPR
7832 && integer_onep (TREE_OPERAND (arg0, 1)))
7833 || (TREE_CODE (arg0) == PLUS_EXPR
7834 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7835 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7836 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7837 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7838 && (tem = fold_unary (BIT_NOT_EXPR, type,
7839 fold_convert (type,
7840 TREE_OPERAND (arg0, 0)))))
7841 return fold_build2 (BIT_XOR_EXPR, type, tem,
7842 fold_convert (type, TREE_OPERAND (arg0, 1)));
7843 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7844 && (tem = fold_unary (BIT_NOT_EXPR, type,
7845 fold_convert (type,
7846 TREE_OPERAND (arg0, 1)))))
7847 return fold_build2 (BIT_XOR_EXPR, type,
7848 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7850 return NULL_TREE;
7852 case TRUTH_NOT_EXPR:
7853 /* The argument to invert_truthvalue must have Boolean type. */
7854 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7855 arg0 = fold_convert (boolean_type_node, arg0);
7857 /* Note that the operand of this must be an int
7858 and its values must be 0 or 1.
7859 ("true" is a fixed value perhaps depending on the language,
7860 but we don't handle values other than 1 correctly yet.) */
7861 tem = fold_truth_not_expr (arg0);
7862 if (!tem)
7863 return NULL_TREE;
7864 return fold_convert (type, tem);
7866 case REALPART_EXPR:
7867 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7868 return fold_convert (type, arg0);
7869 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7870 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7871 TREE_OPERAND (arg0, 1));
7872 if (TREE_CODE (arg0) == COMPLEX_CST)
7873 return fold_convert (type, TREE_REALPART (arg0));
7874 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7876 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7877 tem = fold_build2 (TREE_CODE (arg0), itype,
7878 fold_build1 (REALPART_EXPR, itype,
7879 TREE_OPERAND (arg0, 0)),
7880 fold_build1 (REALPART_EXPR, itype,
7881 TREE_OPERAND (arg0, 1)));
7882 return fold_convert (type, tem);
7884 if (TREE_CODE (arg0) == CONJ_EXPR)
7886 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7887 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7888 return fold_convert (type, tem);
7890 return NULL_TREE;
7892 case IMAGPART_EXPR:
7893 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7894 return fold_convert (type, integer_zero_node);
7895 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7896 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7897 TREE_OPERAND (arg0, 0));
7898 if (TREE_CODE (arg0) == COMPLEX_CST)
7899 return fold_convert (type, TREE_IMAGPART (arg0));
7900 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7902 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7903 tem = fold_build2 (TREE_CODE (arg0), itype,
7904 fold_build1 (IMAGPART_EXPR, itype,
7905 TREE_OPERAND (arg0, 0)),
7906 fold_build1 (IMAGPART_EXPR, itype,
7907 TREE_OPERAND (arg0, 1)));
7908 return fold_convert (type, tem);
7910 if (TREE_CODE (arg0) == CONJ_EXPR)
7912 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7913 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7914 return fold_convert (type, negate_expr (tem));
7916 return NULL_TREE;
7918 default:
7919 return NULL_TREE;
7920 } /* switch (code) */
7923 /* Fold a binary expression of code CODE and type TYPE with operands
7924 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7925 Return the folded expression if folding is successful. Otherwise,
7926 return NULL_TREE. */
7928 static tree
7929 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7931 enum tree_code compl_code;
7933 if (code == MIN_EXPR)
7934 compl_code = MAX_EXPR;
7935 else if (code == MAX_EXPR)
7936 compl_code = MIN_EXPR;
7937 else
7938 gcc_unreachable ();
7940 /* MIN (MAX (a, b), b) == b. */
7941 if (TREE_CODE (op0) == compl_code
7942 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7943 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7945 /* MIN (MAX (b, a), b) == b. */
7946 if (TREE_CODE (op0) == compl_code
7947 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7948 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7949 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7951 /* MIN (a, MAX (a, b)) == a. */
7952 if (TREE_CODE (op1) == compl_code
7953 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7954 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7955 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7957 /* MIN (a, MAX (b, a)) == a. */
7958 if (TREE_CODE (op1) == compl_code
7959 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7960 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7961 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7963 return NULL_TREE;
7966 /* Subroutine of fold_binary. This routine performs all of the
7967 transformations that are common to the equality/inequality
7968 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7969 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7970 fold_binary should call fold_binary. Fold a comparison with
7971 tree code CODE and type TYPE with operands OP0 and OP1. Return
7972 the folded comparison or NULL_TREE. */
7974 static tree
7975 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7977 tree arg0, arg1, tem;
7979 arg0 = op0;
7980 arg1 = op1;
7982 STRIP_SIGN_NOPS (arg0);
7983 STRIP_SIGN_NOPS (arg1);
7985 tem = fold_relational_const (code, type, arg0, arg1);
7986 if (tem != NULL_TREE)
7987 return tem;
7989 /* If one arg is a real or integer constant, put it last. */
7990 if (tree_swap_operands_p (arg0, arg1, true))
7991 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7993 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7994 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7995 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7996 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7997 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
7998 && (TREE_CODE (arg1) == INTEGER_CST
7999 && !TREE_OVERFLOW (arg1)))
8001 tree const1 = TREE_OPERAND (arg0, 1);
8002 tree const2 = arg1;
8003 tree variable = TREE_OPERAND (arg0, 0);
8004 tree lhs;
8005 int lhs_add;
8006 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8008 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8009 TREE_TYPE (arg1), const2, const1);
8010 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8011 && (TREE_CODE (lhs) != INTEGER_CST
8012 || !TREE_OVERFLOW (lhs)))
8014 fold_overflow_warning (("assuming signed overflow does not occur "
8015 "when changing X +- C1 cmp C2 to "
8016 "X cmp C1 +- C2"),
8017 WARN_STRICT_OVERFLOW_COMPARISON);
8018 return fold_build2 (code, type, variable, lhs);
8022 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8023 same object, then we can fold this to a comparison of the two offsets in
8024 signed size type. This is possible because pointer arithmetic is
8025 restricted to retain within an object and overflow on pointer differences
8026 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8028 We check flag_wrapv directly because pointers types are unsigned,
8029 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8030 normally what we want to avoid certain odd overflow cases, but
8031 not here. */
8032 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8033 && !flag_wrapv
8034 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8036 tree base0, offset0, base1, offset1;
8038 if (extract_array_ref (arg0, &base0, &offset0)
8039 && extract_array_ref (arg1, &base1, &offset1)
8040 && operand_equal_p (base0, base1, 0))
8042 tree signed_size_type_node;
8043 signed_size_type_node = signed_type_for (size_type_node);
8045 /* By converting to signed size type we cover middle-end pointer
8046 arithmetic which operates on unsigned pointer types of size
8047 type size and ARRAY_REF offsets which are properly sign or
8048 zero extended from their type in case it is narrower than
8049 size type. */
8050 if (offset0 == NULL_TREE)
8051 offset0 = build_int_cst (signed_size_type_node, 0);
8052 else
8053 offset0 = fold_convert (signed_size_type_node, offset0);
8054 if (offset1 == NULL_TREE)
8055 offset1 = build_int_cst (signed_size_type_node, 0);
8056 else
8057 offset1 = fold_convert (signed_size_type_node, offset1);
8059 return fold_build2 (code, type, offset0, offset1);
8063 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8065 tree targ0 = strip_float_extensions (arg0);
8066 tree targ1 = strip_float_extensions (arg1);
8067 tree newtype = TREE_TYPE (targ0);
8069 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8070 newtype = TREE_TYPE (targ1);
8072 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8073 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8074 return fold_build2 (code, type, fold_convert (newtype, targ0),
8075 fold_convert (newtype, targ1));
8077 /* (-a) CMP (-b) -> b CMP a */
8078 if (TREE_CODE (arg0) == NEGATE_EXPR
8079 && TREE_CODE (arg1) == NEGATE_EXPR)
8080 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8081 TREE_OPERAND (arg0, 0));
8083 if (TREE_CODE (arg1) == REAL_CST)
8085 REAL_VALUE_TYPE cst;
8086 cst = TREE_REAL_CST (arg1);
8088 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8089 if (TREE_CODE (arg0) == NEGATE_EXPR)
8090 return fold_build2 (swap_tree_comparison (code), type,
8091 TREE_OPERAND (arg0, 0),
8092 build_real (TREE_TYPE (arg1),
8093 REAL_VALUE_NEGATE (cst)));
8095 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8096 /* a CMP (-0) -> a CMP 0 */
8097 if (REAL_VALUE_MINUS_ZERO (cst))
8098 return fold_build2 (code, type, arg0,
8099 build_real (TREE_TYPE (arg1), dconst0));
8101 /* x != NaN is always true, other ops are always false. */
8102 if (REAL_VALUE_ISNAN (cst)
8103 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8105 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8106 return omit_one_operand (type, tem, arg0);
8109 /* Fold comparisons against infinity. */
8110 if (REAL_VALUE_ISINF (cst))
8112 tem = fold_inf_compare (code, type, arg0, arg1);
8113 if (tem != NULL_TREE)
8114 return tem;
8118 /* If this is a comparison of a real constant with a PLUS_EXPR
8119 or a MINUS_EXPR of a real constant, we can convert it into a
8120 comparison with a revised real constant as long as no overflow
8121 occurs when unsafe_math_optimizations are enabled. */
8122 if (flag_unsafe_math_optimizations
8123 && TREE_CODE (arg1) == REAL_CST
8124 && (TREE_CODE (arg0) == PLUS_EXPR
8125 || TREE_CODE (arg0) == MINUS_EXPR)
8126 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8127 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8128 ? MINUS_EXPR : PLUS_EXPR,
8129 arg1, TREE_OPERAND (arg0, 1), 0))
8130 && ! TREE_CONSTANT_OVERFLOW (tem))
8131 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8133 /* Likewise, we can simplify a comparison of a real constant with
8134 a MINUS_EXPR whose first operand is also a real constant, i.e.
8135 (c1 - x) < c2 becomes x > c1-c2. */
8136 if (flag_unsafe_math_optimizations
8137 && TREE_CODE (arg1) == REAL_CST
8138 && TREE_CODE (arg0) == MINUS_EXPR
8139 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8140 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8141 arg1, 0))
8142 && ! TREE_CONSTANT_OVERFLOW (tem))
8143 return fold_build2 (swap_tree_comparison (code), type,
8144 TREE_OPERAND (arg0, 1), tem);
8146 /* Fold comparisons against built-in math functions. */
8147 if (TREE_CODE (arg1) == REAL_CST
8148 && flag_unsafe_math_optimizations
8149 && ! flag_errno_math)
8151 enum built_in_function fcode = builtin_mathfn_code (arg0);
8153 if (fcode != END_BUILTINS)
8155 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8156 if (tem != NULL_TREE)
8157 return tem;
8162 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8163 if (TREE_CONSTANT (arg1)
8164 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8165 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8166 /* This optimization is invalid for ordered comparisons
8167 if CONST+INCR overflows or if foo+incr might overflow.
8168 This optimization is invalid for floating point due to rounding.
8169 For pointer types we assume overflow doesn't happen. */
8170 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8171 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8172 && (code == EQ_EXPR || code == NE_EXPR))))
8174 tree varop, newconst;
8176 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8178 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8179 arg1, TREE_OPERAND (arg0, 1));
8180 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8181 TREE_OPERAND (arg0, 0),
8182 TREE_OPERAND (arg0, 1));
8184 else
8186 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8187 arg1, TREE_OPERAND (arg0, 1));
8188 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8189 TREE_OPERAND (arg0, 0),
8190 TREE_OPERAND (arg0, 1));
8194 /* If VAROP is a reference to a bitfield, we must mask
8195 the constant by the width of the field. */
8196 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8197 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8198 && host_integerp (DECL_SIZE (TREE_OPERAND
8199 (TREE_OPERAND (varop, 0), 1)), 1))
8201 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8202 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8203 tree folded_compare, shift;
8205 /* First check whether the comparison would come out
8206 always the same. If we don't do that we would
8207 change the meaning with the masking. */
8208 folded_compare = fold_build2 (code, type,
8209 TREE_OPERAND (varop, 0), arg1);
8210 if (TREE_CODE (folded_compare) == INTEGER_CST)
8211 return omit_one_operand (type, folded_compare, varop);
8213 shift = build_int_cst (NULL_TREE,
8214 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8215 shift = fold_convert (TREE_TYPE (varop), shift);
8216 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8217 newconst, shift);
8218 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8219 newconst, shift);
8222 return fold_build2 (code, type, varop, newconst);
8225 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8226 && (TREE_CODE (arg0) == NOP_EXPR
8227 || TREE_CODE (arg0) == CONVERT_EXPR))
8229 /* If we are widening one operand of an integer comparison,
8230 see if the other operand is similarly being widened. Perhaps we
8231 can do the comparison in the narrower type. */
8232 tem = fold_widened_comparison (code, type, arg0, arg1);
8233 if (tem)
8234 return tem;
8236 /* Or if we are changing signedness. */
8237 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8238 if (tem)
8239 return tem;
8242 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8243 constant, we can simplify it. */
8244 if (TREE_CODE (arg1) == INTEGER_CST
8245 && (TREE_CODE (arg0) == MIN_EXPR
8246 || TREE_CODE (arg0) == MAX_EXPR)
8247 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8249 tem = optimize_minmax_comparison (code, type, op0, op1);
8250 if (tem)
8251 return tem;
8254 /* Simplify comparison of something with itself. (For IEEE
8255 floating-point, we can only do some of these simplifications.) */
8256 if (operand_equal_p (arg0, arg1, 0))
8258 switch (code)
8260 case EQ_EXPR:
8261 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8262 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8263 return constant_boolean_node (1, type);
8264 break;
8266 case GE_EXPR:
8267 case LE_EXPR:
8268 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8269 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8270 return constant_boolean_node (1, type);
8271 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8273 case NE_EXPR:
8274 /* For NE, we can only do this simplification if integer
8275 or we don't honor IEEE floating point NaNs. */
8276 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8277 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8278 break;
8279 /* ... fall through ... */
8280 case GT_EXPR:
8281 case LT_EXPR:
8282 return constant_boolean_node (0, type);
8283 default:
8284 gcc_unreachable ();
8288 /* If we are comparing an expression that just has comparisons
8289 of two integer values, arithmetic expressions of those comparisons,
8290 and constants, we can simplify it. There are only three cases
8291 to check: the two values can either be equal, the first can be
8292 greater, or the second can be greater. Fold the expression for
8293 those three values. Since each value must be 0 or 1, we have
8294 eight possibilities, each of which corresponds to the constant 0
8295 or 1 or one of the six possible comparisons.
8297 This handles common cases like (a > b) == 0 but also handles
8298 expressions like ((x > y) - (y > x)) > 0, which supposedly
8299 occur in macroized code. */
8301 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8303 tree cval1 = 0, cval2 = 0;
8304 int save_p = 0;
8306 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8307 /* Don't handle degenerate cases here; they should already
8308 have been handled anyway. */
8309 && cval1 != 0 && cval2 != 0
8310 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8311 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8312 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8313 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8314 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8315 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8316 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8318 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8319 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8321 /* We can't just pass T to eval_subst in case cval1 or cval2
8322 was the same as ARG1. */
8324 tree high_result
8325 = fold_build2 (code, type,
8326 eval_subst (arg0, cval1, maxval,
8327 cval2, minval),
8328 arg1);
8329 tree equal_result
8330 = fold_build2 (code, type,
8331 eval_subst (arg0, cval1, maxval,
8332 cval2, maxval),
8333 arg1);
8334 tree low_result
8335 = fold_build2 (code, type,
8336 eval_subst (arg0, cval1, minval,
8337 cval2, maxval),
8338 arg1);
8340 /* All three of these results should be 0 or 1. Confirm they are.
8341 Then use those values to select the proper code to use. */
8343 if (TREE_CODE (high_result) == INTEGER_CST
8344 && TREE_CODE (equal_result) == INTEGER_CST
8345 && TREE_CODE (low_result) == INTEGER_CST)
8347 /* Make a 3-bit mask with the high-order bit being the
8348 value for `>', the next for '=', and the low for '<'. */
8349 switch ((integer_onep (high_result) * 4)
8350 + (integer_onep (equal_result) * 2)
8351 + integer_onep (low_result))
8353 case 0:
8354 /* Always false. */
8355 return omit_one_operand (type, integer_zero_node, arg0);
8356 case 1:
8357 code = LT_EXPR;
8358 break;
8359 case 2:
8360 code = EQ_EXPR;
8361 break;
8362 case 3:
8363 code = LE_EXPR;
8364 break;
8365 case 4:
8366 code = GT_EXPR;
8367 break;
8368 case 5:
8369 code = NE_EXPR;
8370 break;
8371 case 6:
8372 code = GE_EXPR;
8373 break;
8374 case 7:
8375 /* Always true. */
8376 return omit_one_operand (type, integer_one_node, arg0);
8379 if (save_p)
8380 return save_expr (build2 (code, type, cval1, cval2));
8381 return fold_build2 (code, type, cval1, cval2);
8386 /* Fold a comparison of the address of COMPONENT_REFs with the same
8387 type and component to a comparison of the address of the base
8388 object. In short, &x->a OP &y->a to x OP y and
8389 &x->a OP &y.a to x OP &y */
8390 if (TREE_CODE (arg0) == ADDR_EXPR
8391 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8392 && TREE_CODE (arg1) == ADDR_EXPR
8393 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8395 tree cref0 = TREE_OPERAND (arg0, 0);
8396 tree cref1 = TREE_OPERAND (arg1, 0);
8397 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8399 tree op0 = TREE_OPERAND (cref0, 0);
8400 tree op1 = TREE_OPERAND (cref1, 0);
8401 return fold_build2 (code, type,
8402 build_fold_addr_expr (op0),
8403 build_fold_addr_expr (op1));
8407 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8408 into a single range test. */
8409 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8410 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8411 && TREE_CODE (arg1) == INTEGER_CST
8412 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8413 && !integer_zerop (TREE_OPERAND (arg0, 1))
8414 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8415 && !TREE_OVERFLOW (arg1))
8417 tem = fold_div_compare (code, type, arg0, arg1);
8418 if (tem != NULL_TREE)
8419 return tem;
8422 return NULL_TREE;
8426 /* Subroutine of fold_binary. Optimize complex multiplications of the
8427 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8428 argument EXPR represents the expression "z" of type TYPE. */
8430 static tree
8431 fold_mult_zconjz (tree type, tree expr)
8433 tree itype = TREE_TYPE (type);
8434 tree rpart, ipart, tem;
8436 if (TREE_CODE (expr) == COMPLEX_EXPR)
8438 rpart = TREE_OPERAND (expr, 0);
8439 ipart = TREE_OPERAND (expr, 1);
8441 else if (TREE_CODE (expr) == COMPLEX_CST)
8443 rpart = TREE_REALPART (expr);
8444 ipart = TREE_IMAGPART (expr);
8446 else
8448 expr = save_expr (expr);
8449 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8450 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8453 rpart = save_expr (rpart);
8454 ipart = save_expr (ipart);
8455 tem = fold_build2 (PLUS_EXPR, itype,
8456 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8457 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8458 return fold_build2 (COMPLEX_EXPR, type, tem,
8459 fold_convert (itype, integer_zero_node));
8463 /* Fold a binary expression of code CODE and type TYPE with operands
8464 OP0 and OP1. Return the folded expression if folding is
8465 successful. Otherwise, return NULL_TREE. */
8467 tree
8468 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8470 enum tree_code_class kind = TREE_CODE_CLASS (code);
8471 tree arg0, arg1, tem;
8472 tree t1 = NULL_TREE;
8473 bool strict_overflow_p;
8475 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8476 && TREE_CODE_LENGTH (code) == 2
8477 && op0 != NULL_TREE
8478 && op1 != NULL_TREE);
8480 arg0 = op0;
8481 arg1 = op1;
8483 /* Strip any conversions that don't change the mode. This is
8484 safe for every expression, except for a comparison expression
8485 because its signedness is derived from its operands. So, in
8486 the latter case, only strip conversions that don't change the
8487 signedness.
8489 Note that this is done as an internal manipulation within the
8490 constant folder, in order to find the simplest representation
8491 of the arguments so that their form can be studied. In any
8492 cases, the appropriate type conversions should be put back in
8493 the tree that will get out of the constant folder. */
8495 if (kind == tcc_comparison)
8497 STRIP_SIGN_NOPS (arg0);
8498 STRIP_SIGN_NOPS (arg1);
8500 else
8502 STRIP_NOPS (arg0);
8503 STRIP_NOPS (arg1);
8506 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8507 constant but we can't do arithmetic on them. */
8508 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8509 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8510 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8511 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8513 if (kind == tcc_binary)
8514 tem = const_binop (code, arg0, arg1, 0);
8515 else if (kind == tcc_comparison)
8516 tem = fold_relational_const (code, type, arg0, arg1);
8517 else
8518 tem = NULL_TREE;
8520 if (tem != NULL_TREE)
8522 if (TREE_TYPE (tem) != type)
8523 tem = fold_convert (type, tem);
8524 return tem;
8528 /* If this is a commutative operation, and ARG0 is a constant, move it
8529 to ARG1 to reduce the number of tests below. */
8530 if (commutative_tree_code (code)
8531 && tree_swap_operands_p (arg0, arg1, true))
8532 return fold_build2 (code, type, op1, op0);
8534 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8536 First check for cases where an arithmetic operation is applied to a
8537 compound, conditional, or comparison operation. Push the arithmetic
8538 operation inside the compound or conditional to see if any folding
8539 can then be done. Convert comparison to conditional for this purpose.
8540 The also optimizes non-constant cases that used to be done in
8541 expand_expr.
8543 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8544 one of the operands is a comparison and the other is a comparison, a
8545 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8546 code below would make the expression more complex. Change it to a
8547 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8548 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8550 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8551 || code == EQ_EXPR || code == NE_EXPR)
8552 && ((truth_value_p (TREE_CODE (arg0))
8553 && (truth_value_p (TREE_CODE (arg1))
8554 || (TREE_CODE (arg1) == BIT_AND_EXPR
8555 && integer_onep (TREE_OPERAND (arg1, 1)))))
8556 || (truth_value_p (TREE_CODE (arg1))
8557 && (truth_value_p (TREE_CODE (arg0))
8558 || (TREE_CODE (arg0) == BIT_AND_EXPR
8559 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8561 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8562 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8563 : TRUTH_XOR_EXPR,
8564 boolean_type_node,
8565 fold_convert (boolean_type_node, arg0),
8566 fold_convert (boolean_type_node, arg1));
8568 if (code == EQ_EXPR)
8569 tem = invert_truthvalue (tem);
8571 return fold_convert (type, tem);
8574 if (TREE_CODE_CLASS (code) == tcc_binary
8575 || TREE_CODE_CLASS (code) == tcc_comparison)
8577 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8578 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8579 fold_build2 (code, type,
8580 TREE_OPERAND (arg0, 1), op1));
8581 if (TREE_CODE (arg1) == COMPOUND_EXPR
8582 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8583 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8584 fold_build2 (code, type,
8585 op0, TREE_OPERAND (arg1, 1)));
8587 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8589 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8590 arg0, arg1,
8591 /*cond_first_p=*/1);
8592 if (tem != NULL_TREE)
8593 return tem;
8596 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8598 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8599 arg1, arg0,
8600 /*cond_first_p=*/0);
8601 if (tem != NULL_TREE)
8602 return tem;
8606 switch (code)
8608 case PLUS_EXPR:
8609 /* A + (-B) -> A - B */
8610 if (TREE_CODE (arg1) == NEGATE_EXPR)
8611 return fold_build2 (MINUS_EXPR, type,
8612 fold_convert (type, arg0),
8613 fold_convert (type, TREE_OPERAND (arg1, 0)));
8614 /* (-A) + B -> B - A */
8615 if (TREE_CODE (arg0) == NEGATE_EXPR
8616 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8617 return fold_build2 (MINUS_EXPR, type,
8618 fold_convert (type, arg1),
8619 fold_convert (type, TREE_OPERAND (arg0, 0)));
8620 /* Convert ~A + 1 to -A. */
8621 if (INTEGRAL_TYPE_P (type)
8622 && TREE_CODE (arg0) == BIT_NOT_EXPR
8623 && integer_onep (arg1))
8624 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8626 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8627 same or one. */
8628 if ((TREE_CODE (arg0) == MULT_EXPR
8629 || TREE_CODE (arg1) == MULT_EXPR)
8630 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8632 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8633 if (tem)
8634 return tem;
8637 if (! FLOAT_TYPE_P (type))
8639 if (integer_zerop (arg1))
8640 return non_lvalue (fold_convert (type, arg0));
8642 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8643 with a constant, and the two constants have no bits in common,
8644 we should treat this as a BIT_IOR_EXPR since this may produce more
8645 simplifications. */
8646 if (TREE_CODE (arg0) == BIT_AND_EXPR
8647 && TREE_CODE (arg1) == BIT_AND_EXPR
8648 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8649 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8650 && integer_zerop (const_binop (BIT_AND_EXPR,
8651 TREE_OPERAND (arg0, 1),
8652 TREE_OPERAND (arg1, 1), 0)))
8654 code = BIT_IOR_EXPR;
8655 goto bit_ior;
8658 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8659 (plus (plus (mult) (mult)) (foo)) so that we can
8660 take advantage of the factoring cases below. */
8661 if (((TREE_CODE (arg0) == PLUS_EXPR
8662 || TREE_CODE (arg0) == MINUS_EXPR)
8663 && TREE_CODE (arg1) == MULT_EXPR)
8664 || ((TREE_CODE (arg1) == PLUS_EXPR
8665 || TREE_CODE (arg1) == MINUS_EXPR)
8666 && TREE_CODE (arg0) == MULT_EXPR))
8668 tree parg0, parg1, parg, marg;
8669 enum tree_code pcode;
8671 if (TREE_CODE (arg1) == MULT_EXPR)
8672 parg = arg0, marg = arg1;
8673 else
8674 parg = arg1, marg = arg0;
8675 pcode = TREE_CODE (parg);
8676 parg0 = TREE_OPERAND (parg, 0);
8677 parg1 = TREE_OPERAND (parg, 1);
8678 STRIP_NOPS (parg0);
8679 STRIP_NOPS (parg1);
8681 if (TREE_CODE (parg0) == MULT_EXPR
8682 && TREE_CODE (parg1) != MULT_EXPR)
8683 return fold_build2 (pcode, type,
8684 fold_build2 (PLUS_EXPR, type,
8685 fold_convert (type, parg0),
8686 fold_convert (type, marg)),
8687 fold_convert (type, parg1));
8688 if (TREE_CODE (parg0) != MULT_EXPR
8689 && TREE_CODE (parg1) == MULT_EXPR)
8690 return fold_build2 (PLUS_EXPR, type,
8691 fold_convert (type, parg0),
8692 fold_build2 (pcode, type,
8693 fold_convert (type, marg),
8694 fold_convert (type,
8695 parg1)));
8698 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8699 of the array. Loop optimizer sometimes produce this type of
8700 expressions. */
8701 if (TREE_CODE (arg0) == ADDR_EXPR)
8703 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8704 if (tem)
8705 return fold_convert (type, tem);
8707 else if (TREE_CODE (arg1) == ADDR_EXPR)
8709 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8710 if (tem)
8711 return fold_convert (type, tem);
8714 else
8716 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8717 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8718 return non_lvalue (fold_convert (type, arg0));
8720 /* Likewise if the operands are reversed. */
8721 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8722 return non_lvalue (fold_convert (type, arg1));
8724 /* Convert X + -C into X - C. */
8725 if (TREE_CODE (arg1) == REAL_CST
8726 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8728 tem = fold_negate_const (arg1, type);
8729 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8730 return fold_build2 (MINUS_EXPR, type,
8731 fold_convert (type, arg0),
8732 fold_convert (type, tem));
8735 if (flag_unsafe_math_optimizations
8736 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8737 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8738 && (tem = distribute_real_division (code, type, arg0, arg1)))
8739 return tem;
8741 /* Convert x+x into x*2.0. */
8742 if (operand_equal_p (arg0, arg1, 0)
8743 && SCALAR_FLOAT_TYPE_P (type))
8744 return fold_build2 (MULT_EXPR, type, arg0,
8745 build_real (type, dconst2));
8747 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8748 if (flag_unsafe_math_optimizations
8749 && TREE_CODE (arg1) == PLUS_EXPR
8750 && TREE_CODE (arg0) != MULT_EXPR)
8752 tree tree10 = TREE_OPERAND (arg1, 0);
8753 tree tree11 = TREE_OPERAND (arg1, 1);
8754 if (TREE_CODE (tree11) == MULT_EXPR
8755 && TREE_CODE (tree10) == MULT_EXPR)
8757 tree tree0;
8758 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8759 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8762 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8763 if (flag_unsafe_math_optimizations
8764 && TREE_CODE (arg0) == PLUS_EXPR
8765 && TREE_CODE (arg1) != MULT_EXPR)
8767 tree tree00 = TREE_OPERAND (arg0, 0);
8768 tree tree01 = TREE_OPERAND (arg0, 1);
8769 if (TREE_CODE (tree01) == MULT_EXPR
8770 && TREE_CODE (tree00) == MULT_EXPR)
8772 tree tree0;
8773 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8774 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8779 bit_rotate:
8780 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8781 is a rotate of A by C1 bits. */
8782 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8783 is a rotate of A by B bits. */
8785 enum tree_code code0, code1;
8786 code0 = TREE_CODE (arg0);
8787 code1 = TREE_CODE (arg1);
8788 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8789 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8790 && operand_equal_p (TREE_OPERAND (arg0, 0),
8791 TREE_OPERAND (arg1, 0), 0)
8792 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8794 tree tree01, tree11;
8795 enum tree_code code01, code11;
8797 tree01 = TREE_OPERAND (arg0, 1);
8798 tree11 = TREE_OPERAND (arg1, 1);
8799 STRIP_NOPS (tree01);
8800 STRIP_NOPS (tree11);
8801 code01 = TREE_CODE (tree01);
8802 code11 = TREE_CODE (tree11);
8803 if (code01 == INTEGER_CST
8804 && code11 == INTEGER_CST
8805 && TREE_INT_CST_HIGH (tree01) == 0
8806 && TREE_INT_CST_HIGH (tree11) == 0
8807 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8808 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8809 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8810 code0 == LSHIFT_EXPR ? tree01 : tree11);
8811 else if (code11 == MINUS_EXPR)
8813 tree tree110, tree111;
8814 tree110 = TREE_OPERAND (tree11, 0);
8815 tree111 = TREE_OPERAND (tree11, 1);
8816 STRIP_NOPS (tree110);
8817 STRIP_NOPS (tree111);
8818 if (TREE_CODE (tree110) == INTEGER_CST
8819 && 0 == compare_tree_int (tree110,
8820 TYPE_PRECISION
8821 (TREE_TYPE (TREE_OPERAND
8822 (arg0, 0))))
8823 && operand_equal_p (tree01, tree111, 0))
8824 return build2 ((code0 == LSHIFT_EXPR
8825 ? LROTATE_EXPR
8826 : RROTATE_EXPR),
8827 type, TREE_OPERAND (arg0, 0), tree01);
8829 else if (code01 == MINUS_EXPR)
8831 tree tree010, tree011;
8832 tree010 = TREE_OPERAND (tree01, 0);
8833 tree011 = TREE_OPERAND (tree01, 1);
8834 STRIP_NOPS (tree010);
8835 STRIP_NOPS (tree011);
8836 if (TREE_CODE (tree010) == INTEGER_CST
8837 && 0 == compare_tree_int (tree010,
8838 TYPE_PRECISION
8839 (TREE_TYPE (TREE_OPERAND
8840 (arg0, 0))))
8841 && operand_equal_p (tree11, tree011, 0))
8842 return build2 ((code0 != LSHIFT_EXPR
8843 ? LROTATE_EXPR
8844 : RROTATE_EXPR),
8845 type, TREE_OPERAND (arg0, 0), tree11);
8850 associate:
8851 /* In most languages, can't associate operations on floats through
8852 parentheses. Rather than remember where the parentheses were, we
8853 don't associate floats at all, unless the user has specified
8854 -funsafe-math-optimizations. */
8856 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8858 tree var0, con0, lit0, minus_lit0;
8859 tree var1, con1, lit1, minus_lit1;
8860 bool ok = true;
8862 /* Split both trees into variables, constants, and literals. Then
8863 associate each group together, the constants with literals,
8864 then the result with variables. This increases the chances of
8865 literals being recombined later and of generating relocatable
8866 expressions for the sum of a constant and literal. */
8867 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8868 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8869 code == MINUS_EXPR);
8871 /* With undefined overflow we can only associate constants
8872 with one variable. */
8873 if ((POINTER_TYPE_P (type)
8874 || (INTEGRAL_TYPE_P (type)
8875 && !(TYPE_UNSIGNED (type) || flag_wrapv)))
8876 && var0 && var1)
8878 tree tmp0 = var0;
8879 tree tmp1 = var1;
8881 if (TREE_CODE (tmp0) == NEGATE_EXPR)
8882 tmp0 = TREE_OPERAND (tmp0, 0);
8883 if (TREE_CODE (tmp1) == NEGATE_EXPR)
8884 tmp1 = TREE_OPERAND (tmp1, 0);
8885 /* The only case we can still associate with two variables
8886 is if they are the same, modulo negation. */
8887 if (!operand_equal_p (tmp0, tmp1, 0))
8888 ok = false;
8891 /* Only do something if we found more than two objects. Otherwise,
8892 nothing has changed and we risk infinite recursion. */
8893 if (ok
8894 && (2 < ((var0 != 0) + (var1 != 0)
8895 + (con0 != 0) + (con1 != 0)
8896 + (lit0 != 0) + (lit1 != 0)
8897 + (minus_lit0 != 0) + (minus_lit1 != 0))))
8899 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8900 if (code == MINUS_EXPR)
8901 code = PLUS_EXPR;
8903 var0 = associate_trees (var0, var1, code, type);
8904 con0 = associate_trees (con0, con1, code, type);
8905 lit0 = associate_trees (lit0, lit1, code, type);
8906 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8908 /* Preserve the MINUS_EXPR if the negative part of the literal is
8909 greater than the positive part. Otherwise, the multiplicative
8910 folding code (i.e extract_muldiv) may be fooled in case
8911 unsigned constants are subtracted, like in the following
8912 example: ((X*2 + 4) - 8U)/2. */
8913 if (minus_lit0 && lit0)
8915 if (TREE_CODE (lit0) == INTEGER_CST
8916 && TREE_CODE (minus_lit0) == INTEGER_CST
8917 && tree_int_cst_lt (lit0, minus_lit0))
8919 minus_lit0 = associate_trees (minus_lit0, lit0,
8920 MINUS_EXPR, type);
8921 lit0 = 0;
8923 else
8925 lit0 = associate_trees (lit0, minus_lit0,
8926 MINUS_EXPR, type);
8927 minus_lit0 = 0;
8930 if (minus_lit0)
8932 if (con0 == 0)
8933 return fold_convert (type,
8934 associate_trees (var0, minus_lit0,
8935 MINUS_EXPR, type));
8936 else
8938 con0 = associate_trees (con0, minus_lit0,
8939 MINUS_EXPR, type);
8940 return fold_convert (type,
8941 associate_trees (var0, con0,
8942 PLUS_EXPR, type));
8946 con0 = associate_trees (con0, lit0, code, type);
8947 return fold_convert (type, associate_trees (var0, con0,
8948 code, type));
8952 return NULL_TREE;
8954 case MINUS_EXPR:
8955 /* A - (-B) -> A + B */
8956 if (TREE_CODE (arg1) == NEGATE_EXPR)
8957 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8958 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8959 if (TREE_CODE (arg0) == NEGATE_EXPR
8960 && (FLOAT_TYPE_P (type)
8961 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8962 && negate_expr_p (arg1)
8963 && reorder_operands_p (arg0, arg1))
8964 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8965 TREE_OPERAND (arg0, 0));
8966 /* Convert -A - 1 to ~A. */
8967 if (INTEGRAL_TYPE_P (type)
8968 && TREE_CODE (arg0) == NEGATE_EXPR
8969 && integer_onep (arg1))
8970 return fold_build1 (BIT_NOT_EXPR, type,
8971 fold_convert (type, TREE_OPERAND (arg0, 0)));
8973 /* Convert -1 - A to ~A. */
8974 if (INTEGRAL_TYPE_P (type)
8975 && integer_all_onesp (arg0))
8976 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8978 if (! FLOAT_TYPE_P (type))
8980 if (integer_zerop (arg0))
8981 return negate_expr (fold_convert (type, arg1));
8982 if (integer_zerop (arg1))
8983 return non_lvalue (fold_convert (type, arg0));
8985 /* Fold A - (A & B) into ~B & A. */
8986 if (!TREE_SIDE_EFFECTS (arg0)
8987 && TREE_CODE (arg1) == BIT_AND_EXPR)
8989 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8990 return fold_build2 (BIT_AND_EXPR, type,
8991 fold_build1 (BIT_NOT_EXPR, type,
8992 TREE_OPERAND (arg1, 0)),
8993 arg0);
8994 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8995 return fold_build2 (BIT_AND_EXPR, type,
8996 fold_build1 (BIT_NOT_EXPR, type,
8997 TREE_OPERAND (arg1, 1)),
8998 arg0);
9001 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9002 any power of 2 minus 1. */
9003 if (TREE_CODE (arg0) == BIT_AND_EXPR
9004 && TREE_CODE (arg1) == BIT_AND_EXPR
9005 && operand_equal_p (TREE_OPERAND (arg0, 0),
9006 TREE_OPERAND (arg1, 0), 0))
9008 tree mask0 = TREE_OPERAND (arg0, 1);
9009 tree mask1 = TREE_OPERAND (arg1, 1);
9010 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9012 if (operand_equal_p (tem, mask1, 0))
9014 tem = fold_build2 (BIT_XOR_EXPR, type,
9015 TREE_OPERAND (arg0, 0), mask1);
9016 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9021 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9022 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9023 return non_lvalue (fold_convert (type, arg0));
9025 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9026 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9027 (-ARG1 + ARG0) reduces to -ARG1. */
9028 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9029 return negate_expr (fold_convert (type, arg1));
9031 /* Fold &x - &x. This can happen from &x.foo - &x.
9032 This is unsafe for certain floats even in non-IEEE formats.
9033 In IEEE, it is unsafe because it does wrong for NaNs.
9034 Also note that operand_equal_p is always false if an operand
9035 is volatile. */
9037 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9038 && operand_equal_p (arg0, arg1, 0))
9039 return fold_convert (type, integer_zero_node);
9041 /* A - B -> A + (-B) if B is easily negatable. */
9042 if (negate_expr_p (arg1)
9043 && ((FLOAT_TYPE_P (type)
9044 /* Avoid this transformation if B is a positive REAL_CST. */
9045 && (TREE_CODE (arg1) != REAL_CST
9046 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9047 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
9048 return fold_build2 (PLUS_EXPR, type,
9049 fold_convert (type, arg0),
9050 fold_convert (type, negate_expr (arg1)));
9052 /* Try folding difference of addresses. */
9054 HOST_WIDE_INT diff;
9056 if ((TREE_CODE (arg0) == ADDR_EXPR
9057 || TREE_CODE (arg1) == ADDR_EXPR)
9058 && ptr_difference_const (arg0, arg1, &diff))
9059 return build_int_cst_type (type, diff);
9062 /* Fold &a[i] - &a[j] to i-j. */
9063 if (TREE_CODE (arg0) == ADDR_EXPR
9064 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9065 && TREE_CODE (arg1) == ADDR_EXPR
9066 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9068 tree aref0 = TREE_OPERAND (arg0, 0);
9069 tree aref1 = TREE_OPERAND (arg1, 0);
9070 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9071 TREE_OPERAND (aref1, 0), 0))
9073 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9074 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9075 tree esz = array_ref_element_size (aref0);
9076 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9077 return fold_build2 (MULT_EXPR, type, diff,
9078 fold_convert (type, esz));
9083 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9084 of the array. Loop optimizer sometimes produce this type of
9085 expressions. */
9086 if (TREE_CODE (arg0) == ADDR_EXPR)
9088 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9089 if (tem)
9090 return fold_convert (type, tem);
9093 if (flag_unsafe_math_optimizations
9094 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9095 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9096 && (tem = distribute_real_division (code, type, arg0, arg1)))
9097 return tem;
9099 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9100 same or one. */
9101 if ((TREE_CODE (arg0) == MULT_EXPR
9102 || TREE_CODE (arg1) == MULT_EXPR)
9103 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9105 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9106 if (tem)
9107 return tem;
9110 goto associate;
9112 case MULT_EXPR:
9113 /* (-A) * (-B) -> A * B */
9114 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9115 return fold_build2 (MULT_EXPR, type,
9116 fold_convert (type, TREE_OPERAND (arg0, 0)),
9117 fold_convert (type, negate_expr (arg1)));
9118 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9119 return fold_build2 (MULT_EXPR, type,
9120 fold_convert (type, negate_expr (arg0)),
9121 fold_convert (type, TREE_OPERAND (arg1, 0)));
9123 if (! FLOAT_TYPE_P (type))
9125 if (integer_zerop (arg1))
9126 return omit_one_operand (type, arg1, arg0);
9127 if (integer_onep (arg1))
9128 return non_lvalue (fold_convert (type, arg0));
9129 /* Transform x * -1 into -x. */
9130 if (integer_all_onesp (arg1))
9131 return fold_convert (type, negate_expr (arg0));
9133 /* (a * (1 << b)) is (a << b) */
9134 if (TREE_CODE (arg1) == LSHIFT_EXPR
9135 && integer_onep (TREE_OPERAND (arg1, 0)))
9136 return fold_build2 (LSHIFT_EXPR, type, arg0,
9137 TREE_OPERAND (arg1, 1));
9138 if (TREE_CODE (arg0) == LSHIFT_EXPR
9139 && integer_onep (TREE_OPERAND (arg0, 0)))
9140 return fold_build2 (LSHIFT_EXPR, type, arg1,
9141 TREE_OPERAND (arg0, 1));
9143 strict_overflow_p = false;
9144 if (TREE_CODE (arg1) == INTEGER_CST
9145 && 0 != (tem = extract_muldiv (op0,
9146 fold_convert (type, arg1),
9147 code, NULL_TREE,
9148 &strict_overflow_p)))
9150 if (strict_overflow_p)
9151 fold_overflow_warning (("assuming signed overflow does not "
9152 "occur when simplifying "
9153 "multiplication"),
9154 WARN_STRICT_OVERFLOW_MISC);
9155 return fold_convert (type, tem);
9158 /* Optimize z * conj(z) for integer complex numbers. */
9159 if (TREE_CODE (arg0) == CONJ_EXPR
9160 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9161 return fold_mult_zconjz (type, arg1);
9162 if (TREE_CODE (arg1) == CONJ_EXPR
9163 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9164 return fold_mult_zconjz (type, arg0);
9166 else
9168 /* Maybe fold x * 0 to 0. The expressions aren't the same
9169 when x is NaN, since x * 0 is also NaN. Nor are they the
9170 same in modes with signed zeros, since multiplying a
9171 negative value by 0 gives -0, not +0. */
9172 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9173 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9174 && real_zerop (arg1))
9175 return omit_one_operand (type, arg1, arg0);
9176 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9177 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9178 && real_onep (arg1))
9179 return non_lvalue (fold_convert (type, arg0));
9181 /* Transform x * -1.0 into -x. */
9182 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9183 && real_minus_onep (arg1))
9184 return fold_convert (type, negate_expr (arg0));
9186 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9187 if (flag_unsafe_math_optimizations
9188 && TREE_CODE (arg0) == RDIV_EXPR
9189 && TREE_CODE (arg1) == REAL_CST
9190 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9192 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9193 arg1, 0);
9194 if (tem)
9195 return fold_build2 (RDIV_EXPR, type, tem,
9196 TREE_OPERAND (arg0, 1));
9199 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9200 if (operand_equal_p (arg0, arg1, 0))
9202 tree tem = fold_strip_sign_ops (arg0);
9203 if (tem != NULL_TREE)
9205 tem = fold_convert (type, tem);
9206 return fold_build2 (MULT_EXPR, type, tem, tem);
9210 /* Optimize z * conj(z) for floating point complex numbers.
9211 Guarded by flag_unsafe_math_optimizations as non-finite
9212 imaginary components don't produce scalar results. */
9213 if (flag_unsafe_math_optimizations
9214 && TREE_CODE (arg0) == CONJ_EXPR
9215 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9216 return fold_mult_zconjz (type, arg1);
9217 if (flag_unsafe_math_optimizations
9218 && TREE_CODE (arg1) == CONJ_EXPR
9219 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9220 return fold_mult_zconjz (type, arg0);
9222 if (flag_unsafe_math_optimizations)
9224 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9225 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9227 /* Optimizations of root(...)*root(...). */
9228 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9230 tree rootfn, arg, arglist;
9231 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9232 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9234 /* Optimize sqrt(x)*sqrt(x) as x. */
9235 if (BUILTIN_SQRT_P (fcode0)
9236 && operand_equal_p (arg00, arg10, 0)
9237 && ! HONOR_SNANS (TYPE_MODE (type)))
9238 return arg00;
9240 /* Optimize root(x)*root(y) as root(x*y). */
9241 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9242 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9243 arglist = build_tree_list (NULL_TREE, arg);
9244 return build_function_call_expr (rootfn, arglist);
9247 /* Optimize expN(x)*expN(y) as expN(x+y). */
9248 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9250 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9251 tree arg = fold_build2 (PLUS_EXPR, type,
9252 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9253 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9254 tree arglist = build_tree_list (NULL_TREE, arg);
9255 return build_function_call_expr (expfn, arglist);
9258 /* Optimizations of pow(...)*pow(...). */
9259 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9260 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9261 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9263 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9264 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9265 1)));
9266 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9267 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9268 1)));
9270 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9271 if (operand_equal_p (arg01, arg11, 0))
9273 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9274 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9275 tree arglist = tree_cons (NULL_TREE, arg,
9276 build_tree_list (NULL_TREE,
9277 arg01));
9278 return build_function_call_expr (powfn, arglist);
9281 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9282 if (operand_equal_p (arg00, arg10, 0))
9284 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9285 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9286 tree arglist = tree_cons (NULL_TREE, arg00,
9287 build_tree_list (NULL_TREE,
9288 arg));
9289 return build_function_call_expr (powfn, arglist);
9293 /* Optimize tan(x)*cos(x) as sin(x). */
9294 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9295 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9296 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9297 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9298 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9299 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9300 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9301 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9303 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9305 if (sinfn != NULL_TREE)
9306 return build_function_call_expr (sinfn,
9307 TREE_OPERAND (arg0, 1));
9310 /* Optimize x*pow(x,c) as pow(x,c+1). */
9311 if (fcode1 == BUILT_IN_POW
9312 || fcode1 == BUILT_IN_POWF
9313 || fcode1 == BUILT_IN_POWL)
9315 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9316 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9317 1)));
9318 if (TREE_CODE (arg11) == REAL_CST
9319 && ! TREE_CONSTANT_OVERFLOW (arg11)
9320 && operand_equal_p (arg0, arg10, 0))
9322 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9323 REAL_VALUE_TYPE c;
9324 tree arg, arglist;
9326 c = TREE_REAL_CST (arg11);
9327 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9328 arg = build_real (type, c);
9329 arglist = build_tree_list (NULL_TREE, arg);
9330 arglist = tree_cons (NULL_TREE, arg0, arglist);
9331 return build_function_call_expr (powfn, arglist);
9335 /* Optimize pow(x,c)*x as pow(x,c+1). */
9336 if (fcode0 == BUILT_IN_POW
9337 || fcode0 == BUILT_IN_POWF
9338 || fcode0 == BUILT_IN_POWL)
9340 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9341 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9342 1)));
9343 if (TREE_CODE (arg01) == REAL_CST
9344 && ! TREE_CONSTANT_OVERFLOW (arg01)
9345 && operand_equal_p (arg1, arg00, 0))
9347 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9348 REAL_VALUE_TYPE c;
9349 tree arg, arglist;
9351 c = TREE_REAL_CST (arg01);
9352 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9353 arg = build_real (type, c);
9354 arglist = build_tree_list (NULL_TREE, arg);
9355 arglist = tree_cons (NULL_TREE, arg1, arglist);
9356 return build_function_call_expr (powfn, arglist);
9360 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9361 if (! optimize_size
9362 && operand_equal_p (arg0, arg1, 0))
9364 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9366 if (powfn)
9368 tree arg = build_real (type, dconst2);
9369 tree arglist = build_tree_list (NULL_TREE, arg);
9370 arglist = tree_cons (NULL_TREE, arg0, arglist);
9371 return build_function_call_expr (powfn, arglist);
9376 goto associate;
9378 case BIT_IOR_EXPR:
9379 bit_ior:
9380 if (integer_all_onesp (arg1))
9381 return omit_one_operand (type, arg1, arg0);
9382 if (integer_zerop (arg1))
9383 return non_lvalue (fold_convert (type, arg0));
9384 if (operand_equal_p (arg0, arg1, 0))
9385 return non_lvalue (fold_convert (type, arg0));
9387 /* ~X | X is -1. */
9388 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9389 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9390 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9392 t1 = build_int_cst (type, -1);
9393 t1 = force_fit_type (t1, 0, false, false);
9394 return omit_one_operand (type, t1, arg1);
9397 /* X | ~X is -1. */
9398 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9399 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9400 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9402 t1 = build_int_cst (type, -1);
9403 t1 = force_fit_type (t1, 0, false, false);
9404 return omit_one_operand (type, t1, arg0);
9407 /* Canonicalize (X & C1) | C2. */
9408 if (TREE_CODE (arg0) == BIT_AND_EXPR
9409 && TREE_CODE (arg1) == INTEGER_CST
9410 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9412 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9413 int width = TYPE_PRECISION (type);
9414 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9415 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9416 hi2 = TREE_INT_CST_HIGH (arg1);
9417 lo2 = TREE_INT_CST_LOW (arg1);
9419 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9420 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9421 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9423 if (width > HOST_BITS_PER_WIDE_INT)
9425 mhi = (unsigned HOST_WIDE_INT) -1
9426 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9427 mlo = -1;
9429 else
9431 mhi = 0;
9432 mlo = (unsigned HOST_WIDE_INT) -1
9433 >> (HOST_BITS_PER_WIDE_INT - width);
9436 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9437 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9438 return fold_build2 (BIT_IOR_EXPR, type,
9439 TREE_OPERAND (arg0, 0), arg1);
9441 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9442 hi1 &= mhi;
9443 lo1 &= mlo;
9444 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9445 return fold_build2 (BIT_IOR_EXPR, type,
9446 fold_build2 (BIT_AND_EXPR, type,
9447 TREE_OPERAND (arg0, 0),
9448 build_int_cst_wide (type,
9449 lo1 & ~lo2,
9450 hi1 & ~hi2)),
9451 arg1);
9454 /* (X & Y) | Y is (X, Y). */
9455 if (TREE_CODE (arg0) == BIT_AND_EXPR
9456 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9457 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9458 /* (X & Y) | X is (Y, X). */
9459 if (TREE_CODE (arg0) == BIT_AND_EXPR
9460 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9461 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9462 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9463 /* X | (X & Y) is (Y, X). */
9464 if (TREE_CODE (arg1) == BIT_AND_EXPR
9465 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9466 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9467 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9468 /* X | (Y & X) is (Y, X). */
9469 if (TREE_CODE (arg1) == BIT_AND_EXPR
9470 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9471 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9472 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9474 t1 = distribute_bit_expr (code, type, arg0, arg1);
9475 if (t1 != NULL_TREE)
9476 return t1;
9478 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9480 This results in more efficient code for machines without a NAND
9481 instruction. Combine will canonicalize to the first form
9482 which will allow use of NAND instructions provided by the
9483 backend if they exist. */
9484 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9485 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9487 return fold_build1 (BIT_NOT_EXPR, type,
9488 build2 (BIT_AND_EXPR, type,
9489 TREE_OPERAND (arg0, 0),
9490 TREE_OPERAND (arg1, 0)));
9493 /* See if this can be simplified into a rotate first. If that
9494 is unsuccessful continue in the association code. */
9495 goto bit_rotate;
9497 case BIT_XOR_EXPR:
9498 if (integer_zerop (arg1))
9499 return non_lvalue (fold_convert (type, arg0));
9500 if (integer_all_onesp (arg1))
9501 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9502 if (operand_equal_p (arg0, arg1, 0))
9503 return omit_one_operand (type, integer_zero_node, arg0);
9505 /* ~X ^ X is -1. */
9506 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9507 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9508 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9510 t1 = build_int_cst (type, -1);
9511 t1 = force_fit_type (t1, 0, false, false);
9512 return omit_one_operand (type, t1, arg1);
9515 /* X ^ ~X is -1. */
9516 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9517 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9518 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9520 t1 = build_int_cst (type, -1);
9521 t1 = force_fit_type (t1, 0, false, false);
9522 return omit_one_operand (type, t1, arg0);
9525 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9526 with a constant, and the two constants have no bits in common,
9527 we should treat this as a BIT_IOR_EXPR since this may produce more
9528 simplifications. */
9529 if (TREE_CODE (arg0) == BIT_AND_EXPR
9530 && TREE_CODE (arg1) == BIT_AND_EXPR
9531 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9532 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9533 && integer_zerop (const_binop (BIT_AND_EXPR,
9534 TREE_OPERAND (arg0, 1),
9535 TREE_OPERAND (arg1, 1), 0)))
9537 code = BIT_IOR_EXPR;
9538 goto bit_ior;
9541 /* (X | Y) ^ X -> Y & ~ X*/
9542 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9543 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9545 tree t2 = TREE_OPERAND (arg0, 1);
9546 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9547 arg1);
9548 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9549 fold_convert (type, t1));
9550 return t1;
9553 /* (Y | X) ^ X -> Y & ~ X*/
9554 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9555 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9557 tree t2 = TREE_OPERAND (arg0, 0);
9558 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9559 arg1);
9560 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9561 fold_convert (type, t1));
9562 return t1;
9565 /* X ^ (X | Y) -> Y & ~ X*/
9566 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9567 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9569 tree t2 = TREE_OPERAND (arg1, 1);
9570 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9571 arg0);
9572 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9573 fold_convert (type, t1));
9574 return t1;
9577 /* X ^ (Y | X) -> Y & ~ X*/
9578 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9579 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9581 tree t2 = TREE_OPERAND (arg1, 0);
9582 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9583 arg0);
9584 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9585 fold_convert (type, t1));
9586 return t1;
9589 /* Convert ~X ^ ~Y to X ^ Y. */
9590 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9591 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9592 return fold_build2 (code, type,
9593 fold_convert (type, TREE_OPERAND (arg0, 0)),
9594 fold_convert (type, TREE_OPERAND (arg1, 0)));
9596 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9597 if (TREE_CODE (arg0) == BIT_AND_EXPR
9598 && integer_onep (TREE_OPERAND (arg0, 1))
9599 && integer_onep (arg1))
9600 return fold_build2 (EQ_EXPR, type, arg0,
9601 build_int_cst (TREE_TYPE (arg0), 0));
9603 /* Fold (X & Y) ^ Y as ~X & Y. */
9604 if (TREE_CODE (arg0) == BIT_AND_EXPR
9605 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9607 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9608 return fold_build2 (BIT_AND_EXPR, type,
9609 fold_build1 (BIT_NOT_EXPR, type, tem),
9610 fold_convert (type, arg1));
9612 /* Fold (X & Y) ^ X as ~Y & X. */
9613 if (TREE_CODE (arg0) == BIT_AND_EXPR
9614 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9615 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9617 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9618 return fold_build2 (BIT_AND_EXPR, type,
9619 fold_build1 (BIT_NOT_EXPR, type, tem),
9620 fold_convert (type, arg1));
9622 /* Fold X ^ (X & Y) as X & ~Y. */
9623 if (TREE_CODE (arg1) == BIT_AND_EXPR
9624 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9626 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9627 return fold_build2 (BIT_AND_EXPR, type,
9628 fold_convert (type, arg0),
9629 fold_build1 (BIT_NOT_EXPR, type, tem));
9631 /* Fold X ^ (Y & X) as ~Y & X. */
9632 if (TREE_CODE (arg1) == BIT_AND_EXPR
9633 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9634 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9636 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9637 return fold_build2 (BIT_AND_EXPR, type,
9638 fold_build1 (BIT_NOT_EXPR, type, tem),
9639 fold_convert (type, arg0));
9642 /* See if this can be simplified into a rotate first. If that
9643 is unsuccessful continue in the association code. */
9644 goto bit_rotate;
9646 case BIT_AND_EXPR:
9647 if (integer_all_onesp (arg1))
9648 return non_lvalue (fold_convert (type, arg0));
9649 if (integer_zerop (arg1))
9650 return omit_one_operand (type, arg1, arg0);
9651 if (operand_equal_p (arg0, arg1, 0))
9652 return non_lvalue (fold_convert (type, arg0));
9654 /* ~X & X is always zero. */
9655 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9656 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9657 return omit_one_operand (type, integer_zero_node, arg1);
9659 /* X & ~X is always zero. */
9660 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9661 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9662 return omit_one_operand (type, integer_zero_node, arg0);
9664 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9665 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9666 && TREE_CODE (arg1) == INTEGER_CST
9667 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9668 return fold_build2 (BIT_IOR_EXPR, type,
9669 fold_build2 (BIT_AND_EXPR, type,
9670 TREE_OPERAND (arg0, 0), arg1),
9671 fold_build2 (BIT_AND_EXPR, type,
9672 TREE_OPERAND (arg0, 1), arg1));
9674 /* (X | Y) & Y is (X, Y). */
9675 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9676 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9677 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9678 /* (X | Y) & X is (Y, X). */
9679 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9680 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9681 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9682 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9683 /* X & (X | Y) is (Y, X). */
9684 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9685 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9686 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9687 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9688 /* X & (Y | X) is (Y, X). */
9689 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9690 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9691 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9692 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9694 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9695 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9696 && integer_onep (TREE_OPERAND (arg0, 1))
9697 && integer_onep (arg1))
9699 tem = TREE_OPERAND (arg0, 0);
9700 return fold_build2 (EQ_EXPR, type,
9701 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9702 build_int_cst (TREE_TYPE (tem), 1)),
9703 build_int_cst (TREE_TYPE (tem), 0));
9705 /* Fold ~X & 1 as (X & 1) == 0. */
9706 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9707 && integer_onep (arg1))
9709 tem = TREE_OPERAND (arg0, 0);
9710 return fold_build2 (EQ_EXPR, type,
9711 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9712 build_int_cst (TREE_TYPE (tem), 1)),
9713 build_int_cst (TREE_TYPE (tem), 0));
9716 /* Fold (X ^ Y) & Y as ~X & Y. */
9717 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9718 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9720 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9721 return fold_build2 (BIT_AND_EXPR, type,
9722 fold_build1 (BIT_NOT_EXPR, type, tem),
9723 fold_convert (type, arg1));
9725 /* Fold (X ^ Y) & X as ~Y & X. */
9726 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9727 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9728 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9730 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9731 return fold_build2 (BIT_AND_EXPR, type,
9732 fold_build1 (BIT_NOT_EXPR, type, tem),
9733 fold_convert (type, arg1));
9735 /* Fold X & (X ^ Y) as X & ~Y. */
9736 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9737 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9739 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9740 return fold_build2 (BIT_AND_EXPR, type,
9741 fold_convert (type, arg0),
9742 fold_build1 (BIT_NOT_EXPR, type, tem));
9744 /* Fold X & (Y ^ X) as ~Y & X. */
9745 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9746 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9747 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9749 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9750 return fold_build2 (BIT_AND_EXPR, type,
9751 fold_build1 (BIT_NOT_EXPR, type, tem),
9752 fold_convert (type, arg0));
9755 t1 = distribute_bit_expr (code, type, arg0, arg1);
9756 if (t1 != NULL_TREE)
9757 return t1;
9758 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9759 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9760 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9762 unsigned int prec
9763 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9765 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9766 && (~TREE_INT_CST_LOW (arg1)
9767 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9768 return fold_convert (type, TREE_OPERAND (arg0, 0));
9771 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9773 This results in more efficient code for machines without a NOR
9774 instruction. Combine will canonicalize to the first form
9775 which will allow use of NOR instructions provided by the
9776 backend if they exist. */
9777 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9778 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9780 return fold_build1 (BIT_NOT_EXPR, type,
9781 build2 (BIT_IOR_EXPR, type,
9782 TREE_OPERAND (arg0, 0),
9783 TREE_OPERAND (arg1, 0)));
9786 goto associate;
9788 case RDIV_EXPR:
9789 /* Don't touch a floating-point divide by zero unless the mode
9790 of the constant can represent infinity. */
9791 if (TREE_CODE (arg1) == REAL_CST
9792 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9793 && real_zerop (arg1))
9794 return NULL_TREE;
9796 /* Optimize A / A to 1.0 if we don't care about
9797 NaNs or Infinities. Skip the transformation
9798 for non-real operands. */
9799 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9800 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9801 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9802 && operand_equal_p (arg0, arg1, 0))
9804 tree r = build_real (TREE_TYPE (arg0), dconst1);
9806 return omit_two_operands (type, r, arg0, arg1);
9809 /* The complex version of the above A / A optimization. */
9810 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9811 && operand_equal_p (arg0, arg1, 0))
9813 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9814 if (! HONOR_NANS (TYPE_MODE (elem_type))
9815 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9817 tree r = build_real (elem_type, dconst1);
9818 /* omit_two_operands will call fold_convert for us. */
9819 return omit_two_operands (type, r, arg0, arg1);
9823 /* (-A) / (-B) -> A / B */
9824 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9825 return fold_build2 (RDIV_EXPR, type,
9826 TREE_OPERAND (arg0, 0),
9827 negate_expr (arg1));
9828 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9829 return fold_build2 (RDIV_EXPR, type,
9830 negate_expr (arg0),
9831 TREE_OPERAND (arg1, 0));
9833 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9834 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9835 && real_onep (arg1))
9836 return non_lvalue (fold_convert (type, arg0));
9838 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9839 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9840 && real_minus_onep (arg1))
9841 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9843 /* If ARG1 is a constant, we can convert this to a multiply by the
9844 reciprocal. This does not have the same rounding properties,
9845 so only do this if -funsafe-math-optimizations. We can actually
9846 always safely do it if ARG1 is a power of two, but it's hard to
9847 tell if it is or not in a portable manner. */
9848 if (TREE_CODE (arg1) == REAL_CST)
9850 if (flag_unsafe_math_optimizations
9851 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9852 arg1, 0)))
9853 return fold_build2 (MULT_EXPR, type, arg0, tem);
9854 /* Find the reciprocal if optimizing and the result is exact. */
9855 if (optimize)
9857 REAL_VALUE_TYPE r;
9858 r = TREE_REAL_CST (arg1);
9859 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9861 tem = build_real (type, r);
9862 return fold_build2 (MULT_EXPR, type,
9863 fold_convert (type, arg0), tem);
9867 /* Convert A/B/C to A/(B*C). */
9868 if (flag_unsafe_math_optimizations
9869 && TREE_CODE (arg0) == RDIV_EXPR)
9870 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9871 fold_build2 (MULT_EXPR, type,
9872 TREE_OPERAND (arg0, 1), arg1));
9874 /* Convert A/(B/C) to (A/B)*C. */
9875 if (flag_unsafe_math_optimizations
9876 && TREE_CODE (arg1) == RDIV_EXPR)
9877 return fold_build2 (MULT_EXPR, type,
9878 fold_build2 (RDIV_EXPR, type, arg0,
9879 TREE_OPERAND (arg1, 0)),
9880 TREE_OPERAND (arg1, 1));
9882 /* Convert C1/(X*C2) into (C1/C2)/X. */
9883 if (flag_unsafe_math_optimizations
9884 && TREE_CODE (arg1) == MULT_EXPR
9885 && TREE_CODE (arg0) == REAL_CST
9886 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9888 tree tem = const_binop (RDIV_EXPR, arg0,
9889 TREE_OPERAND (arg1, 1), 0);
9890 if (tem)
9891 return fold_build2 (RDIV_EXPR, type, tem,
9892 TREE_OPERAND (arg1, 0));
9895 if (flag_unsafe_math_optimizations)
9897 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9898 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9900 /* Optimize sin(x)/cos(x) as tan(x). */
9901 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9902 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9903 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9904 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9905 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9907 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9909 if (tanfn != NULL_TREE)
9910 return build_function_call_expr (tanfn,
9911 TREE_OPERAND (arg0, 1));
9914 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9915 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9916 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9917 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9918 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9919 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9921 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9923 if (tanfn != NULL_TREE)
9925 tree tmp = TREE_OPERAND (arg0, 1);
9926 tmp = build_function_call_expr (tanfn, tmp);
9927 return fold_build2 (RDIV_EXPR, type,
9928 build_real (type, dconst1), tmp);
9932 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9933 NaNs or Infinities. */
9934 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9935 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9936 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9938 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9939 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9941 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9942 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9943 && operand_equal_p (arg00, arg01, 0))
9945 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9947 if (cosfn != NULL_TREE)
9948 return build_function_call_expr (cosfn,
9949 TREE_OPERAND (arg0, 1));
9953 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9954 NaNs or Infinities. */
9955 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9956 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9957 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9959 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9960 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9962 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9963 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9964 && operand_equal_p (arg00, arg01, 0))
9966 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9968 if (cosfn != NULL_TREE)
9970 tree tmp = TREE_OPERAND (arg0, 1);
9971 tmp = build_function_call_expr (cosfn, tmp);
9972 return fold_build2 (RDIV_EXPR, type,
9973 build_real (type, dconst1),
9974 tmp);
9979 /* Optimize pow(x,c)/x as pow(x,c-1). */
9980 if (fcode0 == BUILT_IN_POW
9981 || fcode0 == BUILT_IN_POWF
9982 || fcode0 == BUILT_IN_POWL)
9984 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9985 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9986 if (TREE_CODE (arg01) == REAL_CST
9987 && ! TREE_CONSTANT_OVERFLOW (arg01)
9988 && operand_equal_p (arg1, arg00, 0))
9990 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9991 REAL_VALUE_TYPE c;
9992 tree arg, arglist;
9994 c = TREE_REAL_CST (arg01);
9995 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9996 arg = build_real (type, c);
9997 arglist = build_tree_list (NULL_TREE, arg);
9998 arglist = tree_cons (NULL_TREE, arg1, arglist);
9999 return build_function_call_expr (powfn, arglist);
10003 /* Optimize x/expN(y) into x*expN(-y). */
10004 if (BUILTIN_EXPONENT_P (fcode1))
10006 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10007 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10008 tree arglist = build_tree_list (NULL_TREE,
10009 fold_convert (type, arg));
10010 arg1 = build_function_call_expr (expfn, arglist);
10011 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10014 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10015 if (fcode1 == BUILT_IN_POW
10016 || fcode1 == BUILT_IN_POWF
10017 || fcode1 == BUILT_IN_POWL)
10019 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10020 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10021 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10022 tree neg11 = fold_convert (type, negate_expr (arg11));
10023 tree arglist = tree_cons(NULL_TREE, arg10,
10024 build_tree_list (NULL_TREE, neg11));
10025 arg1 = build_function_call_expr (powfn, arglist);
10026 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10029 return NULL_TREE;
10031 case TRUNC_DIV_EXPR:
10032 case FLOOR_DIV_EXPR:
10033 /* Simplify A / (B << N) where A and B are positive and B is
10034 a power of 2, to A >> (N + log2(B)). */
10035 strict_overflow_p = false;
10036 if (TREE_CODE (arg1) == LSHIFT_EXPR
10037 && (TYPE_UNSIGNED (type)
10038 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10040 tree sval = TREE_OPERAND (arg1, 0);
10041 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10043 tree sh_cnt = TREE_OPERAND (arg1, 1);
10044 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10046 if (strict_overflow_p)
10047 fold_overflow_warning (("assuming signed overflow does not "
10048 "occur when simplifying A / (B << N)"),
10049 WARN_STRICT_OVERFLOW_MISC);
10051 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10052 sh_cnt, build_int_cst (NULL_TREE, pow2));
10053 return fold_build2 (RSHIFT_EXPR, type,
10054 fold_convert (type, arg0), sh_cnt);
10057 /* Fall thru */
10059 case ROUND_DIV_EXPR:
10060 case CEIL_DIV_EXPR:
10061 case EXACT_DIV_EXPR:
10062 if (integer_onep (arg1))
10063 return non_lvalue (fold_convert (type, arg0));
10064 if (integer_zerop (arg1))
10065 return NULL_TREE;
10066 /* X / -1 is -X. */
10067 if (!TYPE_UNSIGNED (type)
10068 && TREE_CODE (arg1) == INTEGER_CST
10069 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10070 && TREE_INT_CST_HIGH (arg1) == -1)
10071 return fold_convert (type, negate_expr (arg0));
10073 /* Convert -A / -B to A / B when the type is signed and overflow is
10074 undefined. */
10075 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10076 && TREE_CODE (arg0) == NEGATE_EXPR
10077 && negate_expr_p (arg1))
10079 if (INTEGRAL_TYPE_P (type))
10080 fold_overflow_warning (("assuming signed overflow does not occur "
10081 "when distributing negation across "
10082 "division"),
10083 WARN_STRICT_OVERFLOW_MISC);
10084 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10085 negate_expr (arg1));
10087 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10088 && TREE_CODE (arg1) == NEGATE_EXPR
10089 && negate_expr_p (arg0))
10091 if (INTEGRAL_TYPE_P (type))
10092 fold_overflow_warning (("assuming signed overflow does not occur "
10093 "when distributing negation across "
10094 "division"),
10095 WARN_STRICT_OVERFLOW_MISC);
10096 return fold_build2 (code, type, negate_expr (arg0),
10097 TREE_OPERAND (arg1, 0));
10100 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10101 operation, EXACT_DIV_EXPR.
10103 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10104 At one time others generated faster code, it's not clear if they do
10105 after the last round to changes to the DIV code in expmed.c. */
10106 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10107 && multiple_of_p (type, arg0, arg1))
10108 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10110 strict_overflow_p = false;
10111 if (TREE_CODE (arg1) == INTEGER_CST
10112 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10113 &strict_overflow_p)))
10115 if (strict_overflow_p)
10116 fold_overflow_warning (("assuming signed overflow does not occur "
10117 "when simplifying division"),
10118 WARN_STRICT_OVERFLOW_MISC);
10119 return fold_convert (type, tem);
10122 return NULL_TREE;
10124 case CEIL_MOD_EXPR:
10125 case FLOOR_MOD_EXPR:
10126 case ROUND_MOD_EXPR:
10127 case TRUNC_MOD_EXPR:
10128 /* X % 1 is always zero, but be sure to preserve any side
10129 effects in X. */
10130 if (integer_onep (arg1))
10131 return omit_one_operand (type, integer_zero_node, arg0);
10133 /* X % 0, return X % 0 unchanged so that we can get the
10134 proper warnings and errors. */
10135 if (integer_zerop (arg1))
10136 return NULL_TREE;
10138 /* 0 % X is always zero, but be sure to preserve any side
10139 effects in X. Place this after checking for X == 0. */
10140 if (integer_zerop (arg0))
10141 return omit_one_operand (type, integer_zero_node, arg1);
10143 /* X % -1 is zero. */
10144 if (!TYPE_UNSIGNED (type)
10145 && TREE_CODE (arg1) == INTEGER_CST
10146 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10147 && TREE_INT_CST_HIGH (arg1) == -1)
10148 return omit_one_operand (type, integer_zero_node, arg0);
10150 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10151 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10152 strict_overflow_p = false;
10153 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10154 && (TYPE_UNSIGNED (type)
10155 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10157 tree c = arg1;
10158 /* Also optimize A % (C << N) where C is a power of 2,
10159 to A & ((C << N) - 1). */
10160 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10161 c = TREE_OPERAND (arg1, 0);
10163 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10165 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10166 arg1, integer_one_node);
10167 if (strict_overflow_p)
10168 fold_overflow_warning (("assuming signed overflow does not "
10169 "occur when simplifying "
10170 "X % (power of two)"),
10171 WARN_STRICT_OVERFLOW_MISC);
10172 return fold_build2 (BIT_AND_EXPR, type,
10173 fold_convert (type, arg0),
10174 fold_convert (type, mask));
10178 /* X % -C is the same as X % C. */
10179 if (code == TRUNC_MOD_EXPR
10180 && !TYPE_UNSIGNED (type)
10181 && TREE_CODE (arg1) == INTEGER_CST
10182 && !TREE_CONSTANT_OVERFLOW (arg1)
10183 && TREE_INT_CST_HIGH (arg1) < 0
10184 && !TYPE_OVERFLOW_TRAPS (type)
10185 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10186 && !sign_bit_p (arg1, arg1))
10187 return fold_build2 (code, type, fold_convert (type, arg0),
10188 fold_convert (type, negate_expr (arg1)));
10190 /* X % -Y is the same as X % Y. */
10191 if (code == TRUNC_MOD_EXPR
10192 && !TYPE_UNSIGNED (type)
10193 && TREE_CODE (arg1) == NEGATE_EXPR
10194 && !TYPE_OVERFLOW_TRAPS (type))
10195 return fold_build2 (code, type, fold_convert (type, arg0),
10196 fold_convert (type, TREE_OPERAND (arg1, 0)));
10198 if (TREE_CODE (arg1) == INTEGER_CST
10199 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10200 &strict_overflow_p)))
10202 if (strict_overflow_p)
10203 fold_overflow_warning (("assuming signed overflow does not occur "
10204 "when simplifying modulos"),
10205 WARN_STRICT_OVERFLOW_MISC);
10206 return fold_convert (type, tem);
10209 return NULL_TREE;
10211 case LROTATE_EXPR:
10212 case RROTATE_EXPR:
10213 if (integer_all_onesp (arg0))
10214 return omit_one_operand (type, arg0, arg1);
10215 goto shift;
10217 case RSHIFT_EXPR:
10218 /* Optimize -1 >> x for arithmetic right shifts. */
10219 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10220 return omit_one_operand (type, arg0, arg1);
10221 /* ... fall through ... */
10223 case LSHIFT_EXPR:
10224 shift:
10225 if (integer_zerop (arg1))
10226 return non_lvalue (fold_convert (type, arg0));
10227 if (integer_zerop (arg0))
10228 return omit_one_operand (type, arg0, arg1);
10230 /* Since negative shift count is not well-defined,
10231 don't try to compute it in the compiler. */
10232 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10233 return NULL_TREE;
10235 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10236 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10237 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10238 && host_integerp (TREE_OPERAND (arg0, 1), false)
10239 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10241 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10242 + TREE_INT_CST_LOW (arg1));
10244 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10245 being well defined. */
10246 if (low >= TYPE_PRECISION (type))
10248 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10249 low = low % TYPE_PRECISION (type);
10250 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10251 return build_int_cst (type, 0);
10252 else
10253 low = TYPE_PRECISION (type) - 1;
10256 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10257 build_int_cst (type, low));
10260 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10261 into x & ((unsigned)-1 >> c) for unsigned types. */
10262 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10263 || (TYPE_UNSIGNED (type)
10264 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10265 && host_integerp (arg1, false)
10266 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10267 && host_integerp (TREE_OPERAND (arg0, 1), false)
10268 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10270 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10271 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10272 tree lshift;
10273 tree arg00;
10275 if (low0 == low1)
10277 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10279 lshift = build_int_cst (type, -1);
10280 lshift = int_const_binop (code, lshift, arg1, 0);
10282 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10286 /* Rewrite an LROTATE_EXPR by a constant into an
10287 RROTATE_EXPR by a new constant. */
10288 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10290 tree tem = build_int_cst (NULL_TREE,
10291 GET_MODE_BITSIZE (TYPE_MODE (type)));
10292 tem = fold_convert (TREE_TYPE (arg1), tem);
10293 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10294 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10297 /* If we have a rotate of a bit operation with the rotate count and
10298 the second operand of the bit operation both constant,
10299 permute the two operations. */
10300 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10301 && (TREE_CODE (arg0) == BIT_AND_EXPR
10302 || TREE_CODE (arg0) == BIT_IOR_EXPR
10303 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10304 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10305 return fold_build2 (TREE_CODE (arg0), type,
10306 fold_build2 (code, type,
10307 TREE_OPERAND (arg0, 0), arg1),
10308 fold_build2 (code, type,
10309 TREE_OPERAND (arg0, 1), arg1));
10311 /* Two consecutive rotates adding up to the width of the mode can
10312 be ignored. */
10313 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10314 && TREE_CODE (arg0) == RROTATE_EXPR
10315 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10316 && TREE_INT_CST_HIGH (arg1) == 0
10317 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10318 && ((TREE_INT_CST_LOW (arg1)
10319 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10320 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10321 return TREE_OPERAND (arg0, 0);
10323 return NULL_TREE;
10325 case MIN_EXPR:
10326 if (operand_equal_p (arg0, arg1, 0))
10327 return omit_one_operand (type, arg0, arg1);
10328 if (INTEGRAL_TYPE_P (type)
10329 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10330 return omit_one_operand (type, arg1, arg0);
10331 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10332 if (tem)
10333 return tem;
10334 goto associate;
10336 case MAX_EXPR:
10337 if (operand_equal_p (arg0, arg1, 0))
10338 return omit_one_operand (type, arg0, arg1);
10339 if (INTEGRAL_TYPE_P (type)
10340 && TYPE_MAX_VALUE (type)
10341 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10342 return omit_one_operand (type, arg1, arg0);
10343 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10344 if (tem)
10345 return tem;
10346 goto associate;
10348 case TRUTH_ANDIF_EXPR:
10349 /* Note that the operands of this must be ints
10350 and their values must be 0 or 1.
10351 ("true" is a fixed value perhaps depending on the language.) */
10352 /* If first arg is constant zero, return it. */
10353 if (integer_zerop (arg0))
10354 return fold_convert (type, arg0);
10355 case TRUTH_AND_EXPR:
10356 /* If either arg is constant true, drop it. */
10357 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10358 return non_lvalue (fold_convert (type, arg1));
10359 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10360 /* Preserve sequence points. */
10361 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10362 return non_lvalue (fold_convert (type, arg0));
10363 /* If second arg is constant zero, result is zero, but first arg
10364 must be evaluated. */
10365 if (integer_zerop (arg1))
10366 return omit_one_operand (type, arg1, arg0);
10367 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10368 case will be handled here. */
10369 if (integer_zerop (arg0))
10370 return omit_one_operand (type, arg0, arg1);
10372 /* !X && X is always false. */
10373 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10374 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10375 return omit_one_operand (type, integer_zero_node, arg1);
10376 /* X && !X is always false. */
10377 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10378 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10379 return omit_one_operand (type, integer_zero_node, arg0);
10381 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10382 means A >= Y && A != MAX, but in this case we know that
10383 A < X <= MAX. */
10385 if (!TREE_SIDE_EFFECTS (arg0)
10386 && !TREE_SIDE_EFFECTS (arg1))
10388 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10389 if (tem && !operand_equal_p (tem, arg0, 0))
10390 return fold_build2 (code, type, tem, arg1);
10392 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10393 if (tem && !operand_equal_p (tem, arg1, 0))
10394 return fold_build2 (code, type, arg0, tem);
10397 truth_andor:
10398 /* We only do these simplifications if we are optimizing. */
10399 if (!optimize)
10400 return NULL_TREE;
10402 /* Check for things like (A || B) && (A || C). We can convert this
10403 to A || (B && C). Note that either operator can be any of the four
10404 truth and/or operations and the transformation will still be
10405 valid. Also note that we only care about order for the
10406 ANDIF and ORIF operators. If B contains side effects, this
10407 might change the truth-value of A. */
10408 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10409 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10410 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10411 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10412 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10413 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10415 tree a00 = TREE_OPERAND (arg0, 0);
10416 tree a01 = TREE_OPERAND (arg0, 1);
10417 tree a10 = TREE_OPERAND (arg1, 0);
10418 tree a11 = TREE_OPERAND (arg1, 1);
10419 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10420 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10421 && (code == TRUTH_AND_EXPR
10422 || code == TRUTH_OR_EXPR));
10424 if (operand_equal_p (a00, a10, 0))
10425 return fold_build2 (TREE_CODE (arg0), type, a00,
10426 fold_build2 (code, type, a01, a11));
10427 else if (commutative && operand_equal_p (a00, a11, 0))
10428 return fold_build2 (TREE_CODE (arg0), type, a00,
10429 fold_build2 (code, type, a01, a10));
10430 else if (commutative && operand_equal_p (a01, a10, 0))
10431 return fold_build2 (TREE_CODE (arg0), type, a01,
10432 fold_build2 (code, type, a00, a11));
10434 /* This case if tricky because we must either have commutative
10435 operators or else A10 must not have side-effects. */
10437 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10438 && operand_equal_p (a01, a11, 0))
10439 return fold_build2 (TREE_CODE (arg0), type,
10440 fold_build2 (code, type, a00, a10),
10441 a01);
10444 /* See if we can build a range comparison. */
10445 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10446 return tem;
10448 /* Check for the possibility of merging component references. If our
10449 lhs is another similar operation, try to merge its rhs with our
10450 rhs. Then try to merge our lhs and rhs. */
10451 if (TREE_CODE (arg0) == code
10452 && 0 != (tem = fold_truthop (code, type,
10453 TREE_OPERAND (arg0, 1), arg1)))
10454 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10456 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10457 return tem;
10459 return NULL_TREE;
10461 case TRUTH_ORIF_EXPR:
10462 /* Note that the operands of this must be ints
10463 and their values must be 0 or true.
10464 ("true" is a fixed value perhaps depending on the language.) */
10465 /* If first arg is constant true, return it. */
10466 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10467 return fold_convert (type, arg0);
10468 case TRUTH_OR_EXPR:
10469 /* If either arg is constant zero, drop it. */
10470 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10471 return non_lvalue (fold_convert (type, arg1));
10472 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10473 /* Preserve sequence points. */
10474 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10475 return non_lvalue (fold_convert (type, arg0));
10476 /* If second arg is constant true, result is true, but we must
10477 evaluate first arg. */
10478 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10479 return omit_one_operand (type, arg1, arg0);
10480 /* Likewise for first arg, but note this only occurs here for
10481 TRUTH_OR_EXPR. */
10482 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10483 return omit_one_operand (type, arg0, arg1);
10485 /* !X || X is always true. */
10486 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10487 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10488 return omit_one_operand (type, integer_one_node, arg1);
10489 /* X || !X is always true. */
10490 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10491 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10492 return omit_one_operand (type, integer_one_node, arg0);
10494 goto truth_andor;
10496 case TRUTH_XOR_EXPR:
10497 /* If the second arg is constant zero, drop it. */
10498 if (integer_zerop (arg1))
10499 return non_lvalue (fold_convert (type, arg0));
10500 /* If the second arg is constant true, this is a logical inversion. */
10501 if (integer_onep (arg1))
10503 /* Only call invert_truthvalue if operand is a truth value. */
10504 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10505 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10506 else
10507 tem = invert_truthvalue (arg0);
10508 return non_lvalue (fold_convert (type, tem));
10510 /* Identical arguments cancel to zero. */
10511 if (operand_equal_p (arg0, arg1, 0))
10512 return omit_one_operand (type, integer_zero_node, arg0);
10514 /* !X ^ X is always true. */
10515 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10516 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10517 return omit_one_operand (type, integer_one_node, arg1);
10519 /* X ^ !X is always true. */
10520 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10521 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10522 return omit_one_operand (type, integer_one_node, arg0);
10524 return NULL_TREE;
10526 case EQ_EXPR:
10527 case NE_EXPR:
10528 tem = fold_comparison (code, type, op0, op1);
10529 if (tem != NULL_TREE)
10530 return tem;
10532 /* bool_var != 0 becomes bool_var. */
10533 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10534 && code == NE_EXPR)
10535 return non_lvalue (fold_convert (type, arg0));
10537 /* bool_var == 1 becomes bool_var. */
10538 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10539 && code == EQ_EXPR)
10540 return non_lvalue (fold_convert (type, arg0));
10542 /* bool_var != 1 becomes !bool_var. */
10543 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10544 && code == NE_EXPR)
10545 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10547 /* bool_var == 0 becomes !bool_var. */
10548 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10549 && code == EQ_EXPR)
10550 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10552 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10553 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10554 && TREE_CODE (arg1) == INTEGER_CST)
10556 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
10557 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10558 fold_build1 (BIT_NOT_EXPR, cmp_type,
10559 fold_convert (cmp_type, arg1)));
10562 /* If this is an equality comparison of the address of a non-weak
10563 object against zero, then we know the result. */
10564 if (TREE_CODE (arg0) == ADDR_EXPR
10565 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10566 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10567 && integer_zerop (arg1))
10568 return constant_boolean_node (code != EQ_EXPR, type);
10570 /* If this is an equality comparison of the address of two non-weak,
10571 unaliased symbols neither of which are extern (since we do not
10572 have access to attributes for externs), then we know the result. */
10573 if (TREE_CODE (arg0) == ADDR_EXPR
10574 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10575 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10576 && ! lookup_attribute ("alias",
10577 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10578 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10579 && TREE_CODE (arg1) == ADDR_EXPR
10580 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10581 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10582 && ! lookup_attribute ("alias",
10583 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10584 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10586 /* We know that we're looking at the address of two
10587 non-weak, unaliased, static _DECL nodes.
10589 It is both wasteful and incorrect to call operand_equal_p
10590 to compare the two ADDR_EXPR nodes. It is wasteful in that
10591 all we need to do is test pointer equality for the arguments
10592 to the two ADDR_EXPR nodes. It is incorrect to use
10593 operand_equal_p as that function is NOT equivalent to a
10594 C equality test. It can in fact return false for two
10595 objects which would test as equal using the C equality
10596 operator. */
10597 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10598 return constant_boolean_node (equal
10599 ? code == EQ_EXPR : code != EQ_EXPR,
10600 type);
10603 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10604 a MINUS_EXPR of a constant, we can convert it into a comparison with
10605 a revised constant as long as no overflow occurs. */
10606 if (TREE_CODE (arg1) == INTEGER_CST
10607 && (TREE_CODE (arg0) == PLUS_EXPR
10608 || TREE_CODE (arg0) == MINUS_EXPR)
10609 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10610 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10611 ? MINUS_EXPR : PLUS_EXPR,
10612 fold_convert (TREE_TYPE (arg0), arg1),
10613 TREE_OPERAND (arg0, 1), 0))
10614 && ! TREE_CONSTANT_OVERFLOW (tem))
10615 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10617 /* Similarly for a NEGATE_EXPR. */
10618 if (TREE_CODE (arg0) == NEGATE_EXPR
10619 && TREE_CODE (arg1) == INTEGER_CST
10620 && 0 != (tem = negate_expr (arg1))
10621 && TREE_CODE (tem) == INTEGER_CST
10622 && ! TREE_CONSTANT_OVERFLOW (tem))
10623 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10625 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10626 for !=. Don't do this for ordered comparisons due to overflow. */
10627 if (TREE_CODE (arg0) == MINUS_EXPR
10628 && integer_zerop (arg1))
10629 return fold_build2 (code, type,
10630 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10632 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10633 if (TREE_CODE (arg0) == ABS_EXPR
10634 && (integer_zerop (arg1) || real_zerop (arg1)))
10635 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10637 /* If this is an EQ or NE comparison with zero and ARG0 is
10638 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10639 two operations, but the latter can be done in one less insn
10640 on machines that have only two-operand insns or on which a
10641 constant cannot be the first operand. */
10642 if (TREE_CODE (arg0) == BIT_AND_EXPR
10643 && integer_zerop (arg1))
10645 tree arg00 = TREE_OPERAND (arg0, 0);
10646 tree arg01 = TREE_OPERAND (arg0, 1);
10647 if (TREE_CODE (arg00) == LSHIFT_EXPR
10648 && integer_onep (TREE_OPERAND (arg00, 0)))
10650 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10651 arg01, TREE_OPERAND (arg00, 1));
10652 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10653 build_int_cst (TREE_TYPE (arg0), 1));
10654 return fold_build2 (code, type,
10655 fold_convert (TREE_TYPE (arg1), tem), arg1);
10657 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10658 && integer_onep (TREE_OPERAND (arg01, 0)))
10660 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10661 arg00, TREE_OPERAND (arg01, 1));
10662 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10663 build_int_cst (TREE_TYPE (arg0), 1));
10664 return fold_build2 (code, type,
10665 fold_convert (TREE_TYPE (arg1), tem), arg1);
10669 /* If this is an NE or EQ comparison of zero against the result of a
10670 signed MOD operation whose second operand is a power of 2, make
10671 the MOD operation unsigned since it is simpler and equivalent. */
10672 if (integer_zerop (arg1)
10673 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10674 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10675 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10676 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10677 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10678 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10680 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10681 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10682 fold_convert (newtype,
10683 TREE_OPERAND (arg0, 0)),
10684 fold_convert (newtype,
10685 TREE_OPERAND (arg0, 1)));
10687 return fold_build2 (code, type, newmod,
10688 fold_convert (newtype, arg1));
10691 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10692 C1 is a valid shift constant, and C2 is a power of two, i.e.
10693 a single bit. */
10694 if (TREE_CODE (arg0) == BIT_AND_EXPR
10695 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10696 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10697 == INTEGER_CST
10698 && integer_pow2p (TREE_OPERAND (arg0, 1))
10699 && integer_zerop (arg1))
10701 tree itype = TREE_TYPE (arg0);
10702 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10703 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10705 /* Check for a valid shift count. */
10706 if (TREE_INT_CST_HIGH (arg001) == 0
10707 && TREE_INT_CST_LOW (arg001) < prec)
10709 tree arg01 = TREE_OPERAND (arg0, 1);
10710 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10711 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10712 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10713 can be rewritten as (X & (C2 << C1)) != 0. */
10714 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10716 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10717 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10718 return fold_build2 (code, type, tem, arg1);
10720 /* Otherwise, for signed (arithmetic) shifts,
10721 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10722 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10723 else if (!TYPE_UNSIGNED (itype))
10724 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10725 arg000, build_int_cst (itype, 0));
10726 /* Otherwise, of unsigned (logical) shifts,
10727 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10728 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10729 else
10730 return omit_one_operand (type,
10731 code == EQ_EXPR ? integer_one_node
10732 : integer_zero_node,
10733 arg000);
10737 /* If this is an NE comparison of zero with an AND of one, remove the
10738 comparison since the AND will give the correct value. */
10739 if (code == NE_EXPR
10740 && integer_zerop (arg1)
10741 && TREE_CODE (arg0) == BIT_AND_EXPR
10742 && integer_onep (TREE_OPERAND (arg0, 1)))
10743 return fold_convert (type, arg0);
10745 /* If we have (A & C) == C where C is a power of 2, convert this into
10746 (A & C) != 0. Similarly for NE_EXPR. */
10747 if (TREE_CODE (arg0) == BIT_AND_EXPR
10748 && integer_pow2p (TREE_OPERAND (arg0, 1))
10749 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10750 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10751 arg0, fold_convert (TREE_TYPE (arg0),
10752 integer_zero_node));
10754 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10755 bit, then fold the expression into A < 0 or A >= 0. */
10756 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10757 if (tem)
10758 return tem;
10760 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10761 Similarly for NE_EXPR. */
10762 if (TREE_CODE (arg0) == BIT_AND_EXPR
10763 && TREE_CODE (arg1) == INTEGER_CST
10764 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10766 tree notc = fold_build1 (BIT_NOT_EXPR,
10767 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10768 TREE_OPERAND (arg0, 1));
10769 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10770 arg1, notc);
10771 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10772 if (integer_nonzerop (dandnotc))
10773 return omit_one_operand (type, rslt, arg0);
10776 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10777 Similarly for NE_EXPR. */
10778 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10779 && TREE_CODE (arg1) == INTEGER_CST
10780 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10782 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10783 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10784 TREE_OPERAND (arg0, 1), notd);
10785 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10786 if (integer_nonzerop (candnotd))
10787 return omit_one_operand (type, rslt, arg0);
10790 /* If this is a comparison of a field, we may be able to simplify it. */
10791 if (((TREE_CODE (arg0) == COMPONENT_REF
10792 && lang_hooks.can_use_bit_fields_p ())
10793 || TREE_CODE (arg0) == BIT_FIELD_REF)
10794 /* Handle the constant case even without -O
10795 to make sure the warnings are given. */
10796 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10798 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10799 if (t1)
10800 return t1;
10803 /* Optimize comparisons of strlen vs zero to a compare of the
10804 first character of the string vs zero. To wit,
10805 strlen(ptr) == 0 => *ptr == 0
10806 strlen(ptr) != 0 => *ptr != 0
10807 Other cases should reduce to one of these two (or a constant)
10808 due to the return value of strlen being unsigned. */
10809 if (TREE_CODE (arg0) == CALL_EXPR
10810 && integer_zerop (arg1))
10812 tree fndecl = get_callee_fndecl (arg0);
10813 tree arglist;
10815 if (fndecl
10816 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10817 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10818 && (arglist = TREE_OPERAND (arg0, 1))
10819 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10820 && ! TREE_CHAIN (arglist))
10822 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10823 return fold_build2 (code, type, iref,
10824 build_int_cst (TREE_TYPE (iref), 0));
10828 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10829 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10830 if (TREE_CODE (arg0) == RSHIFT_EXPR
10831 && integer_zerop (arg1)
10832 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10834 tree arg00 = TREE_OPERAND (arg0, 0);
10835 tree arg01 = TREE_OPERAND (arg0, 1);
10836 tree itype = TREE_TYPE (arg00);
10837 if (TREE_INT_CST_HIGH (arg01) == 0
10838 && TREE_INT_CST_LOW (arg01)
10839 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10841 if (TYPE_UNSIGNED (itype))
10843 itype = lang_hooks.types.signed_type (itype);
10844 arg00 = fold_convert (itype, arg00);
10846 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10847 type, arg00, build_int_cst (itype, 0));
10851 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10852 if (integer_zerop (arg1)
10853 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10854 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10855 TREE_OPERAND (arg0, 1));
10857 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10858 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10859 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10860 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10861 build_int_cst (TREE_TYPE (arg1), 0));
10862 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10863 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10864 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10865 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10866 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10867 build_int_cst (TREE_TYPE (arg1), 0));
10869 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10870 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10871 && TREE_CODE (arg1) == INTEGER_CST
10872 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10873 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10874 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10875 TREE_OPERAND (arg0, 1), arg1));
10877 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10878 (X & C) == 0 when C is a single bit. */
10879 if (TREE_CODE (arg0) == BIT_AND_EXPR
10880 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10881 && integer_zerop (arg1)
10882 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10884 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10885 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10886 TREE_OPERAND (arg0, 1));
10887 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10888 type, tem, arg1);
10891 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10892 constant C is a power of two, i.e. a single bit. */
10893 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10894 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10895 && integer_zerop (arg1)
10896 && integer_pow2p (TREE_OPERAND (arg0, 1))
10897 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10898 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10900 tree arg00 = TREE_OPERAND (arg0, 0);
10901 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10902 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10905 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10906 when is C is a power of two, i.e. a single bit. */
10907 if (TREE_CODE (arg0) == BIT_AND_EXPR
10908 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10909 && integer_zerop (arg1)
10910 && integer_pow2p (TREE_OPERAND (arg0, 1))
10911 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10912 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10914 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10915 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10916 arg000, TREE_OPERAND (arg0, 1));
10917 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10918 tem, build_int_cst (TREE_TYPE (tem), 0));
10921 if (integer_zerop (arg1)
10922 && tree_expr_nonzero_p (arg0))
10924 tree res = constant_boolean_node (code==NE_EXPR, type);
10925 return omit_one_operand (type, res, arg0);
10927 return NULL_TREE;
10929 case LT_EXPR:
10930 case GT_EXPR:
10931 case LE_EXPR:
10932 case GE_EXPR:
10933 tem = fold_comparison (code, type, op0, op1);
10934 if (tem != NULL_TREE)
10935 return tem;
10937 /* Transform comparisons of the form X +- C CMP X. */
10938 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10939 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10940 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10941 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10942 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10943 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10945 tree arg01 = TREE_OPERAND (arg0, 1);
10946 enum tree_code code0 = TREE_CODE (arg0);
10947 int is_positive;
10949 if (TREE_CODE (arg01) == REAL_CST)
10950 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10951 else
10952 is_positive = tree_int_cst_sgn (arg01);
10954 /* (X - c) > X becomes false. */
10955 if (code == GT_EXPR
10956 && ((code0 == MINUS_EXPR && is_positive >= 0)
10957 || (code0 == PLUS_EXPR && is_positive <= 0)))
10959 if (TREE_CODE (arg01) == INTEGER_CST
10960 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10961 fold_overflow_warning (("assuming signed overflow does not "
10962 "occur when assuming that (X - c) > X "
10963 "is always false"),
10964 WARN_STRICT_OVERFLOW_ALL);
10965 return constant_boolean_node (0, type);
10968 /* Likewise (X + c) < X becomes false. */
10969 if (code == LT_EXPR
10970 && ((code0 == PLUS_EXPR && is_positive >= 0)
10971 || (code0 == MINUS_EXPR && is_positive <= 0)))
10973 if (TREE_CODE (arg01) == INTEGER_CST
10974 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10975 fold_overflow_warning (("assuming signed overflow does not "
10976 "occur when assuming that "
10977 "(X + c) < X is always false"),
10978 WARN_STRICT_OVERFLOW_ALL);
10979 return constant_boolean_node (0, type);
10982 /* Convert (X - c) <= X to true. */
10983 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10984 && code == LE_EXPR
10985 && ((code0 == MINUS_EXPR && is_positive >= 0)
10986 || (code0 == PLUS_EXPR && is_positive <= 0)))
10988 if (TREE_CODE (arg01) == INTEGER_CST
10989 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10990 fold_overflow_warning (("assuming signed overflow does not "
10991 "occur when assuming that "
10992 "(X - c) <= X is always true"),
10993 WARN_STRICT_OVERFLOW_ALL);
10994 return constant_boolean_node (1, type);
10997 /* Convert (X + c) >= X to true. */
10998 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10999 && code == GE_EXPR
11000 && ((code0 == PLUS_EXPR && is_positive >= 0)
11001 || (code0 == MINUS_EXPR && is_positive <= 0)))
11003 if (TREE_CODE (arg01) == INTEGER_CST
11004 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11005 fold_overflow_warning (("assuming signed overflow does not "
11006 "occur when assuming that "
11007 "(X + c) >= X is always true"),
11008 WARN_STRICT_OVERFLOW_ALL);
11009 return constant_boolean_node (1, type);
11012 if (TREE_CODE (arg01) == INTEGER_CST)
11014 /* Convert X + c > X and X - c < X to true for integers. */
11015 if (code == GT_EXPR
11016 && ((code0 == PLUS_EXPR && is_positive > 0)
11017 || (code0 == MINUS_EXPR && is_positive < 0)))
11019 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11020 fold_overflow_warning (("assuming signed overflow does "
11021 "not occur when assuming that "
11022 "(X + c) > X is always true"),
11023 WARN_STRICT_OVERFLOW_ALL);
11024 return constant_boolean_node (1, type);
11027 if (code == LT_EXPR
11028 && ((code0 == MINUS_EXPR && is_positive > 0)
11029 || (code0 == PLUS_EXPR && is_positive < 0)))
11031 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11032 fold_overflow_warning (("assuming signed overflow does "
11033 "not occur when assuming that "
11034 "(X - c) < X is always true"),
11035 WARN_STRICT_OVERFLOW_ALL);
11036 return constant_boolean_node (1, type);
11039 /* Convert X + c <= X and X - c >= X to false for integers. */
11040 if (code == LE_EXPR
11041 && ((code0 == PLUS_EXPR && is_positive > 0)
11042 || (code0 == MINUS_EXPR && is_positive < 0)))
11044 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11045 fold_overflow_warning (("assuming signed overflow does "
11046 "not occur when assuming that "
11047 "(X + c) <= X is always false"),
11048 WARN_STRICT_OVERFLOW_ALL);
11049 return constant_boolean_node (0, type);
11052 if (code == GE_EXPR
11053 && ((code0 == MINUS_EXPR && is_positive > 0)
11054 || (code0 == PLUS_EXPR && is_positive < 0)))
11056 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11057 fold_overflow_warning (("assuming signed overflow does "
11058 "not occur when assuming that "
11059 "(X - c) >= X is always true"),
11060 WARN_STRICT_OVERFLOW_ALL);
11061 return constant_boolean_node (0, type);
11066 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11067 This transformation affects the cases which are handled in later
11068 optimizations involving comparisons with non-negative constants. */
11069 if (TREE_CODE (arg1) == INTEGER_CST
11070 && TREE_CODE (arg0) != INTEGER_CST
11071 && tree_int_cst_sgn (arg1) > 0)
11073 if (code == GE_EXPR)
11075 arg1 = const_binop (MINUS_EXPR, arg1,
11076 build_int_cst (TREE_TYPE (arg1), 1), 0);
11077 return fold_build2 (GT_EXPR, type, arg0,
11078 fold_convert (TREE_TYPE (arg0), arg1));
11080 if (code == LT_EXPR)
11082 arg1 = const_binop (MINUS_EXPR, arg1,
11083 build_int_cst (TREE_TYPE (arg1), 1), 0);
11084 return fold_build2 (LE_EXPR, type, arg0,
11085 fold_convert (TREE_TYPE (arg0), arg1));
11089 /* Comparisons with the highest or lowest possible integer of
11090 the specified size will have known values. */
11092 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
11094 if (TREE_CODE (arg1) == INTEGER_CST
11095 && ! TREE_CONSTANT_OVERFLOW (arg1)
11096 && width <= 2 * HOST_BITS_PER_WIDE_INT
11097 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11098 || POINTER_TYPE_P (TREE_TYPE (arg1))))
11100 HOST_WIDE_INT signed_max_hi;
11101 unsigned HOST_WIDE_INT signed_max_lo;
11102 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11104 if (width <= HOST_BITS_PER_WIDE_INT)
11106 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11107 - 1;
11108 signed_max_hi = 0;
11109 max_hi = 0;
11111 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11113 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11114 min_lo = 0;
11115 min_hi = 0;
11117 else
11119 max_lo = signed_max_lo;
11120 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11121 min_hi = -1;
11124 else
11126 width -= HOST_BITS_PER_WIDE_INT;
11127 signed_max_lo = -1;
11128 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11129 - 1;
11130 max_lo = -1;
11131 min_lo = 0;
11133 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11135 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11136 min_hi = 0;
11138 else
11140 max_hi = signed_max_hi;
11141 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11145 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11146 && TREE_INT_CST_LOW (arg1) == max_lo)
11147 switch (code)
11149 case GT_EXPR:
11150 return omit_one_operand (type, integer_zero_node, arg0);
11152 case GE_EXPR:
11153 return fold_build2 (EQ_EXPR, type, op0, op1);
11155 case LE_EXPR:
11156 return omit_one_operand (type, integer_one_node, arg0);
11158 case LT_EXPR:
11159 return fold_build2 (NE_EXPR, type, op0, op1);
11161 /* The GE_EXPR and LT_EXPR cases above are not normally
11162 reached because of previous transformations. */
11164 default:
11165 break;
11167 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11168 == max_hi
11169 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11170 switch (code)
11172 case GT_EXPR:
11173 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11174 return fold_build2 (EQ_EXPR, type,
11175 fold_convert (TREE_TYPE (arg1), arg0),
11176 arg1);
11177 case LE_EXPR:
11178 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11179 return fold_build2 (NE_EXPR, type,
11180 fold_convert (TREE_TYPE (arg1), arg0),
11181 arg1);
11182 default:
11183 break;
11185 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11186 == min_hi
11187 && TREE_INT_CST_LOW (arg1) == min_lo)
11188 switch (code)
11190 case LT_EXPR:
11191 return omit_one_operand (type, integer_zero_node, arg0);
11193 case LE_EXPR:
11194 return fold_build2 (EQ_EXPR, type, op0, op1);
11196 case GE_EXPR:
11197 return omit_one_operand (type, integer_one_node, arg0);
11199 case GT_EXPR:
11200 return fold_build2 (NE_EXPR, type, op0, op1);
11202 default:
11203 break;
11205 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11206 == min_hi
11207 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11208 switch (code)
11210 case GE_EXPR:
11211 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11212 return fold_build2 (NE_EXPR, type,
11213 fold_convert (TREE_TYPE (arg1), arg0),
11214 arg1);
11215 case LT_EXPR:
11216 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11217 return fold_build2 (EQ_EXPR, type,
11218 fold_convert (TREE_TYPE (arg1), arg0),
11219 arg1);
11220 default:
11221 break;
11224 else if (!in_gimple_form
11225 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
11226 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11227 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11228 /* signed_type does not work on pointer types. */
11229 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11231 /* The following case also applies to X < signed_max+1
11232 and X >= signed_max+1 because previous transformations. */
11233 if (code == LE_EXPR || code == GT_EXPR)
11235 tree st;
11236 st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11237 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11238 type, fold_convert (st, arg0),
11239 build_int_cst (st, 0));
11245 /* If we are comparing an ABS_EXPR with a constant, we can
11246 convert all the cases into explicit comparisons, but they may
11247 well not be faster than doing the ABS and one comparison.
11248 But ABS (X) <= C is a range comparison, which becomes a subtraction
11249 and a comparison, and is probably faster. */
11250 if (code == LE_EXPR
11251 && TREE_CODE (arg1) == INTEGER_CST
11252 && TREE_CODE (arg0) == ABS_EXPR
11253 && ! TREE_SIDE_EFFECTS (arg0)
11254 && (0 != (tem = negate_expr (arg1)))
11255 && TREE_CODE (tem) == INTEGER_CST
11256 && ! TREE_CONSTANT_OVERFLOW (tem))
11257 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11258 build2 (GE_EXPR, type,
11259 TREE_OPERAND (arg0, 0), tem),
11260 build2 (LE_EXPR, type,
11261 TREE_OPERAND (arg0, 0), arg1));
11263 /* Convert ABS_EXPR<x> >= 0 to true. */
11264 strict_overflow_p = false;
11265 if (code == GE_EXPR
11266 && (integer_zerop (arg1)
11267 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11268 && real_zerop (arg1)))
11269 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11271 if (strict_overflow_p)
11272 fold_overflow_warning (("assuming signed overflow does not occur "
11273 "when simplifying comparison of "
11274 "absolute value and zero"),
11275 WARN_STRICT_OVERFLOW_CONDITIONAL);
11276 return omit_one_operand (type, integer_one_node, arg0);
11279 /* Convert ABS_EXPR<x> < 0 to false. */
11280 strict_overflow_p = false;
11281 if (code == LT_EXPR
11282 && (integer_zerop (arg1) || real_zerop (arg1))
11283 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11285 if (strict_overflow_p)
11286 fold_overflow_warning (("assuming signed overflow does not occur "
11287 "when simplifying comparison of "
11288 "absolute value and zero"),
11289 WARN_STRICT_OVERFLOW_CONDITIONAL);
11290 return omit_one_operand (type, integer_zero_node, arg0);
11293 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11294 and similarly for >= into !=. */
11295 if ((code == LT_EXPR || code == GE_EXPR)
11296 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11297 && TREE_CODE (arg1) == LSHIFT_EXPR
11298 && integer_onep (TREE_OPERAND (arg1, 0)))
11299 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11300 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11301 TREE_OPERAND (arg1, 1)),
11302 build_int_cst (TREE_TYPE (arg0), 0));
11304 if ((code == LT_EXPR || code == GE_EXPR)
11305 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11306 && (TREE_CODE (arg1) == NOP_EXPR
11307 || TREE_CODE (arg1) == CONVERT_EXPR)
11308 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11309 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11310 return
11311 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11312 fold_convert (TREE_TYPE (arg0),
11313 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11314 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11315 1))),
11316 build_int_cst (TREE_TYPE (arg0), 0));
11318 return NULL_TREE;
11320 case UNORDERED_EXPR:
11321 case ORDERED_EXPR:
11322 case UNLT_EXPR:
11323 case UNLE_EXPR:
11324 case UNGT_EXPR:
11325 case UNGE_EXPR:
11326 case UNEQ_EXPR:
11327 case LTGT_EXPR:
11328 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11330 t1 = fold_relational_const (code, type, arg0, arg1);
11331 if (t1 != NULL_TREE)
11332 return t1;
11335 /* If the first operand is NaN, the result is constant. */
11336 if (TREE_CODE (arg0) == REAL_CST
11337 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11338 && (code != LTGT_EXPR || ! flag_trapping_math))
11340 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11341 ? integer_zero_node
11342 : integer_one_node;
11343 return omit_one_operand (type, t1, arg1);
11346 /* If the second operand is NaN, the result is constant. */
11347 if (TREE_CODE (arg1) == REAL_CST
11348 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11349 && (code != LTGT_EXPR || ! flag_trapping_math))
11351 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11352 ? integer_zero_node
11353 : integer_one_node;
11354 return omit_one_operand (type, t1, arg0);
11357 /* Simplify unordered comparison of something with itself. */
11358 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11359 && operand_equal_p (arg0, arg1, 0))
11360 return constant_boolean_node (1, type);
11362 if (code == LTGT_EXPR
11363 && !flag_trapping_math
11364 && operand_equal_p (arg0, arg1, 0))
11365 return constant_boolean_node (0, type);
11367 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11369 tree targ0 = strip_float_extensions (arg0);
11370 tree targ1 = strip_float_extensions (arg1);
11371 tree newtype = TREE_TYPE (targ0);
11373 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11374 newtype = TREE_TYPE (targ1);
11376 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11377 return fold_build2 (code, type, fold_convert (newtype, targ0),
11378 fold_convert (newtype, targ1));
11381 return NULL_TREE;
11383 case COMPOUND_EXPR:
11384 /* When pedantic, a compound expression can be neither an lvalue
11385 nor an integer constant expression. */
11386 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11387 return NULL_TREE;
11388 /* Don't let (0, 0) be null pointer constant. */
11389 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11390 : fold_convert (type, arg1);
11391 return pedantic_non_lvalue (tem);
11393 case COMPLEX_EXPR:
11394 if ((TREE_CODE (arg0) == REAL_CST
11395 && TREE_CODE (arg1) == REAL_CST)
11396 || (TREE_CODE (arg0) == INTEGER_CST
11397 && TREE_CODE (arg1) == INTEGER_CST))
11398 return build_complex (type, arg0, arg1);
11399 return NULL_TREE;
11401 case ASSERT_EXPR:
11402 /* An ASSERT_EXPR should never be passed to fold_binary. */
11403 gcc_unreachable ();
11405 default:
11406 return NULL_TREE;
11407 } /* switch (code) */
11410 /* Callback for walk_tree, looking for LABEL_EXPR.
11411 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11412 Do not check the sub-tree of GOTO_EXPR. */
11414 static tree
11415 contains_label_1 (tree *tp,
11416 int *walk_subtrees,
11417 void *data ATTRIBUTE_UNUSED)
11419 switch (TREE_CODE (*tp))
11421 case LABEL_EXPR:
11422 return *tp;
11423 case GOTO_EXPR:
11424 *walk_subtrees = 0;
11425 /* no break */
11426 default:
11427 return NULL_TREE;
11431 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11432 accessible from outside the sub-tree. Returns NULL_TREE if no
11433 addressable label is found. */
11435 static bool
11436 contains_label_p (tree st)
11438 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11441 /* Fold a ternary expression of code CODE and type TYPE with operands
11442 OP0, OP1, and OP2. Return the folded expression if folding is
11443 successful. Otherwise, return NULL_TREE. */
11445 tree
11446 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11448 tree tem;
11449 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11450 enum tree_code_class kind = TREE_CODE_CLASS (code);
11452 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11453 && TREE_CODE_LENGTH (code) == 3);
11455 /* Strip any conversions that don't change the mode. This is safe
11456 for every expression, except for a comparison expression because
11457 its signedness is derived from its operands. So, in the latter
11458 case, only strip conversions that don't change the signedness.
11460 Note that this is done as an internal manipulation within the
11461 constant folder, in order to find the simplest representation of
11462 the arguments so that their form can be studied. In any cases,
11463 the appropriate type conversions should be put back in the tree
11464 that will get out of the constant folder. */
11465 if (op0)
11467 arg0 = op0;
11468 STRIP_NOPS (arg0);
11471 if (op1)
11473 arg1 = op1;
11474 STRIP_NOPS (arg1);
11477 switch (code)
11479 case COMPONENT_REF:
11480 if (TREE_CODE (arg0) == CONSTRUCTOR
11481 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11483 unsigned HOST_WIDE_INT idx;
11484 tree field, value;
11485 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11486 if (field == arg1)
11487 return value;
11489 return NULL_TREE;
11491 case COND_EXPR:
11492 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11493 so all simple results must be passed through pedantic_non_lvalue. */
11494 if (TREE_CODE (arg0) == INTEGER_CST)
11496 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11497 tem = integer_zerop (arg0) ? op2 : op1;
11498 /* Only optimize constant conditions when the selected branch
11499 has the same type as the COND_EXPR. This avoids optimizing
11500 away "c ? x : throw", where the throw has a void type.
11501 Avoid throwing away that operand which contains label. */
11502 if ((!TREE_SIDE_EFFECTS (unused_op)
11503 || !contains_label_p (unused_op))
11504 && (! VOID_TYPE_P (TREE_TYPE (tem))
11505 || VOID_TYPE_P (type)))
11506 return pedantic_non_lvalue (tem);
11507 return NULL_TREE;
11509 if (operand_equal_p (arg1, op2, 0))
11510 return pedantic_omit_one_operand (type, arg1, arg0);
11512 /* If we have A op B ? A : C, we may be able to convert this to a
11513 simpler expression, depending on the operation and the values
11514 of B and C. Signed zeros prevent all of these transformations,
11515 for reasons given above each one.
11517 Also try swapping the arguments and inverting the conditional. */
11518 if (COMPARISON_CLASS_P (arg0)
11519 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11520 arg1, TREE_OPERAND (arg0, 1))
11521 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11523 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11524 if (tem)
11525 return tem;
11528 if (COMPARISON_CLASS_P (arg0)
11529 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11530 op2,
11531 TREE_OPERAND (arg0, 1))
11532 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11534 tem = fold_truth_not_expr (arg0);
11535 if (tem && COMPARISON_CLASS_P (tem))
11537 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11538 if (tem)
11539 return tem;
11543 /* If the second operand is simpler than the third, swap them
11544 since that produces better jump optimization results. */
11545 if (truth_value_p (TREE_CODE (arg0))
11546 && tree_swap_operands_p (op1, op2, false))
11548 /* See if this can be inverted. If it can't, possibly because
11549 it was a floating-point inequality comparison, don't do
11550 anything. */
11551 tem = fold_truth_not_expr (arg0);
11552 if (tem)
11553 return fold_build3 (code, type, tem, op2, op1);
11556 /* Convert A ? 1 : 0 to simply A. */
11557 if (integer_onep (op1)
11558 && integer_zerop (op2)
11559 /* If we try to convert OP0 to our type, the
11560 call to fold will try to move the conversion inside
11561 a COND, which will recurse. In that case, the COND_EXPR
11562 is probably the best choice, so leave it alone. */
11563 && type == TREE_TYPE (arg0))
11564 return pedantic_non_lvalue (arg0);
11566 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11567 over COND_EXPR in cases such as floating point comparisons. */
11568 if (integer_zerop (op1)
11569 && integer_onep (op2)
11570 && truth_value_p (TREE_CODE (arg0)))
11571 return pedantic_non_lvalue (fold_convert (type,
11572 invert_truthvalue (arg0)));
11574 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11575 if (TREE_CODE (arg0) == LT_EXPR
11576 && integer_zerop (TREE_OPERAND (arg0, 1))
11577 && integer_zerop (op2)
11578 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11580 /* sign_bit_p only checks ARG1 bits within A's precision.
11581 If <sign bit of A> has wider type than A, bits outside
11582 of A's precision in <sign bit of A> need to be checked.
11583 If they are all 0, this optimization needs to be done
11584 in unsigned A's type, if they are all 1 in signed A's type,
11585 otherwise this can't be done. */
11586 if (TYPE_PRECISION (TREE_TYPE (tem))
11587 < TYPE_PRECISION (TREE_TYPE (arg1))
11588 && TYPE_PRECISION (TREE_TYPE (tem))
11589 < TYPE_PRECISION (type))
11591 unsigned HOST_WIDE_INT mask_lo;
11592 HOST_WIDE_INT mask_hi;
11593 int inner_width, outer_width;
11594 tree tem_type;
11596 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11597 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11598 if (outer_width > TYPE_PRECISION (type))
11599 outer_width = TYPE_PRECISION (type);
11601 if (outer_width > HOST_BITS_PER_WIDE_INT)
11603 mask_hi = ((unsigned HOST_WIDE_INT) -1
11604 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11605 mask_lo = -1;
11607 else
11609 mask_hi = 0;
11610 mask_lo = ((unsigned HOST_WIDE_INT) -1
11611 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11613 if (inner_width > HOST_BITS_PER_WIDE_INT)
11615 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11616 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11617 mask_lo = 0;
11619 else
11620 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11621 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11623 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11624 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11626 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11627 tem = fold_convert (tem_type, tem);
11629 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11630 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11632 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11633 tem = fold_convert (tem_type, tem);
11635 else
11636 tem = NULL;
11639 if (tem)
11640 return fold_convert (type,
11641 fold_build2 (BIT_AND_EXPR,
11642 TREE_TYPE (tem), tem,
11643 fold_convert (TREE_TYPE (tem),
11644 arg1)));
11647 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11648 already handled above. */
11649 if (TREE_CODE (arg0) == BIT_AND_EXPR
11650 && integer_onep (TREE_OPERAND (arg0, 1))
11651 && integer_zerop (op2)
11652 && integer_pow2p (arg1))
11654 tree tem = TREE_OPERAND (arg0, 0);
11655 STRIP_NOPS (tem);
11656 if (TREE_CODE (tem) == RSHIFT_EXPR
11657 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11658 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11659 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11660 return fold_build2 (BIT_AND_EXPR, type,
11661 TREE_OPERAND (tem, 0), arg1);
11664 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11665 is probably obsolete because the first operand should be a
11666 truth value (that's why we have the two cases above), but let's
11667 leave it in until we can confirm this for all front-ends. */
11668 if (integer_zerop (op2)
11669 && TREE_CODE (arg0) == NE_EXPR
11670 && integer_zerop (TREE_OPERAND (arg0, 1))
11671 && integer_pow2p (arg1)
11672 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11673 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11674 arg1, OEP_ONLY_CONST))
11675 return pedantic_non_lvalue (fold_convert (type,
11676 TREE_OPERAND (arg0, 0)));
11678 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11679 if (integer_zerop (op2)
11680 && truth_value_p (TREE_CODE (arg0))
11681 && truth_value_p (TREE_CODE (arg1)))
11682 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11683 fold_convert (type, arg0),
11684 arg1);
11686 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11687 if (integer_onep (op2)
11688 && truth_value_p (TREE_CODE (arg0))
11689 && truth_value_p (TREE_CODE (arg1)))
11691 /* Only perform transformation if ARG0 is easily inverted. */
11692 tem = fold_truth_not_expr (arg0);
11693 if (tem)
11694 return fold_build2 (TRUTH_ORIF_EXPR, type,
11695 fold_convert (type, tem),
11696 arg1);
11699 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11700 if (integer_zerop (arg1)
11701 && truth_value_p (TREE_CODE (arg0))
11702 && truth_value_p (TREE_CODE (op2)))
11704 /* Only perform transformation if ARG0 is easily inverted. */
11705 tem = fold_truth_not_expr (arg0);
11706 if (tem)
11707 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11708 fold_convert (type, tem),
11709 op2);
11712 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11713 if (integer_onep (arg1)
11714 && truth_value_p (TREE_CODE (arg0))
11715 && truth_value_p (TREE_CODE (op2)))
11716 return fold_build2 (TRUTH_ORIF_EXPR, type,
11717 fold_convert (type, arg0),
11718 op2);
11720 return NULL_TREE;
11722 case CALL_EXPR:
11723 /* Check for a built-in function. */
11724 if (TREE_CODE (op0) == ADDR_EXPR
11725 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11726 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11727 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11728 return NULL_TREE;
11730 case BIT_FIELD_REF:
11731 if (TREE_CODE (arg0) == VECTOR_CST
11732 && type == TREE_TYPE (TREE_TYPE (arg0))
11733 && host_integerp (arg1, 1)
11734 && host_integerp (op2, 1))
11736 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11737 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11739 if (width != 0
11740 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11741 && (idx % width) == 0
11742 && (idx = idx / width)
11743 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11745 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11746 while (idx-- > 0 && elements)
11747 elements = TREE_CHAIN (elements);
11748 if (elements)
11749 return TREE_VALUE (elements);
11750 else
11751 return fold_convert (type, integer_zero_node);
11754 return NULL_TREE;
11756 default:
11757 return NULL_TREE;
11758 } /* switch (code) */
11761 /* Perform constant folding and related simplification of EXPR.
11762 The related simplifications include x*1 => x, x*0 => 0, etc.,
11763 and application of the associative law.
11764 NOP_EXPR conversions may be removed freely (as long as we
11765 are careful not to change the type of the overall expression).
11766 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11767 but we can constant-fold them if they have constant operands. */
11769 #ifdef ENABLE_FOLD_CHECKING
11770 # define fold(x) fold_1 (x)
11771 static tree fold_1 (tree);
11772 static
11773 #endif
11774 tree
11775 fold (tree expr)
11777 const tree t = expr;
11778 enum tree_code code = TREE_CODE (t);
11779 enum tree_code_class kind = TREE_CODE_CLASS (code);
11780 tree tem;
11782 /* Return right away if a constant. */
11783 if (kind == tcc_constant)
11784 return t;
11786 if (IS_EXPR_CODE_CLASS (kind))
11788 tree type = TREE_TYPE (t);
11789 tree op0, op1, op2;
11791 switch (TREE_CODE_LENGTH (code))
11793 case 1:
11794 op0 = TREE_OPERAND (t, 0);
11795 tem = fold_unary (code, type, op0);
11796 return tem ? tem : expr;
11797 case 2:
11798 op0 = TREE_OPERAND (t, 0);
11799 op1 = TREE_OPERAND (t, 1);
11800 tem = fold_binary (code, type, op0, op1);
11801 return tem ? tem : expr;
11802 case 3:
11803 op0 = TREE_OPERAND (t, 0);
11804 op1 = TREE_OPERAND (t, 1);
11805 op2 = TREE_OPERAND (t, 2);
11806 tem = fold_ternary (code, type, op0, op1, op2);
11807 return tem ? tem : expr;
11808 default:
11809 break;
11813 switch (code)
11815 case CONST_DECL:
11816 return fold (DECL_INITIAL (t));
11818 default:
11819 return t;
11820 } /* switch (code) */
11823 #ifdef ENABLE_FOLD_CHECKING
11824 #undef fold
11826 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11827 static void fold_check_failed (tree, tree);
11828 void print_fold_checksum (tree);
11830 /* When --enable-checking=fold, compute a digest of expr before
11831 and after actual fold call to see if fold did not accidentally
11832 change original expr. */
11834 tree
11835 fold (tree expr)
11837 tree ret;
11838 struct md5_ctx ctx;
11839 unsigned char checksum_before[16], checksum_after[16];
11840 htab_t ht;
11842 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11843 md5_init_ctx (&ctx);
11844 fold_checksum_tree (expr, &ctx, ht);
11845 md5_finish_ctx (&ctx, checksum_before);
11846 htab_empty (ht);
11848 ret = fold_1 (expr);
11850 md5_init_ctx (&ctx);
11851 fold_checksum_tree (expr, &ctx, ht);
11852 md5_finish_ctx (&ctx, checksum_after);
11853 htab_delete (ht);
11855 if (memcmp (checksum_before, checksum_after, 16))
11856 fold_check_failed (expr, ret);
11858 return ret;
11861 void
11862 print_fold_checksum (tree expr)
11864 struct md5_ctx ctx;
11865 unsigned char checksum[16], cnt;
11866 htab_t ht;
11868 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11869 md5_init_ctx (&ctx);
11870 fold_checksum_tree (expr, &ctx, ht);
11871 md5_finish_ctx (&ctx, checksum);
11872 htab_delete (ht);
11873 for (cnt = 0; cnt < 16; ++cnt)
11874 fprintf (stderr, "%02x", checksum[cnt]);
11875 putc ('\n', stderr);
11878 static void
11879 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11881 internal_error ("fold check: original tree changed by fold");
11884 static void
11885 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11887 void **slot;
11888 enum tree_code code;
11889 struct tree_function_decl buf;
11890 int i, len;
11892 recursive_label:
11894 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11895 <= sizeof (struct tree_function_decl))
11896 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11897 if (expr == NULL)
11898 return;
11899 slot = htab_find_slot (ht, expr, INSERT);
11900 if (*slot != NULL)
11901 return;
11902 *slot = expr;
11903 code = TREE_CODE (expr);
11904 if (TREE_CODE_CLASS (code) == tcc_declaration
11905 && DECL_ASSEMBLER_NAME_SET_P (expr))
11907 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11908 memcpy ((char *) &buf, expr, tree_size (expr));
11909 expr = (tree) &buf;
11910 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11912 else if (TREE_CODE_CLASS (code) == tcc_type
11913 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11914 || TYPE_CACHED_VALUES_P (expr)
11915 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11917 /* Allow these fields to be modified. */
11918 memcpy ((char *) &buf, expr, tree_size (expr));
11919 expr = (tree) &buf;
11920 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11921 TYPE_POINTER_TO (expr) = NULL;
11922 TYPE_REFERENCE_TO (expr) = NULL;
11923 if (TYPE_CACHED_VALUES_P (expr))
11925 TYPE_CACHED_VALUES_P (expr) = 0;
11926 TYPE_CACHED_VALUES (expr) = NULL;
11929 md5_process_bytes (expr, tree_size (expr), ctx);
11930 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11931 if (TREE_CODE_CLASS (code) != tcc_type
11932 && TREE_CODE_CLASS (code) != tcc_declaration
11933 && code != TREE_LIST)
11934 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11935 switch (TREE_CODE_CLASS (code))
11937 case tcc_constant:
11938 switch (code)
11940 case STRING_CST:
11941 md5_process_bytes (TREE_STRING_POINTER (expr),
11942 TREE_STRING_LENGTH (expr), ctx);
11943 break;
11944 case COMPLEX_CST:
11945 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11946 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11947 break;
11948 case VECTOR_CST:
11949 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11950 break;
11951 default:
11952 break;
11954 break;
11955 case tcc_exceptional:
11956 switch (code)
11958 case TREE_LIST:
11959 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11960 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11961 expr = TREE_CHAIN (expr);
11962 goto recursive_label;
11963 break;
11964 case TREE_VEC:
11965 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11966 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11967 break;
11968 default:
11969 break;
11971 break;
11972 case tcc_expression:
11973 case tcc_reference:
11974 case tcc_comparison:
11975 case tcc_unary:
11976 case tcc_binary:
11977 case tcc_statement:
11978 len = TREE_CODE_LENGTH (code);
11979 for (i = 0; i < len; ++i)
11980 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11981 break;
11982 case tcc_declaration:
11983 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11984 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11985 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11987 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11988 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11989 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11990 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11991 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11993 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11994 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11996 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11998 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11999 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12000 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12002 break;
12003 case tcc_type:
12004 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12005 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12006 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12007 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12008 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12009 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12010 if (INTEGRAL_TYPE_P (expr)
12011 || SCALAR_FLOAT_TYPE_P (expr))
12013 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12014 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12016 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12017 if (TREE_CODE (expr) == RECORD_TYPE
12018 || TREE_CODE (expr) == UNION_TYPE
12019 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12020 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12021 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12022 break;
12023 default:
12024 break;
12028 #endif
12030 /* Fold a unary tree expression with code CODE of type TYPE with an
12031 operand OP0. Return a folded expression if successful. Otherwise,
12032 return a tree expression with code CODE of type TYPE with an
12033 operand OP0. */
12035 tree
12036 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12038 tree tem;
12039 #ifdef ENABLE_FOLD_CHECKING
12040 unsigned char checksum_before[16], checksum_after[16];
12041 struct md5_ctx ctx;
12042 htab_t ht;
12044 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12045 md5_init_ctx (&ctx);
12046 fold_checksum_tree (op0, &ctx, ht);
12047 md5_finish_ctx (&ctx, checksum_before);
12048 htab_empty (ht);
12049 #endif
12051 tem = fold_unary (code, type, op0);
12052 if (!tem)
12053 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12055 #ifdef ENABLE_FOLD_CHECKING
12056 md5_init_ctx (&ctx);
12057 fold_checksum_tree (op0, &ctx, ht);
12058 md5_finish_ctx (&ctx, checksum_after);
12059 htab_delete (ht);
12061 if (memcmp (checksum_before, checksum_after, 16))
12062 fold_check_failed (op0, tem);
12063 #endif
12064 return tem;
12067 /* Fold a binary tree expression with code CODE of type TYPE with
12068 operands OP0 and OP1. Return a folded expression if successful.
12069 Otherwise, return a tree expression with code CODE of type TYPE
12070 with operands OP0 and OP1. */
12072 tree
12073 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12074 MEM_STAT_DECL)
12076 tree tem;
12077 #ifdef ENABLE_FOLD_CHECKING
12078 unsigned char checksum_before_op0[16],
12079 checksum_before_op1[16],
12080 checksum_after_op0[16],
12081 checksum_after_op1[16];
12082 struct md5_ctx ctx;
12083 htab_t ht;
12085 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12086 md5_init_ctx (&ctx);
12087 fold_checksum_tree (op0, &ctx, ht);
12088 md5_finish_ctx (&ctx, checksum_before_op0);
12089 htab_empty (ht);
12091 md5_init_ctx (&ctx);
12092 fold_checksum_tree (op1, &ctx, ht);
12093 md5_finish_ctx (&ctx, checksum_before_op1);
12094 htab_empty (ht);
12095 #endif
12097 tem = fold_binary (code, type, op0, op1);
12098 if (!tem)
12099 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12101 #ifdef ENABLE_FOLD_CHECKING
12102 md5_init_ctx (&ctx);
12103 fold_checksum_tree (op0, &ctx, ht);
12104 md5_finish_ctx (&ctx, checksum_after_op0);
12105 htab_empty (ht);
12107 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12108 fold_check_failed (op0, tem);
12110 md5_init_ctx (&ctx);
12111 fold_checksum_tree (op1, &ctx, ht);
12112 md5_finish_ctx (&ctx, checksum_after_op1);
12113 htab_delete (ht);
12115 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12116 fold_check_failed (op1, tem);
12117 #endif
12118 return tem;
12121 /* Fold a ternary tree expression with code CODE of type TYPE with
12122 operands OP0, OP1, and OP2. Return a folded expression if
12123 successful. Otherwise, return a tree expression with code CODE of
12124 type TYPE with operands OP0, OP1, and OP2. */
12126 tree
12127 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12128 MEM_STAT_DECL)
12130 tree tem;
12131 #ifdef ENABLE_FOLD_CHECKING
12132 unsigned char checksum_before_op0[16],
12133 checksum_before_op1[16],
12134 checksum_before_op2[16],
12135 checksum_after_op0[16],
12136 checksum_after_op1[16],
12137 checksum_after_op2[16];
12138 struct md5_ctx ctx;
12139 htab_t ht;
12141 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12142 md5_init_ctx (&ctx);
12143 fold_checksum_tree (op0, &ctx, ht);
12144 md5_finish_ctx (&ctx, checksum_before_op0);
12145 htab_empty (ht);
12147 md5_init_ctx (&ctx);
12148 fold_checksum_tree (op1, &ctx, ht);
12149 md5_finish_ctx (&ctx, checksum_before_op1);
12150 htab_empty (ht);
12152 md5_init_ctx (&ctx);
12153 fold_checksum_tree (op2, &ctx, ht);
12154 md5_finish_ctx (&ctx, checksum_before_op2);
12155 htab_empty (ht);
12156 #endif
12158 tem = fold_ternary (code, type, op0, op1, op2);
12159 if (!tem)
12160 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12162 #ifdef ENABLE_FOLD_CHECKING
12163 md5_init_ctx (&ctx);
12164 fold_checksum_tree (op0, &ctx, ht);
12165 md5_finish_ctx (&ctx, checksum_after_op0);
12166 htab_empty (ht);
12168 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12169 fold_check_failed (op0, tem);
12171 md5_init_ctx (&ctx);
12172 fold_checksum_tree (op1, &ctx, ht);
12173 md5_finish_ctx (&ctx, checksum_after_op1);
12174 htab_empty (ht);
12176 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12177 fold_check_failed (op1, tem);
12179 md5_init_ctx (&ctx);
12180 fold_checksum_tree (op2, &ctx, ht);
12181 md5_finish_ctx (&ctx, checksum_after_op2);
12182 htab_delete (ht);
12184 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12185 fold_check_failed (op2, tem);
12186 #endif
12187 return tem;
12190 /* Perform constant folding and related simplification of initializer
12191 expression EXPR. These behave identically to "fold_buildN" but ignore
12192 potential run-time traps and exceptions that fold must preserve. */
12194 #define START_FOLD_INIT \
12195 int saved_signaling_nans = flag_signaling_nans;\
12196 int saved_trapping_math = flag_trapping_math;\
12197 int saved_rounding_math = flag_rounding_math;\
12198 int saved_trapv = flag_trapv;\
12199 int saved_folding_initializer = folding_initializer;\
12200 flag_signaling_nans = 0;\
12201 flag_trapping_math = 0;\
12202 flag_rounding_math = 0;\
12203 flag_trapv = 0;\
12204 folding_initializer = 1;
12206 #define END_FOLD_INIT \
12207 flag_signaling_nans = saved_signaling_nans;\
12208 flag_trapping_math = saved_trapping_math;\
12209 flag_rounding_math = saved_rounding_math;\
12210 flag_trapv = saved_trapv;\
12211 folding_initializer = saved_folding_initializer;
12213 tree
12214 fold_build1_initializer (enum tree_code code, tree type, tree op)
12216 tree result;
12217 START_FOLD_INIT;
12219 result = fold_build1 (code, type, op);
12221 END_FOLD_INIT;
12222 return result;
12225 tree
12226 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12228 tree result;
12229 START_FOLD_INIT;
12231 result = fold_build2 (code, type, op0, op1);
12233 END_FOLD_INIT;
12234 return result;
12237 tree
12238 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12239 tree op2)
12241 tree result;
12242 START_FOLD_INIT;
12244 result = fold_build3 (code, type, op0, op1, op2);
12246 END_FOLD_INIT;
12247 return result;
12250 #undef START_FOLD_INIT
12251 #undef END_FOLD_INIT
12253 /* Determine if first argument is a multiple of second argument. Return 0 if
12254 it is not, or we cannot easily determined it to be.
12256 An example of the sort of thing we care about (at this point; this routine
12257 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12258 fold cases do now) is discovering that
12260 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12262 is a multiple of
12264 SAVE_EXPR (J * 8)
12266 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12268 This code also handles discovering that
12270 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12272 is a multiple of 8 so we don't have to worry about dealing with a
12273 possible remainder.
12275 Note that we *look* inside a SAVE_EXPR only to determine how it was
12276 calculated; it is not safe for fold to do much of anything else with the
12277 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12278 at run time. For example, the latter example above *cannot* be implemented
12279 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12280 evaluation time of the original SAVE_EXPR is not necessarily the same at
12281 the time the new expression is evaluated. The only optimization of this
12282 sort that would be valid is changing
12284 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12286 divided by 8 to
12288 SAVE_EXPR (I) * SAVE_EXPR (J)
12290 (where the same SAVE_EXPR (J) is used in the original and the
12291 transformed version). */
12293 static int
12294 multiple_of_p (tree type, tree top, tree bottom)
12296 if (operand_equal_p (top, bottom, 0))
12297 return 1;
12299 if (TREE_CODE (type) != INTEGER_TYPE)
12300 return 0;
12302 switch (TREE_CODE (top))
12304 case BIT_AND_EXPR:
12305 /* Bitwise and provides a power of two multiple. If the mask is
12306 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12307 if (!integer_pow2p (bottom))
12308 return 0;
12309 /* FALLTHRU */
12311 case MULT_EXPR:
12312 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12313 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12315 case PLUS_EXPR:
12316 case MINUS_EXPR:
12317 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12318 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12320 case LSHIFT_EXPR:
12321 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12323 tree op1, t1;
12325 op1 = TREE_OPERAND (top, 1);
12326 /* const_binop may not detect overflow correctly,
12327 so check for it explicitly here. */
12328 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12329 > TREE_INT_CST_LOW (op1)
12330 && TREE_INT_CST_HIGH (op1) == 0
12331 && 0 != (t1 = fold_convert (type,
12332 const_binop (LSHIFT_EXPR,
12333 size_one_node,
12334 op1, 0)))
12335 && ! TREE_OVERFLOW (t1))
12336 return multiple_of_p (type, t1, bottom);
12338 return 0;
12340 case NOP_EXPR:
12341 /* Can't handle conversions from non-integral or wider integral type. */
12342 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12343 || (TYPE_PRECISION (type)
12344 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12345 return 0;
12347 /* .. fall through ... */
12349 case SAVE_EXPR:
12350 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12352 case INTEGER_CST:
12353 if (TREE_CODE (bottom) != INTEGER_CST
12354 || (TYPE_UNSIGNED (type)
12355 && (tree_int_cst_sgn (top) < 0
12356 || tree_int_cst_sgn (bottom) < 0)))
12357 return 0;
12358 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12359 top, bottom, 0));
12361 default:
12362 return 0;
12366 /* Return true if `t' is known to be non-negative. If the return
12367 value is based on the assumption that signed overflow is undefined,
12368 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12369 *STRICT_OVERFLOW_P. */
12372 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12374 if (t == error_mark_node)
12375 return 0;
12377 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12378 return 1;
12380 switch (TREE_CODE (t))
12382 case SSA_NAME:
12383 /* Query VRP to see if it has recorded any information about
12384 the range of this object. */
12385 return ssa_name_nonnegative_p (t);
12387 case ABS_EXPR:
12388 /* We can't return 1 if flag_wrapv is set because
12389 ABS_EXPR<INT_MIN> = INT_MIN. */
12390 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12391 return 1;
12392 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12394 *strict_overflow_p = true;
12395 return 1;
12397 break;
12399 case INTEGER_CST:
12400 return tree_int_cst_sgn (t) >= 0;
12402 case REAL_CST:
12403 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12405 case PLUS_EXPR:
12406 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12407 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12408 strict_overflow_p)
12409 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12410 strict_overflow_p));
12412 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12413 both unsigned and at least 2 bits shorter than the result. */
12414 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12415 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12416 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12418 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12419 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12420 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12421 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12423 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12424 TYPE_PRECISION (inner2)) + 1;
12425 return prec < TYPE_PRECISION (TREE_TYPE (t));
12428 break;
12430 case MULT_EXPR:
12431 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12433 /* x * x for floating point x is always non-negative. */
12434 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12435 return 1;
12436 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12437 strict_overflow_p)
12438 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12439 strict_overflow_p));
12442 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12443 both unsigned and their total bits is shorter than the result. */
12444 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12445 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12446 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12448 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12449 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12450 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12451 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12452 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12453 < TYPE_PRECISION (TREE_TYPE (t));
12455 return 0;
12457 case BIT_AND_EXPR:
12458 case MAX_EXPR:
12459 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12460 strict_overflow_p)
12461 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12462 strict_overflow_p));
12464 case BIT_IOR_EXPR:
12465 case BIT_XOR_EXPR:
12466 case MIN_EXPR:
12467 case RDIV_EXPR:
12468 case TRUNC_DIV_EXPR:
12469 case CEIL_DIV_EXPR:
12470 case FLOOR_DIV_EXPR:
12471 case ROUND_DIV_EXPR:
12472 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12473 strict_overflow_p)
12474 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12475 strict_overflow_p));
12477 case TRUNC_MOD_EXPR:
12478 case CEIL_MOD_EXPR:
12479 case FLOOR_MOD_EXPR:
12480 case ROUND_MOD_EXPR:
12481 case SAVE_EXPR:
12482 case NON_LVALUE_EXPR:
12483 case FLOAT_EXPR:
12484 case FIX_TRUNC_EXPR:
12485 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12486 strict_overflow_p);
12488 case COMPOUND_EXPR:
12489 case MODIFY_EXPR:
12490 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12491 strict_overflow_p);
12493 case BIND_EXPR:
12494 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
12495 strict_overflow_p);
12497 case COND_EXPR:
12498 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12499 strict_overflow_p)
12500 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12501 strict_overflow_p));
12503 case NOP_EXPR:
12505 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12506 tree outer_type = TREE_TYPE (t);
12508 if (TREE_CODE (outer_type) == REAL_TYPE)
12510 if (TREE_CODE (inner_type) == REAL_TYPE)
12511 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12512 strict_overflow_p);
12513 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12515 if (TYPE_UNSIGNED (inner_type))
12516 return 1;
12517 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12518 strict_overflow_p);
12521 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12523 if (TREE_CODE (inner_type) == REAL_TYPE)
12524 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
12525 strict_overflow_p);
12526 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12527 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12528 && TYPE_UNSIGNED (inner_type);
12531 break;
12533 case TARGET_EXPR:
12535 tree temp = TARGET_EXPR_SLOT (t);
12536 t = TARGET_EXPR_INITIAL (t);
12538 /* If the initializer is non-void, then it's a normal expression
12539 that will be assigned to the slot. */
12540 if (!VOID_TYPE_P (t))
12541 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
12543 /* Otherwise, the initializer sets the slot in some way. One common
12544 way is an assignment statement at the end of the initializer. */
12545 while (1)
12547 if (TREE_CODE (t) == BIND_EXPR)
12548 t = expr_last (BIND_EXPR_BODY (t));
12549 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12550 || TREE_CODE (t) == TRY_CATCH_EXPR)
12551 t = expr_last (TREE_OPERAND (t, 0));
12552 else if (TREE_CODE (t) == STATEMENT_LIST)
12553 t = expr_last (t);
12554 else
12555 break;
12557 if (TREE_CODE (t) == MODIFY_EXPR
12558 && TREE_OPERAND (t, 0) == temp)
12559 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12560 strict_overflow_p);
12562 return 0;
12565 case CALL_EXPR:
12567 tree fndecl = get_callee_fndecl (t);
12568 tree arglist = TREE_OPERAND (t, 1);
12569 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12570 switch (DECL_FUNCTION_CODE (fndecl))
12572 CASE_FLT_FN (BUILT_IN_ACOS):
12573 CASE_FLT_FN (BUILT_IN_ACOSH):
12574 CASE_FLT_FN (BUILT_IN_CABS):
12575 CASE_FLT_FN (BUILT_IN_COSH):
12576 CASE_FLT_FN (BUILT_IN_ERFC):
12577 CASE_FLT_FN (BUILT_IN_EXP):
12578 CASE_FLT_FN (BUILT_IN_EXP10):
12579 CASE_FLT_FN (BUILT_IN_EXP2):
12580 CASE_FLT_FN (BUILT_IN_FABS):
12581 CASE_FLT_FN (BUILT_IN_FDIM):
12582 CASE_FLT_FN (BUILT_IN_HYPOT):
12583 CASE_FLT_FN (BUILT_IN_POW10):
12584 CASE_INT_FN (BUILT_IN_FFS):
12585 CASE_INT_FN (BUILT_IN_PARITY):
12586 CASE_INT_FN (BUILT_IN_POPCOUNT):
12587 /* Always true. */
12588 return 1;
12590 CASE_FLT_FN (BUILT_IN_SQRT):
12591 /* sqrt(-0.0) is -0.0. */
12592 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12593 return 1;
12594 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12595 strict_overflow_p);
12597 CASE_FLT_FN (BUILT_IN_ASINH):
12598 CASE_FLT_FN (BUILT_IN_ATAN):
12599 CASE_FLT_FN (BUILT_IN_ATANH):
12600 CASE_FLT_FN (BUILT_IN_CBRT):
12601 CASE_FLT_FN (BUILT_IN_CEIL):
12602 CASE_FLT_FN (BUILT_IN_ERF):
12603 CASE_FLT_FN (BUILT_IN_EXPM1):
12604 CASE_FLT_FN (BUILT_IN_FLOOR):
12605 CASE_FLT_FN (BUILT_IN_FMOD):
12606 CASE_FLT_FN (BUILT_IN_FREXP):
12607 CASE_FLT_FN (BUILT_IN_LCEIL):
12608 CASE_FLT_FN (BUILT_IN_LDEXP):
12609 CASE_FLT_FN (BUILT_IN_LFLOOR):
12610 CASE_FLT_FN (BUILT_IN_LLCEIL):
12611 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12612 CASE_FLT_FN (BUILT_IN_LLRINT):
12613 CASE_FLT_FN (BUILT_IN_LLROUND):
12614 CASE_FLT_FN (BUILT_IN_LRINT):
12615 CASE_FLT_FN (BUILT_IN_LROUND):
12616 CASE_FLT_FN (BUILT_IN_MODF):
12617 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12618 CASE_FLT_FN (BUILT_IN_POW):
12619 CASE_FLT_FN (BUILT_IN_RINT):
12620 CASE_FLT_FN (BUILT_IN_ROUND):
12621 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12622 CASE_FLT_FN (BUILT_IN_SINH):
12623 CASE_FLT_FN (BUILT_IN_TANH):
12624 CASE_FLT_FN (BUILT_IN_TRUNC):
12625 /* True if the 1st argument is nonnegative. */
12626 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12627 strict_overflow_p);
12629 CASE_FLT_FN (BUILT_IN_FMAX):
12630 /* True if the 1st OR 2nd arguments are nonnegative. */
12631 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12632 strict_overflow_p)
12633 || (tree_expr_nonnegative_warnv_p
12634 (TREE_VALUE (TREE_CHAIN (arglist)),
12635 strict_overflow_p)));
12637 CASE_FLT_FN (BUILT_IN_FMIN):
12638 /* True if the 1st AND 2nd arguments are nonnegative. */
12639 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12640 strict_overflow_p)
12641 && (tree_expr_nonnegative_warnv_p
12642 (TREE_VALUE (TREE_CHAIN (arglist)),
12643 strict_overflow_p)));
12645 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12646 /* True if the 2nd argument is nonnegative. */
12647 return (tree_expr_nonnegative_warnv_p
12648 (TREE_VALUE (TREE_CHAIN (arglist)),
12649 strict_overflow_p));
12651 default:
12652 break;
12656 /* ... fall through ... */
12658 default:
12660 tree type = TREE_TYPE (t);
12661 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12662 && truth_value_p (TREE_CODE (t)))
12663 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12664 have a signed:1 type (where the value is -1 and 0). */
12665 return true;
12669 /* We don't know sign of `t', so be conservative and return false. */
12670 return 0;
12673 /* Return true if `t' is known to be non-negative. Handle warnings
12674 about undefined signed overflow. */
12677 tree_expr_nonnegative_p (tree t)
12679 int ret;
12680 bool strict_overflow_p;
12682 strict_overflow_p = false;
12683 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
12684 if (strict_overflow_p)
12685 fold_overflow_warning (("assuming signed overflow does not occur when "
12686 "determining that expression is always "
12687 "non-negative"),
12688 WARN_STRICT_OVERFLOW_MISC);
12689 return ret;
12692 /* Return true when T is an address and is known to be nonzero.
12693 For floating point we further ensure that T is not denormal.
12694 Similar logic is present in nonzero_address in rtlanal.h.
12696 If the return value is based on the assumption that signed overflow
12697 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
12698 change *STRICT_OVERFLOW_P. */
12700 bool
12701 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
12703 tree type = TREE_TYPE (t);
12704 bool sub_strict_overflow_p;
12706 /* Doing something useful for floating point would need more work. */
12707 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12708 return false;
12710 switch (TREE_CODE (t))
12712 case SSA_NAME:
12713 /* Query VRP to see if it has recorded any information about
12714 the range of this object. */
12715 return ssa_name_nonzero_p (t);
12717 case ABS_EXPR:
12718 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12719 strict_overflow_p);
12721 case INTEGER_CST:
12722 /* We used to test for !integer_zerop here. This does not work correctly
12723 if TREE_CONSTANT_OVERFLOW (t). */
12724 return (TREE_INT_CST_LOW (t) != 0
12725 || TREE_INT_CST_HIGH (t) != 0);
12727 case PLUS_EXPR:
12728 if (TYPE_OVERFLOW_UNDEFINED (type))
12730 /* With the presence of negative values it is hard
12731 to say something. */
12732 sub_strict_overflow_p = false;
12733 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12734 &sub_strict_overflow_p)
12735 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12736 &sub_strict_overflow_p))
12737 return false;
12738 /* One of operands must be positive and the other non-negative. */
12739 /* We don't set *STRICT_OVERFLOW_P here: even if this value
12740 overflows, on a twos-complement machine the sum of two
12741 nonnegative numbers can never be zero. */
12742 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12743 strict_overflow_p)
12744 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12745 strict_overflow_p));
12747 break;
12749 case MULT_EXPR:
12750 if (TYPE_OVERFLOW_UNDEFINED (type))
12752 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12753 strict_overflow_p)
12754 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12755 strict_overflow_p))
12757 *strict_overflow_p = true;
12758 return true;
12761 break;
12763 case NOP_EXPR:
12765 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12766 tree outer_type = TREE_TYPE (t);
12768 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12769 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12770 strict_overflow_p));
12772 break;
12774 case ADDR_EXPR:
12776 tree base = get_base_address (TREE_OPERAND (t, 0));
12778 if (!base)
12779 return false;
12781 /* Weak declarations may link to NULL. */
12782 if (VAR_OR_FUNCTION_DECL_P (base))
12783 return !DECL_WEAK (base);
12785 /* Constants are never weak. */
12786 if (CONSTANT_CLASS_P (base))
12787 return true;
12789 return false;
12792 case COND_EXPR:
12793 sub_strict_overflow_p = false;
12794 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12795 &sub_strict_overflow_p)
12796 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
12797 &sub_strict_overflow_p))
12799 if (sub_strict_overflow_p)
12800 *strict_overflow_p = true;
12801 return true;
12803 break;
12805 case MIN_EXPR:
12806 sub_strict_overflow_p = false;
12807 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12808 &sub_strict_overflow_p)
12809 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12810 &sub_strict_overflow_p))
12812 if (sub_strict_overflow_p)
12813 *strict_overflow_p = true;
12815 break;
12817 case MAX_EXPR:
12818 sub_strict_overflow_p = false;
12819 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12820 &sub_strict_overflow_p))
12822 if (sub_strict_overflow_p)
12823 *strict_overflow_p = true;
12825 /* When both operands are nonzero, then MAX must be too. */
12826 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12827 strict_overflow_p))
12828 return true;
12830 /* MAX where operand 0 is positive is positive. */
12831 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12832 strict_overflow_p);
12834 /* MAX where operand 1 is positive is positive. */
12835 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12836 &sub_strict_overflow_p)
12837 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12838 &sub_strict_overflow_p))
12840 if (sub_strict_overflow_p)
12841 *strict_overflow_p = true;
12842 return true;
12844 break;
12846 case COMPOUND_EXPR:
12847 case MODIFY_EXPR:
12848 case BIND_EXPR:
12849 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12850 strict_overflow_p);
12852 case SAVE_EXPR:
12853 case NON_LVALUE_EXPR:
12854 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12855 strict_overflow_p);
12857 case BIT_IOR_EXPR:
12858 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12859 strict_overflow_p)
12860 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12861 strict_overflow_p));
12863 case CALL_EXPR:
12864 return alloca_call_p (t);
12866 default:
12867 break;
12869 return false;
12872 /* Return true when T is an address and is known to be nonzero.
12873 Handle warnings about undefined signed overflow. */
12875 bool
12876 tree_expr_nonzero_p (tree t)
12878 bool ret, strict_overflow_p;
12880 strict_overflow_p = false;
12881 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
12882 if (strict_overflow_p)
12883 fold_overflow_warning (("assuming signed overflow does not occur when "
12884 "determining that expression is always "
12885 "non-zero"),
12886 WARN_STRICT_OVERFLOW_MISC);
12887 return ret;
12890 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12891 attempt to fold the expression to a constant without modifying TYPE,
12892 OP0 or OP1.
12894 If the expression could be simplified to a constant, then return
12895 the constant. If the expression would not be simplified to a
12896 constant, then return NULL_TREE. */
12898 tree
12899 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12901 tree tem = fold_binary (code, type, op0, op1);
12902 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12905 /* Given the components of a unary expression CODE, TYPE and OP0,
12906 attempt to fold the expression to a constant without modifying
12907 TYPE or OP0.
12909 If the expression could be simplified to a constant, then return
12910 the constant. If the expression would not be simplified to a
12911 constant, then return NULL_TREE. */
12913 tree
12914 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12916 tree tem = fold_unary (code, type, op0);
12917 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12920 /* If EXP represents referencing an element in a constant string
12921 (either via pointer arithmetic or array indexing), return the
12922 tree representing the value accessed, otherwise return NULL. */
12924 tree
12925 fold_read_from_constant_string (tree exp)
12927 if ((TREE_CODE (exp) == INDIRECT_REF
12928 || TREE_CODE (exp) == ARRAY_REF)
12929 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12931 tree exp1 = TREE_OPERAND (exp, 0);
12932 tree index;
12933 tree string;
12935 if (TREE_CODE (exp) == INDIRECT_REF)
12936 string = string_constant (exp1, &index);
12937 else
12939 tree low_bound = array_ref_low_bound (exp);
12940 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12942 /* Optimize the special-case of a zero lower bound.
12944 We convert the low_bound to sizetype to avoid some problems
12945 with constant folding. (E.g. suppose the lower bound is 1,
12946 and its mode is QI. Without the conversion,l (ARRAY
12947 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12948 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12949 if (! integer_zerop (low_bound))
12950 index = size_diffop (index, fold_convert (sizetype, low_bound));
12952 string = exp1;
12955 if (string
12956 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12957 && TREE_CODE (string) == STRING_CST
12958 && TREE_CODE (index) == INTEGER_CST
12959 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12960 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12961 == MODE_INT)
12962 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12963 return fold_convert (TREE_TYPE (exp),
12964 build_int_cst (NULL_TREE,
12965 (TREE_STRING_POINTER (string)
12966 [TREE_INT_CST_LOW (index)])));
12968 return NULL;
12971 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12972 an integer constant or real constant.
12974 TYPE is the type of the result. */
12976 static tree
12977 fold_negate_const (tree arg0, tree type)
12979 tree t = NULL_TREE;
12981 switch (TREE_CODE (arg0))
12983 case INTEGER_CST:
12985 unsigned HOST_WIDE_INT low;
12986 HOST_WIDE_INT high;
12987 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12988 TREE_INT_CST_HIGH (arg0),
12989 &low, &high);
12990 t = build_int_cst_wide (type, low, high);
12991 t = force_fit_type (t, 1,
12992 (overflow | TREE_OVERFLOW (arg0))
12993 && !TYPE_UNSIGNED (type),
12994 TREE_CONSTANT_OVERFLOW (arg0));
12995 break;
12998 case REAL_CST:
12999 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13000 break;
13002 default:
13003 gcc_unreachable ();
13006 return t;
13009 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13010 an integer constant or real constant.
13012 TYPE is the type of the result. */
13014 tree
13015 fold_abs_const (tree arg0, tree type)
13017 tree t = NULL_TREE;
13019 switch (TREE_CODE (arg0))
13021 case INTEGER_CST:
13022 /* If the value is unsigned, then the absolute value is
13023 the same as the ordinary value. */
13024 if (TYPE_UNSIGNED (type))
13025 t = arg0;
13026 /* Similarly, if the value is non-negative. */
13027 else if (INT_CST_LT (integer_minus_one_node, arg0))
13028 t = arg0;
13029 /* If the value is negative, then the absolute value is
13030 its negation. */
13031 else
13033 unsigned HOST_WIDE_INT low;
13034 HOST_WIDE_INT high;
13035 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13036 TREE_INT_CST_HIGH (arg0),
13037 &low, &high);
13038 t = build_int_cst_wide (type, low, high);
13039 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13040 TREE_CONSTANT_OVERFLOW (arg0));
13042 break;
13044 case REAL_CST:
13045 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13046 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13047 else
13048 t = arg0;
13049 break;
13051 default:
13052 gcc_unreachable ();
13055 return t;
13058 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13059 constant. TYPE is the type of the result. */
13061 static tree
13062 fold_not_const (tree arg0, tree type)
13064 tree t = NULL_TREE;
13066 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13068 t = build_int_cst_wide (type,
13069 ~ TREE_INT_CST_LOW (arg0),
13070 ~ TREE_INT_CST_HIGH (arg0));
13071 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13072 TREE_CONSTANT_OVERFLOW (arg0));
13074 return t;
13077 /* Given CODE, a relational operator, the target type, TYPE and two
13078 constant operands OP0 and OP1, return the result of the
13079 relational operation. If the result is not a compile time
13080 constant, then return NULL_TREE. */
13082 static tree
13083 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13085 int result, invert;
13087 /* From here on, the only cases we handle are when the result is
13088 known to be a constant. */
13090 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13092 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13093 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13095 /* Handle the cases where either operand is a NaN. */
13096 if (real_isnan (c0) || real_isnan (c1))
13098 switch (code)
13100 case EQ_EXPR:
13101 case ORDERED_EXPR:
13102 result = 0;
13103 break;
13105 case NE_EXPR:
13106 case UNORDERED_EXPR:
13107 case UNLT_EXPR:
13108 case UNLE_EXPR:
13109 case UNGT_EXPR:
13110 case UNGE_EXPR:
13111 case UNEQ_EXPR:
13112 result = 1;
13113 break;
13115 case LT_EXPR:
13116 case LE_EXPR:
13117 case GT_EXPR:
13118 case GE_EXPR:
13119 case LTGT_EXPR:
13120 if (flag_trapping_math)
13121 return NULL_TREE;
13122 result = 0;
13123 break;
13125 default:
13126 gcc_unreachable ();
13129 return constant_boolean_node (result, type);
13132 return constant_boolean_node (real_compare (code, c0, c1), type);
13135 /* Handle equality/inequality of complex constants. */
13136 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13138 tree rcond = fold_relational_const (code, type,
13139 TREE_REALPART (op0),
13140 TREE_REALPART (op1));
13141 tree icond = fold_relational_const (code, type,
13142 TREE_IMAGPART (op0),
13143 TREE_IMAGPART (op1));
13144 if (code == EQ_EXPR)
13145 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13146 else if (code == NE_EXPR)
13147 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13148 else
13149 return NULL_TREE;
13152 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13154 To compute GT, swap the arguments and do LT.
13155 To compute GE, do LT and invert the result.
13156 To compute LE, swap the arguments, do LT and invert the result.
13157 To compute NE, do EQ and invert the result.
13159 Therefore, the code below must handle only EQ and LT. */
13161 if (code == LE_EXPR || code == GT_EXPR)
13163 tree tem = op0;
13164 op0 = op1;
13165 op1 = tem;
13166 code = swap_tree_comparison (code);
13169 /* Note that it is safe to invert for real values here because we
13170 have already handled the one case that it matters. */
13172 invert = 0;
13173 if (code == NE_EXPR || code == GE_EXPR)
13175 invert = 1;
13176 code = invert_tree_comparison (code, false);
13179 /* Compute a result for LT or EQ if args permit;
13180 Otherwise return T. */
13181 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13183 if (code == EQ_EXPR)
13184 result = tree_int_cst_equal (op0, op1);
13185 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13186 result = INT_CST_LT_UNSIGNED (op0, op1);
13187 else
13188 result = INT_CST_LT (op0, op1);
13190 else
13191 return NULL_TREE;
13193 if (invert)
13194 result ^= 1;
13195 return constant_boolean_node (result, type);
13198 /* Build an expression for the a clean point containing EXPR with type TYPE.
13199 Don't build a cleanup point expression for EXPR which don't have side
13200 effects. */
13202 tree
13203 fold_build_cleanup_point_expr (tree type, tree expr)
13205 /* If the expression does not have side effects then we don't have to wrap
13206 it with a cleanup point expression. */
13207 if (!TREE_SIDE_EFFECTS (expr))
13208 return expr;
13210 /* If the expression is a return, check to see if the expression inside the
13211 return has no side effects or the right hand side of the modify expression
13212 inside the return. If either don't have side effects set we don't need to
13213 wrap the expression in a cleanup point expression. Note we don't check the
13214 left hand side of the modify because it should always be a return decl. */
13215 if (TREE_CODE (expr) == RETURN_EXPR)
13217 tree op = TREE_OPERAND (expr, 0);
13218 if (!op || !TREE_SIDE_EFFECTS (op))
13219 return expr;
13220 op = TREE_OPERAND (op, 1);
13221 if (!TREE_SIDE_EFFECTS (op))
13222 return expr;
13225 return build1 (CLEANUP_POINT_EXPR, type, expr);
13228 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13229 avoid confusing the gimplify process. */
13231 tree
13232 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13234 /* The size of the object is not relevant when talking about its address. */
13235 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13236 t = TREE_OPERAND (t, 0);
13238 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13239 if (TREE_CODE (t) == INDIRECT_REF
13240 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13242 t = TREE_OPERAND (t, 0);
13243 if (TREE_TYPE (t) != ptrtype)
13244 t = build1 (NOP_EXPR, ptrtype, t);
13246 else
13248 tree base = t;
13250 while (handled_component_p (base))
13251 base = TREE_OPERAND (base, 0);
13252 if (DECL_P (base))
13253 TREE_ADDRESSABLE (base) = 1;
13255 t = build1 (ADDR_EXPR, ptrtype, t);
13258 return t;
13261 tree
13262 build_fold_addr_expr (tree t)
13264 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13267 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13268 of an indirection through OP0, or NULL_TREE if no simplification is
13269 possible. */
13271 tree
13272 fold_indirect_ref_1 (tree type, tree op0)
13274 tree sub = op0;
13275 tree subtype;
13277 STRIP_NOPS (sub);
13278 subtype = TREE_TYPE (sub);
13279 if (!POINTER_TYPE_P (subtype))
13280 return NULL_TREE;
13282 if (TREE_CODE (sub) == ADDR_EXPR)
13284 tree op = TREE_OPERAND (sub, 0);
13285 tree optype = TREE_TYPE (op);
13286 /* *&CONST_DECL -> to the value of the const decl. */
13287 if (TREE_CODE (op) == CONST_DECL)
13288 return DECL_INITIAL (op);
13289 /* *&p => p; make sure to handle *&"str"[cst] here. */
13290 if (type == optype)
13292 tree fop = fold_read_from_constant_string (op);
13293 if (fop)
13294 return fop;
13295 else
13296 return op;
13298 /* *(foo *)&fooarray => fooarray[0] */
13299 else if (TREE_CODE (optype) == ARRAY_TYPE
13300 && type == TREE_TYPE (optype))
13302 tree type_domain = TYPE_DOMAIN (optype);
13303 tree min_val = size_zero_node;
13304 if (type_domain && TYPE_MIN_VALUE (type_domain))
13305 min_val = TYPE_MIN_VALUE (type_domain);
13306 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13308 /* *(foo *)&complexfoo => __real__ complexfoo */
13309 else if (TREE_CODE (optype) == COMPLEX_TYPE
13310 && type == TREE_TYPE (optype))
13311 return fold_build1 (REALPART_EXPR, type, op);
13314 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13315 if (TREE_CODE (sub) == PLUS_EXPR
13316 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13318 tree op00 = TREE_OPERAND (sub, 0);
13319 tree op01 = TREE_OPERAND (sub, 1);
13320 tree op00type;
13322 STRIP_NOPS (op00);
13323 op00type = TREE_TYPE (op00);
13324 if (TREE_CODE (op00) == ADDR_EXPR
13325 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13326 && type == TREE_TYPE (TREE_TYPE (op00type)))
13328 tree size = TYPE_SIZE_UNIT (type);
13329 if (tree_int_cst_equal (size, op01))
13330 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13334 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13335 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13336 && type == TREE_TYPE (TREE_TYPE (subtype)))
13338 tree type_domain;
13339 tree min_val = size_zero_node;
13340 sub = build_fold_indirect_ref (sub);
13341 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13342 if (type_domain && TYPE_MIN_VALUE (type_domain))
13343 min_val = TYPE_MIN_VALUE (type_domain);
13344 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13347 return NULL_TREE;
13350 /* Builds an expression for an indirection through T, simplifying some
13351 cases. */
13353 tree
13354 build_fold_indirect_ref (tree t)
13356 tree type = TREE_TYPE (TREE_TYPE (t));
13357 tree sub = fold_indirect_ref_1 (type, t);
13359 if (sub)
13360 return sub;
13361 else
13362 return build1 (INDIRECT_REF, type, t);
13365 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13367 tree
13368 fold_indirect_ref (tree t)
13370 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13372 if (sub)
13373 return sub;
13374 else
13375 return t;
13378 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13379 whose result is ignored. The type of the returned tree need not be
13380 the same as the original expression. */
13382 tree
13383 fold_ignored_result (tree t)
13385 if (!TREE_SIDE_EFFECTS (t))
13386 return integer_zero_node;
13388 for (;;)
13389 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13391 case tcc_unary:
13392 t = TREE_OPERAND (t, 0);
13393 break;
13395 case tcc_binary:
13396 case tcc_comparison:
13397 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13398 t = TREE_OPERAND (t, 0);
13399 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13400 t = TREE_OPERAND (t, 1);
13401 else
13402 return t;
13403 break;
13405 case tcc_expression:
13406 switch (TREE_CODE (t))
13408 case COMPOUND_EXPR:
13409 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13410 return t;
13411 t = TREE_OPERAND (t, 0);
13412 break;
13414 case COND_EXPR:
13415 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13416 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13417 return t;
13418 t = TREE_OPERAND (t, 0);
13419 break;
13421 default:
13422 return t;
13424 break;
13426 default:
13427 return t;
13431 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13432 This can only be applied to objects of a sizetype. */
13434 tree
13435 round_up (tree value, int divisor)
13437 tree div = NULL_TREE;
13439 gcc_assert (divisor > 0);
13440 if (divisor == 1)
13441 return value;
13443 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13444 have to do anything. Only do this when we are not given a const,
13445 because in that case, this check is more expensive than just
13446 doing it. */
13447 if (TREE_CODE (value) != INTEGER_CST)
13449 div = build_int_cst (TREE_TYPE (value), divisor);
13451 if (multiple_of_p (TREE_TYPE (value), value, div))
13452 return value;
13455 /* If divisor is a power of two, simplify this to bit manipulation. */
13456 if (divisor == (divisor & -divisor))
13458 tree t;
13460 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13461 value = size_binop (PLUS_EXPR, value, t);
13462 t = build_int_cst (TREE_TYPE (value), -divisor);
13463 value = size_binop (BIT_AND_EXPR, value, t);
13465 else
13467 if (!div)
13468 div = build_int_cst (TREE_TYPE (value), divisor);
13469 value = size_binop (CEIL_DIV_EXPR, value, div);
13470 value = size_binop (MULT_EXPR, value, div);
13473 return value;
13476 /* Likewise, but round down. */
13478 tree
13479 round_down (tree value, int divisor)
13481 tree div = NULL_TREE;
13483 gcc_assert (divisor > 0);
13484 if (divisor == 1)
13485 return value;
13487 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13488 have to do anything. Only do this when we are not given a const,
13489 because in that case, this check is more expensive than just
13490 doing it. */
13491 if (TREE_CODE (value) != INTEGER_CST)
13493 div = build_int_cst (TREE_TYPE (value), divisor);
13495 if (multiple_of_p (TREE_TYPE (value), value, div))
13496 return value;
13499 /* If divisor is a power of two, simplify this to bit manipulation. */
13500 if (divisor == (divisor & -divisor))
13502 tree t;
13504 t = build_int_cst (TREE_TYPE (value), -divisor);
13505 value = size_binop (BIT_AND_EXPR, value, t);
13507 else
13509 if (!div)
13510 div = build_int_cst (TREE_TYPE (value), divisor);
13511 value = size_binop (FLOOR_DIV_EXPR, value, div);
13512 value = size_binop (MULT_EXPR, value, div);
13515 return value;
13518 /* Returns the pointer to the base of the object addressed by EXP and
13519 extracts the information about the offset of the access, storing it
13520 to PBITPOS and POFFSET. */
13522 static tree
13523 split_address_to_core_and_offset (tree exp,
13524 HOST_WIDE_INT *pbitpos, tree *poffset)
13526 tree core;
13527 enum machine_mode mode;
13528 int unsignedp, volatilep;
13529 HOST_WIDE_INT bitsize;
13531 if (TREE_CODE (exp) == ADDR_EXPR)
13533 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13534 poffset, &mode, &unsignedp, &volatilep,
13535 false);
13536 core = build_fold_addr_expr (core);
13538 else
13540 core = exp;
13541 *pbitpos = 0;
13542 *poffset = NULL_TREE;
13545 return core;
13548 /* Returns true if addresses of E1 and E2 differ by a constant, false
13549 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13551 bool
13552 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13554 tree core1, core2;
13555 HOST_WIDE_INT bitpos1, bitpos2;
13556 tree toffset1, toffset2, tdiff, type;
13558 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13559 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13561 if (bitpos1 % BITS_PER_UNIT != 0
13562 || bitpos2 % BITS_PER_UNIT != 0
13563 || !operand_equal_p (core1, core2, 0))
13564 return false;
13566 if (toffset1 && toffset2)
13568 type = TREE_TYPE (toffset1);
13569 if (type != TREE_TYPE (toffset2))
13570 toffset2 = fold_convert (type, toffset2);
13572 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13573 if (!cst_and_fits_in_hwi (tdiff))
13574 return false;
13576 *diff = int_cst_value (tdiff);
13578 else if (toffset1 || toffset2)
13580 /* If only one of the offsets is non-constant, the difference cannot
13581 be a constant. */
13582 return false;
13584 else
13585 *diff = 0;
13587 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13588 return true;
13591 /* Simplify the floating point expression EXP when the sign of the
13592 result is not significant. Return NULL_TREE if no simplification
13593 is possible. */
13595 tree
13596 fold_strip_sign_ops (tree exp)
13598 tree arg0, arg1;
13600 switch (TREE_CODE (exp))
13602 case ABS_EXPR:
13603 case NEGATE_EXPR:
13604 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13605 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13607 case MULT_EXPR:
13608 case RDIV_EXPR:
13609 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13610 return NULL_TREE;
13611 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13612 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13613 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13614 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13615 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13616 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13617 break;
13619 default:
13620 break;
13622 return NULL_TREE;