Mark as release
[official-gcc.git] / gcc / fold-const.c
blobd14a8e267970e24927a92913eef9b3d18cbc1831
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
63 /* Non-zero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
90 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree, int);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static tree combine_comparisons (enum tree_code, enum tree_code,
100 enum tree_code, tree, tree, tree);
101 static int truth_value_p (enum tree_code);
102 static int operand_equal_for_comparison_p (tree, tree, tree);
103 static int twoval_comparison_p (tree, tree *, tree *, int *);
104 static tree eval_subst (tree, tree, tree, tree, tree);
105 static tree pedantic_omit_one_operand (tree, tree, tree);
106 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
107 static tree make_bit_field_ref (tree, tree, int, int, int);
108 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
109 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (tree, int);
113 static tree sign_bit_p (tree, tree);
114 static int simple_operand_p (tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 static tree make_range (tree, int *, tree *, tree *, bool *);
119 static tree build_range_check (tree, tree, int, tree, tree);
120 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
121 tree);
122 static tree fold_range_test (enum tree_code, tree, tree, tree);
123 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
124 static tree unextend (tree, int, int, tree);
125 static tree fold_truthop (enum tree_code, tree, tree, tree);
126 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static int multiple_of_p (tree, tree, tree);
130 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
131 tree, tree,
132 tree, tree, int);
133 static bool fold_real_zero_addition_p (tree, tree, int);
134 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 tree, tree, tree);
136 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (tree, tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static int native_encode_expr (tree, unsigned char *, int);
143 static tree native_interpret_expr (tree, unsigned char *, int);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
149 addition.
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 sign. */
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
157 We do that by representing the two-word integer in 4 words, with only
158 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
159 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 #define LOWPART(x) \
162 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
163 #define HIGHPART(x) \
164 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
165 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167 /* Unpack a two-word integer into 4 words.
168 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
169 WORDS points to the array of HOST_WIDE_INTs. */
171 static void
172 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174 words[0] = LOWPART (low);
175 words[1] = HIGHPART (low);
176 words[2] = LOWPART (hi);
177 words[3] = HIGHPART (hi);
180 /* Pack an array of 4 words into a two-word integer.
181 WORDS points to the array of words.
182 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 static void
185 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
186 HOST_WIDE_INT *hi)
188 *low = words[0] + words[1] * BASE;
189 *hi = words[2] + words[3] * BASE;
192 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
193 in overflow of the value, when >0 we are only interested in signed
194 overflow, for <0 we are interested in any overflow. OVERFLOWED
195 indicates whether overflow has already occurred. CONST_OVERFLOWED
196 indicates whether constant overflow has already occurred. We force
197 T's value to be within range of T's type (by setting to 0 or 1 all
198 the bits outside the type's range). We set TREE_OVERFLOWED if,
199 OVERFLOWED is nonzero,
200 or OVERFLOWABLE is >0 and signed overflow occurs
201 or OVERFLOWABLE is <0 and any overflow occurs
202 We set TREE_CONSTANT_OVERFLOWED if,
203 CONST_OVERFLOWED is nonzero
204 or we set TREE_OVERFLOWED.
205 We return either the original T, or a copy. */
207 tree
208 force_fit_type (tree t, int overflowable,
209 bool overflowed, bool overflowed_const)
211 unsigned HOST_WIDE_INT low;
212 HOST_WIDE_INT high;
213 unsigned int prec;
214 int sign_extended_type;
216 gcc_assert (TREE_CODE (t) == INTEGER_CST);
218 low = TREE_INT_CST_LOW (t);
219 high = TREE_INT_CST_HIGH (t);
221 if (POINTER_TYPE_P (TREE_TYPE (t))
222 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
223 prec = POINTER_SIZE;
224 else
225 prec = TYPE_PRECISION (TREE_TYPE (t));
226 /* Size types *are* sign extended. */
227 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
228 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
229 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
231 /* First clear all bits that are beyond the type's precision. */
233 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 else if (prec > HOST_BITS_PER_WIDE_INT)
236 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
237 else
239 high = 0;
240 if (prec < HOST_BITS_PER_WIDE_INT)
241 low &= ~((HOST_WIDE_INT) (-1) << prec);
244 if (!sign_extended_type)
245 /* No sign extension */;
246 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
247 /* Correct width already. */;
248 else if (prec > HOST_BITS_PER_WIDE_INT)
250 /* Sign extend top half? */
251 if (high & ((unsigned HOST_WIDE_INT)1
252 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
253 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
255 else if (prec == HOST_BITS_PER_WIDE_INT)
257 if ((HOST_WIDE_INT)low < 0)
258 high = -1;
260 else
262 /* Sign extend bottom half? */
263 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 high = -1;
266 low |= (HOST_WIDE_INT)(-1) << prec;
270 /* If the value changed, return a new node. */
271 if (overflowed || overflowed_const
272 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
274 t = build_int_cst_wide (TREE_TYPE (t), low, high);
276 if (overflowed
277 || overflowable < 0
278 || (overflowable > 0 && sign_extended_type))
280 t = copy_node (t);
281 TREE_OVERFLOW (t) = 1;
282 TREE_CONSTANT_OVERFLOW (t) = 1;
284 else if (overflowed_const)
286 t = copy_node (t);
287 TREE_CONSTANT_OVERFLOW (t) = 1;
291 return t;
294 /* Add two doubleword integers with doubleword result.
295 Return nonzero if the operation overflows according to UNSIGNED_P.
296 Each argument is given as two `HOST_WIDE_INT' pieces.
297 One argument is L1 and H1; the other, L2 and H2.
298 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
301 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
302 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
303 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
304 bool unsigned_p)
306 unsigned HOST_WIDE_INT l;
307 HOST_WIDE_INT h;
309 l = l1 + l2;
310 h = h1 + h2 + (l < l1);
312 *lv = l;
313 *hv = h;
315 if (unsigned_p)
316 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
317 else
318 return OVERFLOW_SUM_SIGN (h1, h2, h);
321 /* Negate a doubleword integer with doubleword result.
322 Return nonzero if the operation overflows, assuming it's signed.
323 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
324 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
327 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
328 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
330 if (l1 == 0)
332 *lv = 0;
333 *hv = - h1;
334 return (*hv & h1) < 0;
336 else
338 *lv = -l1;
339 *hv = ~h1;
340 return 0;
344 /* Multiply two doubleword integers with doubleword result.
345 Return nonzero if the operation overflows according to UNSIGNED_P.
346 Each argument is given as two `HOST_WIDE_INT' pieces.
347 One argument is L1 and H1; the other, L2 and H2.
348 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
351 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
352 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
353 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
354 bool unsigned_p)
356 HOST_WIDE_INT arg1[4];
357 HOST_WIDE_INT arg2[4];
358 HOST_WIDE_INT prod[4 * 2];
359 unsigned HOST_WIDE_INT carry;
360 int i, j, k;
361 unsigned HOST_WIDE_INT toplow, neglow;
362 HOST_WIDE_INT tophigh, neghigh;
364 encode (arg1, l1, h1);
365 encode (arg2, l2, h2);
367 memset (prod, 0, sizeof prod);
369 for (i = 0; i < 4; i++)
371 carry = 0;
372 for (j = 0; j < 4; j++)
374 k = i + j;
375 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
376 carry += arg1[i] * arg2[j];
377 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
378 carry += prod[k];
379 prod[k] = LOWPART (carry);
380 carry = HIGHPART (carry);
382 prod[i + 4] = carry;
385 decode (prod, lv, hv);
386 decode (prod + 4, &toplow, &tophigh);
388 /* Unsigned overflow is immediate. */
389 if (unsigned_p)
390 return (toplow | tophigh) != 0;
392 /* Check for signed overflow by calculating the signed representation of the
393 top half of the result; it should agree with the low half's sign bit. */
394 if (h1 < 0)
396 neg_double (l2, h2, &neglow, &neghigh);
397 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
399 if (h2 < 0)
401 neg_double (l1, h1, &neglow, &neghigh);
402 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
404 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
407 /* Shift the doubleword integer in L1, H1 left by COUNT places
408 keeping only PREC bits of result.
409 Shift right if COUNT is negative.
410 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
411 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413 void
414 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
415 HOST_WIDE_INT count, unsigned int prec,
416 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
418 unsigned HOST_WIDE_INT signmask;
420 if (count < 0)
422 rshift_double (l1, h1, -count, prec, lv, hv, arith);
423 return;
426 if (SHIFT_COUNT_TRUNCATED)
427 count %= prec;
429 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
431 /* Shifting by the host word size is undefined according to the
432 ANSI standard, so we must handle this as a special case. */
433 *hv = 0;
434 *lv = 0;
436 else if (count >= HOST_BITS_PER_WIDE_INT)
438 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
439 *lv = 0;
441 else
443 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
444 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
445 *lv = l1 << count;
448 /* Sign extend all bits that are beyond the precision. */
450 signmask = -((prec > HOST_BITS_PER_WIDE_INT
451 ? ((unsigned HOST_WIDE_INT) *hv
452 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
453 : (*lv >> (prec - 1))) & 1);
455 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
457 else if (prec >= HOST_BITS_PER_WIDE_INT)
459 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
460 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
462 else
464 *hv = signmask;
465 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
466 *lv |= signmask << prec;
470 /* Shift the doubleword integer in L1, H1 right by COUNT places
471 keeping only PREC bits of result. COUNT must be positive.
472 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
473 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
475 void
476 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
477 HOST_WIDE_INT count, unsigned int prec,
478 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
479 int arith)
481 unsigned HOST_WIDE_INT signmask;
483 signmask = (arith
484 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
485 : 0);
487 if (SHIFT_COUNT_TRUNCATED)
488 count %= prec;
490 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
492 /* Shifting by the host word size is undefined according to the
493 ANSI standard, so we must handle this as a special case. */
494 *hv = 0;
495 *lv = 0;
497 else if (count >= HOST_BITS_PER_WIDE_INT)
499 *hv = 0;
500 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
502 else
504 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
505 *lv = ((l1 >> count)
506 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
509 /* Zero / sign extend all bits that are beyond the precision. */
511 if (count >= (HOST_WIDE_INT)prec)
513 *hv = signmask;
514 *lv = signmask;
516 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
518 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
520 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
521 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
523 else
525 *hv = signmask;
526 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
527 *lv |= signmask << (prec - count);
531 /* Rotate the doubleword integer in L1, H1 left by COUNT places
532 keeping only PREC bits of result.
533 Rotate right if COUNT is negative.
534 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536 void
537 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
538 HOST_WIDE_INT count, unsigned int prec,
539 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541 unsigned HOST_WIDE_INT s1l, s2l;
542 HOST_WIDE_INT s1h, s2h;
544 count %= prec;
545 if (count < 0)
546 count += prec;
548 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
549 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
550 *lv = s1l | s2l;
551 *hv = s1h | s2h;
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result. COUNT must be positive.
556 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 void
559 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
560 HOST_WIDE_INT count, unsigned int prec,
561 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563 unsigned HOST_WIDE_INT s1l, s2l;
564 HOST_WIDE_INT s1h, s2h;
566 count %= prec;
567 if (count < 0)
568 count += prec;
570 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
571 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
572 *lv = s1l | s2l;
573 *hv = s1h | s2h;
576 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
577 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
578 CODE is a tree code for a kind of division, one of
579 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
580 or EXACT_DIV_EXPR
581 It controls how the quotient is rounded to an integer.
582 Return nonzero if the operation overflows.
583 UNS nonzero says do unsigned division. */
586 div_and_round_double (enum tree_code code, int uns,
587 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
588 HOST_WIDE_INT hnum_orig,
589 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
590 HOST_WIDE_INT hden_orig,
591 unsigned HOST_WIDE_INT *lquo,
592 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
593 HOST_WIDE_INT *hrem)
595 int quo_neg = 0;
596 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
597 HOST_WIDE_INT den[4], quo[4];
598 int i, j;
599 unsigned HOST_WIDE_INT work;
600 unsigned HOST_WIDE_INT carry = 0;
601 unsigned HOST_WIDE_INT lnum = lnum_orig;
602 HOST_WIDE_INT hnum = hnum_orig;
603 unsigned HOST_WIDE_INT lden = lden_orig;
604 HOST_WIDE_INT hden = hden_orig;
605 int overflow = 0;
607 if (hden == 0 && lden == 0)
608 overflow = 1, lden = 1;
610 /* Calculate quotient sign and convert operands to unsigned. */
611 if (!uns)
613 if (hnum < 0)
615 quo_neg = ~ quo_neg;
616 /* (minimum integer) / (-1) is the only overflow case. */
617 if (neg_double (lnum, hnum, &lnum, &hnum)
618 && ((HOST_WIDE_INT) lden & hden) == -1)
619 overflow = 1;
621 if (hden < 0)
623 quo_neg = ~ quo_neg;
624 neg_double (lden, hden, &lden, &hden);
628 if (hnum == 0 && hden == 0)
629 { /* single precision */
630 *hquo = *hrem = 0;
631 /* This unsigned division rounds toward zero. */
632 *lquo = lnum / lden;
633 goto finish_up;
636 if (hnum == 0)
637 { /* trivial case: dividend < divisor */
638 /* hden != 0 already checked. */
639 *hquo = *lquo = 0;
640 *hrem = hnum;
641 *lrem = lnum;
642 goto finish_up;
645 memset (quo, 0, sizeof quo);
647 memset (num, 0, sizeof num); /* to zero 9th element */
648 memset (den, 0, sizeof den);
650 encode (num, lnum, hnum);
651 encode (den, lden, hden);
653 /* Special code for when the divisor < BASE. */
654 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
656 /* hnum != 0 already checked. */
657 for (i = 4 - 1; i >= 0; i--)
659 work = num[i] + carry * BASE;
660 quo[i] = work / lden;
661 carry = work % lden;
664 else
666 /* Full double precision division,
667 with thanks to Don Knuth's "Seminumerical Algorithms". */
668 int num_hi_sig, den_hi_sig;
669 unsigned HOST_WIDE_INT quo_est, scale;
671 /* Find the highest nonzero divisor digit. */
672 for (i = 4 - 1;; i--)
673 if (den[i] != 0)
675 den_hi_sig = i;
676 break;
679 /* Insure that the first digit of the divisor is at least BASE/2.
680 This is required by the quotient digit estimation algorithm. */
682 scale = BASE / (den[den_hi_sig] + 1);
683 if (scale > 1)
684 { /* scale divisor and dividend */
685 carry = 0;
686 for (i = 0; i <= 4 - 1; i++)
688 work = (num[i] * scale) + carry;
689 num[i] = LOWPART (work);
690 carry = HIGHPART (work);
693 num[4] = carry;
694 carry = 0;
695 for (i = 0; i <= 4 - 1; i++)
697 work = (den[i] * scale) + carry;
698 den[i] = LOWPART (work);
699 carry = HIGHPART (work);
700 if (den[i] != 0) den_hi_sig = i;
704 num_hi_sig = 4;
706 /* Main loop */
707 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
709 /* Guess the next quotient digit, quo_est, by dividing the first
710 two remaining dividend digits by the high order quotient digit.
711 quo_est is never low and is at most 2 high. */
712 unsigned HOST_WIDE_INT tmp;
714 num_hi_sig = i + den_hi_sig + 1;
715 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
716 if (num[num_hi_sig] != den[den_hi_sig])
717 quo_est = work / den[den_hi_sig];
718 else
719 quo_est = BASE - 1;
721 /* Refine quo_est so it's usually correct, and at most one high. */
722 tmp = work - quo_est * den[den_hi_sig];
723 if (tmp < BASE
724 && (den[den_hi_sig - 1] * quo_est
725 > (tmp * BASE + num[num_hi_sig - 2])))
726 quo_est--;
728 /* Try QUO_EST as the quotient digit, by multiplying the
729 divisor by QUO_EST and subtracting from the remaining dividend.
730 Keep in mind that QUO_EST is the I - 1st digit. */
732 carry = 0;
733 for (j = 0; j <= den_hi_sig; j++)
735 work = quo_est * den[j] + carry;
736 carry = HIGHPART (work);
737 work = num[i + j] - LOWPART (work);
738 num[i + j] = LOWPART (work);
739 carry += HIGHPART (work) != 0;
742 /* If quo_est was high by one, then num[i] went negative and
743 we need to correct things. */
744 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746 quo_est--;
747 carry = 0; /* add divisor back in */
748 for (j = 0; j <= den_hi_sig; j++)
750 work = num[i + j] + den[j] + carry;
751 carry = HIGHPART (work);
752 num[i + j] = LOWPART (work);
755 num [num_hi_sig] += carry;
758 /* Store the quotient digit. */
759 quo[i] = quo_est;
763 decode (quo, lquo, hquo);
765 finish_up:
766 /* If result is negative, make it so. */
767 if (quo_neg)
768 neg_double (*lquo, *hquo, lquo, hquo);
770 /* Compute trial remainder: rem = num - (quo * den) */
771 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
772 neg_double (*lrem, *hrem, lrem, hrem);
773 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
775 switch (code)
777 case TRUNC_DIV_EXPR:
778 case TRUNC_MOD_EXPR: /* round toward zero */
779 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
780 return overflow;
782 case FLOOR_DIV_EXPR:
783 case FLOOR_MOD_EXPR: /* round toward negative infinity */
784 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
786 /* quo = quo - 1; */
787 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
788 lquo, hquo);
790 else
791 return overflow;
792 break;
794 case CEIL_DIV_EXPR:
795 case CEIL_MOD_EXPR: /* round toward positive infinity */
796 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
798 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
799 lquo, hquo);
801 else
802 return overflow;
803 break;
805 case ROUND_DIV_EXPR:
806 case ROUND_MOD_EXPR: /* round to closest integer */
808 unsigned HOST_WIDE_INT labs_rem = *lrem;
809 HOST_WIDE_INT habs_rem = *hrem;
810 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
811 HOST_WIDE_INT habs_den = hden, htwice;
813 /* Get absolute values. */
814 if (*hrem < 0)
815 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
816 if (hden < 0)
817 neg_double (lden, hden, &labs_den, &habs_den);
819 /* If (2 * abs (lrem) >= abs (lden)) */
820 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
821 labs_rem, habs_rem, &ltwice, &htwice);
823 if (((unsigned HOST_WIDE_INT) habs_den
824 < (unsigned HOST_WIDE_INT) htwice)
825 || (((unsigned HOST_WIDE_INT) habs_den
826 == (unsigned HOST_WIDE_INT) htwice)
827 && (labs_den < ltwice)))
829 if (*hquo < 0)
830 /* quo = quo - 1; */
831 add_double (*lquo, *hquo,
832 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
833 else
834 /* quo = quo + 1; */
835 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
836 lquo, hquo);
838 else
839 return overflow;
841 break;
843 default:
844 gcc_unreachable ();
847 /* Compute true remainder: rem = num - (quo * den) */
848 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
849 neg_double (*lrem, *hrem, lrem, hrem);
850 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
851 return overflow;
854 /* If ARG2 divides ARG1 with zero remainder, carries out the division
855 of type CODE and returns the quotient.
856 Otherwise returns NULL_TREE. */
858 static tree
859 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
861 unsigned HOST_WIDE_INT int1l, int2l;
862 HOST_WIDE_INT int1h, int2h;
863 unsigned HOST_WIDE_INT quol, reml;
864 HOST_WIDE_INT quoh, remh;
865 tree type = TREE_TYPE (arg1);
866 int uns = TYPE_UNSIGNED (type);
868 int1l = TREE_INT_CST_LOW (arg1);
869 int1h = TREE_INT_CST_HIGH (arg1);
870 int2l = TREE_INT_CST_LOW (arg2);
871 int2h = TREE_INT_CST_HIGH (arg2);
873 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
874 &quol, &quoh, &reml, &remh);
875 if (remh != 0 || reml != 0)
876 return NULL_TREE;
878 return build_int_cst_wide (type, quol, quoh);
881 /* This is non-zero if we should defer warnings about undefined
882 overflow. This facility exists because these warnings are a
883 special case. The code to estimate loop iterations does not want
884 to issue any warnings, since it works with expressions which do not
885 occur in user code. Various bits of cleanup code call fold(), but
886 only use the result if it has certain characteristics (e.g., is a
887 constant); that code only wants to issue a warning if the result is
888 used. */
890 static int fold_deferring_overflow_warnings;
892 /* If a warning about undefined overflow is deferred, this is the
893 warning. Note that this may cause us to turn two warnings into
894 one, but that is fine since it is sufficient to only give one
895 warning per expression. */
897 static const char* fold_deferred_overflow_warning;
899 /* If a warning about undefined overflow is deferred, this is the
900 level at which the warning should be emitted. */
902 static enum warn_strict_overflow_code fold_deferred_overflow_code;
904 /* Start deferring overflow warnings. We could use a stack here to
905 permit nested calls, but at present it is not necessary. */
907 void
908 fold_defer_overflow_warnings (void)
910 ++fold_deferring_overflow_warnings;
913 /* Stop deferring overflow warnings. If there is a pending warning,
914 and ISSUE is true, then issue the warning if appropriate. STMT is
915 the statement with which the warning should be associated (used for
916 location information); STMT may be NULL. CODE is the level of the
917 warning--a warn_strict_overflow_code value. This function will use
918 the smaller of CODE and the deferred code when deciding whether to
919 issue the warning. CODE may be zero to mean to always use the
920 deferred code. */
922 void
923 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
925 const char *warnmsg;
926 location_t locus;
928 gcc_assert (fold_deferring_overflow_warnings > 0);
929 --fold_deferring_overflow_warnings;
930 if (fold_deferring_overflow_warnings > 0)
932 if (fold_deferred_overflow_warning != NULL
933 && code != 0
934 && code < (int) fold_deferred_overflow_code)
935 fold_deferred_overflow_code = code;
936 return;
939 warnmsg = fold_deferred_overflow_warning;
940 fold_deferred_overflow_warning = NULL;
942 if (!issue || warnmsg == NULL)
943 return;
945 /* Use the smallest code level when deciding to issue the
946 warning. */
947 if (code == 0 || code > (int) fold_deferred_overflow_code)
948 code = fold_deferred_overflow_code;
950 if (!issue_strict_overflow_warning (code))
951 return;
953 if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt))
954 locus = input_location;
955 else
956 locus = EXPR_LOCATION (stmt);
957 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
960 /* Stop deferring overflow warnings, ignoring any deferred
961 warnings. */
963 void
964 fold_undefer_and_ignore_overflow_warnings (void)
966 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
969 /* Whether we are deferring overflow warnings. */
971 bool
972 fold_deferring_overflow_warnings_p (void)
974 return fold_deferring_overflow_warnings > 0;
977 /* This is called when we fold something based on the fact that signed
978 overflow is undefined. */
980 static void
981 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
983 gcc_assert (!flag_wrapv && !flag_trapv);
984 if (fold_deferring_overflow_warnings > 0)
986 if (fold_deferred_overflow_warning == NULL
987 || wc < fold_deferred_overflow_code)
989 fold_deferred_overflow_warning = gmsgid;
990 fold_deferred_overflow_code = wc;
993 else if (issue_strict_overflow_warning (wc))
994 warning (OPT_Wstrict_overflow, gmsgid);
997 /* Return true if the built-in mathematical function specified by CODE
998 is odd, i.e. -f(x) == f(-x). */
1000 static bool
1001 negate_mathfn_p (enum built_in_function code)
1003 switch (code)
1005 CASE_FLT_FN (BUILT_IN_ASIN):
1006 CASE_FLT_FN (BUILT_IN_ASINH):
1007 CASE_FLT_FN (BUILT_IN_ATAN):
1008 CASE_FLT_FN (BUILT_IN_ATANH):
1009 CASE_FLT_FN (BUILT_IN_CBRT):
1010 CASE_FLT_FN (BUILT_IN_SIN):
1011 CASE_FLT_FN (BUILT_IN_SINH):
1012 CASE_FLT_FN (BUILT_IN_TAN):
1013 CASE_FLT_FN (BUILT_IN_TANH):
1014 return true;
1016 default:
1017 break;
1019 return false;
1022 /* Check whether we may negate an integer constant T without causing
1023 overflow. */
1025 bool
1026 may_negate_without_overflow_p (tree t)
1028 unsigned HOST_WIDE_INT val;
1029 unsigned int prec;
1030 tree type;
1032 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1034 type = TREE_TYPE (t);
1035 if (TYPE_UNSIGNED (type))
1036 return false;
1038 prec = TYPE_PRECISION (type);
1039 if (prec > HOST_BITS_PER_WIDE_INT)
1041 if (TREE_INT_CST_LOW (t) != 0)
1042 return true;
1043 prec -= HOST_BITS_PER_WIDE_INT;
1044 val = TREE_INT_CST_HIGH (t);
1046 else
1047 val = TREE_INT_CST_LOW (t);
1048 if (prec < HOST_BITS_PER_WIDE_INT)
1049 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1050 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1053 /* Determine whether an expression T can be cheaply negated using
1054 the function negate_expr without introducing undefined overflow. */
1056 static bool
1057 negate_expr_p (tree t)
1059 tree type;
1061 if (t == 0)
1062 return false;
1064 type = TREE_TYPE (t);
1066 STRIP_SIGN_NOPS (t);
1067 switch (TREE_CODE (t))
1069 case INTEGER_CST:
1070 if (TYPE_OVERFLOW_WRAPS (type))
1071 return true;
1073 /* Check that -CST will not overflow type. */
1074 return may_negate_without_overflow_p (t);
1075 case BIT_NOT_EXPR:
1076 return (INTEGRAL_TYPE_P (type)
1077 && TYPE_OVERFLOW_WRAPS (type));
1079 case REAL_CST:
1080 case NEGATE_EXPR:
1081 return true;
1083 case COMPLEX_CST:
1084 return negate_expr_p (TREE_REALPART (t))
1085 && negate_expr_p (TREE_IMAGPART (t));
1087 case PLUS_EXPR:
1088 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
1089 return false;
1090 /* -(A + B) -> (-B) - A. */
1091 if (negate_expr_p (TREE_OPERAND (t, 1))
1092 && reorder_operands_p (TREE_OPERAND (t, 0),
1093 TREE_OPERAND (t, 1)))
1094 return true;
1095 /* -(A + B) -> (-A) - B. */
1096 return negate_expr_p (TREE_OPERAND (t, 0));
1098 case MINUS_EXPR:
1099 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1100 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1101 && reorder_operands_p (TREE_OPERAND (t, 0),
1102 TREE_OPERAND (t, 1));
1104 case MULT_EXPR:
1105 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1106 break;
1108 /* Fall through. */
1110 case RDIV_EXPR:
1111 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1112 return negate_expr_p (TREE_OPERAND (t, 1))
1113 || negate_expr_p (TREE_OPERAND (t, 0));
1114 break;
1116 case TRUNC_DIV_EXPR:
1117 case ROUND_DIV_EXPR:
1118 case FLOOR_DIV_EXPR:
1119 case CEIL_DIV_EXPR:
1120 case EXACT_DIV_EXPR:
1121 /* In general we can't negate A / B, because if A is INT_MIN and
1122 B is 1, we may turn this into INT_MIN / -1 which is undefined
1123 and actually traps on some architectures. But if overflow is
1124 undefined, we can negate, because - (INT_MIN / 1) is an
1125 overflow. */
1126 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1127 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1128 break;
1129 return negate_expr_p (TREE_OPERAND (t, 1))
1130 || negate_expr_p (TREE_OPERAND (t, 0));
1132 case NOP_EXPR:
1133 /* Negate -((double)float) as (double)(-float). */
1134 if (TREE_CODE (type) == REAL_TYPE)
1136 tree tem = strip_float_extensions (t);
1137 if (tem != t)
1138 return negate_expr_p (tem);
1140 break;
1142 case CALL_EXPR:
1143 /* Negate -f(x) as f(-x). */
1144 if (negate_mathfn_p (builtin_mathfn_code (t)))
1145 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1146 break;
1148 case RSHIFT_EXPR:
1149 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1150 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1152 tree op1 = TREE_OPERAND (t, 1);
1153 if (TREE_INT_CST_HIGH (op1) == 0
1154 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1155 == TREE_INT_CST_LOW (op1))
1156 return true;
1158 break;
1160 default:
1161 break;
1163 return false;
1166 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1167 simplification is possible.
1168 If negate_expr_p would return true for T, NULL_TREE will never be
1169 returned. */
1171 static tree
1172 fold_negate_expr (tree t)
1174 tree type = TREE_TYPE (t);
1175 tree tem;
1177 switch (TREE_CODE (t))
1179 /* Convert - (~A) to A + 1. */
1180 case BIT_NOT_EXPR:
1181 if (INTEGRAL_TYPE_P (type))
1182 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1183 build_int_cst (type, 1));
1184 break;
1186 case INTEGER_CST:
1187 tem = fold_negate_const (t, type);
1188 if (!TREE_OVERFLOW (tem)
1189 || !TYPE_OVERFLOW_TRAPS (type))
1190 return tem;
1191 break;
1193 case REAL_CST:
1194 tem = fold_negate_const (t, type);
1195 /* Two's complement FP formats, such as c4x, may overflow. */
1196 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1197 return tem;
1198 break;
1200 case COMPLEX_CST:
1202 tree rpart = negate_expr (TREE_REALPART (t));
1203 tree ipart = negate_expr (TREE_IMAGPART (t));
1205 if ((TREE_CODE (rpart) == REAL_CST
1206 && TREE_CODE (ipart) == REAL_CST)
1207 || (TREE_CODE (rpart) == INTEGER_CST
1208 && TREE_CODE (ipart) == INTEGER_CST))
1209 return build_complex (type, rpart, ipart);
1211 break;
1213 case NEGATE_EXPR:
1214 return TREE_OPERAND (t, 0);
1216 case PLUS_EXPR:
1217 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1219 /* -(A + B) -> (-B) - A. */
1220 if (negate_expr_p (TREE_OPERAND (t, 1))
1221 && reorder_operands_p (TREE_OPERAND (t, 0),
1222 TREE_OPERAND (t, 1)))
1224 tem = negate_expr (TREE_OPERAND (t, 1));
1225 return fold_build2 (MINUS_EXPR, type,
1226 tem, TREE_OPERAND (t, 0));
1229 /* -(A + B) -> (-A) - B. */
1230 if (negate_expr_p (TREE_OPERAND (t, 0)))
1232 tem = negate_expr (TREE_OPERAND (t, 0));
1233 return fold_build2 (MINUS_EXPR, type,
1234 tem, TREE_OPERAND (t, 1));
1237 break;
1239 case MINUS_EXPR:
1240 /* - (A - B) -> B - A */
1241 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1242 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1243 return fold_build2 (MINUS_EXPR, type,
1244 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1245 break;
1247 case MULT_EXPR:
1248 if (TYPE_UNSIGNED (type))
1249 break;
1251 /* Fall through. */
1253 case RDIV_EXPR:
1254 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1256 tem = TREE_OPERAND (t, 1);
1257 if (negate_expr_p (tem))
1258 return fold_build2 (TREE_CODE (t), type,
1259 TREE_OPERAND (t, 0), negate_expr (tem));
1260 tem = TREE_OPERAND (t, 0);
1261 if (negate_expr_p (tem))
1262 return fold_build2 (TREE_CODE (t), type,
1263 negate_expr (tem), TREE_OPERAND (t, 1));
1265 break;
1267 case TRUNC_DIV_EXPR:
1268 case ROUND_DIV_EXPR:
1269 case FLOOR_DIV_EXPR:
1270 case CEIL_DIV_EXPR:
1271 case EXACT_DIV_EXPR:
1272 /* In general we can't negate A / B, because if A is INT_MIN and
1273 B is 1, we may turn this into INT_MIN / -1 which is undefined
1274 and actually traps on some architectures. But if overflow is
1275 undefined, we can negate, because - (INT_MIN / 1) is an
1276 overflow. */
1277 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1279 const char * const warnmsg = G_("assuming signed overflow does not "
1280 "occur when negating a division");
1281 tem = TREE_OPERAND (t, 1);
1282 if (negate_expr_p (tem))
1284 if (INTEGRAL_TYPE_P (type)
1285 && (TREE_CODE (tem) != INTEGER_CST
1286 || integer_onep (tem)))
1287 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1288 return fold_build2 (TREE_CODE (t), type,
1289 TREE_OPERAND (t, 0), negate_expr (tem));
1291 tem = TREE_OPERAND (t, 0);
1292 if (negate_expr_p (tem))
1294 if (INTEGRAL_TYPE_P (type)
1295 && (TREE_CODE (tem) != INTEGER_CST
1296 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1297 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1298 return fold_build2 (TREE_CODE (t), type,
1299 negate_expr (tem), TREE_OPERAND (t, 1));
1302 break;
1304 case NOP_EXPR:
1305 /* Convert -((double)float) into (double)(-float). */
1306 if (TREE_CODE (type) == REAL_TYPE)
1308 tem = strip_float_extensions (t);
1309 if (tem != t && negate_expr_p (tem))
1310 return negate_expr (tem);
1312 break;
1314 case CALL_EXPR:
1315 /* Negate -f(x) as f(-x). */
1316 if (negate_mathfn_p (builtin_mathfn_code (t))
1317 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1319 tree fndecl, arg, arglist;
1321 fndecl = get_callee_fndecl (t);
1322 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1323 arglist = build_tree_list (NULL_TREE, arg);
1324 return build_function_call_expr (fndecl, arglist);
1326 break;
1328 case RSHIFT_EXPR:
1329 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1330 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1332 tree op1 = TREE_OPERAND (t, 1);
1333 if (TREE_INT_CST_HIGH (op1) == 0
1334 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1335 == TREE_INT_CST_LOW (op1))
1337 tree ntype = TYPE_UNSIGNED (type)
1338 ? lang_hooks.types.signed_type (type)
1339 : lang_hooks.types.unsigned_type (type);
1340 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1341 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1342 return fold_convert (type, temp);
1345 break;
1347 default:
1348 break;
1351 return NULL_TREE;
1354 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1355 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1356 return NULL_TREE. */
1358 static tree
1359 negate_expr (tree t)
1361 tree type, tem;
1363 if (t == NULL_TREE)
1364 return NULL_TREE;
1366 type = TREE_TYPE (t);
1367 STRIP_SIGN_NOPS (t);
1369 tem = fold_negate_expr (t);
1370 if (!tem)
1371 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1372 return fold_convert (type, tem);
1375 /* Split a tree IN into a constant, literal and variable parts that could be
1376 combined with CODE to make IN. "constant" means an expression with
1377 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1378 commutative arithmetic operation. Store the constant part into *CONP,
1379 the literal in *LITP and return the variable part. If a part isn't
1380 present, set it to null. If the tree does not decompose in this way,
1381 return the entire tree as the variable part and the other parts as null.
1383 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1384 case, we negate an operand that was subtracted. Except if it is a
1385 literal for which we use *MINUS_LITP instead.
1387 If NEGATE_P is true, we are negating all of IN, again except a literal
1388 for which we use *MINUS_LITP instead.
1390 If IN is itself a literal or constant, return it as appropriate.
1392 Note that we do not guarantee that any of the three values will be the
1393 same type as IN, but they will have the same signedness and mode. */
1395 static tree
1396 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1397 tree *minus_litp, int negate_p)
1399 tree var = 0;
1401 *conp = 0;
1402 *litp = 0;
1403 *minus_litp = 0;
1405 /* Strip any conversions that don't change the machine mode or signedness. */
1406 STRIP_SIGN_NOPS (in);
1408 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1409 *litp = in;
1410 else if (TREE_CODE (in) == code
1411 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1412 /* We can associate addition and subtraction together (even
1413 though the C standard doesn't say so) for integers because
1414 the value is not affected. For reals, the value might be
1415 affected, so we can't. */
1416 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1417 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1419 tree op0 = TREE_OPERAND (in, 0);
1420 tree op1 = TREE_OPERAND (in, 1);
1421 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1422 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1424 /* First see if either of the operands is a literal, then a constant. */
1425 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1426 *litp = op0, op0 = 0;
1427 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1428 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1430 if (op0 != 0 && TREE_CONSTANT (op0))
1431 *conp = op0, op0 = 0;
1432 else if (op1 != 0 && TREE_CONSTANT (op1))
1433 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1435 /* If we haven't dealt with either operand, this is not a case we can
1436 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1437 if (op0 != 0 && op1 != 0)
1438 var = in;
1439 else if (op0 != 0)
1440 var = op0;
1441 else
1442 var = op1, neg_var_p = neg1_p;
1444 /* Now do any needed negations. */
1445 if (neg_litp_p)
1446 *minus_litp = *litp, *litp = 0;
1447 if (neg_conp_p)
1448 *conp = negate_expr (*conp);
1449 if (neg_var_p)
1450 var = negate_expr (var);
1452 else if (TREE_CONSTANT (in))
1453 *conp = in;
1454 else
1455 var = in;
1457 if (negate_p)
1459 if (*litp)
1460 *minus_litp = *litp, *litp = 0;
1461 else if (*minus_litp)
1462 *litp = *minus_litp, *minus_litp = 0;
1463 *conp = negate_expr (*conp);
1464 var = negate_expr (var);
1467 return var;
1470 /* Re-associate trees split by the above function. T1 and T2 are either
1471 expressions to associate or null. Return the new expression, if any. If
1472 we build an operation, do it in TYPE and with CODE. */
1474 static tree
1475 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1477 if (t1 == 0)
1478 return t2;
1479 else if (t2 == 0)
1480 return t1;
1482 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1483 try to fold this since we will have infinite recursion. But do
1484 deal with any NEGATE_EXPRs. */
1485 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1486 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1488 if (code == PLUS_EXPR)
1490 if (TREE_CODE (t1) == NEGATE_EXPR)
1491 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1492 fold_convert (type, TREE_OPERAND (t1, 0)));
1493 else if (TREE_CODE (t2) == NEGATE_EXPR)
1494 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1495 fold_convert (type, TREE_OPERAND (t2, 0)));
1496 else if (integer_zerop (t2))
1497 return fold_convert (type, t1);
1499 else if (code == MINUS_EXPR)
1501 if (integer_zerop (t2))
1502 return fold_convert (type, t1);
1505 return build2 (code, type, fold_convert (type, t1),
1506 fold_convert (type, t2));
1509 return fold_build2 (code, type, fold_convert (type, t1),
1510 fold_convert (type, t2));
1513 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1514 to produce a new constant. Return NULL_TREE if we don't know how
1515 to evaluate CODE at compile-time.
1517 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1519 tree
1520 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1522 unsigned HOST_WIDE_INT int1l, int2l;
1523 HOST_WIDE_INT int1h, int2h;
1524 unsigned HOST_WIDE_INT low;
1525 HOST_WIDE_INT hi;
1526 unsigned HOST_WIDE_INT garbagel;
1527 HOST_WIDE_INT garbageh;
1528 tree t;
1529 tree type = TREE_TYPE (arg1);
1530 int uns = TYPE_UNSIGNED (type);
1531 int is_sizetype
1532 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1533 int overflow = 0;
1535 int1l = TREE_INT_CST_LOW (arg1);
1536 int1h = TREE_INT_CST_HIGH (arg1);
1537 int2l = TREE_INT_CST_LOW (arg2);
1538 int2h = TREE_INT_CST_HIGH (arg2);
1540 switch (code)
1542 case BIT_IOR_EXPR:
1543 low = int1l | int2l, hi = int1h | int2h;
1544 break;
1546 case BIT_XOR_EXPR:
1547 low = int1l ^ int2l, hi = int1h ^ int2h;
1548 break;
1550 case BIT_AND_EXPR:
1551 low = int1l & int2l, hi = int1h & int2h;
1552 break;
1554 case RSHIFT_EXPR:
1555 int2l = -int2l;
1556 case LSHIFT_EXPR:
1557 /* It's unclear from the C standard whether shifts can overflow.
1558 The following code ignores overflow; perhaps a C standard
1559 interpretation ruling is needed. */
1560 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1561 &low, &hi, !uns);
1562 break;
1564 case RROTATE_EXPR:
1565 int2l = - int2l;
1566 case LROTATE_EXPR:
1567 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1568 &low, &hi);
1569 break;
1571 case PLUS_EXPR:
1572 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1573 break;
1575 case MINUS_EXPR:
1576 neg_double (int2l, int2h, &low, &hi);
1577 add_double (int1l, int1h, low, hi, &low, &hi);
1578 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1579 break;
1581 case MULT_EXPR:
1582 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1583 break;
1585 case TRUNC_DIV_EXPR:
1586 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1587 case EXACT_DIV_EXPR:
1588 /* This is a shortcut for a common special case. */
1589 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1590 && ! TREE_CONSTANT_OVERFLOW (arg1)
1591 && ! TREE_CONSTANT_OVERFLOW (arg2)
1592 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1594 if (code == CEIL_DIV_EXPR)
1595 int1l += int2l - 1;
1597 low = int1l / int2l, hi = 0;
1598 break;
1601 /* ... fall through ... */
1603 case ROUND_DIV_EXPR:
1604 if (int2h == 0 && int2l == 0)
1605 return NULL_TREE;
1606 if (int2h == 0 && int2l == 1)
1608 low = int1l, hi = int1h;
1609 break;
1611 if (int1l == int2l && int1h == int2h
1612 && ! (int1l == 0 && int1h == 0))
1614 low = 1, hi = 0;
1615 break;
1617 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1618 &low, &hi, &garbagel, &garbageh);
1619 break;
1621 case TRUNC_MOD_EXPR:
1622 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1623 /* This is a shortcut for a common special case. */
1624 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1625 && ! TREE_CONSTANT_OVERFLOW (arg1)
1626 && ! TREE_CONSTANT_OVERFLOW (arg2)
1627 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1629 if (code == CEIL_MOD_EXPR)
1630 int1l += int2l - 1;
1631 low = int1l % int2l, hi = 0;
1632 break;
1635 /* ... fall through ... */
1637 case ROUND_MOD_EXPR:
1638 if (int2h == 0 && int2l == 0)
1639 return NULL_TREE;
1640 overflow = div_and_round_double (code, uns,
1641 int1l, int1h, int2l, int2h,
1642 &garbagel, &garbageh, &low, &hi);
1643 break;
1645 case MIN_EXPR:
1646 case MAX_EXPR:
1647 if (uns)
1648 low = (((unsigned HOST_WIDE_INT) int1h
1649 < (unsigned HOST_WIDE_INT) int2h)
1650 || (((unsigned HOST_WIDE_INT) int1h
1651 == (unsigned HOST_WIDE_INT) int2h)
1652 && int1l < int2l));
1653 else
1654 low = (int1h < int2h
1655 || (int1h == int2h && int1l < int2l));
1657 if (low == (code == MIN_EXPR))
1658 low = int1l, hi = int1h;
1659 else
1660 low = int2l, hi = int2h;
1661 break;
1663 default:
1664 return NULL_TREE;
1667 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1669 if (notrunc)
1671 /* Propagate overflow flags ourselves. */
1672 if (((!uns || is_sizetype) && overflow)
1673 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1675 t = copy_node (t);
1676 TREE_OVERFLOW (t) = 1;
1677 TREE_CONSTANT_OVERFLOW (t) = 1;
1679 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1681 t = copy_node (t);
1682 TREE_CONSTANT_OVERFLOW (t) = 1;
1685 else
1686 t = force_fit_type (t, 1,
1687 ((!uns || is_sizetype) && overflow)
1688 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1689 TREE_CONSTANT_OVERFLOW (arg1)
1690 | TREE_CONSTANT_OVERFLOW (arg2));
1692 return t;
1695 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1696 constant. We assume ARG1 and ARG2 have the same data type, or at least
1697 are the same kind of constant and the same machine mode. Return zero if
1698 combining the constants is not allowed in the current operating mode.
1700 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1702 static tree
1703 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1705 /* Sanity check for the recursive cases. */
1706 if (!arg1 || !arg2)
1707 return NULL_TREE;
1709 STRIP_NOPS (arg1);
1710 STRIP_NOPS (arg2);
1712 if (TREE_CODE (arg1) == INTEGER_CST)
1713 return int_const_binop (code, arg1, arg2, notrunc);
1715 if (TREE_CODE (arg1) == REAL_CST)
1717 enum machine_mode mode;
1718 REAL_VALUE_TYPE d1;
1719 REAL_VALUE_TYPE d2;
1720 REAL_VALUE_TYPE value;
1721 REAL_VALUE_TYPE result;
1722 bool inexact;
1723 tree t, type;
1725 /* The following codes are handled by real_arithmetic. */
1726 switch (code)
1728 case PLUS_EXPR:
1729 case MINUS_EXPR:
1730 case MULT_EXPR:
1731 case RDIV_EXPR:
1732 case MIN_EXPR:
1733 case MAX_EXPR:
1734 break;
1736 default:
1737 return NULL_TREE;
1740 d1 = TREE_REAL_CST (arg1);
1741 d2 = TREE_REAL_CST (arg2);
1743 type = TREE_TYPE (arg1);
1744 mode = TYPE_MODE (type);
1746 /* Don't perform operation if we honor signaling NaNs and
1747 either operand is a NaN. */
1748 if (HONOR_SNANS (mode)
1749 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1750 return NULL_TREE;
1752 /* Don't perform operation if it would raise a division
1753 by zero exception. */
1754 if (code == RDIV_EXPR
1755 && REAL_VALUES_EQUAL (d2, dconst0)
1756 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1757 return NULL_TREE;
1759 /* If either operand is a NaN, just return it. Otherwise, set up
1760 for floating-point trap; we return an overflow. */
1761 if (REAL_VALUE_ISNAN (d1))
1762 return arg1;
1763 else if (REAL_VALUE_ISNAN (d2))
1764 return arg2;
1766 inexact = real_arithmetic (&value, code, &d1, &d2);
1767 real_convert (&result, mode, &value);
1769 /* Don't constant fold this floating point operation if
1770 the result has overflowed and flag_trapping_math. */
1771 if (flag_trapping_math
1772 && MODE_HAS_INFINITIES (mode)
1773 && REAL_VALUE_ISINF (result)
1774 && !REAL_VALUE_ISINF (d1)
1775 && !REAL_VALUE_ISINF (d2))
1776 return NULL_TREE;
1778 /* Don't constant fold this floating point operation if the
1779 result may dependent upon the run-time rounding mode and
1780 flag_rounding_math is set, or if GCC's software emulation
1781 is unable to accurately represent the result. */
1782 if ((flag_rounding_math
1783 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1784 && !flag_unsafe_math_optimizations))
1785 && (inexact || !real_identical (&result, &value)))
1786 return NULL_TREE;
1788 t = build_real (type, result);
1790 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1791 TREE_CONSTANT_OVERFLOW (t)
1792 = TREE_OVERFLOW (t)
1793 | TREE_CONSTANT_OVERFLOW (arg1)
1794 | TREE_CONSTANT_OVERFLOW (arg2);
1795 return t;
1798 if (TREE_CODE (arg1) == COMPLEX_CST)
1800 tree type = TREE_TYPE (arg1);
1801 tree r1 = TREE_REALPART (arg1);
1802 tree i1 = TREE_IMAGPART (arg1);
1803 tree r2 = TREE_REALPART (arg2);
1804 tree i2 = TREE_IMAGPART (arg2);
1805 tree real, imag;
1807 switch (code)
1809 case PLUS_EXPR:
1810 case MINUS_EXPR:
1811 real = const_binop (code, r1, r2, notrunc);
1812 imag = const_binop (code, i1, i2, notrunc);
1813 break;
1815 case MULT_EXPR:
1816 real = const_binop (MINUS_EXPR,
1817 const_binop (MULT_EXPR, r1, r2, notrunc),
1818 const_binop (MULT_EXPR, i1, i2, notrunc),
1819 notrunc);
1820 imag = const_binop (PLUS_EXPR,
1821 const_binop (MULT_EXPR, r1, i2, notrunc),
1822 const_binop (MULT_EXPR, i1, r2, notrunc),
1823 notrunc);
1824 break;
1826 case RDIV_EXPR:
1828 tree magsquared
1829 = const_binop (PLUS_EXPR,
1830 const_binop (MULT_EXPR, r2, r2, notrunc),
1831 const_binop (MULT_EXPR, i2, i2, notrunc),
1832 notrunc);
1833 tree t1
1834 = const_binop (PLUS_EXPR,
1835 const_binop (MULT_EXPR, r1, r2, notrunc),
1836 const_binop (MULT_EXPR, i1, i2, notrunc),
1837 notrunc);
1838 tree t2
1839 = const_binop (MINUS_EXPR,
1840 const_binop (MULT_EXPR, i1, r2, notrunc),
1841 const_binop (MULT_EXPR, r1, i2, notrunc),
1842 notrunc);
1844 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1845 code = TRUNC_DIV_EXPR;
1847 real = const_binop (code, t1, magsquared, notrunc);
1848 imag = const_binop (code, t2, magsquared, notrunc);
1850 break;
1852 default:
1853 return NULL_TREE;
1856 if (real && imag)
1857 return build_complex (type, real, imag);
1860 return NULL_TREE;
1863 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1864 indicates which particular sizetype to create. */
1866 tree
1867 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1869 return build_int_cst (sizetype_tab[(int) kind], number);
1872 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1873 is a tree code. The type of the result is taken from the operands.
1874 Both must be the same type integer type and it must be a size type.
1875 If the operands are constant, so is the result. */
1877 tree
1878 size_binop (enum tree_code code, tree arg0, tree arg1)
1880 tree type = TREE_TYPE (arg0);
1882 if (arg0 == error_mark_node || arg1 == error_mark_node)
1883 return error_mark_node;
1885 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1886 && type == TREE_TYPE (arg1));
1888 /* Handle the special case of two integer constants faster. */
1889 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1891 /* And some specific cases even faster than that. */
1892 if (code == PLUS_EXPR && integer_zerop (arg0))
1893 return arg1;
1894 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1895 && integer_zerop (arg1))
1896 return arg0;
1897 else if (code == MULT_EXPR && integer_onep (arg0))
1898 return arg1;
1900 /* Handle general case of two integer constants. */
1901 return int_const_binop (code, arg0, arg1, 0);
1904 return fold_build2 (code, type, arg0, arg1);
1907 /* Given two values, either both of sizetype or both of bitsizetype,
1908 compute the difference between the two values. Return the value
1909 in signed type corresponding to the type of the operands. */
1911 tree
1912 size_diffop (tree arg0, tree arg1)
1914 tree type = TREE_TYPE (arg0);
1915 tree ctype;
1917 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1918 && type == TREE_TYPE (arg1));
1920 /* If the type is already signed, just do the simple thing. */
1921 if (!TYPE_UNSIGNED (type))
1922 return size_binop (MINUS_EXPR, arg0, arg1);
1924 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1926 /* If either operand is not a constant, do the conversions to the signed
1927 type and subtract. The hardware will do the right thing with any
1928 overflow in the subtraction. */
1929 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1930 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1931 fold_convert (ctype, arg1));
1933 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1934 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1935 overflow) and negate (which can't either). Special-case a result
1936 of zero while we're here. */
1937 if (tree_int_cst_equal (arg0, arg1))
1938 return build_int_cst (ctype, 0);
1939 else if (tree_int_cst_lt (arg1, arg0))
1940 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1941 else
1942 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1943 fold_convert (ctype, size_binop (MINUS_EXPR,
1944 arg1, arg0)));
1947 /* A subroutine of fold_convert_const handling conversions of an
1948 INTEGER_CST to another integer type. */
1950 static tree
1951 fold_convert_const_int_from_int (tree type, tree arg1)
1953 tree t;
1955 /* Given an integer constant, make new constant with new type,
1956 appropriately sign-extended or truncated. */
1957 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1958 TREE_INT_CST_HIGH (arg1));
1960 t = force_fit_type (t,
1961 /* Don't set the overflow when
1962 converting a pointer */
1963 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1964 (TREE_INT_CST_HIGH (arg1) < 0
1965 && (TYPE_UNSIGNED (type)
1966 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1967 | TREE_OVERFLOW (arg1),
1968 TREE_CONSTANT_OVERFLOW (arg1));
1970 return t;
1973 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1974 to an integer type. */
1976 static tree
1977 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1979 int overflow = 0;
1980 tree t;
1982 /* The following code implements the floating point to integer
1983 conversion rules required by the Java Language Specification,
1984 that IEEE NaNs are mapped to zero and values that overflow
1985 the target precision saturate, i.e. values greater than
1986 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1987 are mapped to INT_MIN. These semantics are allowed by the
1988 C and C++ standards that simply state that the behavior of
1989 FP-to-integer conversion is unspecified upon overflow. */
1991 HOST_WIDE_INT high, low;
1992 REAL_VALUE_TYPE r;
1993 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1995 switch (code)
1997 case FIX_TRUNC_EXPR:
1998 real_trunc (&r, VOIDmode, &x);
1999 break;
2001 case FIX_CEIL_EXPR:
2002 real_ceil (&r, VOIDmode, &x);
2003 break;
2005 case FIX_FLOOR_EXPR:
2006 real_floor (&r, VOIDmode, &x);
2007 break;
2009 case FIX_ROUND_EXPR:
2010 real_round (&r, VOIDmode, &x);
2011 break;
2013 default:
2014 gcc_unreachable ();
2017 /* If R is NaN, return zero and show we have an overflow. */
2018 if (REAL_VALUE_ISNAN (r))
2020 overflow = 1;
2021 high = 0;
2022 low = 0;
2025 /* See if R is less than the lower bound or greater than the
2026 upper bound. */
2028 if (! overflow)
2030 tree lt = TYPE_MIN_VALUE (type);
2031 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2032 if (REAL_VALUES_LESS (r, l))
2034 overflow = 1;
2035 high = TREE_INT_CST_HIGH (lt);
2036 low = TREE_INT_CST_LOW (lt);
2040 if (! overflow)
2042 tree ut = TYPE_MAX_VALUE (type);
2043 if (ut)
2045 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2046 if (REAL_VALUES_LESS (u, r))
2048 overflow = 1;
2049 high = TREE_INT_CST_HIGH (ut);
2050 low = TREE_INT_CST_LOW (ut);
2055 if (! overflow)
2056 REAL_VALUE_TO_INT (&low, &high, r);
2058 t = build_int_cst_wide (type, low, high);
2060 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
2061 TREE_CONSTANT_OVERFLOW (arg1));
2062 return t;
2065 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2066 to another floating point type. */
2068 static tree
2069 fold_convert_const_real_from_real (tree type, tree arg1)
2071 REAL_VALUE_TYPE value;
2072 tree t;
2074 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2075 t = build_real (type, value);
2077 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2078 TREE_CONSTANT_OVERFLOW (t)
2079 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2080 return t;
2083 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2084 type TYPE. If no simplification can be done return NULL_TREE. */
2086 static tree
2087 fold_convert_const (enum tree_code code, tree type, tree arg1)
2089 if (TREE_TYPE (arg1) == type)
2090 return arg1;
2092 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2094 if (TREE_CODE (arg1) == INTEGER_CST)
2095 return fold_convert_const_int_from_int (type, arg1);
2096 else if (TREE_CODE (arg1) == REAL_CST)
2097 return fold_convert_const_int_from_real (code, type, arg1);
2099 else if (TREE_CODE (type) == REAL_TYPE)
2101 if (TREE_CODE (arg1) == INTEGER_CST)
2102 return build_real_from_int_cst (type, arg1);
2103 if (TREE_CODE (arg1) == REAL_CST)
2104 return fold_convert_const_real_from_real (type, arg1);
2106 return NULL_TREE;
2109 /* Construct a vector of zero elements of vector type TYPE. */
2111 static tree
2112 build_zero_vector (tree type)
2114 tree elem, list;
2115 int i, units;
2117 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2118 units = TYPE_VECTOR_SUBPARTS (type);
2120 list = NULL_TREE;
2121 for (i = 0; i < units; i++)
2122 list = tree_cons (NULL_TREE, elem, list);
2123 return build_vector (type, list);
2126 /* Convert expression ARG to type TYPE. Used by the middle-end for
2127 simple conversions in preference to calling the front-end's convert. */
2129 tree
2130 fold_convert (tree type, tree arg)
2132 tree orig = TREE_TYPE (arg);
2133 tree tem;
2135 if (type == orig)
2136 return arg;
2138 if (TREE_CODE (arg) == ERROR_MARK
2139 || TREE_CODE (type) == ERROR_MARK
2140 || TREE_CODE (orig) == ERROR_MARK)
2141 return error_mark_node;
2143 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2144 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2145 TYPE_MAIN_VARIANT (orig)))
2146 return fold_build1 (NOP_EXPR, type, arg);
2148 switch (TREE_CODE (type))
2150 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2151 case POINTER_TYPE: case REFERENCE_TYPE:
2152 case OFFSET_TYPE:
2153 if (TREE_CODE (arg) == INTEGER_CST)
2155 tem = fold_convert_const (NOP_EXPR, type, arg);
2156 if (tem != NULL_TREE)
2157 return tem;
2159 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2160 || TREE_CODE (orig) == OFFSET_TYPE)
2161 return fold_build1 (NOP_EXPR, type, arg);
2162 if (TREE_CODE (orig) == COMPLEX_TYPE)
2164 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2165 return fold_convert (type, tem);
2167 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2168 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2169 return fold_build1 (NOP_EXPR, type, arg);
2171 case REAL_TYPE:
2172 if (TREE_CODE (arg) == INTEGER_CST)
2174 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2175 if (tem != NULL_TREE)
2176 return tem;
2178 else if (TREE_CODE (arg) == REAL_CST)
2180 tem = fold_convert_const (NOP_EXPR, type, arg);
2181 if (tem != NULL_TREE)
2182 return tem;
2185 switch (TREE_CODE (orig))
2187 case INTEGER_TYPE:
2188 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2189 case POINTER_TYPE: case REFERENCE_TYPE:
2190 return fold_build1 (FLOAT_EXPR, type, arg);
2192 case REAL_TYPE:
2193 return fold_build1 (NOP_EXPR, type, arg);
2195 case COMPLEX_TYPE:
2196 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2197 return fold_convert (type, tem);
2199 default:
2200 gcc_unreachable ();
2203 case COMPLEX_TYPE:
2204 switch (TREE_CODE (orig))
2206 case INTEGER_TYPE:
2207 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2208 case POINTER_TYPE: case REFERENCE_TYPE:
2209 case REAL_TYPE:
2210 return build2 (COMPLEX_EXPR, type,
2211 fold_convert (TREE_TYPE (type), arg),
2212 fold_convert (TREE_TYPE (type), integer_zero_node));
2213 case COMPLEX_TYPE:
2215 tree rpart, ipart;
2217 if (TREE_CODE (arg) == COMPLEX_EXPR)
2219 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2220 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2221 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2224 arg = save_expr (arg);
2225 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2226 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2227 rpart = fold_convert (TREE_TYPE (type), rpart);
2228 ipart = fold_convert (TREE_TYPE (type), ipart);
2229 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2232 default:
2233 gcc_unreachable ();
2236 case VECTOR_TYPE:
2237 if (integer_zerop (arg))
2238 return build_zero_vector (type);
2239 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2240 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2241 || TREE_CODE (orig) == VECTOR_TYPE);
2242 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2244 case VOID_TYPE:
2245 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2247 default:
2248 gcc_unreachable ();
2252 /* Return false if expr can be assumed not to be an lvalue, true
2253 otherwise. */
2255 static bool
2256 maybe_lvalue_p (tree x)
2258 /* We only need to wrap lvalue tree codes. */
2259 switch (TREE_CODE (x))
2261 case VAR_DECL:
2262 case PARM_DECL:
2263 case RESULT_DECL:
2264 case LABEL_DECL:
2265 case FUNCTION_DECL:
2266 case SSA_NAME:
2268 case COMPONENT_REF:
2269 case INDIRECT_REF:
2270 case ALIGN_INDIRECT_REF:
2271 case MISALIGNED_INDIRECT_REF:
2272 case ARRAY_REF:
2273 case ARRAY_RANGE_REF:
2274 case BIT_FIELD_REF:
2275 case OBJ_TYPE_REF:
2277 case REALPART_EXPR:
2278 case IMAGPART_EXPR:
2279 case PREINCREMENT_EXPR:
2280 case PREDECREMENT_EXPR:
2281 case SAVE_EXPR:
2282 case TRY_CATCH_EXPR:
2283 case WITH_CLEANUP_EXPR:
2284 case COMPOUND_EXPR:
2285 case MODIFY_EXPR:
2286 case TARGET_EXPR:
2287 case COND_EXPR:
2288 case BIND_EXPR:
2289 case MIN_EXPR:
2290 case MAX_EXPR:
2291 break;
2293 default:
2294 /* Assume the worst for front-end tree codes. */
2295 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2296 break;
2297 return false;
2300 return true;
2303 /* Return an expr equal to X but certainly not valid as an lvalue. */
2305 tree
2306 non_lvalue (tree x)
2308 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2309 us. */
2310 if (in_gimple_form)
2311 return x;
2313 if (! maybe_lvalue_p (x))
2314 return x;
2315 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2318 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2319 Zero means allow extended lvalues. */
2321 int pedantic_lvalues;
2323 /* When pedantic, return an expr equal to X but certainly not valid as a
2324 pedantic lvalue. Otherwise, return X. */
2326 static tree
2327 pedantic_non_lvalue (tree x)
2329 if (pedantic_lvalues)
2330 return non_lvalue (x);
2331 else
2332 return x;
2335 /* Given a tree comparison code, return the code that is the logical inverse
2336 of the given code. It is not safe to do this for floating-point
2337 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2338 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2340 enum tree_code
2341 invert_tree_comparison (enum tree_code code, bool honor_nans)
2343 if (honor_nans && flag_trapping_math)
2344 return ERROR_MARK;
2346 switch (code)
2348 case EQ_EXPR:
2349 return NE_EXPR;
2350 case NE_EXPR:
2351 return EQ_EXPR;
2352 case GT_EXPR:
2353 return honor_nans ? UNLE_EXPR : LE_EXPR;
2354 case GE_EXPR:
2355 return honor_nans ? UNLT_EXPR : LT_EXPR;
2356 case LT_EXPR:
2357 return honor_nans ? UNGE_EXPR : GE_EXPR;
2358 case LE_EXPR:
2359 return honor_nans ? UNGT_EXPR : GT_EXPR;
2360 case LTGT_EXPR:
2361 return UNEQ_EXPR;
2362 case UNEQ_EXPR:
2363 return LTGT_EXPR;
2364 case UNGT_EXPR:
2365 return LE_EXPR;
2366 case UNGE_EXPR:
2367 return LT_EXPR;
2368 case UNLT_EXPR:
2369 return GE_EXPR;
2370 case UNLE_EXPR:
2371 return GT_EXPR;
2372 case ORDERED_EXPR:
2373 return UNORDERED_EXPR;
2374 case UNORDERED_EXPR:
2375 return ORDERED_EXPR;
2376 default:
2377 gcc_unreachable ();
2381 /* Similar, but return the comparison that results if the operands are
2382 swapped. This is safe for floating-point. */
2384 enum tree_code
2385 swap_tree_comparison (enum tree_code code)
2387 switch (code)
2389 case EQ_EXPR:
2390 case NE_EXPR:
2391 case ORDERED_EXPR:
2392 case UNORDERED_EXPR:
2393 case LTGT_EXPR:
2394 case UNEQ_EXPR:
2395 return code;
2396 case GT_EXPR:
2397 return LT_EXPR;
2398 case GE_EXPR:
2399 return LE_EXPR;
2400 case LT_EXPR:
2401 return GT_EXPR;
2402 case LE_EXPR:
2403 return GE_EXPR;
2404 case UNGT_EXPR:
2405 return UNLT_EXPR;
2406 case UNGE_EXPR:
2407 return UNLE_EXPR;
2408 case UNLT_EXPR:
2409 return UNGT_EXPR;
2410 case UNLE_EXPR:
2411 return UNGE_EXPR;
2412 default:
2413 gcc_unreachable ();
2418 /* Convert a comparison tree code from an enum tree_code representation
2419 into a compcode bit-based encoding. This function is the inverse of
2420 compcode_to_comparison. */
2422 static enum comparison_code
2423 comparison_to_compcode (enum tree_code code)
2425 switch (code)
2427 case LT_EXPR:
2428 return COMPCODE_LT;
2429 case EQ_EXPR:
2430 return COMPCODE_EQ;
2431 case LE_EXPR:
2432 return COMPCODE_LE;
2433 case GT_EXPR:
2434 return COMPCODE_GT;
2435 case NE_EXPR:
2436 return COMPCODE_NE;
2437 case GE_EXPR:
2438 return COMPCODE_GE;
2439 case ORDERED_EXPR:
2440 return COMPCODE_ORD;
2441 case UNORDERED_EXPR:
2442 return COMPCODE_UNORD;
2443 case UNLT_EXPR:
2444 return COMPCODE_UNLT;
2445 case UNEQ_EXPR:
2446 return COMPCODE_UNEQ;
2447 case UNLE_EXPR:
2448 return COMPCODE_UNLE;
2449 case UNGT_EXPR:
2450 return COMPCODE_UNGT;
2451 case LTGT_EXPR:
2452 return COMPCODE_LTGT;
2453 case UNGE_EXPR:
2454 return COMPCODE_UNGE;
2455 default:
2456 gcc_unreachable ();
2460 /* Convert a compcode bit-based encoding of a comparison operator back
2461 to GCC's enum tree_code representation. This function is the
2462 inverse of comparison_to_compcode. */
2464 static enum tree_code
2465 compcode_to_comparison (enum comparison_code code)
2467 switch (code)
2469 case COMPCODE_LT:
2470 return LT_EXPR;
2471 case COMPCODE_EQ:
2472 return EQ_EXPR;
2473 case COMPCODE_LE:
2474 return LE_EXPR;
2475 case COMPCODE_GT:
2476 return GT_EXPR;
2477 case COMPCODE_NE:
2478 return NE_EXPR;
2479 case COMPCODE_GE:
2480 return GE_EXPR;
2481 case COMPCODE_ORD:
2482 return ORDERED_EXPR;
2483 case COMPCODE_UNORD:
2484 return UNORDERED_EXPR;
2485 case COMPCODE_UNLT:
2486 return UNLT_EXPR;
2487 case COMPCODE_UNEQ:
2488 return UNEQ_EXPR;
2489 case COMPCODE_UNLE:
2490 return UNLE_EXPR;
2491 case COMPCODE_UNGT:
2492 return UNGT_EXPR;
2493 case COMPCODE_LTGT:
2494 return LTGT_EXPR;
2495 case COMPCODE_UNGE:
2496 return UNGE_EXPR;
2497 default:
2498 gcc_unreachable ();
2502 /* Return a tree for the comparison which is the combination of
2503 doing the AND or OR (depending on CODE) of the two operations LCODE
2504 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2505 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2506 if this makes the transformation invalid. */
2508 tree
2509 combine_comparisons (enum tree_code code, enum tree_code lcode,
2510 enum tree_code rcode, tree truth_type,
2511 tree ll_arg, tree lr_arg)
2513 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2514 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2515 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2516 enum comparison_code compcode;
2518 switch (code)
2520 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2521 compcode = lcompcode & rcompcode;
2522 break;
2524 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2525 compcode = lcompcode | rcompcode;
2526 break;
2528 default:
2529 return NULL_TREE;
2532 if (!honor_nans)
2534 /* Eliminate unordered comparisons, as well as LTGT and ORD
2535 which are not used unless the mode has NaNs. */
2536 compcode &= ~COMPCODE_UNORD;
2537 if (compcode == COMPCODE_LTGT)
2538 compcode = COMPCODE_NE;
2539 else if (compcode == COMPCODE_ORD)
2540 compcode = COMPCODE_TRUE;
2542 else if (flag_trapping_math)
2544 /* Check that the original operation and the optimized ones will trap
2545 under the same condition. */
2546 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2547 && (lcompcode != COMPCODE_EQ)
2548 && (lcompcode != COMPCODE_ORD);
2549 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2550 && (rcompcode != COMPCODE_EQ)
2551 && (rcompcode != COMPCODE_ORD);
2552 bool trap = (compcode & COMPCODE_UNORD) == 0
2553 && (compcode != COMPCODE_EQ)
2554 && (compcode != COMPCODE_ORD);
2556 /* In a short-circuited boolean expression the LHS might be
2557 such that the RHS, if evaluated, will never trap. For
2558 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2559 if neither x nor y is NaN. (This is a mixed blessing: for
2560 example, the expression above will never trap, hence
2561 optimizing it to x < y would be invalid). */
2562 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2563 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2564 rtrap = false;
2566 /* If the comparison was short-circuited, and only the RHS
2567 trapped, we may now generate a spurious trap. */
2568 if (rtrap && !ltrap
2569 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2570 return NULL_TREE;
2572 /* If we changed the conditions that cause a trap, we lose. */
2573 if ((ltrap || rtrap) != trap)
2574 return NULL_TREE;
2577 if (compcode == COMPCODE_TRUE)
2578 return constant_boolean_node (true, truth_type);
2579 else if (compcode == COMPCODE_FALSE)
2580 return constant_boolean_node (false, truth_type);
2581 else
2582 return fold_build2 (compcode_to_comparison (compcode),
2583 truth_type, ll_arg, lr_arg);
2586 /* Return nonzero if CODE is a tree code that represents a truth value. */
2588 static int
2589 truth_value_p (enum tree_code code)
2591 return (TREE_CODE_CLASS (code) == tcc_comparison
2592 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2593 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2594 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2597 /* Return nonzero if two operands (typically of the same tree node)
2598 are necessarily equal. If either argument has side-effects this
2599 function returns zero. FLAGS modifies behavior as follows:
2601 If OEP_ONLY_CONST is set, only return nonzero for constants.
2602 This function tests whether the operands are indistinguishable;
2603 it does not test whether they are equal using C's == operation.
2604 The distinction is important for IEEE floating point, because
2605 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2606 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2608 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2609 even though it may hold multiple values during a function.
2610 This is because a GCC tree node guarantees that nothing else is
2611 executed between the evaluation of its "operands" (which may often
2612 be evaluated in arbitrary order). Hence if the operands themselves
2613 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2614 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2615 unset means assuming isochronic (or instantaneous) tree equivalence.
2616 Unless comparing arbitrary expression trees, such as from different
2617 statements, this flag can usually be left unset.
2619 If OEP_PURE_SAME is set, then pure functions with identical arguments
2620 are considered the same. It is used when the caller has other ways
2621 to ensure that global memory is unchanged in between. */
2624 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2626 /* If either is ERROR_MARK, they aren't equal. */
2627 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2628 return 0;
2630 /* If both types don't have the same signedness, then we can't consider
2631 them equal. We must check this before the STRIP_NOPS calls
2632 because they may change the signedness of the arguments. */
2633 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2634 return 0;
2636 /* If both types don't have the same precision, then it is not safe
2637 to strip NOPs. */
2638 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2639 return 0;
2641 STRIP_NOPS (arg0);
2642 STRIP_NOPS (arg1);
2644 /* In case both args are comparisons but with different comparison
2645 code, try to swap the comparison operands of one arg to produce
2646 a match and compare that variant. */
2647 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2648 && COMPARISON_CLASS_P (arg0)
2649 && COMPARISON_CLASS_P (arg1))
2651 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2653 if (TREE_CODE (arg0) == swap_code)
2654 return operand_equal_p (TREE_OPERAND (arg0, 0),
2655 TREE_OPERAND (arg1, 1), flags)
2656 && operand_equal_p (TREE_OPERAND (arg0, 1),
2657 TREE_OPERAND (arg1, 0), flags);
2660 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2661 /* This is needed for conversions and for COMPONENT_REF.
2662 Might as well play it safe and always test this. */
2663 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2664 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2665 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2666 return 0;
2668 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2669 We don't care about side effects in that case because the SAVE_EXPR
2670 takes care of that for us. In all other cases, two expressions are
2671 equal if they have no side effects. If we have two identical
2672 expressions with side effects that should be treated the same due
2673 to the only side effects being identical SAVE_EXPR's, that will
2674 be detected in the recursive calls below. */
2675 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2676 && (TREE_CODE (arg0) == SAVE_EXPR
2677 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2678 return 1;
2680 /* Next handle constant cases, those for which we can return 1 even
2681 if ONLY_CONST is set. */
2682 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2683 switch (TREE_CODE (arg0))
2685 case INTEGER_CST:
2686 return (! TREE_CONSTANT_OVERFLOW (arg0)
2687 && ! TREE_CONSTANT_OVERFLOW (arg1)
2688 && tree_int_cst_equal (arg0, arg1));
2690 case REAL_CST:
2691 return (! TREE_CONSTANT_OVERFLOW (arg0)
2692 && ! TREE_CONSTANT_OVERFLOW (arg1)
2693 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2694 TREE_REAL_CST (arg1)));
2696 case VECTOR_CST:
2698 tree v1, v2;
2700 if (TREE_CONSTANT_OVERFLOW (arg0)
2701 || TREE_CONSTANT_OVERFLOW (arg1))
2702 return 0;
2704 v1 = TREE_VECTOR_CST_ELTS (arg0);
2705 v2 = TREE_VECTOR_CST_ELTS (arg1);
2706 while (v1 && v2)
2708 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2709 flags))
2710 return 0;
2711 v1 = TREE_CHAIN (v1);
2712 v2 = TREE_CHAIN (v2);
2715 return v1 == v2;
2718 case COMPLEX_CST:
2719 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2720 flags)
2721 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2722 flags));
2724 case STRING_CST:
2725 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2726 && ! memcmp (TREE_STRING_POINTER (arg0),
2727 TREE_STRING_POINTER (arg1),
2728 TREE_STRING_LENGTH (arg0)));
2730 case ADDR_EXPR:
2731 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2733 default:
2734 break;
2737 if (flags & OEP_ONLY_CONST)
2738 return 0;
2740 /* Define macros to test an operand from arg0 and arg1 for equality and a
2741 variant that allows null and views null as being different from any
2742 non-null value. In the latter case, if either is null, the both
2743 must be; otherwise, do the normal comparison. */
2744 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2745 TREE_OPERAND (arg1, N), flags)
2747 #define OP_SAME_WITH_NULL(N) \
2748 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2749 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2751 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2753 case tcc_unary:
2754 /* Two conversions are equal only if signedness and modes match. */
2755 switch (TREE_CODE (arg0))
2757 case NOP_EXPR:
2758 case CONVERT_EXPR:
2759 case FIX_CEIL_EXPR:
2760 case FIX_TRUNC_EXPR:
2761 case FIX_FLOOR_EXPR:
2762 case FIX_ROUND_EXPR:
2763 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2764 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2765 return 0;
2766 break;
2767 default:
2768 break;
2771 return OP_SAME (0);
2774 case tcc_comparison:
2775 case tcc_binary:
2776 if (OP_SAME (0) && OP_SAME (1))
2777 return 1;
2779 /* For commutative ops, allow the other order. */
2780 return (commutative_tree_code (TREE_CODE (arg0))
2781 && operand_equal_p (TREE_OPERAND (arg0, 0),
2782 TREE_OPERAND (arg1, 1), flags)
2783 && operand_equal_p (TREE_OPERAND (arg0, 1),
2784 TREE_OPERAND (arg1, 0), flags));
2786 case tcc_reference:
2787 /* If either of the pointer (or reference) expressions we are
2788 dereferencing contain a side effect, these cannot be equal. */
2789 if (TREE_SIDE_EFFECTS (arg0)
2790 || TREE_SIDE_EFFECTS (arg1))
2791 return 0;
2793 switch (TREE_CODE (arg0))
2795 case INDIRECT_REF:
2796 case ALIGN_INDIRECT_REF:
2797 case MISALIGNED_INDIRECT_REF:
2798 case REALPART_EXPR:
2799 case IMAGPART_EXPR:
2800 return OP_SAME (0);
2802 case ARRAY_REF:
2803 case ARRAY_RANGE_REF:
2804 /* Operands 2 and 3 may be null. */
2805 return (OP_SAME (0)
2806 && OP_SAME (1)
2807 && OP_SAME_WITH_NULL (2)
2808 && OP_SAME_WITH_NULL (3));
2810 case COMPONENT_REF:
2811 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2812 may be NULL when we're called to compare MEM_EXPRs. */
2813 return OP_SAME_WITH_NULL (0)
2814 && OP_SAME (1)
2815 && OP_SAME_WITH_NULL (2);
2817 case BIT_FIELD_REF:
2818 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2820 default:
2821 return 0;
2824 case tcc_expression:
2825 switch (TREE_CODE (arg0))
2827 case ADDR_EXPR:
2828 case TRUTH_NOT_EXPR:
2829 return OP_SAME (0);
2831 case TRUTH_ANDIF_EXPR:
2832 case TRUTH_ORIF_EXPR:
2833 return OP_SAME (0) && OP_SAME (1);
2835 case TRUTH_AND_EXPR:
2836 case TRUTH_OR_EXPR:
2837 case TRUTH_XOR_EXPR:
2838 if (OP_SAME (0) && OP_SAME (1))
2839 return 1;
2841 /* Otherwise take into account this is a commutative operation. */
2842 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2843 TREE_OPERAND (arg1, 1), flags)
2844 && operand_equal_p (TREE_OPERAND (arg0, 1),
2845 TREE_OPERAND (arg1, 0), flags));
2847 case CALL_EXPR:
2848 /* If the CALL_EXPRs call different functions, then they
2849 clearly can not be equal. */
2850 if (!OP_SAME (0))
2851 return 0;
2854 unsigned int cef = call_expr_flags (arg0);
2855 if (flags & OEP_PURE_SAME)
2856 cef &= ECF_CONST | ECF_PURE;
2857 else
2858 cef &= ECF_CONST;
2859 if (!cef)
2860 return 0;
2863 /* Now see if all the arguments are the same. operand_equal_p
2864 does not handle TREE_LIST, so we walk the operands here
2865 feeding them to operand_equal_p. */
2866 arg0 = TREE_OPERAND (arg0, 1);
2867 arg1 = TREE_OPERAND (arg1, 1);
2868 while (arg0 && arg1)
2870 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2871 flags))
2872 return 0;
2874 arg0 = TREE_CHAIN (arg0);
2875 arg1 = TREE_CHAIN (arg1);
2878 /* If we get here and both argument lists are exhausted
2879 then the CALL_EXPRs are equal. */
2880 return ! (arg0 || arg1);
2882 default:
2883 return 0;
2886 case tcc_declaration:
2887 /* Consider __builtin_sqrt equal to sqrt. */
2888 return (TREE_CODE (arg0) == FUNCTION_DECL
2889 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2890 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2891 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2893 default:
2894 return 0;
2897 #undef OP_SAME
2898 #undef OP_SAME_WITH_NULL
2901 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2902 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2904 When in doubt, return 0. */
2906 static int
2907 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2909 int unsignedp1, unsignedpo;
2910 tree primarg0, primarg1, primother;
2911 unsigned int correct_width;
2913 if (operand_equal_p (arg0, arg1, 0))
2914 return 1;
2916 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2917 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2918 return 0;
2920 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2921 and see if the inner values are the same. This removes any
2922 signedness comparison, which doesn't matter here. */
2923 primarg0 = arg0, primarg1 = arg1;
2924 STRIP_NOPS (primarg0);
2925 STRIP_NOPS (primarg1);
2926 if (operand_equal_p (primarg0, primarg1, 0))
2927 return 1;
2929 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2930 actual comparison operand, ARG0.
2932 First throw away any conversions to wider types
2933 already present in the operands. */
2935 primarg1 = get_narrower (arg1, &unsignedp1);
2936 primother = get_narrower (other, &unsignedpo);
2938 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2939 if (unsignedp1 == unsignedpo
2940 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2941 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2943 tree type = TREE_TYPE (arg0);
2945 /* Make sure shorter operand is extended the right way
2946 to match the longer operand. */
2947 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2948 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2950 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2951 return 1;
2954 return 0;
2957 /* See if ARG is an expression that is either a comparison or is performing
2958 arithmetic on comparisons. The comparisons must only be comparing
2959 two different values, which will be stored in *CVAL1 and *CVAL2; if
2960 they are nonzero it means that some operands have already been found.
2961 No variables may be used anywhere else in the expression except in the
2962 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2963 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2965 If this is true, return 1. Otherwise, return zero. */
2967 static int
2968 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2970 enum tree_code code = TREE_CODE (arg);
2971 enum tree_code_class class = TREE_CODE_CLASS (code);
2973 /* We can handle some of the tcc_expression cases here. */
2974 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2975 class = tcc_unary;
2976 else if (class == tcc_expression
2977 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2978 || code == COMPOUND_EXPR))
2979 class = tcc_binary;
2981 else if (class == tcc_expression && code == SAVE_EXPR
2982 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2984 /* If we've already found a CVAL1 or CVAL2, this expression is
2985 two complex to handle. */
2986 if (*cval1 || *cval2)
2987 return 0;
2989 class = tcc_unary;
2990 *save_p = 1;
2993 switch (class)
2995 case tcc_unary:
2996 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2998 case tcc_binary:
2999 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3000 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3001 cval1, cval2, save_p));
3003 case tcc_constant:
3004 return 1;
3006 case tcc_expression:
3007 if (code == COND_EXPR)
3008 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3009 cval1, cval2, save_p)
3010 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3011 cval1, cval2, save_p)
3012 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3013 cval1, cval2, save_p));
3014 return 0;
3016 case tcc_comparison:
3017 /* First see if we can handle the first operand, then the second. For
3018 the second operand, we know *CVAL1 can't be zero. It must be that
3019 one side of the comparison is each of the values; test for the
3020 case where this isn't true by failing if the two operands
3021 are the same. */
3023 if (operand_equal_p (TREE_OPERAND (arg, 0),
3024 TREE_OPERAND (arg, 1), 0))
3025 return 0;
3027 if (*cval1 == 0)
3028 *cval1 = TREE_OPERAND (arg, 0);
3029 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3031 else if (*cval2 == 0)
3032 *cval2 = TREE_OPERAND (arg, 0);
3033 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3035 else
3036 return 0;
3038 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3040 else if (*cval2 == 0)
3041 *cval2 = TREE_OPERAND (arg, 1);
3042 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3044 else
3045 return 0;
3047 return 1;
3049 default:
3050 return 0;
3054 /* ARG is a tree that is known to contain just arithmetic operations and
3055 comparisons. Evaluate the operations in the tree substituting NEW0 for
3056 any occurrence of OLD0 as an operand of a comparison and likewise for
3057 NEW1 and OLD1. */
3059 static tree
3060 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3062 tree type = TREE_TYPE (arg);
3063 enum tree_code code = TREE_CODE (arg);
3064 enum tree_code_class class = TREE_CODE_CLASS (code);
3066 /* We can handle some of the tcc_expression cases here. */
3067 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3068 class = tcc_unary;
3069 else if (class == tcc_expression
3070 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3071 class = tcc_binary;
3073 switch (class)
3075 case tcc_unary:
3076 return fold_build1 (code, type,
3077 eval_subst (TREE_OPERAND (arg, 0),
3078 old0, new0, old1, new1));
3080 case tcc_binary:
3081 return fold_build2 (code, type,
3082 eval_subst (TREE_OPERAND (arg, 0),
3083 old0, new0, old1, new1),
3084 eval_subst (TREE_OPERAND (arg, 1),
3085 old0, new0, old1, new1));
3087 case tcc_expression:
3088 switch (code)
3090 case SAVE_EXPR:
3091 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3093 case COMPOUND_EXPR:
3094 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3096 case COND_EXPR:
3097 return fold_build3 (code, type,
3098 eval_subst (TREE_OPERAND (arg, 0),
3099 old0, new0, old1, new1),
3100 eval_subst (TREE_OPERAND (arg, 1),
3101 old0, new0, old1, new1),
3102 eval_subst (TREE_OPERAND (arg, 2),
3103 old0, new0, old1, new1));
3104 default:
3105 break;
3107 /* Fall through - ??? */
3109 case tcc_comparison:
3111 tree arg0 = TREE_OPERAND (arg, 0);
3112 tree arg1 = TREE_OPERAND (arg, 1);
3114 /* We need to check both for exact equality and tree equality. The
3115 former will be true if the operand has a side-effect. In that
3116 case, we know the operand occurred exactly once. */
3118 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3119 arg0 = new0;
3120 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3121 arg0 = new1;
3123 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3124 arg1 = new0;
3125 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3126 arg1 = new1;
3128 return fold_build2 (code, type, arg0, arg1);
3131 default:
3132 return arg;
3136 /* Return a tree for the case when the result of an expression is RESULT
3137 converted to TYPE and OMITTED was previously an operand of the expression
3138 but is now not needed (e.g., we folded OMITTED * 0).
3140 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3141 the conversion of RESULT to TYPE. */
3143 tree
3144 omit_one_operand (tree type, tree result, tree omitted)
3146 tree t = fold_convert (type, result);
3148 if (TREE_SIDE_EFFECTS (omitted))
3149 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3151 return non_lvalue (t);
3154 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3156 static tree
3157 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3159 tree t = fold_convert (type, result);
3161 if (TREE_SIDE_EFFECTS (omitted))
3162 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3164 return pedantic_non_lvalue (t);
3167 /* Return a tree for the case when the result of an expression is RESULT
3168 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3169 of the expression but are now not needed.
3171 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3172 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3173 evaluated before OMITTED2. Otherwise, if neither has side effects,
3174 just do the conversion of RESULT to TYPE. */
3176 tree
3177 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3179 tree t = fold_convert (type, result);
3181 if (TREE_SIDE_EFFECTS (omitted2))
3182 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3183 if (TREE_SIDE_EFFECTS (omitted1))
3184 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3186 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3190 /* Return a simplified tree node for the truth-negation of ARG. This
3191 never alters ARG itself. We assume that ARG is an operation that
3192 returns a truth value (0 or 1).
3194 FIXME: one would think we would fold the result, but it causes
3195 problems with the dominator optimizer. */
3197 tree
3198 fold_truth_not_expr (tree arg)
3200 tree type = TREE_TYPE (arg);
3201 enum tree_code code = TREE_CODE (arg);
3203 /* If this is a comparison, we can simply invert it, except for
3204 floating-point non-equality comparisons, in which case we just
3205 enclose a TRUTH_NOT_EXPR around what we have. */
3207 if (TREE_CODE_CLASS (code) == tcc_comparison)
3209 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3210 if (FLOAT_TYPE_P (op_type)
3211 && flag_trapping_math
3212 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3213 && code != NE_EXPR && code != EQ_EXPR)
3214 return NULL_TREE;
3215 else
3217 code = invert_tree_comparison (code,
3218 HONOR_NANS (TYPE_MODE (op_type)));
3219 if (code == ERROR_MARK)
3220 return NULL_TREE;
3221 else
3222 return build2 (code, type,
3223 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3227 switch (code)
3229 case INTEGER_CST:
3230 return constant_boolean_node (integer_zerop (arg), type);
3232 case TRUTH_AND_EXPR:
3233 return build2 (TRUTH_OR_EXPR, type,
3234 invert_truthvalue (TREE_OPERAND (arg, 0)),
3235 invert_truthvalue (TREE_OPERAND (arg, 1)));
3237 case TRUTH_OR_EXPR:
3238 return build2 (TRUTH_AND_EXPR, type,
3239 invert_truthvalue (TREE_OPERAND (arg, 0)),
3240 invert_truthvalue (TREE_OPERAND (arg, 1)));
3242 case TRUTH_XOR_EXPR:
3243 /* Here we can invert either operand. We invert the first operand
3244 unless the second operand is a TRUTH_NOT_EXPR in which case our
3245 result is the XOR of the first operand with the inside of the
3246 negation of the second operand. */
3248 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3249 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3250 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3251 else
3252 return build2 (TRUTH_XOR_EXPR, type,
3253 invert_truthvalue (TREE_OPERAND (arg, 0)),
3254 TREE_OPERAND (arg, 1));
3256 case TRUTH_ANDIF_EXPR:
3257 return build2 (TRUTH_ORIF_EXPR, type,
3258 invert_truthvalue (TREE_OPERAND (arg, 0)),
3259 invert_truthvalue (TREE_OPERAND (arg, 1)));
3261 case TRUTH_ORIF_EXPR:
3262 return build2 (TRUTH_ANDIF_EXPR, type,
3263 invert_truthvalue (TREE_OPERAND (arg, 0)),
3264 invert_truthvalue (TREE_OPERAND (arg, 1)));
3266 case TRUTH_NOT_EXPR:
3267 return TREE_OPERAND (arg, 0);
3269 case COND_EXPR:
3271 tree arg1 = TREE_OPERAND (arg, 1);
3272 tree arg2 = TREE_OPERAND (arg, 2);
3273 /* A COND_EXPR may have a throw as one operand, which
3274 then has void type. Just leave void operands
3275 as they are. */
3276 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3277 VOID_TYPE_P (TREE_TYPE (arg1))
3278 ? arg1 : invert_truthvalue (arg1),
3279 VOID_TYPE_P (TREE_TYPE (arg2))
3280 ? arg2 : invert_truthvalue (arg2));
3283 case COMPOUND_EXPR:
3284 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3285 invert_truthvalue (TREE_OPERAND (arg, 1)));
3287 case NON_LVALUE_EXPR:
3288 return invert_truthvalue (TREE_OPERAND (arg, 0));
3290 case NOP_EXPR:
3291 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3292 return build1 (TRUTH_NOT_EXPR, type, arg);
3294 case CONVERT_EXPR:
3295 case FLOAT_EXPR:
3296 return build1 (TREE_CODE (arg), type,
3297 invert_truthvalue (TREE_OPERAND (arg, 0)));
3299 case BIT_AND_EXPR:
3300 if (!integer_onep (TREE_OPERAND (arg, 1)))
3301 break;
3302 return build2 (EQ_EXPR, type, arg,
3303 build_int_cst (type, 0));
3305 case SAVE_EXPR:
3306 return build1 (TRUTH_NOT_EXPR, type, arg);
3308 case CLEANUP_POINT_EXPR:
3309 return build1 (CLEANUP_POINT_EXPR, type,
3310 invert_truthvalue (TREE_OPERAND (arg, 0)));
3312 default:
3313 break;
3316 return NULL_TREE;
3319 /* Return a simplified tree node for the truth-negation of ARG. This
3320 never alters ARG itself. We assume that ARG is an operation that
3321 returns a truth value (0 or 1).
3323 FIXME: one would think we would fold the result, but it causes
3324 problems with the dominator optimizer. */
3326 tree
3327 invert_truthvalue (tree arg)
3329 tree tem;
3331 if (TREE_CODE (arg) == ERROR_MARK)
3332 return arg;
3334 tem = fold_truth_not_expr (arg);
3335 if (!tem)
3336 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3338 return tem;
3341 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3342 operands are another bit-wise operation with a common input. If so,
3343 distribute the bit operations to save an operation and possibly two if
3344 constants are involved. For example, convert
3345 (A | B) & (A | C) into A | (B & C)
3346 Further simplification will occur if B and C are constants.
3348 If this optimization cannot be done, 0 will be returned. */
3350 static tree
3351 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3353 tree common;
3354 tree left, right;
3356 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3357 || TREE_CODE (arg0) == code
3358 || (TREE_CODE (arg0) != BIT_AND_EXPR
3359 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3360 return 0;
3362 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3364 common = TREE_OPERAND (arg0, 0);
3365 left = TREE_OPERAND (arg0, 1);
3366 right = TREE_OPERAND (arg1, 1);
3368 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3370 common = TREE_OPERAND (arg0, 0);
3371 left = TREE_OPERAND (arg0, 1);
3372 right = TREE_OPERAND (arg1, 0);
3374 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3376 common = TREE_OPERAND (arg0, 1);
3377 left = TREE_OPERAND (arg0, 0);
3378 right = TREE_OPERAND (arg1, 1);
3380 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3382 common = TREE_OPERAND (arg0, 1);
3383 left = TREE_OPERAND (arg0, 0);
3384 right = TREE_OPERAND (arg1, 0);
3386 else
3387 return 0;
3389 return fold_build2 (TREE_CODE (arg0), type, common,
3390 fold_build2 (code, type, left, right));
3393 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3394 with code CODE. This optimization is unsafe. */
3395 static tree
3396 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3398 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3399 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3401 /* (A / C) +- (B / C) -> (A +- B) / C. */
3402 if (mul0 == mul1
3403 && operand_equal_p (TREE_OPERAND (arg0, 1),
3404 TREE_OPERAND (arg1, 1), 0))
3405 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3406 fold_build2 (code, type,
3407 TREE_OPERAND (arg0, 0),
3408 TREE_OPERAND (arg1, 0)),
3409 TREE_OPERAND (arg0, 1));
3411 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3412 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3413 TREE_OPERAND (arg1, 0), 0)
3414 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3415 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3417 REAL_VALUE_TYPE r0, r1;
3418 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3419 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3420 if (!mul0)
3421 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3422 if (!mul1)
3423 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3424 real_arithmetic (&r0, code, &r0, &r1);
3425 return fold_build2 (MULT_EXPR, type,
3426 TREE_OPERAND (arg0, 0),
3427 build_real (type, r0));
3430 return NULL_TREE;
3433 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3434 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3436 static tree
3437 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3438 int unsignedp)
3440 tree result;
3442 if (bitpos == 0)
3444 tree size = TYPE_SIZE (TREE_TYPE (inner));
3445 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3446 || POINTER_TYPE_P (TREE_TYPE (inner)))
3447 && host_integerp (size, 0)
3448 && tree_low_cst (size, 0) == bitsize)
3449 return fold_convert (type, inner);
3452 result = build3 (BIT_FIELD_REF, type, inner,
3453 size_int (bitsize), bitsize_int (bitpos));
3455 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3457 return result;
3460 /* Optimize a bit-field compare.
3462 There are two cases: First is a compare against a constant and the
3463 second is a comparison of two items where the fields are at the same
3464 bit position relative to the start of a chunk (byte, halfword, word)
3465 large enough to contain it. In these cases we can avoid the shift
3466 implicit in bitfield extractions.
3468 For constants, we emit a compare of the shifted constant with the
3469 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3470 compared. For two fields at the same position, we do the ANDs with the
3471 similar mask and compare the result of the ANDs.
3473 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3474 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3475 are the left and right operands of the comparison, respectively.
3477 If the optimization described above can be done, we return the resulting
3478 tree. Otherwise we return zero. */
3480 static tree
3481 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3482 tree lhs, tree rhs)
3484 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3485 tree type = TREE_TYPE (lhs);
3486 tree signed_type, unsigned_type;
3487 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3488 enum machine_mode lmode, rmode, nmode;
3489 int lunsignedp, runsignedp;
3490 int lvolatilep = 0, rvolatilep = 0;
3491 tree linner, rinner = NULL_TREE;
3492 tree mask;
3493 tree offset;
3495 /* Get all the information about the extractions being done. If the bit size
3496 if the same as the size of the underlying object, we aren't doing an
3497 extraction at all and so can do nothing. We also don't want to
3498 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3499 then will no longer be able to replace it. */
3500 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3501 &lunsignedp, &lvolatilep, false);
3502 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3503 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3504 return 0;
3506 if (!const_p)
3508 /* If this is not a constant, we can only do something if bit positions,
3509 sizes, and signedness are the same. */
3510 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3511 &runsignedp, &rvolatilep, false);
3513 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3514 || lunsignedp != runsignedp || offset != 0
3515 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3516 return 0;
3519 /* See if we can find a mode to refer to this field. We should be able to,
3520 but fail if we can't. */
3521 nmode = get_best_mode (lbitsize, lbitpos,
3522 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3523 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3524 TYPE_ALIGN (TREE_TYPE (rinner))),
3525 word_mode, lvolatilep || rvolatilep);
3526 if (nmode == VOIDmode)
3527 return 0;
3529 /* Set signed and unsigned types of the precision of this mode for the
3530 shifts below. */
3531 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3532 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3534 /* Compute the bit position and size for the new reference and our offset
3535 within it. If the new reference is the same size as the original, we
3536 won't optimize anything, so return zero. */
3537 nbitsize = GET_MODE_BITSIZE (nmode);
3538 nbitpos = lbitpos & ~ (nbitsize - 1);
3539 lbitpos -= nbitpos;
3540 if (nbitsize == lbitsize)
3541 return 0;
3543 if (BYTES_BIG_ENDIAN)
3544 lbitpos = nbitsize - lbitsize - lbitpos;
3546 /* Make the mask to be used against the extracted field. */
3547 mask = build_int_cst (unsigned_type, -1);
3548 mask = force_fit_type (mask, 0, false, false);
3549 mask = fold_convert (unsigned_type, mask);
3550 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3551 mask = const_binop (RSHIFT_EXPR, mask,
3552 size_int (nbitsize - lbitsize - lbitpos), 0);
3554 if (! const_p)
3555 /* If not comparing with constant, just rework the comparison
3556 and return. */
3557 return build2 (code, compare_type,
3558 build2 (BIT_AND_EXPR, unsigned_type,
3559 make_bit_field_ref (linner, unsigned_type,
3560 nbitsize, nbitpos, 1),
3561 mask),
3562 build2 (BIT_AND_EXPR, unsigned_type,
3563 make_bit_field_ref (rinner, unsigned_type,
3564 nbitsize, nbitpos, 1),
3565 mask));
3567 /* Otherwise, we are handling the constant case. See if the constant is too
3568 big for the field. Warn and return a tree of for 0 (false) if so. We do
3569 this not only for its own sake, but to avoid having to test for this
3570 error case below. If we didn't, we might generate wrong code.
3572 For unsigned fields, the constant shifted right by the field length should
3573 be all zero. For signed fields, the high-order bits should agree with
3574 the sign bit. */
3576 if (lunsignedp)
3578 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3579 fold_convert (unsigned_type, rhs),
3580 size_int (lbitsize), 0)))
3582 warning (0, "comparison is always %d due to width of bit-field",
3583 code == NE_EXPR);
3584 return constant_boolean_node (code == NE_EXPR, compare_type);
3587 else
3589 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3590 size_int (lbitsize - 1), 0);
3591 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3593 warning (0, "comparison is always %d due to width of bit-field",
3594 code == NE_EXPR);
3595 return constant_boolean_node (code == NE_EXPR, compare_type);
3599 /* Single-bit compares should always be against zero. */
3600 if (lbitsize == 1 && ! integer_zerop (rhs))
3602 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3603 rhs = build_int_cst (type, 0);
3606 /* Make a new bitfield reference, shift the constant over the
3607 appropriate number of bits and mask it with the computed mask
3608 (in case this was a signed field). If we changed it, make a new one. */
3609 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3610 if (lvolatilep)
3612 TREE_SIDE_EFFECTS (lhs) = 1;
3613 TREE_THIS_VOLATILE (lhs) = 1;
3616 rhs = const_binop (BIT_AND_EXPR,
3617 const_binop (LSHIFT_EXPR,
3618 fold_convert (unsigned_type, rhs),
3619 size_int (lbitpos), 0),
3620 mask, 0);
3622 return build2 (code, compare_type,
3623 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3624 rhs);
3627 /* Subroutine for fold_truthop: decode a field reference.
3629 If EXP is a comparison reference, we return the innermost reference.
3631 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3632 set to the starting bit number.
3634 If the innermost field can be completely contained in a mode-sized
3635 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3637 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3638 otherwise it is not changed.
3640 *PUNSIGNEDP is set to the signedness of the field.
3642 *PMASK is set to the mask used. This is either contained in a
3643 BIT_AND_EXPR or derived from the width of the field.
3645 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3647 Return 0 if this is not a component reference or is one that we can't
3648 do anything with. */
3650 static tree
3651 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3652 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3653 int *punsignedp, int *pvolatilep,
3654 tree *pmask, tree *pand_mask)
3656 tree outer_type = 0;
3657 tree and_mask = 0;
3658 tree mask, inner, offset;
3659 tree unsigned_type;
3660 unsigned int precision;
3662 /* All the optimizations using this function assume integer fields.
3663 There are problems with FP fields since the type_for_size call
3664 below can fail for, e.g., XFmode. */
3665 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3666 return 0;
3668 /* We are interested in the bare arrangement of bits, so strip everything
3669 that doesn't affect the machine mode. However, record the type of the
3670 outermost expression if it may matter below. */
3671 if (TREE_CODE (exp) == NOP_EXPR
3672 || TREE_CODE (exp) == CONVERT_EXPR
3673 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3674 outer_type = TREE_TYPE (exp);
3675 STRIP_NOPS (exp);
3677 if (TREE_CODE (exp) == BIT_AND_EXPR)
3679 and_mask = TREE_OPERAND (exp, 1);
3680 exp = TREE_OPERAND (exp, 0);
3681 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3682 if (TREE_CODE (and_mask) != INTEGER_CST)
3683 return 0;
3686 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3687 punsignedp, pvolatilep, false);
3688 if ((inner == exp && and_mask == 0)
3689 || *pbitsize < 0 || offset != 0
3690 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3691 return 0;
3693 /* If the number of bits in the reference is the same as the bitsize of
3694 the outer type, then the outer type gives the signedness. Otherwise
3695 (in case of a small bitfield) the signedness is unchanged. */
3696 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3697 *punsignedp = TYPE_UNSIGNED (outer_type);
3699 /* Compute the mask to access the bitfield. */
3700 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3701 precision = TYPE_PRECISION (unsigned_type);
3703 mask = build_int_cst (unsigned_type, -1);
3704 mask = force_fit_type (mask, 0, false, false);
3706 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3707 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3709 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3710 if (and_mask != 0)
3711 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3712 fold_convert (unsigned_type, and_mask), mask);
3714 *pmask = mask;
3715 *pand_mask = and_mask;
3716 return inner;
3719 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3720 bit positions. */
3722 static int
3723 all_ones_mask_p (tree mask, int size)
3725 tree type = TREE_TYPE (mask);
3726 unsigned int precision = TYPE_PRECISION (type);
3727 tree tmask;
3729 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3730 tmask = force_fit_type (tmask, 0, false, false);
3732 return
3733 tree_int_cst_equal (mask,
3734 const_binop (RSHIFT_EXPR,
3735 const_binop (LSHIFT_EXPR, tmask,
3736 size_int (precision - size),
3738 size_int (precision - size), 0));
3741 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3742 represents the sign bit of EXP's type. If EXP represents a sign
3743 or zero extension, also test VAL against the unextended type.
3744 The return value is the (sub)expression whose sign bit is VAL,
3745 or NULL_TREE otherwise. */
3747 static tree
3748 sign_bit_p (tree exp, tree val)
3750 unsigned HOST_WIDE_INT mask_lo, lo;
3751 HOST_WIDE_INT mask_hi, hi;
3752 int width;
3753 tree t;
3755 /* Tree EXP must have an integral type. */
3756 t = TREE_TYPE (exp);
3757 if (! INTEGRAL_TYPE_P (t))
3758 return NULL_TREE;
3760 /* Tree VAL must be an integer constant. */
3761 if (TREE_CODE (val) != INTEGER_CST
3762 || TREE_CONSTANT_OVERFLOW (val))
3763 return NULL_TREE;
3765 width = TYPE_PRECISION (t);
3766 if (width > HOST_BITS_PER_WIDE_INT)
3768 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3769 lo = 0;
3771 mask_hi = ((unsigned HOST_WIDE_INT) -1
3772 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3773 mask_lo = -1;
3775 else
3777 hi = 0;
3778 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3780 mask_hi = 0;
3781 mask_lo = ((unsigned HOST_WIDE_INT) -1
3782 >> (HOST_BITS_PER_WIDE_INT - width));
3785 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3786 treat VAL as if it were unsigned. */
3787 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3788 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3789 return exp;
3791 /* Handle extension from a narrower type. */
3792 if (TREE_CODE (exp) == NOP_EXPR
3793 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3794 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3796 return NULL_TREE;
3799 /* Subroutine for fold_truthop: determine if an operand is simple enough
3800 to be evaluated unconditionally. */
3802 static int
3803 simple_operand_p (tree exp)
3805 /* Strip any conversions that don't change the machine mode. */
3806 STRIP_NOPS (exp);
3808 return (CONSTANT_CLASS_P (exp)
3809 || TREE_CODE (exp) == SSA_NAME
3810 || (DECL_P (exp)
3811 && ! TREE_ADDRESSABLE (exp)
3812 && ! TREE_THIS_VOLATILE (exp)
3813 && ! DECL_NONLOCAL (exp)
3814 /* Don't regard global variables as simple. They may be
3815 allocated in ways unknown to the compiler (shared memory,
3816 #pragma weak, etc). */
3817 && ! TREE_PUBLIC (exp)
3818 && ! DECL_EXTERNAL (exp)
3819 /* Loading a static variable is unduly expensive, but global
3820 registers aren't expensive. */
3821 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3824 /* The following functions are subroutines to fold_range_test and allow it to
3825 try to change a logical combination of comparisons into a range test.
3827 For example, both
3828 X == 2 || X == 3 || X == 4 || X == 5
3830 X >= 2 && X <= 5
3831 are converted to
3832 (unsigned) (X - 2) <= 3
3834 We describe each set of comparisons as being either inside or outside
3835 a range, using a variable named like IN_P, and then describe the
3836 range with a lower and upper bound. If one of the bounds is omitted,
3837 it represents either the highest or lowest value of the type.
3839 In the comments below, we represent a range by two numbers in brackets
3840 preceded by a "+" to designate being inside that range, or a "-" to
3841 designate being outside that range, so the condition can be inverted by
3842 flipping the prefix. An omitted bound is represented by a "-". For
3843 example, "- [-, 10]" means being outside the range starting at the lowest
3844 possible value and ending at 10, in other words, being greater than 10.
3845 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3846 always false.
3848 We set up things so that the missing bounds are handled in a consistent
3849 manner so neither a missing bound nor "true" and "false" need to be
3850 handled using a special case. */
3852 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3853 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3854 and UPPER1_P are nonzero if the respective argument is an upper bound
3855 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3856 must be specified for a comparison. ARG1 will be converted to ARG0's
3857 type if both are specified. */
3859 static tree
3860 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3861 tree arg1, int upper1_p)
3863 tree tem;
3864 int result;
3865 int sgn0, sgn1;
3867 /* If neither arg represents infinity, do the normal operation.
3868 Else, if not a comparison, return infinity. Else handle the special
3869 comparison rules. Note that most of the cases below won't occur, but
3870 are handled for consistency. */
3872 if (arg0 != 0 && arg1 != 0)
3874 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3875 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3876 STRIP_NOPS (tem);
3877 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3880 if (TREE_CODE_CLASS (code) != tcc_comparison)
3881 return 0;
3883 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3884 for neither. In real maths, we cannot assume open ended ranges are
3885 the same. But, this is computer arithmetic, where numbers are finite.
3886 We can therefore make the transformation of any unbounded range with
3887 the value Z, Z being greater than any representable number. This permits
3888 us to treat unbounded ranges as equal. */
3889 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3890 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3891 switch (code)
3893 case EQ_EXPR:
3894 result = sgn0 == sgn1;
3895 break;
3896 case NE_EXPR:
3897 result = sgn0 != sgn1;
3898 break;
3899 case LT_EXPR:
3900 result = sgn0 < sgn1;
3901 break;
3902 case LE_EXPR:
3903 result = sgn0 <= sgn1;
3904 break;
3905 case GT_EXPR:
3906 result = sgn0 > sgn1;
3907 break;
3908 case GE_EXPR:
3909 result = sgn0 >= sgn1;
3910 break;
3911 default:
3912 gcc_unreachable ();
3915 return constant_boolean_node (result, type);
3918 /* Given EXP, a logical expression, set the range it is testing into
3919 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3920 actually being tested. *PLOW and *PHIGH will be made of the same
3921 type as the returned expression. If EXP is not a comparison, we
3922 will most likely not be returning a useful value and range. Set
3923 *STRICT_OVERFLOW_P to true if the return value is only valid
3924 because signed overflow is undefined; otherwise, do not change
3925 *STRICT_OVERFLOW_P. */
3927 static tree
3928 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3929 bool *strict_overflow_p)
3931 enum tree_code code;
3932 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3933 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3934 int in_p, n_in_p;
3935 tree low, high, n_low, n_high;
3937 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3938 and see if we can refine the range. Some of the cases below may not
3939 happen, but it doesn't seem worth worrying about this. We "continue"
3940 the outer loop when we've changed something; otherwise we "break"
3941 the switch, which will "break" the while. */
3943 in_p = 0;
3944 low = high = build_int_cst (TREE_TYPE (exp), 0);
3946 while (1)
3948 code = TREE_CODE (exp);
3949 exp_type = TREE_TYPE (exp);
3951 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3953 if (TREE_CODE_LENGTH (code) > 0)
3954 arg0 = TREE_OPERAND (exp, 0);
3955 if (TREE_CODE_CLASS (code) == tcc_comparison
3956 || TREE_CODE_CLASS (code) == tcc_unary
3957 || TREE_CODE_CLASS (code) == tcc_binary)
3958 arg0_type = TREE_TYPE (arg0);
3959 if (TREE_CODE_CLASS (code) == tcc_binary
3960 || TREE_CODE_CLASS (code) == tcc_comparison
3961 || (TREE_CODE_CLASS (code) == tcc_expression
3962 && TREE_CODE_LENGTH (code) > 1))
3963 arg1 = TREE_OPERAND (exp, 1);
3966 switch (code)
3968 case TRUTH_NOT_EXPR:
3969 in_p = ! in_p, exp = arg0;
3970 continue;
3972 case EQ_EXPR: case NE_EXPR:
3973 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3974 /* We can only do something if the range is testing for zero
3975 and if the second operand is an integer constant. Note that
3976 saying something is "in" the range we make is done by
3977 complementing IN_P since it will set in the initial case of
3978 being not equal to zero; "out" is leaving it alone. */
3979 if (low == 0 || high == 0
3980 || ! integer_zerop (low) || ! integer_zerop (high)
3981 || TREE_CODE (arg1) != INTEGER_CST)
3982 break;
3984 switch (code)
3986 case NE_EXPR: /* - [c, c] */
3987 low = high = arg1;
3988 break;
3989 case EQ_EXPR: /* + [c, c] */
3990 in_p = ! in_p, low = high = arg1;
3991 break;
3992 case GT_EXPR: /* - [-, c] */
3993 low = 0, high = arg1;
3994 break;
3995 case GE_EXPR: /* + [c, -] */
3996 in_p = ! in_p, low = arg1, high = 0;
3997 break;
3998 case LT_EXPR: /* - [c, -] */
3999 low = arg1, high = 0;
4000 break;
4001 case LE_EXPR: /* + [-, c] */
4002 in_p = ! in_p, low = 0, high = arg1;
4003 break;
4004 default:
4005 gcc_unreachable ();
4008 /* If this is an unsigned comparison, we also know that EXP is
4009 greater than or equal to zero. We base the range tests we make
4010 on that fact, so we record it here so we can parse existing
4011 range tests. We test arg0_type since often the return type
4012 of, e.g. EQ_EXPR, is boolean. */
4013 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4015 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4016 in_p, low, high, 1,
4017 build_int_cst (arg0_type, 0),
4018 NULL_TREE))
4019 break;
4021 in_p = n_in_p, low = n_low, high = n_high;
4023 /* If the high bound is missing, but we have a nonzero low
4024 bound, reverse the range so it goes from zero to the low bound
4025 minus 1. */
4026 if (high == 0 && low && ! integer_zerop (low))
4028 in_p = ! in_p;
4029 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4030 integer_one_node, 0);
4031 low = build_int_cst (arg0_type, 0);
4035 exp = arg0;
4036 continue;
4038 case NEGATE_EXPR:
4039 /* (-x) IN [a,b] -> x in [-b, -a] */
4040 n_low = range_binop (MINUS_EXPR, exp_type,
4041 build_int_cst (exp_type, 0),
4042 0, high, 1);
4043 n_high = range_binop (MINUS_EXPR, exp_type,
4044 build_int_cst (exp_type, 0),
4045 0, low, 0);
4046 low = n_low, high = n_high;
4047 exp = arg0;
4048 continue;
4050 case BIT_NOT_EXPR:
4051 /* ~ X -> -X - 1 */
4052 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4053 build_int_cst (exp_type, 1));
4054 continue;
4056 case PLUS_EXPR: case MINUS_EXPR:
4057 if (TREE_CODE (arg1) != INTEGER_CST)
4058 break;
4060 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4061 move a constant to the other side. */
4062 if (!TYPE_UNSIGNED (arg0_type)
4063 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4064 break;
4066 /* If EXP is signed, any overflow in the computation is undefined,
4067 so we don't worry about it so long as our computations on
4068 the bounds don't overflow. For unsigned, overflow is defined
4069 and this is exactly the right thing. */
4070 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4071 arg0_type, low, 0, arg1, 0);
4072 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4073 arg0_type, high, 1, arg1, 0);
4074 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4075 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4076 break;
4078 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4079 *strict_overflow_p = true;
4081 /* Check for an unsigned range which has wrapped around the maximum
4082 value thus making n_high < n_low, and normalize it. */
4083 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4085 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4086 integer_one_node, 0);
4087 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4088 integer_one_node, 0);
4090 /* If the range is of the form +/- [ x+1, x ], we won't
4091 be able to normalize it. But then, it represents the
4092 whole range or the empty set, so make it
4093 +/- [ -, - ]. */
4094 if (tree_int_cst_equal (n_low, low)
4095 && tree_int_cst_equal (n_high, high))
4096 low = high = 0;
4097 else
4098 in_p = ! in_p;
4100 else
4101 low = n_low, high = n_high;
4103 exp = arg0;
4104 continue;
4106 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4107 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4108 break;
4110 if (! INTEGRAL_TYPE_P (arg0_type)
4111 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4112 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4113 break;
4115 n_low = low, n_high = high;
4117 if (n_low != 0)
4118 n_low = fold_convert (arg0_type, n_low);
4120 if (n_high != 0)
4121 n_high = fold_convert (arg0_type, n_high);
4124 /* If we're converting arg0 from an unsigned type, to exp,
4125 a signed type, we will be doing the comparison as unsigned.
4126 The tests above have already verified that LOW and HIGH
4127 are both positive.
4129 So we have to ensure that we will handle large unsigned
4130 values the same way that the current signed bounds treat
4131 negative values. */
4133 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4135 tree high_positive;
4136 tree equiv_type = lang_hooks.types.type_for_mode
4137 (TYPE_MODE (arg0_type), 1);
4139 /* A range without an upper bound is, naturally, unbounded.
4140 Since convert would have cropped a very large value, use
4141 the max value for the destination type. */
4142 high_positive
4143 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4144 : TYPE_MAX_VALUE (arg0_type);
4146 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4147 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4148 fold_convert (arg0_type,
4149 high_positive),
4150 fold_convert (arg0_type,
4151 integer_one_node));
4153 /* If the low bound is specified, "and" the range with the
4154 range for which the original unsigned value will be
4155 positive. */
4156 if (low != 0)
4158 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4159 1, n_low, n_high, 1,
4160 fold_convert (arg0_type,
4161 integer_zero_node),
4162 high_positive))
4163 break;
4165 in_p = (n_in_p == in_p);
4167 else
4169 /* Otherwise, "or" the range with the range of the input
4170 that will be interpreted as negative. */
4171 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4172 0, n_low, n_high, 1,
4173 fold_convert (arg0_type,
4174 integer_zero_node),
4175 high_positive))
4176 break;
4178 in_p = (in_p != n_in_p);
4182 exp = arg0;
4183 low = n_low, high = n_high;
4184 continue;
4186 default:
4187 break;
4190 break;
4193 /* If EXP is a constant, we can evaluate whether this is true or false. */
4194 if (TREE_CODE (exp) == INTEGER_CST)
4196 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4197 exp, 0, low, 0))
4198 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4199 exp, 1, high, 1)));
4200 low = high = 0;
4201 exp = 0;
4204 *pin_p = in_p, *plow = low, *phigh = high;
4205 return exp;
4208 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4209 type, TYPE, return an expression to test if EXP is in (or out of, depending
4210 on IN_P) the range. Return 0 if the test couldn't be created. */
4212 static tree
4213 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4215 tree etype = TREE_TYPE (exp);
4216 tree value;
4218 #ifdef HAVE_canonicalize_funcptr_for_compare
4219 /* Disable this optimization for function pointer expressions
4220 on targets that require function pointer canonicalization. */
4221 if (HAVE_canonicalize_funcptr_for_compare
4222 && TREE_CODE (etype) == POINTER_TYPE
4223 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4224 return NULL_TREE;
4225 #endif
4227 if (! in_p)
4229 value = build_range_check (type, exp, 1, low, high);
4230 if (value != 0)
4231 return invert_truthvalue (value);
4233 return 0;
4236 if (low == 0 && high == 0)
4237 return build_int_cst (type, 1);
4239 if (low == 0)
4240 return fold_build2 (LE_EXPR, type, exp,
4241 fold_convert (etype, high));
4243 if (high == 0)
4244 return fold_build2 (GE_EXPR, type, exp,
4245 fold_convert (etype, low));
4247 if (operand_equal_p (low, high, 0))
4248 return fold_build2 (EQ_EXPR, type, exp,
4249 fold_convert (etype, low));
4251 if (integer_zerop (low))
4253 if (! TYPE_UNSIGNED (etype))
4255 etype = lang_hooks.types.unsigned_type (etype);
4256 high = fold_convert (etype, high);
4257 exp = fold_convert (etype, exp);
4259 return build_range_check (type, exp, 1, 0, high);
4262 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4263 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4265 unsigned HOST_WIDE_INT lo;
4266 HOST_WIDE_INT hi;
4267 int prec;
4269 prec = TYPE_PRECISION (etype);
4270 if (prec <= HOST_BITS_PER_WIDE_INT)
4272 hi = 0;
4273 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4275 else
4277 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4278 lo = (unsigned HOST_WIDE_INT) -1;
4281 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4283 if (TYPE_UNSIGNED (etype))
4285 etype = lang_hooks.types.signed_type (etype);
4286 exp = fold_convert (etype, exp);
4288 return fold_build2 (GT_EXPR, type, exp,
4289 build_int_cst (etype, 0));
4293 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4294 This requires wrap-around arithmetics for the type of the expression. */
4295 switch (TREE_CODE (etype))
4297 case INTEGER_TYPE:
4298 /* There is no requirement that LOW be within the range of ETYPE
4299 if the latter is a subtype. It must, however, be within the base
4300 type of ETYPE. So be sure we do the subtraction in that type. */
4301 if (TREE_TYPE (etype))
4302 etype = TREE_TYPE (etype);
4303 break;
4305 case ENUMERAL_TYPE:
4306 case BOOLEAN_TYPE:
4307 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4308 TYPE_UNSIGNED (etype));
4309 break;
4311 default:
4312 break;
4315 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4316 if (TREE_CODE (etype) == INTEGER_TYPE
4317 && !TYPE_OVERFLOW_WRAPS (etype))
4319 tree utype, minv, maxv;
4321 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4322 for the type in question, as we rely on this here. */
4323 utype = lang_hooks.types.unsigned_type (etype);
4324 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4325 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4326 integer_one_node, 1);
4327 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4329 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4330 minv, 1, maxv, 1)))
4331 etype = utype;
4332 else
4333 return 0;
4336 high = fold_convert (etype, high);
4337 low = fold_convert (etype, low);
4338 exp = fold_convert (etype, exp);
4340 value = const_binop (MINUS_EXPR, high, low, 0);
4342 if (value != 0 && !TREE_OVERFLOW (value))
4343 return build_range_check (type,
4344 fold_build2 (MINUS_EXPR, etype, exp, low),
4345 1, build_int_cst (etype, 0), value);
4347 return 0;
4350 /* Return the predecessor of VAL in its type, handling the infinite case. */
4352 static tree
4353 range_predecessor (tree val)
4355 tree type = TREE_TYPE (val);
4357 if (INTEGRAL_TYPE_P (type)
4358 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4359 return 0;
4360 else
4361 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4364 /* Return the successor of VAL in its type, handling the infinite case. */
4366 static tree
4367 range_successor (tree val)
4369 tree type = TREE_TYPE (val);
4371 if (INTEGRAL_TYPE_P (type)
4372 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4373 return 0;
4374 else
4375 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4378 /* Given two ranges, see if we can merge them into one. Return 1 if we
4379 can, 0 if we can't. Set the output range into the specified parameters. */
4381 static int
4382 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4383 tree high0, int in1_p, tree low1, tree high1)
4385 int no_overlap;
4386 int subset;
4387 int temp;
4388 tree tem;
4389 int in_p;
4390 tree low, high;
4391 int lowequal = ((low0 == 0 && low1 == 0)
4392 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4393 low0, 0, low1, 0)));
4394 int highequal = ((high0 == 0 && high1 == 0)
4395 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4396 high0, 1, high1, 1)));
4398 /* Make range 0 be the range that starts first, or ends last if they
4399 start at the same value. Swap them if it isn't. */
4400 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4401 low0, 0, low1, 0))
4402 || (lowequal
4403 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4404 high1, 1, high0, 1))))
4406 temp = in0_p, in0_p = in1_p, in1_p = temp;
4407 tem = low0, low0 = low1, low1 = tem;
4408 tem = high0, high0 = high1, high1 = tem;
4411 /* Now flag two cases, whether the ranges are disjoint or whether the
4412 second range is totally subsumed in the first. Note that the tests
4413 below are simplified by the ones above. */
4414 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4415 high0, 1, low1, 0));
4416 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4417 high1, 1, high0, 1));
4419 /* We now have four cases, depending on whether we are including or
4420 excluding the two ranges. */
4421 if (in0_p && in1_p)
4423 /* If they don't overlap, the result is false. If the second range
4424 is a subset it is the result. Otherwise, the range is from the start
4425 of the second to the end of the first. */
4426 if (no_overlap)
4427 in_p = 0, low = high = 0;
4428 else if (subset)
4429 in_p = 1, low = low1, high = high1;
4430 else
4431 in_p = 1, low = low1, high = high0;
4434 else if (in0_p && ! in1_p)
4436 /* If they don't overlap, the result is the first range. If they are
4437 equal, the result is false. If the second range is a subset of the
4438 first, and the ranges begin at the same place, we go from just after
4439 the end of the second range to the end of the first. If the second
4440 range is not a subset of the first, or if it is a subset and both
4441 ranges end at the same place, the range starts at the start of the
4442 first range and ends just before the second range.
4443 Otherwise, we can't describe this as a single range. */
4444 if (no_overlap)
4445 in_p = 1, low = low0, high = high0;
4446 else if (lowequal && highequal)
4447 in_p = 0, low = high = 0;
4448 else if (subset && lowequal)
4450 low = range_successor (high1);
4451 high = high0;
4452 in_p = 1;
4453 if (low == 0)
4455 /* We are in the weird situation where high0 > high1 but
4456 high1 has no successor. Punt. */
4457 return 0;
4460 else if (! subset || highequal)
4462 low = low0;
4463 high = range_predecessor (low1);
4464 in_p = 1;
4465 if (high == 0)
4467 /* low0 < low1 but low1 has no predecessor. Punt. */
4468 return 0;
4471 else
4472 return 0;
4475 else if (! in0_p && in1_p)
4477 /* If they don't overlap, the result is the second range. If the second
4478 is a subset of the first, the result is false. Otherwise,
4479 the range starts just after the first range and ends at the
4480 end of the second. */
4481 if (no_overlap)
4482 in_p = 1, low = low1, high = high1;
4483 else if (subset || highequal)
4484 in_p = 0, low = high = 0;
4485 else
4487 low = range_successor (high0);
4488 high = high1;
4489 in_p = 1;
4490 if (low == 0)
4492 /* high1 > high0 but high0 has no successor. Punt. */
4493 return 0;
4498 else
4500 /* The case where we are excluding both ranges. Here the complex case
4501 is if they don't overlap. In that case, the only time we have a
4502 range is if they are adjacent. If the second is a subset of the
4503 first, the result is the first. Otherwise, the range to exclude
4504 starts at the beginning of the first range and ends at the end of the
4505 second. */
4506 if (no_overlap)
4508 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4509 range_successor (high0),
4510 1, low1, 0)))
4511 in_p = 0, low = low0, high = high1;
4512 else
4514 /* Canonicalize - [min, x] into - [-, x]. */
4515 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4516 switch (TREE_CODE (TREE_TYPE (low0)))
4518 case ENUMERAL_TYPE:
4519 if (TYPE_PRECISION (TREE_TYPE (low0))
4520 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4521 break;
4522 /* FALLTHROUGH */
4523 case INTEGER_TYPE:
4524 if (tree_int_cst_equal (low0,
4525 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4526 low0 = 0;
4527 break;
4528 case POINTER_TYPE:
4529 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4530 && integer_zerop (low0))
4531 low0 = 0;
4532 break;
4533 default:
4534 break;
4537 /* Canonicalize - [x, max] into - [x, -]. */
4538 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4539 switch (TREE_CODE (TREE_TYPE (high1)))
4541 case ENUMERAL_TYPE:
4542 if (TYPE_PRECISION (TREE_TYPE (high1))
4543 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4544 break;
4545 /* FALLTHROUGH */
4546 case INTEGER_TYPE:
4547 if (tree_int_cst_equal (high1,
4548 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4549 high1 = 0;
4550 break;
4551 case POINTER_TYPE:
4552 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4553 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4554 high1, 1,
4555 integer_one_node, 1)))
4556 high1 = 0;
4557 break;
4558 default:
4559 break;
4562 /* The ranges might be also adjacent between the maximum and
4563 minimum values of the given type. For
4564 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4565 return + [x + 1, y - 1]. */
4566 if (low0 == 0 && high1 == 0)
4568 low = range_successor (high0);
4569 high = range_predecessor (low1);
4570 if (low == 0 || high == 0)
4571 return 0;
4573 in_p = 1;
4575 else
4576 return 0;
4579 else if (subset)
4580 in_p = 0, low = low0, high = high0;
4581 else
4582 in_p = 0, low = low0, high = high1;
4585 *pin_p = in_p, *plow = low, *phigh = high;
4586 return 1;
4590 /* Subroutine of fold, looking inside expressions of the form
4591 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4592 of the COND_EXPR. This function is being used also to optimize
4593 A op B ? C : A, by reversing the comparison first.
4595 Return a folded expression whose code is not a COND_EXPR
4596 anymore, or NULL_TREE if no folding opportunity is found. */
4598 static tree
4599 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4601 enum tree_code comp_code = TREE_CODE (arg0);
4602 tree arg00 = TREE_OPERAND (arg0, 0);
4603 tree arg01 = TREE_OPERAND (arg0, 1);
4604 tree arg1_type = TREE_TYPE (arg1);
4605 tree tem;
4607 STRIP_NOPS (arg1);
4608 STRIP_NOPS (arg2);
4610 /* If we have A op 0 ? A : -A, consider applying the following
4611 transformations:
4613 A == 0? A : -A same as -A
4614 A != 0? A : -A same as A
4615 A >= 0? A : -A same as abs (A)
4616 A > 0? A : -A same as abs (A)
4617 A <= 0? A : -A same as -abs (A)
4618 A < 0? A : -A same as -abs (A)
4620 None of these transformations work for modes with signed
4621 zeros. If A is +/-0, the first two transformations will
4622 change the sign of the result (from +0 to -0, or vice
4623 versa). The last four will fix the sign of the result,
4624 even though the original expressions could be positive or
4625 negative, depending on the sign of A.
4627 Note that all these transformations are correct if A is
4628 NaN, since the two alternatives (A and -A) are also NaNs. */
4629 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4630 ? real_zerop (arg01)
4631 : integer_zerop (arg01))
4632 && ((TREE_CODE (arg2) == NEGATE_EXPR
4633 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4634 /* In the case that A is of the form X-Y, '-A' (arg2) may
4635 have already been folded to Y-X, check for that. */
4636 || (TREE_CODE (arg1) == MINUS_EXPR
4637 && TREE_CODE (arg2) == MINUS_EXPR
4638 && operand_equal_p (TREE_OPERAND (arg1, 0),
4639 TREE_OPERAND (arg2, 1), 0)
4640 && operand_equal_p (TREE_OPERAND (arg1, 1),
4641 TREE_OPERAND (arg2, 0), 0))))
4642 switch (comp_code)
4644 case EQ_EXPR:
4645 case UNEQ_EXPR:
4646 tem = fold_convert (arg1_type, arg1);
4647 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4648 case NE_EXPR:
4649 case LTGT_EXPR:
4650 return pedantic_non_lvalue (fold_convert (type, arg1));
4651 case UNGE_EXPR:
4652 case UNGT_EXPR:
4653 if (flag_trapping_math)
4654 break;
4655 /* Fall through. */
4656 case GE_EXPR:
4657 case GT_EXPR:
4658 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4659 arg1 = fold_convert (lang_hooks.types.signed_type
4660 (TREE_TYPE (arg1)), arg1);
4661 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4662 return pedantic_non_lvalue (fold_convert (type, tem));
4663 case UNLE_EXPR:
4664 case UNLT_EXPR:
4665 if (flag_trapping_math)
4666 break;
4667 case LE_EXPR:
4668 case LT_EXPR:
4669 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4670 arg1 = fold_convert (lang_hooks.types.signed_type
4671 (TREE_TYPE (arg1)), arg1);
4672 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4673 return negate_expr (fold_convert (type, tem));
4674 default:
4675 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4676 break;
4679 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4680 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4681 both transformations are correct when A is NaN: A != 0
4682 is then true, and A == 0 is false. */
4684 if (integer_zerop (arg01) && integer_zerop (arg2))
4686 if (comp_code == NE_EXPR)
4687 return pedantic_non_lvalue (fold_convert (type, arg1));
4688 else if (comp_code == EQ_EXPR)
4689 return build_int_cst (type, 0);
4692 /* Try some transformations of A op B ? A : B.
4694 A == B? A : B same as B
4695 A != B? A : B same as A
4696 A >= B? A : B same as max (A, B)
4697 A > B? A : B same as max (B, A)
4698 A <= B? A : B same as min (A, B)
4699 A < B? A : B same as min (B, A)
4701 As above, these transformations don't work in the presence
4702 of signed zeros. For example, if A and B are zeros of
4703 opposite sign, the first two transformations will change
4704 the sign of the result. In the last four, the original
4705 expressions give different results for (A=+0, B=-0) and
4706 (A=-0, B=+0), but the transformed expressions do not.
4708 The first two transformations are correct if either A or B
4709 is a NaN. In the first transformation, the condition will
4710 be false, and B will indeed be chosen. In the case of the
4711 second transformation, the condition A != B will be true,
4712 and A will be chosen.
4714 The conversions to max() and min() are not correct if B is
4715 a number and A is not. The conditions in the original
4716 expressions will be false, so all four give B. The min()
4717 and max() versions would give a NaN instead. */
4718 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4719 /* Avoid these transformations if the COND_EXPR may be used
4720 as an lvalue in the C++ front-end. PR c++/19199. */
4721 && (in_gimple_form
4722 || (strcmp (lang_hooks.name, "GNU C++") != 0
4723 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4724 || ! maybe_lvalue_p (arg1)
4725 || ! maybe_lvalue_p (arg2)))
4727 tree comp_op0 = arg00;
4728 tree comp_op1 = arg01;
4729 tree comp_type = TREE_TYPE (comp_op0);
4731 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4732 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4734 comp_type = type;
4735 comp_op0 = arg1;
4736 comp_op1 = arg2;
4739 switch (comp_code)
4741 case EQ_EXPR:
4742 return pedantic_non_lvalue (fold_convert (type, arg2));
4743 case NE_EXPR:
4744 return pedantic_non_lvalue (fold_convert (type, arg1));
4745 case LE_EXPR:
4746 case LT_EXPR:
4747 case UNLE_EXPR:
4748 case UNLT_EXPR:
4749 /* In C++ a ?: expression can be an lvalue, so put the
4750 operand which will be used if they are equal first
4751 so that we can convert this back to the
4752 corresponding COND_EXPR. */
4753 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4755 comp_op0 = fold_convert (comp_type, comp_op0);
4756 comp_op1 = fold_convert (comp_type, comp_op1);
4757 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4758 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4759 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4760 return pedantic_non_lvalue (fold_convert (type, tem));
4762 break;
4763 case GE_EXPR:
4764 case GT_EXPR:
4765 case UNGE_EXPR:
4766 case UNGT_EXPR:
4767 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4769 comp_op0 = fold_convert (comp_type, comp_op0);
4770 comp_op1 = fold_convert (comp_type, comp_op1);
4771 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4772 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4773 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4774 return pedantic_non_lvalue (fold_convert (type, tem));
4776 break;
4777 case UNEQ_EXPR:
4778 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4779 return pedantic_non_lvalue (fold_convert (type, arg2));
4780 break;
4781 case LTGT_EXPR:
4782 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4783 return pedantic_non_lvalue (fold_convert (type, arg1));
4784 break;
4785 default:
4786 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4787 break;
4791 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4792 we might still be able to simplify this. For example,
4793 if C1 is one less or one more than C2, this might have started
4794 out as a MIN or MAX and been transformed by this function.
4795 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4797 if (INTEGRAL_TYPE_P (type)
4798 && TREE_CODE (arg01) == INTEGER_CST
4799 && TREE_CODE (arg2) == INTEGER_CST)
4800 switch (comp_code)
4802 case EQ_EXPR:
4803 /* We can replace A with C1 in this case. */
4804 arg1 = fold_convert (type, arg01);
4805 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4807 case LT_EXPR:
4808 /* If C1 is C2 + 1, this is min(A, C2). */
4809 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4810 OEP_ONLY_CONST)
4811 && operand_equal_p (arg01,
4812 const_binop (PLUS_EXPR, arg2,
4813 integer_one_node, 0),
4814 OEP_ONLY_CONST))
4815 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4816 type, arg1, arg2));
4817 break;
4819 case LE_EXPR:
4820 /* If C1 is C2 - 1, this is min(A, C2). */
4821 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4822 OEP_ONLY_CONST)
4823 && operand_equal_p (arg01,
4824 const_binop (MINUS_EXPR, arg2,
4825 integer_one_node, 0),
4826 OEP_ONLY_CONST))
4827 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4828 type, arg1, arg2));
4829 break;
4831 case GT_EXPR:
4832 /* If C1 is C2 - 1, this is max(A, C2). */
4833 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4834 OEP_ONLY_CONST)
4835 && operand_equal_p (arg01,
4836 const_binop (MINUS_EXPR, arg2,
4837 integer_one_node, 0),
4838 OEP_ONLY_CONST))
4839 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4840 type, arg1, arg2));
4841 break;
4843 case GE_EXPR:
4844 /* If C1 is C2 + 1, this is max(A, C2). */
4845 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4846 OEP_ONLY_CONST)
4847 && operand_equal_p (arg01,
4848 const_binop (PLUS_EXPR, arg2,
4849 integer_one_node, 0),
4850 OEP_ONLY_CONST))
4851 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4852 type, arg1, arg2));
4853 break;
4854 case NE_EXPR:
4855 break;
4856 default:
4857 gcc_unreachable ();
4860 return NULL_TREE;
4865 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4866 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4867 #endif
4869 /* EXP is some logical combination of boolean tests. See if we can
4870 merge it into some range test. Return the new tree if so. */
4872 static tree
4873 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4875 int or_op = (code == TRUTH_ORIF_EXPR
4876 || code == TRUTH_OR_EXPR);
4877 int in0_p, in1_p, in_p;
4878 tree low0, low1, low, high0, high1, high;
4879 bool strict_overflow_p = false;
4880 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4881 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4882 tree tem;
4883 const char * const warnmsg = G_("assuming signed overflow does not occur "
4884 "when simplifying range test");
4886 /* If this is an OR operation, invert both sides; we will invert
4887 again at the end. */
4888 if (or_op)
4889 in0_p = ! in0_p, in1_p = ! in1_p;
4891 /* If both expressions are the same, if we can merge the ranges, and we
4892 can build the range test, return it or it inverted. If one of the
4893 ranges is always true or always false, consider it to be the same
4894 expression as the other. */
4895 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4896 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4897 in1_p, low1, high1)
4898 && 0 != (tem = (build_range_check (type,
4899 lhs != 0 ? lhs
4900 : rhs != 0 ? rhs : integer_zero_node,
4901 in_p, low, high))))
4903 if (strict_overflow_p)
4904 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4905 return or_op ? invert_truthvalue (tem) : tem;
4908 /* On machines where the branch cost is expensive, if this is a
4909 short-circuited branch and the underlying object on both sides
4910 is the same, make a non-short-circuit operation. */
4911 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4912 && lhs != 0 && rhs != 0
4913 && (code == TRUTH_ANDIF_EXPR
4914 || code == TRUTH_ORIF_EXPR)
4915 && operand_equal_p (lhs, rhs, 0))
4917 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4918 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4919 which cases we can't do this. */
4920 if (simple_operand_p (lhs))
4921 return build2 (code == TRUTH_ANDIF_EXPR
4922 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4923 type, op0, op1);
4925 else if (lang_hooks.decls.global_bindings_p () == 0
4926 && ! CONTAINS_PLACEHOLDER_P (lhs))
4928 tree common = save_expr (lhs);
4930 if (0 != (lhs = build_range_check (type, common,
4931 or_op ? ! in0_p : in0_p,
4932 low0, high0))
4933 && (0 != (rhs = build_range_check (type, common,
4934 or_op ? ! in1_p : in1_p,
4935 low1, high1))))
4937 if (strict_overflow_p)
4938 fold_overflow_warning (warnmsg,
4939 WARN_STRICT_OVERFLOW_COMPARISON);
4940 return build2 (code == TRUTH_ANDIF_EXPR
4941 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4942 type, lhs, rhs);
4947 return 0;
4950 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4951 bit value. Arrange things so the extra bits will be set to zero if and
4952 only if C is signed-extended to its full width. If MASK is nonzero,
4953 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4955 static tree
4956 unextend (tree c, int p, int unsignedp, tree mask)
4958 tree type = TREE_TYPE (c);
4959 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4960 tree temp;
4962 if (p == modesize || unsignedp)
4963 return c;
4965 /* We work by getting just the sign bit into the low-order bit, then
4966 into the high-order bit, then sign-extend. We then XOR that value
4967 with C. */
4968 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4969 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4971 /* We must use a signed type in order to get an arithmetic right shift.
4972 However, we must also avoid introducing accidental overflows, so that
4973 a subsequent call to integer_zerop will work. Hence we must
4974 do the type conversion here. At this point, the constant is either
4975 zero or one, and the conversion to a signed type can never overflow.
4976 We could get an overflow if this conversion is done anywhere else. */
4977 if (TYPE_UNSIGNED (type))
4978 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4980 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4981 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4982 if (mask != 0)
4983 temp = const_binop (BIT_AND_EXPR, temp,
4984 fold_convert (TREE_TYPE (c), mask), 0);
4985 /* If necessary, convert the type back to match the type of C. */
4986 if (TYPE_UNSIGNED (type))
4987 temp = fold_convert (type, temp);
4989 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4992 /* Find ways of folding logical expressions of LHS and RHS:
4993 Try to merge two comparisons to the same innermost item.
4994 Look for range tests like "ch >= '0' && ch <= '9'".
4995 Look for combinations of simple terms on machines with expensive branches
4996 and evaluate the RHS unconditionally.
4998 For example, if we have p->a == 2 && p->b == 4 and we can make an
4999 object large enough to span both A and B, we can do this with a comparison
5000 against the object ANDed with the a mask.
5002 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5003 operations to do this with one comparison.
5005 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5006 function and the one above.
5008 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5009 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5011 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5012 two operands.
5014 We return the simplified tree or 0 if no optimization is possible. */
5016 static tree
5017 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5019 /* If this is the "or" of two comparisons, we can do something if
5020 the comparisons are NE_EXPR. If this is the "and", we can do something
5021 if the comparisons are EQ_EXPR. I.e.,
5022 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5024 WANTED_CODE is this operation code. For single bit fields, we can
5025 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5026 comparison for one-bit fields. */
5028 enum tree_code wanted_code;
5029 enum tree_code lcode, rcode;
5030 tree ll_arg, lr_arg, rl_arg, rr_arg;
5031 tree ll_inner, lr_inner, rl_inner, rr_inner;
5032 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5033 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5034 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5035 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5036 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5037 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5038 enum machine_mode lnmode, rnmode;
5039 tree ll_mask, lr_mask, rl_mask, rr_mask;
5040 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5041 tree l_const, r_const;
5042 tree lntype, rntype, result;
5043 int first_bit, end_bit;
5044 int volatilep;
5045 tree orig_lhs = lhs, orig_rhs = rhs;
5046 enum tree_code orig_code = code;
5048 /* Start by getting the comparison codes. Fail if anything is volatile.
5049 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5050 it were surrounded with a NE_EXPR. */
5052 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5053 return 0;
5055 lcode = TREE_CODE (lhs);
5056 rcode = TREE_CODE (rhs);
5058 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5060 lhs = build2 (NE_EXPR, truth_type, lhs,
5061 build_int_cst (TREE_TYPE (lhs), 0));
5062 lcode = NE_EXPR;
5065 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5067 rhs = build2 (NE_EXPR, truth_type, rhs,
5068 build_int_cst (TREE_TYPE (rhs), 0));
5069 rcode = NE_EXPR;
5072 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5073 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5074 return 0;
5076 ll_arg = TREE_OPERAND (lhs, 0);
5077 lr_arg = TREE_OPERAND (lhs, 1);
5078 rl_arg = TREE_OPERAND (rhs, 0);
5079 rr_arg = TREE_OPERAND (rhs, 1);
5081 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5082 if (simple_operand_p (ll_arg)
5083 && simple_operand_p (lr_arg))
5085 tree result;
5086 if (operand_equal_p (ll_arg, rl_arg, 0)
5087 && operand_equal_p (lr_arg, rr_arg, 0))
5089 result = combine_comparisons (code, lcode, rcode,
5090 truth_type, ll_arg, lr_arg);
5091 if (result)
5092 return result;
5094 else if (operand_equal_p (ll_arg, rr_arg, 0)
5095 && operand_equal_p (lr_arg, rl_arg, 0))
5097 result = combine_comparisons (code, lcode,
5098 swap_tree_comparison (rcode),
5099 truth_type, ll_arg, lr_arg);
5100 if (result)
5101 return result;
5105 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5106 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5108 /* If the RHS can be evaluated unconditionally and its operands are
5109 simple, it wins to evaluate the RHS unconditionally on machines
5110 with expensive branches. In this case, this isn't a comparison
5111 that can be merged. Avoid doing this if the RHS is a floating-point
5112 comparison since those can trap. */
5114 if (BRANCH_COST >= 2
5115 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5116 && simple_operand_p (rl_arg)
5117 && simple_operand_p (rr_arg))
5119 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5120 if (code == TRUTH_OR_EXPR
5121 && lcode == NE_EXPR && integer_zerop (lr_arg)
5122 && rcode == NE_EXPR && integer_zerop (rr_arg)
5123 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5124 return build2 (NE_EXPR, truth_type,
5125 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5126 ll_arg, rl_arg),
5127 build_int_cst (TREE_TYPE (ll_arg), 0));
5129 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5130 if (code == TRUTH_AND_EXPR
5131 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5132 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5133 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5134 return build2 (EQ_EXPR, truth_type,
5135 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5136 ll_arg, rl_arg),
5137 build_int_cst (TREE_TYPE (ll_arg), 0));
5139 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5141 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5142 return build2 (code, truth_type, lhs, rhs);
5143 return NULL_TREE;
5147 /* See if the comparisons can be merged. Then get all the parameters for
5148 each side. */
5150 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5151 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5152 return 0;
5154 volatilep = 0;
5155 ll_inner = decode_field_reference (ll_arg,
5156 &ll_bitsize, &ll_bitpos, &ll_mode,
5157 &ll_unsignedp, &volatilep, &ll_mask,
5158 &ll_and_mask);
5159 lr_inner = decode_field_reference (lr_arg,
5160 &lr_bitsize, &lr_bitpos, &lr_mode,
5161 &lr_unsignedp, &volatilep, &lr_mask,
5162 &lr_and_mask);
5163 rl_inner = decode_field_reference (rl_arg,
5164 &rl_bitsize, &rl_bitpos, &rl_mode,
5165 &rl_unsignedp, &volatilep, &rl_mask,
5166 &rl_and_mask);
5167 rr_inner = decode_field_reference (rr_arg,
5168 &rr_bitsize, &rr_bitpos, &rr_mode,
5169 &rr_unsignedp, &volatilep, &rr_mask,
5170 &rr_and_mask);
5172 /* It must be true that the inner operation on the lhs of each
5173 comparison must be the same if we are to be able to do anything.
5174 Then see if we have constants. If not, the same must be true for
5175 the rhs's. */
5176 if (volatilep || ll_inner == 0 || rl_inner == 0
5177 || ! operand_equal_p (ll_inner, rl_inner, 0))
5178 return 0;
5180 if (TREE_CODE (lr_arg) == INTEGER_CST
5181 && TREE_CODE (rr_arg) == INTEGER_CST)
5182 l_const = lr_arg, r_const = rr_arg;
5183 else if (lr_inner == 0 || rr_inner == 0
5184 || ! operand_equal_p (lr_inner, rr_inner, 0))
5185 return 0;
5186 else
5187 l_const = r_const = 0;
5189 /* If either comparison code is not correct for our logical operation,
5190 fail. However, we can convert a one-bit comparison against zero into
5191 the opposite comparison against that bit being set in the field. */
5193 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5194 if (lcode != wanted_code)
5196 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5198 /* Make the left operand unsigned, since we are only interested
5199 in the value of one bit. Otherwise we are doing the wrong
5200 thing below. */
5201 ll_unsignedp = 1;
5202 l_const = ll_mask;
5204 else
5205 return 0;
5208 /* This is analogous to the code for l_const above. */
5209 if (rcode != wanted_code)
5211 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5213 rl_unsignedp = 1;
5214 r_const = rl_mask;
5216 else
5217 return 0;
5220 /* After this point all optimizations will generate bit-field
5221 references, which we might not want. */
5222 if (! lang_hooks.can_use_bit_fields_p ())
5223 return 0;
5225 /* See if we can find a mode that contains both fields being compared on
5226 the left. If we can't, fail. Otherwise, update all constants and masks
5227 to be relative to a field of that size. */
5228 first_bit = MIN (ll_bitpos, rl_bitpos);
5229 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5230 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5231 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5232 volatilep);
5233 if (lnmode == VOIDmode)
5234 return 0;
5236 lnbitsize = GET_MODE_BITSIZE (lnmode);
5237 lnbitpos = first_bit & ~ (lnbitsize - 1);
5238 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5239 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5241 if (BYTES_BIG_ENDIAN)
5243 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5244 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5247 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5248 size_int (xll_bitpos), 0);
5249 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5250 size_int (xrl_bitpos), 0);
5252 if (l_const)
5254 l_const = fold_convert (lntype, l_const);
5255 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5256 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5257 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5258 fold_build1 (BIT_NOT_EXPR,
5259 lntype, ll_mask),
5260 0)))
5262 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5264 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5267 if (r_const)
5269 r_const = fold_convert (lntype, r_const);
5270 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5271 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5272 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5273 fold_build1 (BIT_NOT_EXPR,
5274 lntype, rl_mask),
5275 0)))
5277 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5279 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5283 /* If the right sides are not constant, do the same for it. Also,
5284 disallow this optimization if a size or signedness mismatch occurs
5285 between the left and right sides. */
5286 if (l_const == 0)
5288 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5289 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5290 /* Make sure the two fields on the right
5291 correspond to the left without being swapped. */
5292 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5293 return 0;
5295 first_bit = MIN (lr_bitpos, rr_bitpos);
5296 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5297 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5298 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5299 volatilep);
5300 if (rnmode == VOIDmode)
5301 return 0;
5303 rnbitsize = GET_MODE_BITSIZE (rnmode);
5304 rnbitpos = first_bit & ~ (rnbitsize - 1);
5305 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5306 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5308 if (BYTES_BIG_ENDIAN)
5310 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5311 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5314 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5315 size_int (xlr_bitpos), 0);
5316 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5317 size_int (xrr_bitpos), 0);
5319 /* Make a mask that corresponds to both fields being compared.
5320 Do this for both items being compared. If the operands are the
5321 same size and the bits being compared are in the same position
5322 then we can do this by masking both and comparing the masked
5323 results. */
5324 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5325 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5326 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5328 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5329 ll_unsignedp || rl_unsignedp);
5330 if (! all_ones_mask_p (ll_mask, lnbitsize))
5331 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5333 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5334 lr_unsignedp || rr_unsignedp);
5335 if (! all_ones_mask_p (lr_mask, rnbitsize))
5336 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5338 return build2 (wanted_code, truth_type, lhs, rhs);
5341 /* There is still another way we can do something: If both pairs of
5342 fields being compared are adjacent, we may be able to make a wider
5343 field containing them both.
5345 Note that we still must mask the lhs/rhs expressions. Furthermore,
5346 the mask must be shifted to account for the shift done by
5347 make_bit_field_ref. */
5348 if ((ll_bitsize + ll_bitpos == rl_bitpos
5349 && lr_bitsize + lr_bitpos == rr_bitpos)
5350 || (ll_bitpos == rl_bitpos + rl_bitsize
5351 && lr_bitpos == rr_bitpos + rr_bitsize))
5353 tree type;
5355 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5356 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5357 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5358 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5360 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5361 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5362 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5363 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5365 /* Convert to the smaller type before masking out unwanted bits. */
5366 type = lntype;
5367 if (lntype != rntype)
5369 if (lnbitsize > rnbitsize)
5371 lhs = fold_convert (rntype, lhs);
5372 ll_mask = fold_convert (rntype, ll_mask);
5373 type = rntype;
5375 else if (lnbitsize < rnbitsize)
5377 rhs = fold_convert (lntype, rhs);
5378 lr_mask = fold_convert (lntype, lr_mask);
5379 type = lntype;
5383 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5384 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5386 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5387 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5389 return build2 (wanted_code, truth_type, lhs, rhs);
5392 return 0;
5395 /* Handle the case of comparisons with constants. If there is something in
5396 common between the masks, those bits of the constants must be the same.
5397 If not, the condition is always false. Test for this to avoid generating
5398 incorrect code below. */
5399 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5400 if (! integer_zerop (result)
5401 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5402 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5404 if (wanted_code == NE_EXPR)
5406 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5407 return constant_boolean_node (true, truth_type);
5409 else
5411 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5412 return constant_boolean_node (false, truth_type);
5416 /* Construct the expression we will return. First get the component
5417 reference we will make. Unless the mask is all ones the width of
5418 that field, perform the mask operation. Then compare with the
5419 merged constant. */
5420 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5421 ll_unsignedp || rl_unsignedp);
5423 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5424 if (! all_ones_mask_p (ll_mask, lnbitsize))
5425 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5427 return build2 (wanted_code, truth_type, result,
5428 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5431 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5432 constant. */
5434 static tree
5435 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5437 tree arg0 = op0;
5438 enum tree_code op_code;
5439 tree comp_const = op1;
5440 tree minmax_const;
5441 int consts_equal, consts_lt;
5442 tree inner;
5444 STRIP_SIGN_NOPS (arg0);
5446 op_code = TREE_CODE (arg0);
5447 minmax_const = TREE_OPERAND (arg0, 1);
5448 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5449 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5450 inner = TREE_OPERAND (arg0, 0);
5452 /* If something does not permit us to optimize, return the original tree. */
5453 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5454 || TREE_CODE (comp_const) != INTEGER_CST
5455 || TREE_CONSTANT_OVERFLOW (comp_const)
5456 || TREE_CODE (minmax_const) != INTEGER_CST
5457 || TREE_CONSTANT_OVERFLOW (minmax_const))
5458 return NULL_TREE;
5460 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5461 and GT_EXPR, doing the rest with recursive calls using logical
5462 simplifications. */
5463 switch (code)
5465 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5467 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5468 type, op0, op1);
5469 if (tem)
5470 return invert_truthvalue (tem);
5471 return NULL_TREE;
5474 case GE_EXPR:
5475 return
5476 fold_build2 (TRUTH_ORIF_EXPR, type,
5477 optimize_minmax_comparison
5478 (EQ_EXPR, type, arg0, comp_const),
5479 optimize_minmax_comparison
5480 (GT_EXPR, type, arg0, comp_const));
5482 case EQ_EXPR:
5483 if (op_code == MAX_EXPR && consts_equal)
5484 /* MAX (X, 0) == 0 -> X <= 0 */
5485 return fold_build2 (LE_EXPR, type, inner, comp_const);
5487 else if (op_code == MAX_EXPR && consts_lt)
5488 /* MAX (X, 0) == 5 -> X == 5 */
5489 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5491 else if (op_code == MAX_EXPR)
5492 /* MAX (X, 0) == -1 -> false */
5493 return omit_one_operand (type, integer_zero_node, inner);
5495 else if (consts_equal)
5496 /* MIN (X, 0) == 0 -> X >= 0 */
5497 return fold_build2 (GE_EXPR, type, inner, comp_const);
5499 else if (consts_lt)
5500 /* MIN (X, 0) == 5 -> false */
5501 return omit_one_operand (type, integer_zero_node, inner);
5503 else
5504 /* MIN (X, 0) == -1 -> X == -1 */
5505 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5507 case GT_EXPR:
5508 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5509 /* MAX (X, 0) > 0 -> X > 0
5510 MAX (X, 0) > 5 -> X > 5 */
5511 return fold_build2 (GT_EXPR, type, inner, comp_const);
5513 else if (op_code == MAX_EXPR)
5514 /* MAX (X, 0) > -1 -> true */
5515 return omit_one_operand (type, integer_one_node, inner);
5517 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5518 /* MIN (X, 0) > 0 -> false
5519 MIN (X, 0) > 5 -> false */
5520 return omit_one_operand (type, integer_zero_node, inner);
5522 else
5523 /* MIN (X, 0) > -1 -> X > -1 */
5524 return fold_build2 (GT_EXPR, type, inner, comp_const);
5526 default:
5527 return NULL_TREE;
5531 /* T is an integer expression that is being multiplied, divided, or taken a
5532 modulus (CODE says which and what kind of divide or modulus) by a
5533 constant C. See if we can eliminate that operation by folding it with
5534 other operations already in T. WIDE_TYPE, if non-null, is a type that
5535 should be used for the computation if wider than our type.
5537 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5538 (X * 2) + (Y * 4). We must, however, be assured that either the original
5539 expression would not overflow or that overflow is undefined for the type
5540 in the language in question.
5542 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5543 the machine has a multiply-accumulate insn or that this is part of an
5544 addressing calculation.
5546 If we return a non-null expression, it is an equivalent form of the
5547 original computation, but need not be in the original type.
5549 We set *STRICT_OVERFLOW_P to true if the return values depends on
5550 signed overflow being undefined. Otherwise we do not change
5551 *STRICT_OVERFLOW_P. */
5553 static tree
5554 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5555 bool *strict_overflow_p)
5557 /* To avoid exponential search depth, refuse to allow recursion past
5558 three levels. Beyond that (1) it's highly unlikely that we'll find
5559 something interesting and (2) we've probably processed it before
5560 when we built the inner expression. */
5562 static int depth;
5563 tree ret;
5565 if (depth > 3)
5566 return NULL;
5568 depth++;
5569 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5570 depth--;
5572 return ret;
5575 static tree
5576 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5577 bool *strict_overflow_p)
5579 tree type = TREE_TYPE (t);
5580 enum tree_code tcode = TREE_CODE (t);
5581 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5582 > GET_MODE_SIZE (TYPE_MODE (type)))
5583 ? wide_type : type);
5584 tree t1, t2;
5585 int same_p = tcode == code;
5586 tree op0 = NULL_TREE, op1 = NULL_TREE;
5587 bool sub_strict_overflow_p;
5589 /* Don't deal with constants of zero here; they confuse the code below. */
5590 if (integer_zerop (c))
5591 return NULL_TREE;
5593 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5594 op0 = TREE_OPERAND (t, 0);
5596 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5597 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5599 /* Note that we need not handle conditional operations here since fold
5600 already handles those cases. So just do arithmetic here. */
5601 switch (tcode)
5603 case INTEGER_CST:
5604 /* For a constant, we can always simplify if we are a multiply
5605 or (for divide and modulus) if it is a multiple of our constant. */
5606 if (code == MULT_EXPR
5607 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5608 return const_binop (code, fold_convert (ctype, t),
5609 fold_convert (ctype, c), 0);
5610 break;
5612 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5613 /* If op0 is an expression ... */
5614 if ((COMPARISON_CLASS_P (op0)
5615 || UNARY_CLASS_P (op0)
5616 || BINARY_CLASS_P (op0)
5617 || EXPRESSION_CLASS_P (op0))
5618 /* ... and is unsigned, and its type is smaller than ctype,
5619 then we cannot pass through as widening. */
5620 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5621 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5622 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5623 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5624 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5625 /* ... or this is a truncation (t is narrower than op0),
5626 then we cannot pass through this narrowing. */
5627 || (GET_MODE_SIZE (TYPE_MODE (type))
5628 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5629 /* ... or signedness changes for division or modulus,
5630 then we cannot pass through this conversion. */
5631 || (code != MULT_EXPR
5632 && (TYPE_UNSIGNED (ctype)
5633 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5634 break;
5636 /* Pass the constant down and see if we can make a simplification. If
5637 we can, replace this expression with the inner simplification for
5638 possible later conversion to our or some other type. */
5639 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5640 && TREE_CODE (t2) == INTEGER_CST
5641 && ! TREE_CONSTANT_OVERFLOW (t2)
5642 && (0 != (t1 = extract_muldiv (op0, t2, code,
5643 code == MULT_EXPR
5644 ? ctype : NULL_TREE,
5645 strict_overflow_p))))
5646 return t1;
5647 break;
5649 case ABS_EXPR:
5650 /* If widening the type changes it from signed to unsigned, then we
5651 must avoid building ABS_EXPR itself as unsigned. */
5652 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5654 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5655 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5656 != 0)
5658 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5659 return fold_convert (ctype, t1);
5661 break;
5663 /* FALLTHROUGH */
5664 case NEGATE_EXPR:
5665 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5666 != 0)
5667 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5668 break;
5670 case MIN_EXPR: case MAX_EXPR:
5671 /* If widening the type changes the signedness, then we can't perform
5672 this optimization as that changes the result. */
5673 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5674 break;
5676 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5677 sub_strict_overflow_p = false;
5678 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5679 &sub_strict_overflow_p)) != 0
5680 && (t2 = extract_muldiv (op1, c, code, wide_type,
5681 &sub_strict_overflow_p)) != 0)
5683 if (tree_int_cst_sgn (c) < 0)
5684 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5685 if (sub_strict_overflow_p)
5686 *strict_overflow_p = true;
5687 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5688 fold_convert (ctype, t2));
5690 break;
5692 case LSHIFT_EXPR: case RSHIFT_EXPR:
5693 /* If the second operand is constant, this is a multiplication
5694 or floor division, by a power of two, so we can treat it that
5695 way unless the multiplier or divisor overflows. Signed
5696 left-shift overflow is implementation-defined rather than
5697 undefined in C90, so do not convert signed left shift into
5698 multiplication. */
5699 if (TREE_CODE (op1) == INTEGER_CST
5700 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5701 /* const_binop may not detect overflow correctly,
5702 so check for it explicitly here. */
5703 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5704 && TREE_INT_CST_HIGH (op1) == 0
5705 && 0 != (t1 = fold_convert (ctype,
5706 const_binop (LSHIFT_EXPR,
5707 size_one_node,
5708 op1, 0)))
5709 && ! TREE_OVERFLOW (t1))
5710 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5711 ? MULT_EXPR : FLOOR_DIV_EXPR,
5712 ctype, fold_convert (ctype, op0), t1),
5713 c, code, wide_type, strict_overflow_p);
5714 break;
5716 case PLUS_EXPR: case MINUS_EXPR:
5717 /* See if we can eliminate the operation on both sides. If we can, we
5718 can return a new PLUS or MINUS. If we can't, the only remaining
5719 cases where we can do anything are if the second operand is a
5720 constant. */
5721 sub_strict_overflow_p = false;
5722 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5723 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5724 if (t1 != 0 && t2 != 0
5725 && (code == MULT_EXPR
5726 /* If not multiplication, we can only do this if both operands
5727 are divisible by c. */
5728 || (multiple_of_p (ctype, op0, c)
5729 && multiple_of_p (ctype, op1, c))))
5731 if (sub_strict_overflow_p)
5732 *strict_overflow_p = true;
5733 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5734 fold_convert (ctype, t2));
5737 /* If this was a subtraction, negate OP1 and set it to be an addition.
5738 This simplifies the logic below. */
5739 if (tcode == MINUS_EXPR)
5740 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5742 if (TREE_CODE (op1) != INTEGER_CST)
5743 break;
5745 /* If either OP1 or C are negative, this optimization is not safe for
5746 some of the division and remainder types while for others we need
5747 to change the code. */
5748 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5750 if (code == CEIL_DIV_EXPR)
5751 code = FLOOR_DIV_EXPR;
5752 else if (code == FLOOR_DIV_EXPR)
5753 code = CEIL_DIV_EXPR;
5754 else if (code != MULT_EXPR
5755 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5756 break;
5759 /* If it's a multiply or a division/modulus operation of a multiple
5760 of our constant, do the operation and verify it doesn't overflow. */
5761 if (code == MULT_EXPR
5762 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5764 op1 = const_binop (code, fold_convert (ctype, op1),
5765 fold_convert (ctype, c), 0);
5766 /* We allow the constant to overflow with wrapping semantics. */
5767 if (op1 == 0
5768 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5769 break;
5771 else
5772 break;
5774 /* If we have an unsigned type is not a sizetype, we cannot widen
5775 the operation since it will change the result if the original
5776 computation overflowed. */
5777 if (TYPE_UNSIGNED (ctype)
5778 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5779 && ctype != type)
5780 break;
5782 /* If we were able to eliminate our operation from the first side,
5783 apply our operation to the second side and reform the PLUS. */
5784 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5785 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5787 /* The last case is if we are a multiply. In that case, we can
5788 apply the distributive law to commute the multiply and addition
5789 if the multiplication of the constants doesn't overflow. */
5790 if (code == MULT_EXPR)
5791 return fold_build2 (tcode, ctype,
5792 fold_build2 (code, ctype,
5793 fold_convert (ctype, op0),
5794 fold_convert (ctype, c)),
5795 op1);
5797 break;
5799 case MULT_EXPR:
5800 /* We have a special case here if we are doing something like
5801 (C * 8) % 4 since we know that's zero. */
5802 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5803 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5804 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5805 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5806 return omit_one_operand (type, integer_zero_node, op0);
5808 /* ... fall through ... */
5810 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5811 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5812 /* If we can extract our operation from the LHS, do so and return a
5813 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5814 do something only if the second operand is a constant. */
5815 if (same_p
5816 && (t1 = extract_muldiv (op0, c, code, wide_type,
5817 strict_overflow_p)) != 0)
5818 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5819 fold_convert (ctype, op1));
5820 else if (tcode == MULT_EXPR && code == MULT_EXPR
5821 && (t1 = extract_muldiv (op1, c, code, wide_type,
5822 strict_overflow_p)) != 0)
5823 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5824 fold_convert (ctype, t1));
5825 else if (TREE_CODE (op1) != INTEGER_CST)
5826 return 0;
5828 /* If these are the same operation types, we can associate them
5829 assuming no overflow. */
5830 if (tcode == code
5831 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5832 fold_convert (ctype, c), 0))
5833 && ! TREE_OVERFLOW (t1))
5834 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5836 /* If these operations "cancel" each other, we have the main
5837 optimizations of this pass, which occur when either constant is a
5838 multiple of the other, in which case we replace this with either an
5839 operation or CODE or TCODE.
5841 If we have an unsigned type that is not a sizetype, we cannot do
5842 this since it will change the result if the original computation
5843 overflowed. */
5844 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5845 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5846 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5847 || (tcode == MULT_EXPR
5848 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5849 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5851 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5853 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5854 *strict_overflow_p = true;
5855 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5856 fold_convert (ctype,
5857 const_binop (TRUNC_DIV_EXPR,
5858 op1, c, 0)));
5860 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5862 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5863 *strict_overflow_p = true;
5864 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5865 fold_convert (ctype,
5866 const_binop (TRUNC_DIV_EXPR,
5867 c, op1, 0)));
5870 break;
5872 default:
5873 break;
5876 return 0;
5879 /* Return a node which has the indicated constant VALUE (either 0 or
5880 1), and is of the indicated TYPE. */
5882 tree
5883 constant_boolean_node (int value, tree type)
5885 if (type == integer_type_node)
5886 return value ? integer_one_node : integer_zero_node;
5887 else if (type == boolean_type_node)
5888 return value ? boolean_true_node : boolean_false_node;
5889 else
5890 return build_int_cst (type, value);
5894 /* Return true if expr looks like an ARRAY_REF and set base and
5895 offset to the appropriate trees. If there is no offset,
5896 offset is set to NULL_TREE. Base will be canonicalized to
5897 something you can get the element type from using
5898 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5899 in bytes to the base. */
5901 static bool
5902 extract_array_ref (tree expr, tree *base, tree *offset)
5904 /* One canonical form is a PLUS_EXPR with the first
5905 argument being an ADDR_EXPR with a possible NOP_EXPR
5906 attached. */
5907 if (TREE_CODE (expr) == PLUS_EXPR)
5909 tree op0 = TREE_OPERAND (expr, 0);
5910 tree inner_base, dummy1;
5911 /* Strip NOP_EXPRs here because the C frontends and/or
5912 folders present us (int *)&x.a + 4B possibly. */
5913 STRIP_NOPS (op0);
5914 if (extract_array_ref (op0, &inner_base, &dummy1))
5916 *base = inner_base;
5917 if (dummy1 == NULL_TREE)
5918 *offset = TREE_OPERAND (expr, 1);
5919 else
5920 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5921 dummy1, TREE_OPERAND (expr, 1));
5922 return true;
5925 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5926 which we transform into an ADDR_EXPR with appropriate
5927 offset. For other arguments to the ADDR_EXPR we assume
5928 zero offset and as such do not care about the ADDR_EXPR
5929 type and strip possible nops from it. */
5930 else if (TREE_CODE (expr) == ADDR_EXPR)
5932 tree op0 = TREE_OPERAND (expr, 0);
5933 if (TREE_CODE (op0) == ARRAY_REF)
5935 tree idx = TREE_OPERAND (op0, 1);
5936 *base = TREE_OPERAND (op0, 0);
5937 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5938 array_ref_element_size (op0));
5940 else
5942 /* Handle array-to-pointer decay as &a. */
5943 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5944 *base = TREE_OPERAND (expr, 0);
5945 else
5946 *base = expr;
5947 *offset = NULL_TREE;
5949 return true;
5951 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5952 else if (SSA_VAR_P (expr)
5953 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5955 *base = expr;
5956 *offset = NULL_TREE;
5957 return true;
5960 return false;
5964 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5965 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5966 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5967 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5968 COND is the first argument to CODE; otherwise (as in the example
5969 given here), it is the second argument. TYPE is the type of the
5970 original expression. Return NULL_TREE if no simplification is
5971 possible. */
5973 static tree
5974 fold_binary_op_with_conditional_arg (enum tree_code code,
5975 tree type, tree op0, tree op1,
5976 tree cond, tree arg, int cond_first_p)
5978 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5979 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5980 tree test, true_value, false_value;
5981 tree lhs = NULL_TREE;
5982 tree rhs = NULL_TREE;
5984 /* This transformation is only worthwhile if we don't have to wrap
5985 arg in a SAVE_EXPR, and the operation can be simplified on at least
5986 one of the branches once its pushed inside the COND_EXPR. */
5987 if (!TREE_CONSTANT (arg))
5988 return NULL_TREE;
5990 if (TREE_CODE (cond) == COND_EXPR)
5992 test = TREE_OPERAND (cond, 0);
5993 true_value = TREE_OPERAND (cond, 1);
5994 false_value = TREE_OPERAND (cond, 2);
5995 /* If this operand throws an expression, then it does not make
5996 sense to try to perform a logical or arithmetic operation
5997 involving it. */
5998 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5999 lhs = true_value;
6000 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6001 rhs = false_value;
6003 else
6005 tree testtype = TREE_TYPE (cond);
6006 test = cond;
6007 true_value = constant_boolean_node (true, testtype);
6008 false_value = constant_boolean_node (false, testtype);
6011 arg = fold_convert (arg_type, arg);
6012 if (lhs == 0)
6014 true_value = fold_convert (cond_type, true_value);
6015 if (cond_first_p)
6016 lhs = fold_build2 (code, type, true_value, arg);
6017 else
6018 lhs = fold_build2 (code, type, arg, true_value);
6020 if (rhs == 0)
6022 false_value = fold_convert (cond_type, false_value);
6023 if (cond_first_p)
6024 rhs = fold_build2 (code, type, false_value, arg);
6025 else
6026 rhs = fold_build2 (code, type, arg, false_value);
6029 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6030 return fold_convert (type, test);
6034 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6036 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6037 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6038 ADDEND is the same as X.
6040 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6041 and finite. The problematic cases are when X is zero, and its mode
6042 has signed zeros. In the case of rounding towards -infinity,
6043 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6044 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6046 static bool
6047 fold_real_zero_addition_p (tree type, tree addend, int negate)
6049 if (!real_zerop (addend))
6050 return false;
6052 /* Don't allow the fold with -fsignaling-nans. */
6053 if (HONOR_SNANS (TYPE_MODE (type)))
6054 return false;
6056 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6057 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6058 return true;
6060 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6061 if (TREE_CODE (addend) == REAL_CST
6062 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6063 negate = !negate;
6065 /* The mode has signed zeros, and we have to honor their sign.
6066 In this situation, there is only one case we can return true for.
6067 X - 0 is the same as X unless rounding towards -infinity is
6068 supported. */
6069 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6072 /* Subroutine of fold() that checks comparisons of built-in math
6073 functions against real constants.
6075 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6076 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6077 is the type of the result and ARG0 and ARG1 are the operands of the
6078 comparison. ARG1 must be a TREE_REAL_CST.
6080 The function returns the constant folded tree if a simplification
6081 can be made, and NULL_TREE otherwise. */
6083 static tree
6084 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6085 tree type, tree arg0, tree arg1)
6087 REAL_VALUE_TYPE c;
6089 if (BUILTIN_SQRT_P (fcode))
6091 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6092 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6094 c = TREE_REAL_CST (arg1);
6095 if (REAL_VALUE_NEGATIVE (c))
6097 /* sqrt(x) < y is always false, if y is negative. */
6098 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6099 return omit_one_operand (type, integer_zero_node, arg);
6101 /* sqrt(x) > y is always true, if y is negative and we
6102 don't care about NaNs, i.e. negative values of x. */
6103 if (code == NE_EXPR || !HONOR_NANS (mode))
6104 return omit_one_operand (type, integer_one_node, arg);
6106 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6107 return fold_build2 (GE_EXPR, type, arg,
6108 build_real (TREE_TYPE (arg), dconst0));
6110 else if (code == GT_EXPR || code == GE_EXPR)
6112 REAL_VALUE_TYPE c2;
6114 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6115 real_convert (&c2, mode, &c2);
6117 if (REAL_VALUE_ISINF (c2))
6119 /* sqrt(x) > y is x == +Inf, when y is very large. */
6120 if (HONOR_INFINITIES (mode))
6121 return fold_build2 (EQ_EXPR, type, arg,
6122 build_real (TREE_TYPE (arg), c2));
6124 /* sqrt(x) > y is always false, when y is very large
6125 and we don't care about infinities. */
6126 return omit_one_operand (type, integer_zero_node, arg);
6129 /* sqrt(x) > c is the same as x > c*c. */
6130 return fold_build2 (code, type, arg,
6131 build_real (TREE_TYPE (arg), c2));
6133 else if (code == LT_EXPR || code == LE_EXPR)
6135 REAL_VALUE_TYPE c2;
6137 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6138 real_convert (&c2, mode, &c2);
6140 if (REAL_VALUE_ISINF (c2))
6142 /* sqrt(x) < y is always true, when y is a very large
6143 value and we don't care about NaNs or Infinities. */
6144 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6145 return omit_one_operand (type, integer_one_node, arg);
6147 /* sqrt(x) < y is x != +Inf when y is very large and we
6148 don't care about NaNs. */
6149 if (! HONOR_NANS (mode))
6150 return fold_build2 (NE_EXPR, type, arg,
6151 build_real (TREE_TYPE (arg), c2));
6153 /* sqrt(x) < y is x >= 0 when y is very large and we
6154 don't care about Infinities. */
6155 if (! HONOR_INFINITIES (mode))
6156 return fold_build2 (GE_EXPR, type, arg,
6157 build_real (TREE_TYPE (arg), dconst0));
6159 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6160 if (lang_hooks.decls.global_bindings_p () != 0
6161 || CONTAINS_PLACEHOLDER_P (arg))
6162 return NULL_TREE;
6164 arg = save_expr (arg);
6165 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6166 fold_build2 (GE_EXPR, type, arg,
6167 build_real (TREE_TYPE (arg),
6168 dconst0)),
6169 fold_build2 (NE_EXPR, type, arg,
6170 build_real (TREE_TYPE (arg),
6171 c2)));
6174 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6175 if (! HONOR_NANS (mode))
6176 return fold_build2 (code, type, arg,
6177 build_real (TREE_TYPE (arg), c2));
6179 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6180 if (lang_hooks.decls.global_bindings_p () == 0
6181 && ! CONTAINS_PLACEHOLDER_P (arg))
6183 arg = save_expr (arg);
6184 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6185 fold_build2 (GE_EXPR, type, arg,
6186 build_real (TREE_TYPE (arg),
6187 dconst0)),
6188 fold_build2 (code, type, arg,
6189 build_real (TREE_TYPE (arg),
6190 c2)));
6195 return NULL_TREE;
6198 /* Subroutine of fold() that optimizes comparisons against Infinities,
6199 either +Inf or -Inf.
6201 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6202 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6203 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6205 The function returns the constant folded tree if a simplification
6206 can be made, and NULL_TREE otherwise. */
6208 static tree
6209 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6211 enum machine_mode mode;
6212 REAL_VALUE_TYPE max;
6213 tree temp;
6214 bool neg;
6216 mode = TYPE_MODE (TREE_TYPE (arg0));
6218 /* For negative infinity swap the sense of the comparison. */
6219 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6220 if (neg)
6221 code = swap_tree_comparison (code);
6223 switch (code)
6225 case GT_EXPR:
6226 /* x > +Inf is always false, if with ignore sNANs. */
6227 if (HONOR_SNANS (mode))
6228 return NULL_TREE;
6229 return omit_one_operand (type, integer_zero_node, arg0);
6231 case LE_EXPR:
6232 /* x <= +Inf is always true, if we don't case about NaNs. */
6233 if (! HONOR_NANS (mode))
6234 return omit_one_operand (type, integer_one_node, arg0);
6236 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6237 if (lang_hooks.decls.global_bindings_p () == 0
6238 && ! CONTAINS_PLACEHOLDER_P (arg0))
6240 arg0 = save_expr (arg0);
6241 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6243 break;
6245 case EQ_EXPR:
6246 case GE_EXPR:
6247 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6248 real_maxval (&max, neg, mode);
6249 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6250 arg0, build_real (TREE_TYPE (arg0), max));
6252 case LT_EXPR:
6253 /* x < +Inf is always equal to x <= DBL_MAX. */
6254 real_maxval (&max, neg, mode);
6255 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6256 arg0, build_real (TREE_TYPE (arg0), max));
6258 case NE_EXPR:
6259 /* x != +Inf is always equal to !(x > DBL_MAX). */
6260 real_maxval (&max, neg, mode);
6261 if (! HONOR_NANS (mode))
6262 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6263 arg0, build_real (TREE_TYPE (arg0), max));
6265 /* The transformation below creates non-gimple code and thus is
6266 not appropriate if we are in gimple form. */
6267 if (in_gimple_form)
6268 return NULL_TREE;
6270 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6271 arg0, build_real (TREE_TYPE (arg0), max));
6272 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6274 default:
6275 break;
6278 return NULL_TREE;
6281 /* Subroutine of fold() that optimizes comparisons of a division by
6282 a nonzero integer constant against an integer constant, i.e.
6283 X/C1 op C2.
6285 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6286 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6287 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6289 The function returns the constant folded tree if a simplification
6290 can be made, and NULL_TREE otherwise. */
6292 static tree
6293 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6295 tree prod, tmp, hi, lo;
6296 tree arg00 = TREE_OPERAND (arg0, 0);
6297 tree arg01 = TREE_OPERAND (arg0, 1);
6298 unsigned HOST_WIDE_INT lpart;
6299 HOST_WIDE_INT hpart;
6300 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6301 bool neg_overflow;
6302 int overflow;
6304 /* We have to do this the hard way to detect unsigned overflow.
6305 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6306 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6307 TREE_INT_CST_HIGH (arg01),
6308 TREE_INT_CST_LOW (arg1),
6309 TREE_INT_CST_HIGH (arg1),
6310 &lpart, &hpart, unsigned_p);
6311 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6312 prod = force_fit_type (prod, -1, overflow, false);
6313 neg_overflow = false;
6315 if (unsigned_p)
6317 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6318 lo = prod;
6320 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6321 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6322 TREE_INT_CST_HIGH (prod),
6323 TREE_INT_CST_LOW (tmp),
6324 TREE_INT_CST_HIGH (tmp),
6325 &lpart, &hpart, unsigned_p);
6326 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6327 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6328 TREE_CONSTANT_OVERFLOW (prod));
6330 else if (tree_int_cst_sgn (arg01) >= 0)
6332 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6333 switch (tree_int_cst_sgn (arg1))
6335 case -1:
6336 neg_overflow = true;
6337 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6338 hi = prod;
6339 break;
6341 case 0:
6342 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6343 hi = tmp;
6344 break;
6346 case 1:
6347 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6348 lo = prod;
6349 break;
6351 default:
6352 gcc_unreachable ();
6355 else
6357 /* A negative divisor reverses the relational operators. */
6358 code = swap_tree_comparison (code);
6360 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6361 switch (tree_int_cst_sgn (arg1))
6363 case -1:
6364 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6365 lo = prod;
6366 break;
6368 case 0:
6369 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6370 lo = tmp;
6371 break;
6373 case 1:
6374 neg_overflow = true;
6375 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6376 hi = prod;
6377 break;
6379 default:
6380 gcc_unreachable ();
6384 switch (code)
6386 case EQ_EXPR:
6387 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6388 return omit_one_operand (type, integer_zero_node, arg00);
6389 if (TREE_OVERFLOW (hi))
6390 return fold_build2 (GE_EXPR, type, arg00, lo);
6391 if (TREE_OVERFLOW (lo))
6392 return fold_build2 (LE_EXPR, type, arg00, hi);
6393 return build_range_check (type, arg00, 1, lo, hi);
6395 case NE_EXPR:
6396 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6397 return omit_one_operand (type, integer_one_node, arg00);
6398 if (TREE_OVERFLOW (hi))
6399 return fold_build2 (LT_EXPR, type, arg00, lo);
6400 if (TREE_OVERFLOW (lo))
6401 return fold_build2 (GT_EXPR, type, arg00, hi);
6402 return build_range_check (type, arg00, 0, lo, hi);
6404 case LT_EXPR:
6405 if (TREE_OVERFLOW (lo))
6407 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6408 return omit_one_operand (type, tmp, arg00);
6410 return fold_build2 (LT_EXPR, type, arg00, lo);
6412 case LE_EXPR:
6413 if (TREE_OVERFLOW (hi))
6415 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6416 return omit_one_operand (type, tmp, arg00);
6418 return fold_build2 (LE_EXPR, type, arg00, hi);
6420 case GT_EXPR:
6421 if (TREE_OVERFLOW (hi))
6423 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6424 return omit_one_operand (type, tmp, arg00);
6426 return fold_build2 (GT_EXPR, type, arg00, hi);
6428 case GE_EXPR:
6429 if (TREE_OVERFLOW (lo))
6431 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6432 return omit_one_operand (type, tmp, arg00);
6434 return fold_build2 (GE_EXPR, type, arg00, lo);
6436 default:
6437 break;
6440 return NULL_TREE;
6444 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6445 equality/inequality test, then return a simplified form of the test
6446 using a sign testing. Otherwise return NULL. TYPE is the desired
6447 result type. */
6449 static tree
6450 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6451 tree result_type)
6453 /* If this is testing a single bit, we can optimize the test. */
6454 if ((code == NE_EXPR || code == EQ_EXPR)
6455 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6456 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6458 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6459 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6460 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6462 if (arg00 != NULL_TREE
6463 /* This is only a win if casting to a signed type is cheap,
6464 i.e. when arg00's type is not a partial mode. */
6465 && TYPE_PRECISION (TREE_TYPE (arg00))
6466 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6468 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6469 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6470 result_type, fold_convert (stype, arg00),
6471 build_int_cst (stype, 0));
6475 return NULL_TREE;
6478 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6479 equality/inequality test, then return a simplified form of
6480 the test using shifts and logical operations. Otherwise return
6481 NULL. TYPE is the desired result type. */
6483 tree
6484 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6485 tree result_type)
6487 /* If this is testing a single bit, we can optimize the test. */
6488 if ((code == NE_EXPR || code == EQ_EXPR)
6489 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6490 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6492 tree inner = TREE_OPERAND (arg0, 0);
6493 tree type = TREE_TYPE (arg0);
6494 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6495 enum machine_mode operand_mode = TYPE_MODE (type);
6496 int ops_unsigned;
6497 tree signed_type, unsigned_type, intermediate_type;
6498 tree tem;
6500 /* First, see if we can fold the single bit test into a sign-bit
6501 test. */
6502 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6503 result_type);
6504 if (tem)
6505 return tem;
6507 /* Otherwise we have (A & C) != 0 where C is a single bit,
6508 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6509 Similarly for (A & C) == 0. */
6511 /* If INNER is a right shift of a constant and it plus BITNUM does
6512 not overflow, adjust BITNUM and INNER. */
6513 if (TREE_CODE (inner) == RSHIFT_EXPR
6514 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6515 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6516 && bitnum < TYPE_PRECISION (type)
6517 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6518 bitnum - TYPE_PRECISION (type)))
6520 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6521 inner = TREE_OPERAND (inner, 0);
6524 /* If we are going to be able to omit the AND below, we must do our
6525 operations as unsigned. If we must use the AND, we have a choice.
6526 Normally unsigned is faster, but for some machines signed is. */
6527 #ifdef LOAD_EXTEND_OP
6528 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6529 && !flag_syntax_only) ? 0 : 1;
6530 #else
6531 ops_unsigned = 1;
6532 #endif
6534 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6535 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6536 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6537 inner = fold_convert (intermediate_type, inner);
6539 if (bitnum != 0)
6540 inner = build2 (RSHIFT_EXPR, intermediate_type,
6541 inner, size_int (bitnum));
6543 if (code == EQ_EXPR)
6544 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6545 inner, integer_one_node);
6547 /* Put the AND last so it can combine with more things. */
6548 inner = build2 (BIT_AND_EXPR, intermediate_type,
6549 inner, integer_one_node);
6551 /* Make sure to return the proper type. */
6552 inner = fold_convert (result_type, inner);
6554 return inner;
6556 return NULL_TREE;
6559 /* Check whether we are allowed to reorder operands arg0 and arg1,
6560 such that the evaluation of arg1 occurs before arg0. */
6562 static bool
6563 reorder_operands_p (tree arg0, tree arg1)
6565 if (! flag_evaluation_order)
6566 return true;
6567 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6568 return true;
6569 return ! TREE_SIDE_EFFECTS (arg0)
6570 && ! TREE_SIDE_EFFECTS (arg1);
6573 /* Test whether it is preferable two swap two operands, ARG0 and
6574 ARG1, for example because ARG0 is an integer constant and ARG1
6575 isn't. If REORDER is true, only recommend swapping if we can
6576 evaluate the operands in reverse order. */
6578 bool
6579 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6581 STRIP_SIGN_NOPS (arg0);
6582 STRIP_SIGN_NOPS (arg1);
6584 if (TREE_CODE (arg1) == INTEGER_CST)
6585 return 0;
6586 if (TREE_CODE (arg0) == INTEGER_CST)
6587 return 1;
6589 if (TREE_CODE (arg1) == REAL_CST)
6590 return 0;
6591 if (TREE_CODE (arg0) == REAL_CST)
6592 return 1;
6594 if (TREE_CODE (arg1) == COMPLEX_CST)
6595 return 0;
6596 if (TREE_CODE (arg0) == COMPLEX_CST)
6597 return 1;
6599 if (TREE_CONSTANT (arg1))
6600 return 0;
6601 if (TREE_CONSTANT (arg0))
6602 return 1;
6604 if (optimize_size)
6605 return 0;
6607 if (reorder && flag_evaluation_order
6608 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6609 return 0;
6611 if (DECL_P (arg1))
6612 return 0;
6613 if (DECL_P (arg0))
6614 return 1;
6616 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6617 for commutative and comparison operators. Ensuring a canonical
6618 form allows the optimizers to find additional redundancies without
6619 having to explicitly check for both orderings. */
6620 if (TREE_CODE (arg0) == SSA_NAME
6621 && TREE_CODE (arg1) == SSA_NAME
6622 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6623 return 1;
6625 return 0;
6628 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6629 ARG0 is extended to a wider type. */
6631 static tree
6632 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6634 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6635 tree arg1_unw;
6636 tree shorter_type, outer_type;
6637 tree min, max;
6638 bool above, below;
6640 if (arg0_unw == arg0)
6641 return NULL_TREE;
6642 shorter_type = TREE_TYPE (arg0_unw);
6644 #ifdef HAVE_canonicalize_funcptr_for_compare
6645 /* Disable this optimization if we're casting a function pointer
6646 type on targets that require function pointer canonicalization. */
6647 if (HAVE_canonicalize_funcptr_for_compare
6648 && TREE_CODE (shorter_type) == POINTER_TYPE
6649 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6650 return NULL_TREE;
6651 #endif
6653 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6654 return NULL_TREE;
6656 arg1_unw = get_unwidened (arg1, shorter_type);
6658 /* If possible, express the comparison in the shorter mode. */
6659 if ((code == EQ_EXPR || code == NE_EXPR
6660 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6661 && (TREE_TYPE (arg1_unw) == shorter_type
6662 || (TREE_CODE (arg1_unw) == INTEGER_CST
6663 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6664 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6665 && int_fits_type_p (arg1_unw, shorter_type))))
6666 return fold_build2 (code, type, arg0_unw,
6667 fold_convert (shorter_type, arg1_unw));
6669 if (TREE_CODE (arg1_unw) != INTEGER_CST
6670 || TREE_CODE (shorter_type) != INTEGER_TYPE
6671 || !int_fits_type_p (arg1_unw, shorter_type))
6672 return NULL_TREE;
6674 /* If we are comparing with the integer that does not fit into the range
6675 of the shorter type, the result is known. */
6676 outer_type = TREE_TYPE (arg1_unw);
6677 min = lower_bound_in_type (outer_type, shorter_type);
6678 max = upper_bound_in_type (outer_type, shorter_type);
6680 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6681 max, arg1_unw));
6682 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6683 arg1_unw, min));
6685 switch (code)
6687 case EQ_EXPR:
6688 if (above || below)
6689 return omit_one_operand (type, integer_zero_node, arg0);
6690 break;
6692 case NE_EXPR:
6693 if (above || below)
6694 return omit_one_operand (type, integer_one_node, arg0);
6695 break;
6697 case LT_EXPR:
6698 case LE_EXPR:
6699 if (above)
6700 return omit_one_operand (type, integer_one_node, arg0);
6701 else if (below)
6702 return omit_one_operand (type, integer_zero_node, arg0);
6704 case GT_EXPR:
6705 case GE_EXPR:
6706 if (above)
6707 return omit_one_operand (type, integer_zero_node, arg0);
6708 else if (below)
6709 return omit_one_operand (type, integer_one_node, arg0);
6711 default:
6712 break;
6715 return NULL_TREE;
6718 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6719 ARG0 just the signedness is changed. */
6721 static tree
6722 fold_sign_changed_comparison (enum tree_code code, tree type,
6723 tree arg0, tree arg1)
6725 tree arg0_inner, tmp;
6726 tree inner_type, outer_type;
6728 if (TREE_CODE (arg0) != NOP_EXPR
6729 && TREE_CODE (arg0) != CONVERT_EXPR)
6730 return NULL_TREE;
6732 outer_type = TREE_TYPE (arg0);
6733 arg0_inner = TREE_OPERAND (arg0, 0);
6734 inner_type = TREE_TYPE (arg0_inner);
6736 #ifdef HAVE_canonicalize_funcptr_for_compare
6737 /* Disable this optimization if we're casting a function pointer
6738 type on targets that require function pointer canonicalization. */
6739 if (HAVE_canonicalize_funcptr_for_compare
6740 && TREE_CODE (inner_type) == POINTER_TYPE
6741 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6742 return NULL_TREE;
6743 #endif
6745 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6746 return NULL_TREE;
6748 if (TREE_CODE (arg1) != INTEGER_CST
6749 && !((TREE_CODE (arg1) == NOP_EXPR
6750 || TREE_CODE (arg1) == CONVERT_EXPR)
6751 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6752 return NULL_TREE;
6754 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6755 && code != NE_EXPR
6756 && code != EQ_EXPR)
6757 return NULL_TREE;
6759 if (TREE_CODE (arg1) == INTEGER_CST)
6761 tmp = build_int_cst_wide (inner_type,
6762 TREE_INT_CST_LOW (arg1),
6763 TREE_INT_CST_HIGH (arg1));
6764 arg1 = force_fit_type (tmp, 0,
6765 TREE_OVERFLOW (arg1),
6766 TREE_CONSTANT_OVERFLOW (arg1));
6768 else
6769 arg1 = fold_convert (inner_type, arg1);
6771 return fold_build2 (code, type, arg0_inner, arg1);
6774 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6775 step of the array. Reconstructs s and delta in the case of s * delta
6776 being an integer constant (and thus already folded).
6777 ADDR is the address. MULT is the multiplicative expression.
6778 If the function succeeds, the new address expression is returned. Otherwise
6779 NULL_TREE is returned. */
6781 static tree
6782 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6784 tree s, delta, step;
6785 tree ref = TREE_OPERAND (addr, 0), pref;
6786 tree ret, pos;
6787 tree itype;
6789 /* Canonicalize op1 into a possibly non-constant delta
6790 and an INTEGER_CST s. */
6791 if (TREE_CODE (op1) == MULT_EXPR)
6793 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6795 STRIP_NOPS (arg0);
6796 STRIP_NOPS (arg1);
6798 if (TREE_CODE (arg0) == INTEGER_CST)
6800 s = arg0;
6801 delta = arg1;
6803 else if (TREE_CODE (arg1) == INTEGER_CST)
6805 s = arg1;
6806 delta = arg0;
6808 else
6809 return NULL_TREE;
6811 else if (TREE_CODE (op1) == INTEGER_CST)
6813 delta = op1;
6814 s = NULL_TREE;
6816 else
6818 /* Simulate we are delta * 1. */
6819 delta = op1;
6820 s = integer_one_node;
6823 for (;; ref = TREE_OPERAND (ref, 0))
6825 if (TREE_CODE (ref) == ARRAY_REF)
6827 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6828 if (! itype)
6829 continue;
6831 step = array_ref_element_size (ref);
6832 if (TREE_CODE (step) != INTEGER_CST)
6833 continue;
6835 if (s)
6837 if (! tree_int_cst_equal (step, s))
6838 continue;
6840 else
6842 /* Try if delta is a multiple of step. */
6843 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6844 if (! tmp)
6845 continue;
6846 delta = tmp;
6849 break;
6852 if (!handled_component_p (ref))
6853 return NULL_TREE;
6856 /* We found the suitable array reference. So copy everything up to it,
6857 and replace the index. */
6859 pref = TREE_OPERAND (addr, 0);
6860 ret = copy_node (pref);
6861 pos = ret;
6863 while (pref != ref)
6865 pref = TREE_OPERAND (pref, 0);
6866 TREE_OPERAND (pos, 0) = copy_node (pref);
6867 pos = TREE_OPERAND (pos, 0);
6870 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6871 fold_convert (itype,
6872 TREE_OPERAND (pos, 1)),
6873 fold_convert (itype, delta));
6875 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6879 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6880 means A >= Y && A != MAX, but in this case we know that
6881 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6883 static tree
6884 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6886 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6888 if (TREE_CODE (bound) == LT_EXPR)
6889 a = TREE_OPERAND (bound, 0);
6890 else if (TREE_CODE (bound) == GT_EXPR)
6891 a = TREE_OPERAND (bound, 1);
6892 else
6893 return NULL_TREE;
6895 typea = TREE_TYPE (a);
6896 if (!INTEGRAL_TYPE_P (typea)
6897 && !POINTER_TYPE_P (typea))
6898 return NULL_TREE;
6900 if (TREE_CODE (ineq) == LT_EXPR)
6902 a1 = TREE_OPERAND (ineq, 1);
6903 y = TREE_OPERAND (ineq, 0);
6905 else if (TREE_CODE (ineq) == GT_EXPR)
6907 a1 = TREE_OPERAND (ineq, 0);
6908 y = TREE_OPERAND (ineq, 1);
6910 else
6911 return NULL_TREE;
6913 if (TREE_TYPE (a1) != typea)
6914 return NULL_TREE;
6916 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6917 if (!integer_onep (diff))
6918 return NULL_TREE;
6920 return fold_build2 (GE_EXPR, type, a, y);
6923 /* Fold a sum or difference of at least one multiplication.
6924 Returns the folded tree or NULL if no simplification could be made. */
6926 static tree
6927 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6929 tree arg00, arg01, arg10, arg11;
6930 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6932 /* (A * C) +- (B * C) -> (A+-B) * C.
6933 (A * C) +- A -> A * (C+-1).
6934 We are most concerned about the case where C is a constant,
6935 but other combinations show up during loop reduction. Since
6936 it is not difficult, try all four possibilities. */
6938 if (TREE_CODE (arg0) == MULT_EXPR)
6940 arg00 = TREE_OPERAND (arg0, 0);
6941 arg01 = TREE_OPERAND (arg0, 1);
6943 else
6945 arg00 = arg0;
6946 arg01 = build_one_cst (type);
6948 if (TREE_CODE (arg1) == MULT_EXPR)
6950 arg10 = TREE_OPERAND (arg1, 0);
6951 arg11 = TREE_OPERAND (arg1, 1);
6953 else
6955 arg10 = arg1;
6956 arg11 = build_one_cst (type);
6958 same = NULL_TREE;
6960 if (operand_equal_p (arg01, arg11, 0))
6961 same = arg01, alt0 = arg00, alt1 = arg10;
6962 else if (operand_equal_p (arg00, arg10, 0))
6963 same = arg00, alt0 = arg01, alt1 = arg11;
6964 else if (operand_equal_p (arg00, arg11, 0))
6965 same = arg00, alt0 = arg01, alt1 = arg10;
6966 else if (operand_equal_p (arg01, arg10, 0))
6967 same = arg01, alt0 = arg00, alt1 = arg11;
6969 /* No identical multiplicands; see if we can find a common
6970 power-of-two factor in non-power-of-two multiplies. This
6971 can help in multi-dimensional array access. */
6972 else if (host_integerp (arg01, 0)
6973 && host_integerp (arg11, 0))
6975 HOST_WIDE_INT int01, int11, tmp;
6976 bool swap = false;
6977 tree maybe_same;
6978 int01 = TREE_INT_CST_LOW (arg01);
6979 int11 = TREE_INT_CST_LOW (arg11);
6981 /* Move min of absolute values to int11. */
6982 if ((int01 >= 0 ? int01 : -int01)
6983 < (int11 >= 0 ? int11 : -int11))
6985 tmp = int01, int01 = int11, int11 = tmp;
6986 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6987 maybe_same = arg01;
6988 swap = true;
6990 else
6991 maybe_same = arg11;
6993 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6995 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6996 build_int_cst (TREE_TYPE (arg00),
6997 int01 / int11));
6998 alt1 = arg10;
6999 same = maybe_same;
7000 if (swap)
7001 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7005 if (same)
7006 return fold_build2 (MULT_EXPR, type,
7007 fold_build2 (code, type,
7008 fold_convert (type, alt0),
7009 fold_convert (type, alt1)),
7010 fold_convert (type, same));
7012 return NULL_TREE;
7015 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7016 specified by EXPR into the buffer PTR of length LEN bytes.
7017 Return the number of bytes placed in the buffer, or zero
7018 upon failure. */
7020 static int
7021 native_encode_int (tree expr, unsigned char *ptr, int len)
7023 tree type = TREE_TYPE (expr);
7024 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7025 int byte, offset, word, words;
7026 unsigned char value;
7028 if (total_bytes > len)
7029 return 0;
7030 words = total_bytes / UNITS_PER_WORD;
7032 for (byte = 0; byte < total_bytes; byte++)
7034 int bitpos = byte * BITS_PER_UNIT;
7035 if (bitpos < HOST_BITS_PER_WIDE_INT)
7036 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7037 else
7038 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7039 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7041 if (total_bytes > UNITS_PER_WORD)
7043 word = byte / UNITS_PER_WORD;
7044 if (WORDS_BIG_ENDIAN)
7045 word = (words - 1) - word;
7046 offset = word * UNITS_PER_WORD;
7047 if (BYTES_BIG_ENDIAN)
7048 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7049 else
7050 offset += byte % UNITS_PER_WORD;
7052 else
7053 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7054 ptr[offset] = value;
7056 return total_bytes;
7060 /* Subroutine of native_encode_expr. Encode the REAL_CST
7061 specified by EXPR into the buffer PTR of length LEN bytes.
7062 Return the number of bytes placed in the buffer, or zero
7063 upon failure. */
7065 static int
7066 native_encode_real (tree expr, unsigned char *ptr, int len)
7068 tree type = TREE_TYPE (expr);
7069 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7070 int byte, offset, word, words, bitpos;
7071 unsigned char value;
7073 /* There are always 32 bits in each long, no matter the size of
7074 the hosts long. We handle floating point representations with
7075 up to 192 bits. */
7076 long tmp[6];
7078 if (total_bytes > len)
7079 return 0;
7080 words = 32 / UNITS_PER_WORD;
7082 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7084 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7085 bitpos += BITS_PER_UNIT)
7087 byte = (bitpos / BITS_PER_UNIT) & 3;
7088 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7090 if (UNITS_PER_WORD < 4)
7092 word = byte / UNITS_PER_WORD;
7093 if (WORDS_BIG_ENDIAN)
7094 word = (words - 1) - word;
7095 offset = word * UNITS_PER_WORD;
7096 if (BYTES_BIG_ENDIAN)
7097 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7098 else
7099 offset += byte % UNITS_PER_WORD;
7101 else
7102 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7103 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7105 return total_bytes;
7108 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7109 specified by EXPR into the buffer PTR of length LEN bytes.
7110 Return the number of bytes placed in the buffer, or zero
7111 upon failure. */
7113 static int
7114 native_encode_complex (tree expr, unsigned char *ptr, int len)
7116 int rsize, isize;
7117 tree part;
7119 part = TREE_REALPART (expr);
7120 rsize = native_encode_expr (part, ptr, len);
7121 if (rsize == 0)
7122 return 0;
7123 part = TREE_IMAGPART (expr);
7124 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7125 if (isize != rsize)
7126 return 0;
7127 return rsize + isize;
7131 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7132 specified by EXPR into the buffer PTR of length LEN bytes.
7133 Return the number of bytes placed in the buffer, or zero
7134 upon failure. */
7136 static int
7137 native_encode_vector (tree expr, unsigned char *ptr, int len)
7139 int i, size, offset, count;
7140 tree itype, elem, elements;
7142 offset = 0;
7143 elements = TREE_VECTOR_CST_ELTS (expr);
7144 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7145 itype = TREE_TYPE (TREE_TYPE (expr));
7146 size = GET_MODE_SIZE (TYPE_MODE (itype));
7147 for (i = 0; i < count; i++)
7149 if (elements)
7151 elem = TREE_VALUE (elements);
7152 elements = TREE_CHAIN (elements);
7154 else
7155 elem = NULL_TREE;
7157 if (elem)
7159 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7160 return 0;
7162 else
7164 if (offset + size > len)
7165 return 0;
7166 memset (ptr+offset, 0, size);
7168 offset += size;
7170 return offset;
7174 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7175 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7176 buffer PTR of length LEN bytes. Return the number of bytes
7177 placed in the buffer, or zero upon failure. */
7179 static int
7180 native_encode_expr (tree expr, unsigned char *ptr, int len)
7182 switch (TREE_CODE (expr))
7184 case INTEGER_CST:
7185 return native_encode_int (expr, ptr, len);
7187 case REAL_CST:
7188 return native_encode_real (expr, ptr, len);
7190 case COMPLEX_CST:
7191 return native_encode_complex (expr, ptr, len);
7193 case VECTOR_CST:
7194 return native_encode_vector (expr, ptr, len);
7196 default:
7197 return 0;
7202 /* Subroutine of native_interpret_expr. Interpret the contents of
7203 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7204 If the buffer cannot be interpreted, return NULL_TREE. */
7206 static tree
7207 native_interpret_int (tree type, unsigned char *ptr, int len)
7209 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7210 int byte, offset, word, words;
7211 unsigned char value;
7212 unsigned int HOST_WIDE_INT lo = 0;
7213 HOST_WIDE_INT hi = 0;
7215 if (total_bytes > len)
7216 return NULL_TREE;
7217 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7218 return NULL_TREE;
7219 words = total_bytes / UNITS_PER_WORD;
7221 for (byte = 0; byte < total_bytes; byte++)
7223 int bitpos = byte * BITS_PER_UNIT;
7224 if (total_bytes > UNITS_PER_WORD)
7226 word = byte / UNITS_PER_WORD;
7227 if (WORDS_BIG_ENDIAN)
7228 word = (words - 1) - word;
7229 offset = word * UNITS_PER_WORD;
7230 if (BYTES_BIG_ENDIAN)
7231 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7232 else
7233 offset += byte % UNITS_PER_WORD;
7235 else
7236 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7237 value = ptr[offset];
7239 if (bitpos < HOST_BITS_PER_WIDE_INT)
7240 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7241 else
7242 hi |= (unsigned HOST_WIDE_INT) value
7243 << (bitpos - HOST_BITS_PER_WIDE_INT);
7246 return force_fit_type (build_int_cst_wide (type, lo, hi),
7247 0, false, false);
7251 /* Subroutine of native_interpret_expr. Interpret the contents of
7252 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7253 If the buffer cannot be interpreted, return NULL_TREE. */
7255 static tree
7256 native_interpret_real (tree type, unsigned char *ptr, int len)
7258 enum machine_mode mode = TYPE_MODE (type);
7259 int total_bytes = GET_MODE_SIZE (mode);
7260 int byte, offset, word, words, bitpos;
7261 unsigned char value;
7262 /* There are always 32 bits in each long, no matter the size of
7263 the hosts long. We handle floating point representations with
7264 up to 192 bits. */
7265 REAL_VALUE_TYPE r;
7266 long tmp[6];
7268 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7269 if (total_bytes > len || total_bytes > 24)
7270 return NULL_TREE;
7271 words = 32 / UNITS_PER_WORD;
7273 memset (tmp, 0, sizeof (tmp));
7274 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7275 bitpos += BITS_PER_UNIT)
7277 byte = (bitpos / BITS_PER_UNIT) & 3;
7278 if (UNITS_PER_WORD < 4)
7280 word = byte / UNITS_PER_WORD;
7281 if (WORDS_BIG_ENDIAN)
7282 word = (words - 1) - word;
7283 offset = word * UNITS_PER_WORD;
7284 if (BYTES_BIG_ENDIAN)
7285 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7286 else
7287 offset += byte % UNITS_PER_WORD;
7289 else
7290 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7291 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7293 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7296 real_from_target (&r, tmp, mode);
7297 return build_real (type, r);
7301 /* Subroutine of native_interpret_expr. Interpret the contents of
7302 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7303 If the buffer cannot be interpreted, return NULL_TREE. */
7305 static tree
7306 native_interpret_complex (tree type, unsigned char *ptr, int len)
7308 tree etype, rpart, ipart;
7309 int size;
7311 etype = TREE_TYPE (type);
7312 size = GET_MODE_SIZE (TYPE_MODE (etype));
7313 if (size * 2 > len)
7314 return NULL_TREE;
7315 rpart = native_interpret_expr (etype, ptr, size);
7316 if (!rpart)
7317 return NULL_TREE;
7318 ipart = native_interpret_expr (etype, ptr+size, size);
7319 if (!ipart)
7320 return NULL_TREE;
7321 return build_complex (type, rpart, ipart);
7325 /* Subroutine of native_interpret_expr. Interpret the contents of
7326 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7327 If the buffer cannot be interpreted, return NULL_TREE. */
7329 static tree
7330 native_interpret_vector (tree type, unsigned char *ptr, int len)
7332 tree etype, elem, elements;
7333 int i, size, count;
7335 etype = TREE_TYPE (type);
7336 size = GET_MODE_SIZE (TYPE_MODE (etype));
7337 count = TYPE_VECTOR_SUBPARTS (type);
7338 if (size * count > len)
7339 return NULL_TREE;
7341 elements = NULL_TREE;
7342 for (i = count - 1; i >= 0; i--)
7344 elem = native_interpret_expr (etype, ptr+(i*size), size);
7345 if (!elem)
7346 return NULL_TREE;
7347 elements = tree_cons (NULL_TREE, elem, elements);
7349 return build_vector (type, elements);
7353 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7354 the buffer PTR of length LEN as a constant of type TYPE. For
7355 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7356 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7357 return NULL_TREE. */
7359 static tree
7360 native_interpret_expr (tree type, unsigned char *ptr, int len)
7362 switch (TREE_CODE (type))
7364 case INTEGER_TYPE:
7365 case ENUMERAL_TYPE:
7366 case BOOLEAN_TYPE:
7367 return native_interpret_int (type, ptr, len);
7369 case REAL_TYPE:
7370 return native_interpret_real (type, ptr, len);
7372 case COMPLEX_TYPE:
7373 return native_interpret_complex (type, ptr, len);
7375 case VECTOR_TYPE:
7376 return native_interpret_vector (type, ptr, len);
7378 default:
7379 return NULL_TREE;
7384 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7385 TYPE at compile-time. If we're unable to perform the conversion
7386 return NULL_TREE. */
7388 static tree
7389 fold_view_convert_expr (tree type, tree expr)
7391 /* We support up to 512-bit values (for V8DFmode). */
7392 unsigned char buffer[64];
7393 int len;
7395 /* Check that the host and target are sane. */
7396 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7397 return NULL_TREE;
7399 len = native_encode_expr (expr, buffer, sizeof (buffer));
7400 if (len == 0)
7401 return NULL_TREE;
7403 return native_interpret_expr (type, buffer, len);
7407 /* Fold a unary expression of code CODE and type TYPE with operand
7408 OP0. Return the folded expression if folding is successful.
7409 Otherwise, return NULL_TREE. */
7411 tree
7412 fold_unary (enum tree_code code, tree type, tree op0)
7414 tree tem;
7415 tree arg0;
7416 enum tree_code_class kind = TREE_CODE_CLASS (code);
7418 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7419 && TREE_CODE_LENGTH (code) == 1);
7421 arg0 = op0;
7422 if (arg0)
7424 if (code == NOP_EXPR || code == CONVERT_EXPR
7425 || code == FLOAT_EXPR || code == ABS_EXPR)
7427 /* Don't use STRIP_NOPS, because signedness of argument type
7428 matters. */
7429 STRIP_SIGN_NOPS (arg0);
7431 else
7433 /* Strip any conversions that don't change the mode. This
7434 is safe for every expression, except for a comparison
7435 expression because its signedness is derived from its
7436 operands.
7438 Note that this is done as an internal manipulation within
7439 the constant folder, in order to find the simplest
7440 representation of the arguments so that their form can be
7441 studied. In any cases, the appropriate type conversions
7442 should be put back in the tree that will get out of the
7443 constant folder. */
7444 STRIP_NOPS (arg0);
7448 if (TREE_CODE_CLASS (code) == tcc_unary)
7450 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7451 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7452 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7453 else if (TREE_CODE (arg0) == COND_EXPR)
7455 tree arg01 = TREE_OPERAND (arg0, 1);
7456 tree arg02 = TREE_OPERAND (arg0, 2);
7457 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7458 arg01 = fold_build1 (code, type, arg01);
7459 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7460 arg02 = fold_build1 (code, type, arg02);
7461 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7462 arg01, arg02);
7464 /* If this was a conversion, and all we did was to move into
7465 inside the COND_EXPR, bring it back out. But leave it if
7466 it is a conversion from integer to integer and the
7467 result precision is no wider than a word since such a
7468 conversion is cheap and may be optimized away by combine,
7469 while it couldn't if it were outside the COND_EXPR. Then return
7470 so we don't get into an infinite recursion loop taking the
7471 conversion out and then back in. */
7473 if ((code == NOP_EXPR || code == CONVERT_EXPR
7474 || code == NON_LVALUE_EXPR)
7475 && TREE_CODE (tem) == COND_EXPR
7476 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7477 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7478 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7479 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7480 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7481 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7482 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7483 && (INTEGRAL_TYPE_P
7484 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7485 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7486 || flag_syntax_only))
7487 tem = build1 (code, type,
7488 build3 (COND_EXPR,
7489 TREE_TYPE (TREE_OPERAND
7490 (TREE_OPERAND (tem, 1), 0)),
7491 TREE_OPERAND (tem, 0),
7492 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7493 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7494 return tem;
7496 else if (COMPARISON_CLASS_P (arg0))
7498 if (TREE_CODE (type) == BOOLEAN_TYPE)
7500 arg0 = copy_node (arg0);
7501 TREE_TYPE (arg0) = type;
7502 return arg0;
7504 else if (TREE_CODE (type) != INTEGER_TYPE)
7505 return fold_build3 (COND_EXPR, type, arg0,
7506 fold_build1 (code, type,
7507 integer_one_node),
7508 fold_build1 (code, type,
7509 integer_zero_node));
7513 switch (code)
7515 case NOP_EXPR:
7516 case FLOAT_EXPR:
7517 case CONVERT_EXPR:
7518 case FIX_TRUNC_EXPR:
7519 case FIX_CEIL_EXPR:
7520 case FIX_FLOOR_EXPR:
7521 case FIX_ROUND_EXPR:
7522 if (TREE_TYPE (op0) == type)
7523 return op0;
7525 /* If we have (type) (a CMP b) and type is an integral type, return
7526 new expression involving the new type. */
7527 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7528 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7529 TREE_OPERAND (op0, 1));
7531 /* Handle cases of two conversions in a row. */
7532 if (TREE_CODE (op0) == NOP_EXPR
7533 || TREE_CODE (op0) == CONVERT_EXPR)
7535 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7536 tree inter_type = TREE_TYPE (op0);
7537 int inside_int = INTEGRAL_TYPE_P (inside_type);
7538 int inside_ptr = POINTER_TYPE_P (inside_type);
7539 int inside_float = FLOAT_TYPE_P (inside_type);
7540 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7541 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7542 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7543 int inter_int = INTEGRAL_TYPE_P (inter_type);
7544 int inter_ptr = POINTER_TYPE_P (inter_type);
7545 int inter_float = FLOAT_TYPE_P (inter_type);
7546 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7547 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7548 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7549 int final_int = INTEGRAL_TYPE_P (type);
7550 int final_ptr = POINTER_TYPE_P (type);
7551 int final_float = FLOAT_TYPE_P (type);
7552 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7553 unsigned int final_prec = TYPE_PRECISION (type);
7554 int final_unsignedp = TYPE_UNSIGNED (type);
7556 /* In addition to the cases of two conversions in a row
7557 handled below, if we are converting something to its own
7558 type via an object of identical or wider precision, neither
7559 conversion is needed. */
7560 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7561 && (((inter_int || inter_ptr) && final_int)
7562 || (inter_float && final_float))
7563 && inter_prec >= final_prec)
7564 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7566 /* Likewise, if the intermediate and final types are either both
7567 float or both integer, we don't need the middle conversion if
7568 it is wider than the final type and doesn't change the signedness
7569 (for integers). Avoid this if the final type is a pointer
7570 since then we sometimes need the inner conversion. Likewise if
7571 the outer has a precision not equal to the size of its mode. */
7572 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7573 || (inter_float && inside_float)
7574 || (inter_vec && inside_vec))
7575 && inter_prec >= inside_prec
7576 && (inter_float || inter_vec
7577 || inter_unsignedp == inside_unsignedp)
7578 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7579 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7580 && ! final_ptr
7581 && (! final_vec || inter_prec == inside_prec))
7582 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7584 /* If we have a sign-extension of a zero-extended value, we can
7585 replace that by a single zero-extension. */
7586 if (inside_int && inter_int && final_int
7587 && inside_prec < inter_prec && inter_prec < final_prec
7588 && inside_unsignedp && !inter_unsignedp)
7589 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7591 /* Two conversions in a row are not needed unless:
7592 - some conversion is floating-point (overstrict for now), or
7593 - some conversion is a vector (overstrict for now), or
7594 - the intermediate type is narrower than both initial and
7595 final, or
7596 - the intermediate type and innermost type differ in signedness,
7597 and the outermost type is wider than the intermediate, or
7598 - the initial type is a pointer type and the precisions of the
7599 intermediate and final types differ, or
7600 - the final type is a pointer type and the precisions of the
7601 initial and intermediate types differ.
7602 - the final type is a pointer type and the initial type not
7603 - the initial type is a pointer to an array and the final type
7604 not. */
7605 /* Java pointer type conversions generate checks in some
7606 cases, so we explicitly disallow this optimization. */
7607 if (! inside_float && ! inter_float && ! final_float
7608 && ! inside_vec && ! inter_vec && ! final_vec
7609 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7610 && ! (inside_int && inter_int
7611 && inter_unsignedp != inside_unsignedp
7612 && inter_prec < final_prec)
7613 && ((inter_unsignedp && inter_prec > inside_prec)
7614 == (final_unsignedp && final_prec > inter_prec))
7615 && ! (inside_ptr && inter_prec != final_prec)
7616 && ! (final_ptr && inside_prec != inter_prec)
7617 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7618 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7619 && final_ptr == inside_ptr
7620 && ! (inside_ptr
7621 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7622 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)
7623 && ! ((strcmp (lang_hooks.name, "GNU Java") == 0)
7624 && final_ptr))
7625 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7628 /* Handle (T *)&A.B.C for A being of type T and B and C
7629 living at offset zero. This occurs frequently in
7630 C++ upcasting and then accessing the base. */
7631 if (TREE_CODE (op0) == ADDR_EXPR
7632 && POINTER_TYPE_P (type)
7633 && handled_component_p (TREE_OPERAND (op0, 0)))
7635 HOST_WIDE_INT bitsize, bitpos;
7636 tree offset;
7637 enum machine_mode mode;
7638 int unsignedp, volatilep;
7639 tree base = TREE_OPERAND (op0, 0);
7640 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7641 &mode, &unsignedp, &volatilep, false);
7642 /* If the reference was to a (constant) zero offset, we can use
7643 the address of the base if it has the same base type
7644 as the result type. */
7645 if (! offset && bitpos == 0
7646 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7647 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7648 return fold_convert (type, build_fold_addr_expr (base));
7651 if (TREE_CODE (op0) == MODIFY_EXPR
7652 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7653 /* Detect assigning a bitfield. */
7654 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7655 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7657 /* Don't leave an assignment inside a conversion
7658 unless assigning a bitfield. */
7659 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7660 /* First do the assignment, then return converted constant. */
7661 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7662 TREE_NO_WARNING (tem) = 1;
7663 TREE_USED (tem) = 1;
7664 return tem;
7667 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7668 constants (if x has signed type, the sign bit cannot be set
7669 in c). This folds extension into the BIT_AND_EXPR. */
7670 if (INTEGRAL_TYPE_P (type)
7671 && TREE_CODE (type) != BOOLEAN_TYPE
7672 && TREE_CODE (op0) == BIT_AND_EXPR
7673 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7675 tree and = op0;
7676 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7677 int change = 0;
7679 if (TYPE_UNSIGNED (TREE_TYPE (and))
7680 || (TYPE_PRECISION (type)
7681 <= TYPE_PRECISION (TREE_TYPE (and))))
7682 change = 1;
7683 else if (TYPE_PRECISION (TREE_TYPE (and1))
7684 <= HOST_BITS_PER_WIDE_INT
7685 && host_integerp (and1, 1))
7687 unsigned HOST_WIDE_INT cst;
7689 cst = tree_low_cst (and1, 1);
7690 cst &= (HOST_WIDE_INT) -1
7691 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7692 change = (cst == 0);
7693 #ifdef LOAD_EXTEND_OP
7694 if (change
7695 && !flag_syntax_only
7696 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7697 == ZERO_EXTEND))
7699 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7700 and0 = fold_convert (uns, and0);
7701 and1 = fold_convert (uns, and1);
7703 #endif
7705 if (change)
7707 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7708 TREE_INT_CST_HIGH (and1));
7709 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7710 TREE_CONSTANT_OVERFLOW (and1));
7711 return fold_build2 (BIT_AND_EXPR, type,
7712 fold_convert (type, and0), tem);
7716 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7717 T2 being pointers to types of the same size. */
7718 if (POINTER_TYPE_P (type)
7719 && BINARY_CLASS_P (arg0)
7720 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7721 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7723 tree arg00 = TREE_OPERAND (arg0, 0);
7724 tree t0 = type;
7725 tree t1 = TREE_TYPE (arg00);
7726 tree tt0 = TREE_TYPE (t0);
7727 tree tt1 = TREE_TYPE (t1);
7728 tree s0 = TYPE_SIZE (tt0);
7729 tree s1 = TYPE_SIZE (tt1);
7731 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7732 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7733 TREE_OPERAND (arg0, 1));
7736 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7737 of the same precision, and X is a integer type not narrower than
7738 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7739 if (INTEGRAL_TYPE_P (type)
7740 && TREE_CODE (op0) == BIT_NOT_EXPR
7741 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7742 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7743 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7744 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7746 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7747 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7748 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7749 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7752 tem = fold_convert_const (code, type, op0);
7753 return tem ? tem : NULL_TREE;
7755 case VIEW_CONVERT_EXPR:
7756 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7757 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7758 return fold_view_convert_expr (type, op0);
7760 case NEGATE_EXPR:
7761 tem = fold_negate_expr (arg0);
7762 if (tem)
7763 return fold_convert (type, tem);
7764 return NULL_TREE;
7766 case ABS_EXPR:
7767 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7768 return fold_abs_const (arg0, type);
7769 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7770 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7771 /* Convert fabs((double)float) into (double)fabsf(float). */
7772 else if (TREE_CODE (arg0) == NOP_EXPR
7773 && TREE_CODE (type) == REAL_TYPE)
7775 tree targ0 = strip_float_extensions (arg0);
7776 if (targ0 != arg0)
7777 return fold_convert (type, fold_build1 (ABS_EXPR,
7778 TREE_TYPE (targ0),
7779 targ0));
7781 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7782 else if (TREE_CODE (arg0) == ABS_EXPR)
7783 return arg0;
7784 else if (tree_expr_nonnegative_p (arg0))
7785 return arg0;
7787 /* Strip sign ops from argument. */
7788 if (TREE_CODE (type) == REAL_TYPE)
7790 tem = fold_strip_sign_ops (arg0);
7791 if (tem)
7792 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7794 return NULL_TREE;
7796 case CONJ_EXPR:
7797 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7798 return fold_convert (type, arg0);
7799 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7801 tree itype = TREE_TYPE (type);
7802 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7803 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7804 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7806 if (TREE_CODE (arg0) == COMPLEX_CST)
7808 tree itype = TREE_TYPE (type);
7809 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7810 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7811 return build_complex (type, rpart, negate_expr (ipart));
7813 if (TREE_CODE (arg0) == CONJ_EXPR)
7814 return fold_convert (type, TREE_OPERAND (arg0, 0));
7815 return NULL_TREE;
7817 case BIT_NOT_EXPR:
7818 if (TREE_CODE (arg0) == INTEGER_CST)
7819 return fold_not_const (arg0, type);
7820 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7821 return TREE_OPERAND (arg0, 0);
7822 /* Convert ~ (-A) to A - 1. */
7823 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7824 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7825 build_int_cst (type, 1));
7826 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7827 else if (INTEGRAL_TYPE_P (type)
7828 && ((TREE_CODE (arg0) == MINUS_EXPR
7829 && integer_onep (TREE_OPERAND (arg0, 1)))
7830 || (TREE_CODE (arg0) == PLUS_EXPR
7831 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7832 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7833 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7834 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7835 && (tem = fold_unary (BIT_NOT_EXPR, type,
7836 fold_convert (type,
7837 TREE_OPERAND (arg0, 0)))))
7838 return fold_build2 (BIT_XOR_EXPR, type, tem,
7839 fold_convert (type, TREE_OPERAND (arg0, 1)));
7840 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7841 && (tem = fold_unary (BIT_NOT_EXPR, type,
7842 fold_convert (type,
7843 TREE_OPERAND (arg0, 1)))))
7844 return fold_build2 (BIT_XOR_EXPR, type,
7845 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7847 return NULL_TREE;
7849 case TRUTH_NOT_EXPR:
7850 /* The argument to invert_truthvalue must have Boolean type. */
7851 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7852 arg0 = fold_convert (boolean_type_node, arg0);
7854 /* Note that the operand of this must be an int
7855 and its values must be 0 or 1.
7856 ("true" is a fixed value perhaps depending on the language,
7857 but we don't handle values other than 1 correctly yet.) */
7858 tem = fold_truth_not_expr (arg0);
7859 if (!tem)
7860 return NULL_TREE;
7861 return fold_convert (type, tem);
7863 case REALPART_EXPR:
7864 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7865 return fold_convert (type, arg0);
7866 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7867 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7868 TREE_OPERAND (arg0, 1));
7869 if (TREE_CODE (arg0) == COMPLEX_CST)
7870 return fold_convert (type, TREE_REALPART (arg0));
7871 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7873 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7874 tem = fold_build2 (TREE_CODE (arg0), itype,
7875 fold_build1 (REALPART_EXPR, itype,
7876 TREE_OPERAND (arg0, 0)),
7877 fold_build1 (REALPART_EXPR, itype,
7878 TREE_OPERAND (arg0, 1)));
7879 return fold_convert (type, tem);
7881 if (TREE_CODE (arg0) == CONJ_EXPR)
7883 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7884 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7885 return fold_convert (type, tem);
7887 return NULL_TREE;
7889 case IMAGPART_EXPR:
7890 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7891 return fold_convert (type, integer_zero_node);
7892 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7893 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7894 TREE_OPERAND (arg0, 0));
7895 if (TREE_CODE (arg0) == COMPLEX_CST)
7896 return fold_convert (type, TREE_IMAGPART (arg0));
7897 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7899 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7900 tem = fold_build2 (TREE_CODE (arg0), itype,
7901 fold_build1 (IMAGPART_EXPR, itype,
7902 TREE_OPERAND (arg0, 0)),
7903 fold_build1 (IMAGPART_EXPR, itype,
7904 TREE_OPERAND (arg0, 1)));
7905 return fold_convert (type, tem);
7907 if (TREE_CODE (arg0) == CONJ_EXPR)
7909 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7910 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7911 return fold_convert (type, negate_expr (tem));
7913 return NULL_TREE;
7915 default:
7916 return NULL_TREE;
7917 } /* switch (code) */
7920 /* Fold a binary expression of code CODE and type TYPE with operands
7921 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7922 Return the folded expression if folding is successful. Otherwise,
7923 return NULL_TREE. */
7925 static tree
7926 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7928 enum tree_code compl_code;
7930 if (code == MIN_EXPR)
7931 compl_code = MAX_EXPR;
7932 else if (code == MAX_EXPR)
7933 compl_code = MIN_EXPR;
7934 else
7935 gcc_unreachable ();
7937 /* MIN (MAX (a, b), b) == b. */
7938 if (TREE_CODE (op0) == compl_code
7939 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7940 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7942 /* MIN (MAX (b, a), b) == b. */
7943 if (TREE_CODE (op0) == compl_code
7944 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7945 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7946 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7948 /* MIN (a, MAX (a, b)) == a. */
7949 if (TREE_CODE (op1) == compl_code
7950 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7951 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7952 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7954 /* MIN (a, MAX (b, a)) == a. */
7955 if (TREE_CODE (op1) == compl_code
7956 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7957 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7958 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7960 return NULL_TREE;
7963 /* Subroutine of fold_binary. This routine performs all of the
7964 transformations that are common to the equality/inequality
7965 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7966 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7967 fold_binary should call fold_binary. Fold a comparison with
7968 tree code CODE and type TYPE with operands OP0 and OP1. Return
7969 the folded comparison or NULL_TREE. */
7971 static tree
7972 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7974 tree arg0, arg1, tem;
7976 arg0 = op0;
7977 arg1 = op1;
7979 STRIP_SIGN_NOPS (arg0);
7980 STRIP_SIGN_NOPS (arg1);
7982 tem = fold_relational_const (code, type, arg0, arg1);
7983 if (tem != NULL_TREE)
7984 return tem;
7986 /* If one arg is a real or integer constant, put it last. */
7987 if (tree_swap_operands_p (arg0, arg1, true))
7988 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7990 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7991 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7992 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7993 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7994 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
7995 && (TREE_CODE (arg1) == INTEGER_CST
7996 && !TREE_OVERFLOW (arg1)))
7998 tree const1 = TREE_OPERAND (arg0, 1);
7999 tree const2 = arg1;
8000 tree variable = TREE_OPERAND (arg0, 0);
8001 tree lhs;
8002 int lhs_add;
8003 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8005 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8006 TREE_TYPE (arg1), const2, const1);
8007 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8008 && (TREE_CODE (lhs) != INTEGER_CST
8009 || !TREE_OVERFLOW (lhs)))
8011 fold_overflow_warning (("assuming signed overflow does not occur "
8012 "when changing X +- C1 cmp C2 to "
8013 "X cmp C1 +- C2"),
8014 WARN_STRICT_OVERFLOW_COMPARISON);
8015 return fold_build2 (code, type, variable, lhs);
8019 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8020 same object, then we can fold this to a comparison of the two offsets in
8021 signed size type. This is possible because pointer arithmetic is
8022 restricted to retain within an object and overflow on pointer differences
8023 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8025 We check flag_wrapv directly because pointers types are unsigned,
8026 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8027 normally what we want to avoid certain odd overflow cases, but
8028 not here. */
8029 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8030 && !flag_wrapv
8031 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8033 tree base0, offset0, base1, offset1;
8035 if (extract_array_ref (arg0, &base0, &offset0)
8036 && extract_array_ref (arg1, &base1, &offset1)
8037 && operand_equal_p (base0, base1, 0))
8039 tree signed_size_type_node;
8040 signed_size_type_node = signed_type_for (size_type_node);
8042 /* By converting to signed size type we cover middle-end pointer
8043 arithmetic which operates on unsigned pointer types of size
8044 type size and ARRAY_REF offsets which are properly sign or
8045 zero extended from their type in case it is narrower than
8046 size type. */
8047 if (offset0 == NULL_TREE)
8048 offset0 = build_int_cst (signed_size_type_node, 0);
8049 else
8050 offset0 = fold_convert (signed_size_type_node, offset0);
8051 if (offset1 == NULL_TREE)
8052 offset1 = build_int_cst (signed_size_type_node, 0);
8053 else
8054 offset1 = fold_convert (signed_size_type_node, offset1);
8056 return fold_build2 (code, type, offset0, offset1);
8060 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8062 tree targ0 = strip_float_extensions (arg0);
8063 tree targ1 = strip_float_extensions (arg1);
8064 tree newtype = TREE_TYPE (targ0);
8066 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8067 newtype = TREE_TYPE (targ1);
8069 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8070 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8071 return fold_build2 (code, type, fold_convert (newtype, targ0),
8072 fold_convert (newtype, targ1));
8074 /* (-a) CMP (-b) -> b CMP a */
8075 if (TREE_CODE (arg0) == NEGATE_EXPR
8076 && TREE_CODE (arg1) == NEGATE_EXPR)
8077 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8078 TREE_OPERAND (arg0, 0));
8080 if (TREE_CODE (arg1) == REAL_CST)
8082 REAL_VALUE_TYPE cst;
8083 cst = TREE_REAL_CST (arg1);
8085 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8086 if (TREE_CODE (arg0) == NEGATE_EXPR)
8087 return fold_build2 (swap_tree_comparison (code), type,
8088 TREE_OPERAND (arg0, 0),
8089 build_real (TREE_TYPE (arg1),
8090 REAL_VALUE_NEGATE (cst)));
8092 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8093 /* a CMP (-0) -> a CMP 0 */
8094 if (REAL_VALUE_MINUS_ZERO (cst))
8095 return fold_build2 (code, type, arg0,
8096 build_real (TREE_TYPE (arg1), dconst0));
8098 /* x != NaN is always true, other ops are always false. */
8099 if (REAL_VALUE_ISNAN (cst)
8100 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8102 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8103 return omit_one_operand (type, tem, arg0);
8106 /* Fold comparisons against infinity. */
8107 if (REAL_VALUE_ISINF (cst))
8109 tem = fold_inf_compare (code, type, arg0, arg1);
8110 if (tem != NULL_TREE)
8111 return tem;
8115 /* If this is a comparison of a real constant with a PLUS_EXPR
8116 or a MINUS_EXPR of a real constant, we can convert it into a
8117 comparison with a revised real constant as long as no overflow
8118 occurs when unsafe_math_optimizations are enabled. */
8119 if (flag_unsafe_math_optimizations
8120 && TREE_CODE (arg1) == REAL_CST
8121 && (TREE_CODE (arg0) == PLUS_EXPR
8122 || TREE_CODE (arg0) == MINUS_EXPR)
8123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8124 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8125 ? MINUS_EXPR : PLUS_EXPR,
8126 arg1, TREE_OPERAND (arg0, 1), 0))
8127 && ! TREE_CONSTANT_OVERFLOW (tem))
8128 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8130 /* Likewise, we can simplify a comparison of a real constant with
8131 a MINUS_EXPR whose first operand is also a real constant, i.e.
8132 (c1 - x) < c2 becomes x > c1-c2. */
8133 if (flag_unsafe_math_optimizations
8134 && TREE_CODE (arg1) == REAL_CST
8135 && TREE_CODE (arg0) == MINUS_EXPR
8136 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8137 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8138 arg1, 0))
8139 && ! TREE_CONSTANT_OVERFLOW (tem))
8140 return fold_build2 (swap_tree_comparison (code), type,
8141 TREE_OPERAND (arg0, 1), tem);
8143 /* Fold comparisons against built-in math functions. */
8144 if (TREE_CODE (arg1) == REAL_CST
8145 && flag_unsafe_math_optimizations
8146 && ! flag_errno_math)
8148 enum built_in_function fcode = builtin_mathfn_code (arg0);
8150 if (fcode != END_BUILTINS)
8152 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8153 if (tem != NULL_TREE)
8154 return tem;
8159 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8160 if (TREE_CONSTANT (arg1)
8161 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8162 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8163 /* This optimization is invalid for ordered comparisons
8164 if CONST+INCR overflows or if foo+incr might overflow.
8165 This optimization is invalid for floating point due to rounding.
8166 For pointer types we assume overflow doesn't happen. */
8167 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8168 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8169 && (code == EQ_EXPR || code == NE_EXPR))))
8171 tree varop, newconst;
8173 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8175 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8176 arg1, TREE_OPERAND (arg0, 1));
8177 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8178 TREE_OPERAND (arg0, 0),
8179 TREE_OPERAND (arg0, 1));
8181 else
8183 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8184 arg1, TREE_OPERAND (arg0, 1));
8185 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8186 TREE_OPERAND (arg0, 0),
8187 TREE_OPERAND (arg0, 1));
8191 /* If VAROP is a reference to a bitfield, we must mask
8192 the constant by the width of the field. */
8193 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8194 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8195 && host_integerp (DECL_SIZE (TREE_OPERAND
8196 (TREE_OPERAND (varop, 0), 1)), 1))
8198 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8199 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8200 tree folded_compare, shift;
8202 /* First check whether the comparison would come out
8203 always the same. If we don't do that we would
8204 change the meaning with the masking. */
8205 folded_compare = fold_build2 (code, type,
8206 TREE_OPERAND (varop, 0), arg1);
8207 if (TREE_CODE (folded_compare) == INTEGER_CST)
8208 return omit_one_operand (type, folded_compare, varop);
8210 shift = build_int_cst (NULL_TREE,
8211 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8212 shift = fold_convert (TREE_TYPE (varop), shift);
8213 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8214 newconst, shift);
8215 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8216 newconst, shift);
8219 return fold_build2 (code, type, varop, newconst);
8222 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8223 && (TREE_CODE (arg0) == NOP_EXPR
8224 || TREE_CODE (arg0) == CONVERT_EXPR))
8226 /* If we are widening one operand of an integer comparison,
8227 see if the other operand is similarly being widened. Perhaps we
8228 can do the comparison in the narrower type. */
8229 tem = fold_widened_comparison (code, type, arg0, arg1);
8230 if (tem)
8231 return tem;
8233 /* Or if we are changing signedness. */
8234 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8235 if (tem)
8236 return tem;
8239 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8240 constant, we can simplify it. */
8241 if (TREE_CODE (arg1) == INTEGER_CST
8242 && (TREE_CODE (arg0) == MIN_EXPR
8243 || TREE_CODE (arg0) == MAX_EXPR)
8244 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8246 tem = optimize_minmax_comparison (code, type, op0, op1);
8247 if (tem)
8248 return tem;
8251 /* Simplify comparison of something with itself. (For IEEE
8252 floating-point, we can only do some of these simplifications.) */
8253 if (operand_equal_p (arg0, arg1, 0))
8255 switch (code)
8257 case EQ_EXPR:
8258 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8259 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8260 return constant_boolean_node (1, type);
8261 break;
8263 case GE_EXPR:
8264 case LE_EXPR:
8265 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8266 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8267 return constant_boolean_node (1, type);
8268 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8270 case NE_EXPR:
8271 /* For NE, we can only do this simplification if integer
8272 or we don't honor IEEE floating point NaNs. */
8273 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8274 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8275 break;
8276 /* ... fall through ... */
8277 case GT_EXPR:
8278 case LT_EXPR:
8279 return constant_boolean_node (0, type);
8280 default:
8281 gcc_unreachable ();
8285 /* If we are comparing an expression that just has comparisons
8286 of two integer values, arithmetic expressions of those comparisons,
8287 and constants, we can simplify it. There are only three cases
8288 to check: the two values can either be equal, the first can be
8289 greater, or the second can be greater. Fold the expression for
8290 those three values. Since each value must be 0 or 1, we have
8291 eight possibilities, each of which corresponds to the constant 0
8292 or 1 or one of the six possible comparisons.
8294 This handles common cases like (a > b) == 0 but also handles
8295 expressions like ((x > y) - (y > x)) > 0, which supposedly
8296 occur in macroized code. */
8298 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8300 tree cval1 = 0, cval2 = 0;
8301 int save_p = 0;
8303 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8304 /* Don't handle degenerate cases here; they should already
8305 have been handled anyway. */
8306 && cval1 != 0 && cval2 != 0
8307 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8308 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8309 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8310 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8311 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8312 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8313 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8315 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8316 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8318 /* We can't just pass T to eval_subst in case cval1 or cval2
8319 was the same as ARG1. */
8321 tree high_result
8322 = fold_build2 (code, type,
8323 eval_subst (arg0, cval1, maxval,
8324 cval2, minval),
8325 arg1);
8326 tree equal_result
8327 = fold_build2 (code, type,
8328 eval_subst (arg0, cval1, maxval,
8329 cval2, maxval),
8330 arg1);
8331 tree low_result
8332 = fold_build2 (code, type,
8333 eval_subst (arg0, cval1, minval,
8334 cval2, maxval),
8335 arg1);
8337 /* All three of these results should be 0 or 1. Confirm they are.
8338 Then use those values to select the proper code to use. */
8340 if (TREE_CODE (high_result) == INTEGER_CST
8341 && TREE_CODE (equal_result) == INTEGER_CST
8342 && TREE_CODE (low_result) == INTEGER_CST)
8344 /* Make a 3-bit mask with the high-order bit being the
8345 value for `>', the next for '=', and the low for '<'. */
8346 switch ((integer_onep (high_result) * 4)
8347 + (integer_onep (equal_result) * 2)
8348 + integer_onep (low_result))
8350 case 0:
8351 /* Always false. */
8352 return omit_one_operand (type, integer_zero_node, arg0);
8353 case 1:
8354 code = LT_EXPR;
8355 break;
8356 case 2:
8357 code = EQ_EXPR;
8358 break;
8359 case 3:
8360 code = LE_EXPR;
8361 break;
8362 case 4:
8363 code = GT_EXPR;
8364 break;
8365 case 5:
8366 code = NE_EXPR;
8367 break;
8368 case 6:
8369 code = GE_EXPR;
8370 break;
8371 case 7:
8372 /* Always true. */
8373 return omit_one_operand (type, integer_one_node, arg0);
8376 if (save_p)
8377 return save_expr (build2 (code, type, cval1, cval2));
8378 return fold_build2 (code, type, cval1, cval2);
8383 /* Fold a comparison of the address of COMPONENT_REFs with the same
8384 type and component to a comparison of the address of the base
8385 object. In short, &x->a OP &y->a to x OP y and
8386 &x->a OP &y.a to x OP &y */
8387 if (TREE_CODE (arg0) == ADDR_EXPR
8388 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8389 && TREE_CODE (arg1) == ADDR_EXPR
8390 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8392 tree cref0 = TREE_OPERAND (arg0, 0);
8393 tree cref1 = TREE_OPERAND (arg1, 0);
8394 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8396 tree op0 = TREE_OPERAND (cref0, 0);
8397 tree op1 = TREE_OPERAND (cref1, 0);
8398 return fold_build2 (code, type,
8399 build_fold_addr_expr (op0),
8400 build_fold_addr_expr (op1));
8404 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8405 into a single range test. */
8406 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8407 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8408 && TREE_CODE (arg1) == INTEGER_CST
8409 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8410 && !integer_zerop (TREE_OPERAND (arg0, 1))
8411 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8412 && !TREE_OVERFLOW (arg1))
8414 tem = fold_div_compare (code, type, arg0, arg1);
8415 if (tem != NULL_TREE)
8416 return tem;
8419 return NULL_TREE;
8423 /* Subroutine of fold_binary. Optimize complex multiplications of the
8424 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8425 argument EXPR represents the expression "z" of type TYPE. */
8427 static tree
8428 fold_mult_zconjz (tree type, tree expr)
8430 tree itype = TREE_TYPE (type);
8431 tree rpart, ipart, tem;
8433 if (TREE_CODE (expr) == COMPLEX_EXPR)
8435 rpart = TREE_OPERAND (expr, 0);
8436 ipart = TREE_OPERAND (expr, 1);
8438 else if (TREE_CODE (expr) == COMPLEX_CST)
8440 rpart = TREE_REALPART (expr);
8441 ipart = TREE_IMAGPART (expr);
8443 else
8445 expr = save_expr (expr);
8446 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8447 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8450 rpart = save_expr (rpart);
8451 ipart = save_expr (ipart);
8452 tem = fold_build2 (PLUS_EXPR, itype,
8453 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8454 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8455 return fold_build2 (COMPLEX_EXPR, type, tem,
8456 fold_convert (itype, integer_zero_node));
8460 /* Fold a binary expression of code CODE and type TYPE with operands
8461 OP0 and OP1. Return the folded expression if folding is
8462 successful. Otherwise, return NULL_TREE. */
8464 tree
8465 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8467 enum tree_code_class kind = TREE_CODE_CLASS (code);
8468 tree arg0, arg1, tem;
8469 tree t1 = NULL_TREE;
8470 bool strict_overflow_p;
8472 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8473 && TREE_CODE_LENGTH (code) == 2
8474 && op0 != NULL_TREE
8475 && op1 != NULL_TREE);
8477 arg0 = op0;
8478 arg1 = op1;
8480 /* Strip any conversions that don't change the mode. This is
8481 safe for every expression, except for a comparison expression
8482 because its signedness is derived from its operands. So, in
8483 the latter case, only strip conversions that don't change the
8484 signedness.
8486 Note that this is done as an internal manipulation within the
8487 constant folder, in order to find the simplest representation
8488 of the arguments so that their form can be studied. In any
8489 cases, the appropriate type conversions should be put back in
8490 the tree that will get out of the constant folder. */
8492 if (kind == tcc_comparison)
8494 STRIP_SIGN_NOPS (arg0);
8495 STRIP_SIGN_NOPS (arg1);
8497 else
8499 STRIP_NOPS (arg0);
8500 STRIP_NOPS (arg1);
8503 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8504 constant but we can't do arithmetic on them. */
8505 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8506 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8507 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8508 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8510 if (kind == tcc_binary)
8511 tem = const_binop (code, arg0, arg1, 0);
8512 else if (kind == tcc_comparison)
8513 tem = fold_relational_const (code, type, arg0, arg1);
8514 else
8515 tem = NULL_TREE;
8517 if (tem != NULL_TREE)
8519 if (TREE_TYPE (tem) != type)
8520 tem = fold_convert (type, tem);
8521 return tem;
8525 /* If this is a commutative operation, and ARG0 is a constant, move it
8526 to ARG1 to reduce the number of tests below. */
8527 if (commutative_tree_code (code)
8528 && tree_swap_operands_p (arg0, arg1, true))
8529 return fold_build2 (code, type, op1, op0);
8531 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8533 First check for cases where an arithmetic operation is applied to a
8534 compound, conditional, or comparison operation. Push the arithmetic
8535 operation inside the compound or conditional to see if any folding
8536 can then be done. Convert comparison to conditional for this purpose.
8537 The also optimizes non-constant cases that used to be done in
8538 expand_expr.
8540 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8541 one of the operands is a comparison and the other is a comparison, a
8542 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8543 code below would make the expression more complex. Change it to a
8544 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8545 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8547 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8548 || code == EQ_EXPR || code == NE_EXPR)
8549 && ((truth_value_p (TREE_CODE (arg0))
8550 && (truth_value_p (TREE_CODE (arg1))
8551 || (TREE_CODE (arg1) == BIT_AND_EXPR
8552 && integer_onep (TREE_OPERAND (arg1, 1)))))
8553 || (truth_value_p (TREE_CODE (arg1))
8554 && (truth_value_p (TREE_CODE (arg0))
8555 || (TREE_CODE (arg0) == BIT_AND_EXPR
8556 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8558 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8559 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8560 : TRUTH_XOR_EXPR,
8561 boolean_type_node,
8562 fold_convert (boolean_type_node, arg0),
8563 fold_convert (boolean_type_node, arg1));
8565 if (code == EQ_EXPR)
8566 tem = invert_truthvalue (tem);
8568 return fold_convert (type, tem);
8571 if (TREE_CODE_CLASS (code) == tcc_binary
8572 || TREE_CODE_CLASS (code) == tcc_comparison)
8574 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8575 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8576 fold_build2 (code, type,
8577 TREE_OPERAND (arg0, 1), op1));
8578 if (TREE_CODE (arg1) == COMPOUND_EXPR
8579 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8580 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8581 fold_build2 (code, type,
8582 op0, TREE_OPERAND (arg1, 1)));
8584 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8586 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8587 arg0, arg1,
8588 /*cond_first_p=*/1);
8589 if (tem != NULL_TREE)
8590 return tem;
8593 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8595 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8596 arg1, arg0,
8597 /*cond_first_p=*/0);
8598 if (tem != NULL_TREE)
8599 return tem;
8603 switch (code)
8605 case PLUS_EXPR:
8606 /* A + (-B) -> A - B */
8607 if (TREE_CODE (arg1) == NEGATE_EXPR)
8608 return fold_build2 (MINUS_EXPR, type,
8609 fold_convert (type, arg0),
8610 fold_convert (type, TREE_OPERAND (arg1, 0)));
8611 /* (-A) + B -> B - A */
8612 if (TREE_CODE (arg0) == NEGATE_EXPR
8613 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8614 return fold_build2 (MINUS_EXPR, type,
8615 fold_convert (type, arg1),
8616 fold_convert (type, TREE_OPERAND (arg0, 0)));
8617 /* Convert ~A + 1 to -A. */
8618 if (INTEGRAL_TYPE_P (type)
8619 && TREE_CODE (arg0) == BIT_NOT_EXPR
8620 && integer_onep (arg1))
8621 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8623 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8624 same or one. */
8625 if ((TREE_CODE (arg0) == MULT_EXPR
8626 || TREE_CODE (arg1) == MULT_EXPR)
8627 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8629 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8630 if (tem)
8631 return tem;
8634 if (! FLOAT_TYPE_P (type))
8636 if (integer_zerop (arg1))
8637 return non_lvalue (fold_convert (type, arg0));
8639 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8640 with a constant, and the two constants have no bits in common,
8641 we should treat this as a BIT_IOR_EXPR since this may produce more
8642 simplifications. */
8643 if (TREE_CODE (arg0) == BIT_AND_EXPR
8644 && TREE_CODE (arg1) == BIT_AND_EXPR
8645 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8646 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8647 && integer_zerop (const_binop (BIT_AND_EXPR,
8648 TREE_OPERAND (arg0, 1),
8649 TREE_OPERAND (arg1, 1), 0)))
8651 code = BIT_IOR_EXPR;
8652 goto bit_ior;
8655 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8656 (plus (plus (mult) (mult)) (foo)) so that we can
8657 take advantage of the factoring cases below. */
8658 if (((TREE_CODE (arg0) == PLUS_EXPR
8659 || TREE_CODE (arg0) == MINUS_EXPR)
8660 && TREE_CODE (arg1) == MULT_EXPR)
8661 || ((TREE_CODE (arg1) == PLUS_EXPR
8662 || TREE_CODE (arg1) == MINUS_EXPR)
8663 && TREE_CODE (arg0) == MULT_EXPR))
8665 tree parg0, parg1, parg, marg;
8666 enum tree_code pcode;
8668 if (TREE_CODE (arg1) == MULT_EXPR)
8669 parg = arg0, marg = arg1;
8670 else
8671 parg = arg1, marg = arg0;
8672 pcode = TREE_CODE (parg);
8673 parg0 = TREE_OPERAND (parg, 0);
8674 parg1 = TREE_OPERAND (parg, 1);
8675 STRIP_NOPS (parg0);
8676 STRIP_NOPS (parg1);
8678 if (TREE_CODE (parg0) == MULT_EXPR
8679 && TREE_CODE (parg1) != MULT_EXPR)
8680 return fold_build2 (pcode, type,
8681 fold_build2 (PLUS_EXPR, type,
8682 fold_convert (type, parg0),
8683 fold_convert (type, marg)),
8684 fold_convert (type, parg1));
8685 if (TREE_CODE (parg0) != MULT_EXPR
8686 && TREE_CODE (parg1) == MULT_EXPR)
8687 return fold_build2 (PLUS_EXPR, type,
8688 fold_convert (type, parg0),
8689 fold_build2 (pcode, type,
8690 fold_convert (type, marg),
8691 fold_convert (type,
8692 parg1)));
8695 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8696 of the array. Loop optimizer sometimes produce this type of
8697 expressions. */
8698 if (TREE_CODE (arg0) == ADDR_EXPR)
8700 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8701 if (tem)
8702 return fold_convert (type, tem);
8704 else if (TREE_CODE (arg1) == ADDR_EXPR)
8706 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8707 if (tem)
8708 return fold_convert (type, tem);
8711 else
8713 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8714 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8715 return non_lvalue (fold_convert (type, arg0));
8717 /* Likewise if the operands are reversed. */
8718 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8719 return non_lvalue (fold_convert (type, arg1));
8721 /* Convert X + -C into X - C. */
8722 if (TREE_CODE (arg1) == REAL_CST
8723 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8725 tem = fold_negate_const (arg1, type);
8726 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8727 return fold_build2 (MINUS_EXPR, type,
8728 fold_convert (type, arg0),
8729 fold_convert (type, tem));
8732 if (flag_unsafe_math_optimizations
8733 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8734 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8735 && (tem = distribute_real_division (code, type, arg0, arg1)))
8736 return tem;
8738 /* Convert x+x into x*2.0. */
8739 if (operand_equal_p (arg0, arg1, 0)
8740 && SCALAR_FLOAT_TYPE_P (type))
8741 return fold_build2 (MULT_EXPR, type, arg0,
8742 build_real (type, dconst2));
8744 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8745 if (flag_unsafe_math_optimizations
8746 && TREE_CODE (arg1) == PLUS_EXPR
8747 && TREE_CODE (arg0) != MULT_EXPR)
8749 tree tree10 = TREE_OPERAND (arg1, 0);
8750 tree tree11 = TREE_OPERAND (arg1, 1);
8751 if (TREE_CODE (tree11) == MULT_EXPR
8752 && TREE_CODE (tree10) == MULT_EXPR)
8754 tree tree0;
8755 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8756 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8759 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8760 if (flag_unsafe_math_optimizations
8761 && TREE_CODE (arg0) == PLUS_EXPR
8762 && TREE_CODE (arg1) != MULT_EXPR)
8764 tree tree00 = TREE_OPERAND (arg0, 0);
8765 tree tree01 = TREE_OPERAND (arg0, 1);
8766 if (TREE_CODE (tree01) == MULT_EXPR
8767 && TREE_CODE (tree00) == MULT_EXPR)
8769 tree tree0;
8770 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8771 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8776 bit_rotate:
8777 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8778 is a rotate of A by C1 bits. */
8779 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8780 is a rotate of A by B bits. */
8782 enum tree_code code0, code1;
8783 code0 = TREE_CODE (arg0);
8784 code1 = TREE_CODE (arg1);
8785 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8786 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8787 && operand_equal_p (TREE_OPERAND (arg0, 0),
8788 TREE_OPERAND (arg1, 0), 0)
8789 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8791 tree tree01, tree11;
8792 enum tree_code code01, code11;
8794 tree01 = TREE_OPERAND (arg0, 1);
8795 tree11 = TREE_OPERAND (arg1, 1);
8796 STRIP_NOPS (tree01);
8797 STRIP_NOPS (tree11);
8798 code01 = TREE_CODE (tree01);
8799 code11 = TREE_CODE (tree11);
8800 if (code01 == INTEGER_CST
8801 && code11 == INTEGER_CST
8802 && TREE_INT_CST_HIGH (tree01) == 0
8803 && TREE_INT_CST_HIGH (tree11) == 0
8804 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8805 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8806 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8807 code0 == LSHIFT_EXPR ? tree01 : tree11);
8808 else if (code11 == MINUS_EXPR)
8810 tree tree110, tree111;
8811 tree110 = TREE_OPERAND (tree11, 0);
8812 tree111 = TREE_OPERAND (tree11, 1);
8813 STRIP_NOPS (tree110);
8814 STRIP_NOPS (tree111);
8815 if (TREE_CODE (tree110) == INTEGER_CST
8816 && 0 == compare_tree_int (tree110,
8817 TYPE_PRECISION
8818 (TREE_TYPE (TREE_OPERAND
8819 (arg0, 0))))
8820 && operand_equal_p (tree01, tree111, 0))
8821 return build2 ((code0 == LSHIFT_EXPR
8822 ? LROTATE_EXPR
8823 : RROTATE_EXPR),
8824 type, TREE_OPERAND (arg0, 0), tree01);
8826 else if (code01 == MINUS_EXPR)
8828 tree tree010, tree011;
8829 tree010 = TREE_OPERAND (tree01, 0);
8830 tree011 = TREE_OPERAND (tree01, 1);
8831 STRIP_NOPS (tree010);
8832 STRIP_NOPS (tree011);
8833 if (TREE_CODE (tree010) == INTEGER_CST
8834 && 0 == compare_tree_int (tree010,
8835 TYPE_PRECISION
8836 (TREE_TYPE (TREE_OPERAND
8837 (arg0, 0))))
8838 && operand_equal_p (tree11, tree011, 0))
8839 return build2 ((code0 != LSHIFT_EXPR
8840 ? LROTATE_EXPR
8841 : RROTATE_EXPR),
8842 type, TREE_OPERAND (arg0, 0), tree11);
8847 associate:
8848 /* In most languages, can't associate operations on floats through
8849 parentheses. Rather than remember where the parentheses were, we
8850 don't associate floats at all, unless the user has specified
8851 -funsafe-math-optimizations. */
8853 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8855 tree var0, con0, lit0, minus_lit0;
8856 tree var1, con1, lit1, minus_lit1;
8857 bool ok = true;
8859 /* Split both trees into variables, constants, and literals. Then
8860 associate each group together, the constants with literals,
8861 then the result with variables. This increases the chances of
8862 literals being recombined later and of generating relocatable
8863 expressions for the sum of a constant and literal. */
8864 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8865 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8866 code == MINUS_EXPR);
8868 /* With undefined overflow we can only associate constants
8869 with one variable. */
8870 if ((POINTER_TYPE_P (type)
8871 || (INTEGRAL_TYPE_P (type)
8872 && !(TYPE_UNSIGNED (type) || flag_wrapv)))
8873 && var0 && var1)
8875 tree tmp0 = var0;
8876 tree tmp1 = var1;
8878 if (TREE_CODE (tmp0) == NEGATE_EXPR)
8879 tmp0 = TREE_OPERAND (tmp0, 0);
8880 if (TREE_CODE (tmp1) == NEGATE_EXPR)
8881 tmp1 = TREE_OPERAND (tmp1, 0);
8882 /* The only case we can still associate with two variables
8883 is if they are the same, modulo negation. */
8884 if (!operand_equal_p (tmp0, tmp1, 0))
8885 ok = false;
8888 /* Only do something if we found more than two objects. Otherwise,
8889 nothing has changed and we risk infinite recursion. */
8890 if (ok
8891 && (2 < ((var0 != 0) + (var1 != 0)
8892 + (con0 != 0) + (con1 != 0)
8893 + (lit0 != 0) + (lit1 != 0)
8894 + (minus_lit0 != 0) + (minus_lit1 != 0))))
8896 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8897 if (code == MINUS_EXPR)
8898 code = PLUS_EXPR;
8900 var0 = associate_trees (var0, var1, code, type);
8901 con0 = associate_trees (con0, con1, code, type);
8902 lit0 = associate_trees (lit0, lit1, code, type);
8903 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8905 /* Preserve the MINUS_EXPR if the negative part of the literal is
8906 greater than the positive part. Otherwise, the multiplicative
8907 folding code (i.e extract_muldiv) may be fooled in case
8908 unsigned constants are subtracted, like in the following
8909 example: ((X*2 + 4) - 8U)/2. */
8910 if (minus_lit0 && lit0)
8912 if (TREE_CODE (lit0) == INTEGER_CST
8913 && TREE_CODE (minus_lit0) == INTEGER_CST
8914 && tree_int_cst_lt (lit0, minus_lit0))
8916 minus_lit0 = associate_trees (minus_lit0, lit0,
8917 MINUS_EXPR, type);
8918 lit0 = 0;
8920 else
8922 lit0 = associate_trees (lit0, minus_lit0,
8923 MINUS_EXPR, type);
8924 minus_lit0 = 0;
8927 if (minus_lit0)
8929 if (con0 == 0)
8930 return fold_convert (type,
8931 associate_trees (var0, minus_lit0,
8932 MINUS_EXPR, type));
8933 else
8935 con0 = associate_trees (con0, minus_lit0,
8936 MINUS_EXPR, type);
8937 return fold_convert (type,
8938 associate_trees (var0, con0,
8939 PLUS_EXPR, type));
8943 con0 = associate_trees (con0, lit0, code, type);
8944 return fold_convert (type, associate_trees (var0, con0,
8945 code, type));
8949 return NULL_TREE;
8951 case MINUS_EXPR:
8952 /* A - (-B) -> A + B */
8953 if (TREE_CODE (arg1) == NEGATE_EXPR)
8954 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8955 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8956 if (TREE_CODE (arg0) == NEGATE_EXPR
8957 && (FLOAT_TYPE_P (type)
8958 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8959 && negate_expr_p (arg1)
8960 && reorder_operands_p (arg0, arg1))
8961 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8962 TREE_OPERAND (arg0, 0));
8963 /* Convert -A - 1 to ~A. */
8964 if (INTEGRAL_TYPE_P (type)
8965 && TREE_CODE (arg0) == NEGATE_EXPR
8966 && integer_onep (arg1))
8967 return fold_build1 (BIT_NOT_EXPR, type,
8968 fold_convert (type, TREE_OPERAND (arg0, 0)));
8970 /* Convert -1 - A to ~A. */
8971 if (INTEGRAL_TYPE_P (type)
8972 && integer_all_onesp (arg0))
8973 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8975 if (! FLOAT_TYPE_P (type))
8977 if (integer_zerop (arg0))
8978 return negate_expr (fold_convert (type, arg1));
8979 if (integer_zerop (arg1))
8980 return non_lvalue (fold_convert (type, arg0));
8982 /* Fold A - (A & B) into ~B & A. */
8983 if (!TREE_SIDE_EFFECTS (arg0)
8984 && TREE_CODE (arg1) == BIT_AND_EXPR)
8986 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8987 return fold_build2 (BIT_AND_EXPR, type,
8988 fold_build1 (BIT_NOT_EXPR, type,
8989 TREE_OPERAND (arg1, 0)),
8990 arg0);
8991 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8992 return fold_build2 (BIT_AND_EXPR, type,
8993 fold_build1 (BIT_NOT_EXPR, type,
8994 TREE_OPERAND (arg1, 1)),
8995 arg0);
8998 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8999 any power of 2 minus 1. */
9000 if (TREE_CODE (arg0) == BIT_AND_EXPR
9001 && TREE_CODE (arg1) == BIT_AND_EXPR
9002 && operand_equal_p (TREE_OPERAND (arg0, 0),
9003 TREE_OPERAND (arg1, 0), 0))
9005 tree mask0 = TREE_OPERAND (arg0, 1);
9006 tree mask1 = TREE_OPERAND (arg1, 1);
9007 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9009 if (operand_equal_p (tem, mask1, 0))
9011 tem = fold_build2 (BIT_XOR_EXPR, type,
9012 TREE_OPERAND (arg0, 0), mask1);
9013 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9018 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9019 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9020 return non_lvalue (fold_convert (type, arg0));
9022 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9023 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9024 (-ARG1 + ARG0) reduces to -ARG1. */
9025 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9026 return negate_expr (fold_convert (type, arg1));
9028 /* Fold &x - &x. This can happen from &x.foo - &x.
9029 This is unsafe for certain floats even in non-IEEE formats.
9030 In IEEE, it is unsafe because it does wrong for NaNs.
9031 Also note that operand_equal_p is always false if an operand
9032 is volatile. */
9034 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9035 && operand_equal_p (arg0, arg1, 0))
9036 return fold_convert (type, integer_zero_node);
9038 /* A - B -> A + (-B) if B is easily negatable. */
9039 if (negate_expr_p (arg1)
9040 && ((FLOAT_TYPE_P (type)
9041 /* Avoid this transformation if B is a positive REAL_CST. */
9042 && (TREE_CODE (arg1) != REAL_CST
9043 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9044 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
9045 return fold_build2 (PLUS_EXPR, type,
9046 fold_convert (type, arg0),
9047 fold_convert (type, negate_expr (arg1)));
9049 /* Try folding difference of addresses. */
9051 HOST_WIDE_INT diff;
9053 if ((TREE_CODE (arg0) == ADDR_EXPR
9054 || TREE_CODE (arg1) == ADDR_EXPR)
9055 && ptr_difference_const (arg0, arg1, &diff))
9056 return build_int_cst_type (type, diff);
9059 /* Fold &a[i] - &a[j] to i-j. */
9060 if (TREE_CODE (arg0) == ADDR_EXPR
9061 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9062 && TREE_CODE (arg1) == ADDR_EXPR
9063 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9065 tree aref0 = TREE_OPERAND (arg0, 0);
9066 tree aref1 = TREE_OPERAND (arg1, 0);
9067 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9068 TREE_OPERAND (aref1, 0), 0))
9070 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9071 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9072 tree esz = array_ref_element_size (aref0);
9073 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9074 return fold_build2 (MULT_EXPR, type, diff,
9075 fold_convert (type, esz));
9080 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9081 of the array. Loop optimizer sometimes produce this type of
9082 expressions. */
9083 if (TREE_CODE (arg0) == ADDR_EXPR)
9085 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9086 if (tem)
9087 return fold_convert (type, tem);
9090 if (flag_unsafe_math_optimizations
9091 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9092 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9093 && (tem = distribute_real_division (code, type, arg0, arg1)))
9094 return tem;
9096 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9097 same or one. */
9098 if ((TREE_CODE (arg0) == MULT_EXPR
9099 || TREE_CODE (arg1) == MULT_EXPR)
9100 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9102 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9103 if (tem)
9104 return tem;
9107 goto associate;
9109 case MULT_EXPR:
9110 /* (-A) * (-B) -> A * B */
9111 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9112 return fold_build2 (MULT_EXPR, type,
9113 fold_convert (type, TREE_OPERAND (arg0, 0)),
9114 fold_convert (type, negate_expr (arg1)));
9115 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9116 return fold_build2 (MULT_EXPR, type,
9117 fold_convert (type, negate_expr (arg0)),
9118 fold_convert (type, TREE_OPERAND (arg1, 0)));
9120 if (! FLOAT_TYPE_P (type))
9122 if (integer_zerop (arg1))
9123 return omit_one_operand (type, arg1, arg0);
9124 if (integer_onep (arg1))
9125 return non_lvalue (fold_convert (type, arg0));
9126 /* Transform x * -1 into -x. */
9127 if (integer_all_onesp (arg1))
9128 return fold_convert (type, negate_expr (arg0));
9130 /* (a * (1 << b)) is (a << b) */
9131 if (TREE_CODE (arg1) == LSHIFT_EXPR
9132 && integer_onep (TREE_OPERAND (arg1, 0)))
9133 return fold_build2 (LSHIFT_EXPR, type, arg0,
9134 TREE_OPERAND (arg1, 1));
9135 if (TREE_CODE (arg0) == LSHIFT_EXPR
9136 && integer_onep (TREE_OPERAND (arg0, 0)))
9137 return fold_build2 (LSHIFT_EXPR, type, arg1,
9138 TREE_OPERAND (arg0, 1));
9140 strict_overflow_p = false;
9141 if (TREE_CODE (arg1) == INTEGER_CST
9142 && 0 != (tem = extract_muldiv (op0,
9143 fold_convert (type, arg1),
9144 code, NULL_TREE,
9145 &strict_overflow_p)))
9147 if (strict_overflow_p)
9148 fold_overflow_warning (("assuming signed overflow does not "
9149 "occur when simplifying "
9150 "multiplication"),
9151 WARN_STRICT_OVERFLOW_MISC);
9152 return fold_convert (type, tem);
9155 /* Optimize z * conj(z) for integer complex numbers. */
9156 if (TREE_CODE (arg0) == CONJ_EXPR
9157 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9158 return fold_mult_zconjz (type, arg1);
9159 if (TREE_CODE (arg1) == CONJ_EXPR
9160 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9161 return fold_mult_zconjz (type, arg0);
9163 else
9165 /* Maybe fold x * 0 to 0. The expressions aren't the same
9166 when x is NaN, since x * 0 is also NaN. Nor are they the
9167 same in modes with signed zeros, since multiplying a
9168 negative value by 0 gives -0, not +0. */
9169 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9170 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9171 && real_zerop (arg1))
9172 return omit_one_operand (type, arg1, arg0);
9173 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9174 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9175 && real_onep (arg1))
9176 return non_lvalue (fold_convert (type, arg0));
9178 /* Transform x * -1.0 into -x. */
9179 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9180 && real_minus_onep (arg1))
9181 return fold_convert (type, negate_expr (arg0));
9183 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9184 if (flag_unsafe_math_optimizations
9185 && TREE_CODE (arg0) == RDIV_EXPR
9186 && TREE_CODE (arg1) == REAL_CST
9187 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9189 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9190 arg1, 0);
9191 if (tem)
9192 return fold_build2 (RDIV_EXPR, type, tem,
9193 TREE_OPERAND (arg0, 1));
9196 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9197 if (operand_equal_p (arg0, arg1, 0))
9199 tree tem = fold_strip_sign_ops (arg0);
9200 if (tem != NULL_TREE)
9202 tem = fold_convert (type, tem);
9203 return fold_build2 (MULT_EXPR, type, tem, tem);
9207 /* Optimize z * conj(z) for floating point complex numbers.
9208 Guarded by flag_unsafe_math_optimizations as non-finite
9209 imaginary components don't produce scalar results. */
9210 if (flag_unsafe_math_optimizations
9211 && TREE_CODE (arg0) == CONJ_EXPR
9212 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9213 return fold_mult_zconjz (type, arg1);
9214 if (flag_unsafe_math_optimizations
9215 && TREE_CODE (arg1) == CONJ_EXPR
9216 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9217 return fold_mult_zconjz (type, arg0);
9219 if (flag_unsafe_math_optimizations)
9221 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9222 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9224 /* Optimizations of root(...)*root(...). */
9225 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9227 tree rootfn, arg, arglist;
9228 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9229 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9231 /* Optimize sqrt(x)*sqrt(x) as x. */
9232 if (BUILTIN_SQRT_P (fcode0)
9233 && operand_equal_p (arg00, arg10, 0)
9234 && ! HONOR_SNANS (TYPE_MODE (type)))
9235 return arg00;
9237 /* Optimize root(x)*root(y) as root(x*y). */
9238 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9239 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9240 arglist = build_tree_list (NULL_TREE, arg);
9241 return build_function_call_expr (rootfn, arglist);
9244 /* Optimize expN(x)*expN(y) as expN(x+y). */
9245 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9247 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9248 tree arg = fold_build2 (PLUS_EXPR, type,
9249 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9250 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9251 tree arglist = build_tree_list (NULL_TREE, arg);
9252 return build_function_call_expr (expfn, arglist);
9255 /* Optimizations of pow(...)*pow(...). */
9256 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9257 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9258 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9260 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9261 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9262 1)));
9263 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9264 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9265 1)));
9267 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9268 if (operand_equal_p (arg01, arg11, 0))
9270 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9271 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9272 tree arglist = tree_cons (NULL_TREE, arg,
9273 build_tree_list (NULL_TREE,
9274 arg01));
9275 return build_function_call_expr (powfn, arglist);
9278 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9279 if (operand_equal_p (arg00, arg10, 0))
9281 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9282 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9283 tree arglist = tree_cons (NULL_TREE, arg00,
9284 build_tree_list (NULL_TREE,
9285 arg));
9286 return build_function_call_expr (powfn, arglist);
9290 /* Optimize tan(x)*cos(x) as sin(x). */
9291 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9292 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9293 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9294 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9295 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9296 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9297 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9298 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9300 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9302 if (sinfn != NULL_TREE)
9303 return build_function_call_expr (sinfn,
9304 TREE_OPERAND (arg0, 1));
9307 /* Optimize x*pow(x,c) as pow(x,c+1). */
9308 if (fcode1 == BUILT_IN_POW
9309 || fcode1 == BUILT_IN_POWF
9310 || fcode1 == BUILT_IN_POWL)
9312 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9313 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9314 1)));
9315 if (TREE_CODE (arg11) == REAL_CST
9316 && ! TREE_CONSTANT_OVERFLOW (arg11)
9317 && operand_equal_p (arg0, arg10, 0))
9319 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9320 REAL_VALUE_TYPE c;
9321 tree arg, arglist;
9323 c = TREE_REAL_CST (arg11);
9324 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9325 arg = build_real (type, c);
9326 arglist = build_tree_list (NULL_TREE, arg);
9327 arglist = tree_cons (NULL_TREE, arg0, arglist);
9328 return build_function_call_expr (powfn, arglist);
9332 /* Optimize pow(x,c)*x as pow(x,c+1). */
9333 if (fcode0 == BUILT_IN_POW
9334 || fcode0 == BUILT_IN_POWF
9335 || fcode0 == BUILT_IN_POWL)
9337 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9338 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9339 1)));
9340 if (TREE_CODE (arg01) == REAL_CST
9341 && ! TREE_CONSTANT_OVERFLOW (arg01)
9342 && operand_equal_p (arg1, arg00, 0))
9344 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9345 REAL_VALUE_TYPE c;
9346 tree arg, arglist;
9348 c = TREE_REAL_CST (arg01);
9349 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9350 arg = build_real (type, c);
9351 arglist = build_tree_list (NULL_TREE, arg);
9352 arglist = tree_cons (NULL_TREE, arg1, arglist);
9353 return build_function_call_expr (powfn, arglist);
9357 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9358 if (! optimize_size
9359 && operand_equal_p (arg0, arg1, 0))
9361 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9363 if (powfn)
9365 tree arg = build_real (type, dconst2);
9366 tree arglist = build_tree_list (NULL_TREE, arg);
9367 arglist = tree_cons (NULL_TREE, arg0, arglist);
9368 return build_function_call_expr (powfn, arglist);
9373 goto associate;
9375 case BIT_IOR_EXPR:
9376 bit_ior:
9377 if (integer_all_onesp (arg1))
9378 return omit_one_operand (type, arg1, arg0);
9379 if (integer_zerop (arg1))
9380 return non_lvalue (fold_convert (type, arg0));
9381 if (operand_equal_p (arg0, arg1, 0))
9382 return non_lvalue (fold_convert (type, arg0));
9384 /* ~X | X is -1. */
9385 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9386 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9387 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9389 t1 = build_int_cst (type, -1);
9390 t1 = force_fit_type (t1, 0, false, false);
9391 return omit_one_operand (type, t1, arg1);
9394 /* X | ~X is -1. */
9395 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9396 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9397 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9399 t1 = build_int_cst (type, -1);
9400 t1 = force_fit_type (t1, 0, false, false);
9401 return omit_one_operand (type, t1, arg0);
9404 /* Canonicalize (X & C1) | C2. */
9405 if (TREE_CODE (arg0) == BIT_AND_EXPR
9406 && TREE_CODE (arg1) == INTEGER_CST
9407 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9409 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9410 int width = TYPE_PRECISION (type);
9411 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9412 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9413 hi2 = TREE_INT_CST_HIGH (arg1);
9414 lo2 = TREE_INT_CST_LOW (arg1);
9416 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9417 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9418 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9420 if (width > HOST_BITS_PER_WIDE_INT)
9422 mhi = (unsigned HOST_WIDE_INT) -1
9423 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9424 mlo = -1;
9426 else
9428 mhi = 0;
9429 mlo = (unsigned HOST_WIDE_INT) -1
9430 >> (HOST_BITS_PER_WIDE_INT - width);
9433 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9434 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9435 return fold_build2 (BIT_IOR_EXPR, type,
9436 TREE_OPERAND (arg0, 0), arg1);
9438 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9439 hi1 &= mhi;
9440 lo1 &= mlo;
9441 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9442 return fold_build2 (BIT_IOR_EXPR, type,
9443 fold_build2 (BIT_AND_EXPR, type,
9444 TREE_OPERAND (arg0, 0),
9445 build_int_cst_wide (type,
9446 lo1 & ~lo2,
9447 hi1 & ~hi2)),
9448 arg1);
9451 /* (X & Y) | Y is (X, Y). */
9452 if (TREE_CODE (arg0) == BIT_AND_EXPR
9453 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9454 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9455 /* (X & Y) | X is (Y, X). */
9456 if (TREE_CODE (arg0) == BIT_AND_EXPR
9457 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9458 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9459 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9460 /* X | (X & Y) is (Y, X). */
9461 if (TREE_CODE (arg1) == BIT_AND_EXPR
9462 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9463 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9464 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9465 /* X | (Y & X) is (Y, X). */
9466 if (TREE_CODE (arg1) == BIT_AND_EXPR
9467 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9468 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9469 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9471 t1 = distribute_bit_expr (code, type, arg0, arg1);
9472 if (t1 != NULL_TREE)
9473 return t1;
9475 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9477 This results in more efficient code for machines without a NAND
9478 instruction. Combine will canonicalize to the first form
9479 which will allow use of NAND instructions provided by the
9480 backend if they exist. */
9481 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9482 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9484 return fold_build1 (BIT_NOT_EXPR, type,
9485 build2 (BIT_AND_EXPR, type,
9486 TREE_OPERAND (arg0, 0),
9487 TREE_OPERAND (arg1, 0)));
9490 /* See if this can be simplified into a rotate first. If that
9491 is unsuccessful continue in the association code. */
9492 goto bit_rotate;
9494 case BIT_XOR_EXPR:
9495 if (integer_zerop (arg1))
9496 return non_lvalue (fold_convert (type, arg0));
9497 if (integer_all_onesp (arg1))
9498 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9499 if (operand_equal_p (arg0, arg1, 0))
9500 return omit_one_operand (type, integer_zero_node, arg0);
9502 /* ~X ^ X is -1. */
9503 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9504 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9505 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9507 t1 = build_int_cst (type, -1);
9508 t1 = force_fit_type (t1, 0, false, false);
9509 return omit_one_operand (type, t1, arg1);
9512 /* X ^ ~X is -1. */
9513 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9514 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9515 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9517 t1 = build_int_cst (type, -1);
9518 t1 = force_fit_type (t1, 0, false, false);
9519 return omit_one_operand (type, t1, arg0);
9522 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9523 with a constant, and the two constants have no bits in common,
9524 we should treat this as a BIT_IOR_EXPR since this may produce more
9525 simplifications. */
9526 if (TREE_CODE (arg0) == BIT_AND_EXPR
9527 && TREE_CODE (arg1) == BIT_AND_EXPR
9528 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9529 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9530 && integer_zerop (const_binop (BIT_AND_EXPR,
9531 TREE_OPERAND (arg0, 1),
9532 TREE_OPERAND (arg1, 1), 0)))
9534 code = BIT_IOR_EXPR;
9535 goto bit_ior;
9538 /* (X | Y) ^ X -> Y & ~ X*/
9539 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9540 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9542 tree t2 = TREE_OPERAND (arg0, 1);
9543 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9544 arg1);
9545 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9546 fold_convert (type, t1));
9547 return t1;
9550 /* (Y | X) ^ X -> Y & ~ X*/
9551 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9552 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9554 tree t2 = TREE_OPERAND (arg0, 0);
9555 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9556 arg1);
9557 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9558 fold_convert (type, t1));
9559 return t1;
9562 /* X ^ (X | Y) -> Y & ~ X*/
9563 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9564 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9566 tree t2 = TREE_OPERAND (arg1, 1);
9567 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9568 arg0);
9569 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9570 fold_convert (type, t1));
9571 return t1;
9574 /* X ^ (Y | X) -> Y & ~ X*/
9575 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9576 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9578 tree t2 = TREE_OPERAND (arg1, 0);
9579 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9580 arg0);
9581 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9582 fold_convert (type, t1));
9583 return t1;
9586 /* Convert ~X ^ ~Y to X ^ Y. */
9587 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9588 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9589 return fold_build2 (code, type,
9590 fold_convert (type, TREE_OPERAND (arg0, 0)),
9591 fold_convert (type, TREE_OPERAND (arg1, 0)));
9593 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9594 if (TREE_CODE (arg0) == BIT_AND_EXPR
9595 && integer_onep (TREE_OPERAND (arg0, 1))
9596 && integer_onep (arg1))
9597 return fold_build2 (EQ_EXPR, type, arg0,
9598 build_int_cst (TREE_TYPE (arg0), 0));
9600 /* Fold (X & Y) ^ Y as ~X & Y. */
9601 if (TREE_CODE (arg0) == BIT_AND_EXPR
9602 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9604 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9605 return fold_build2 (BIT_AND_EXPR, type,
9606 fold_build1 (BIT_NOT_EXPR, type, tem),
9607 fold_convert (type, arg1));
9609 /* Fold (X & Y) ^ X as ~Y & X. */
9610 if (TREE_CODE (arg0) == BIT_AND_EXPR
9611 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9612 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9614 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9615 return fold_build2 (BIT_AND_EXPR, type,
9616 fold_build1 (BIT_NOT_EXPR, type, tem),
9617 fold_convert (type, arg1));
9619 /* Fold X ^ (X & Y) as X & ~Y. */
9620 if (TREE_CODE (arg1) == BIT_AND_EXPR
9621 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9623 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9624 return fold_build2 (BIT_AND_EXPR, type,
9625 fold_convert (type, arg0),
9626 fold_build1 (BIT_NOT_EXPR, type, tem));
9628 /* Fold X ^ (Y & X) as ~Y & X. */
9629 if (TREE_CODE (arg1) == BIT_AND_EXPR
9630 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9631 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9633 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9634 return fold_build2 (BIT_AND_EXPR, type,
9635 fold_build1 (BIT_NOT_EXPR, type, tem),
9636 fold_convert (type, arg0));
9639 /* See if this can be simplified into a rotate first. If that
9640 is unsuccessful continue in the association code. */
9641 goto bit_rotate;
9643 case BIT_AND_EXPR:
9644 if (integer_all_onesp (arg1))
9645 return non_lvalue (fold_convert (type, arg0));
9646 if (integer_zerop (arg1))
9647 return omit_one_operand (type, arg1, arg0);
9648 if (operand_equal_p (arg0, arg1, 0))
9649 return non_lvalue (fold_convert (type, arg0));
9651 /* ~X & X is always zero. */
9652 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9653 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9654 return omit_one_operand (type, integer_zero_node, arg1);
9656 /* X & ~X is always zero. */
9657 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9658 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9659 return omit_one_operand (type, integer_zero_node, arg0);
9661 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9662 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9663 && TREE_CODE (arg1) == INTEGER_CST
9664 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9665 return fold_build2 (BIT_IOR_EXPR, type,
9666 fold_build2 (BIT_AND_EXPR, type,
9667 TREE_OPERAND (arg0, 0), arg1),
9668 fold_build2 (BIT_AND_EXPR, type,
9669 TREE_OPERAND (arg0, 1), arg1));
9671 /* (X | Y) & Y is (X, Y). */
9672 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9673 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9674 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9675 /* (X | Y) & X is (Y, X). */
9676 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9677 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9678 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9679 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9680 /* X & (X | Y) is (Y, X). */
9681 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9682 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9683 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9684 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9685 /* X & (Y | X) is (Y, X). */
9686 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9687 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9688 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9689 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9691 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9692 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9693 && integer_onep (TREE_OPERAND (arg0, 1))
9694 && integer_onep (arg1))
9696 tem = TREE_OPERAND (arg0, 0);
9697 return fold_build2 (EQ_EXPR, type,
9698 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9699 build_int_cst (TREE_TYPE (tem), 1)),
9700 build_int_cst (TREE_TYPE (tem), 0));
9702 /* Fold ~X & 1 as (X & 1) == 0. */
9703 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9704 && integer_onep (arg1))
9706 tem = TREE_OPERAND (arg0, 0);
9707 return fold_build2 (EQ_EXPR, type,
9708 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9709 build_int_cst (TREE_TYPE (tem), 1)),
9710 build_int_cst (TREE_TYPE (tem), 0));
9713 /* Fold (X ^ Y) & Y as ~X & Y. */
9714 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9715 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9717 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9718 return fold_build2 (BIT_AND_EXPR, type,
9719 fold_build1 (BIT_NOT_EXPR, type, tem),
9720 fold_convert (type, arg1));
9722 /* Fold (X ^ Y) & X as ~Y & X. */
9723 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9724 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9725 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9727 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9728 return fold_build2 (BIT_AND_EXPR, type,
9729 fold_build1 (BIT_NOT_EXPR, type, tem),
9730 fold_convert (type, arg1));
9732 /* Fold X & (X ^ Y) as X & ~Y. */
9733 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9734 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9736 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9737 return fold_build2 (BIT_AND_EXPR, type,
9738 fold_convert (type, arg0),
9739 fold_build1 (BIT_NOT_EXPR, type, tem));
9741 /* Fold X & (Y ^ X) as ~Y & X. */
9742 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9743 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9744 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9746 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9747 return fold_build2 (BIT_AND_EXPR, type,
9748 fold_build1 (BIT_NOT_EXPR, type, tem),
9749 fold_convert (type, arg0));
9752 t1 = distribute_bit_expr (code, type, arg0, arg1);
9753 if (t1 != NULL_TREE)
9754 return t1;
9755 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9756 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9757 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9759 unsigned int prec
9760 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9762 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9763 && (~TREE_INT_CST_LOW (arg1)
9764 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9765 return fold_convert (type, TREE_OPERAND (arg0, 0));
9768 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9770 This results in more efficient code for machines without a NOR
9771 instruction. Combine will canonicalize to the first form
9772 which will allow use of NOR instructions provided by the
9773 backend if they exist. */
9774 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9775 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9777 return fold_build1 (BIT_NOT_EXPR, type,
9778 build2 (BIT_IOR_EXPR, type,
9779 TREE_OPERAND (arg0, 0),
9780 TREE_OPERAND (arg1, 0)));
9783 goto associate;
9785 case RDIV_EXPR:
9786 /* Don't touch a floating-point divide by zero unless the mode
9787 of the constant can represent infinity. */
9788 if (TREE_CODE (arg1) == REAL_CST
9789 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9790 && real_zerop (arg1))
9791 return NULL_TREE;
9793 /* Optimize A / A to 1.0 if we don't care about
9794 NaNs or Infinities. Skip the transformation
9795 for non-real operands. */
9796 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9797 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9798 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9799 && operand_equal_p (arg0, arg1, 0))
9801 tree r = build_real (TREE_TYPE (arg0), dconst1);
9803 return omit_two_operands (type, r, arg0, arg1);
9806 /* The complex version of the above A / A optimization. */
9807 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9808 && operand_equal_p (arg0, arg1, 0))
9810 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9811 if (! HONOR_NANS (TYPE_MODE (elem_type))
9812 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9814 tree r = build_real (elem_type, dconst1);
9815 /* omit_two_operands will call fold_convert for us. */
9816 return omit_two_operands (type, r, arg0, arg1);
9820 /* (-A) / (-B) -> A / B */
9821 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9822 return fold_build2 (RDIV_EXPR, type,
9823 TREE_OPERAND (arg0, 0),
9824 negate_expr (arg1));
9825 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9826 return fold_build2 (RDIV_EXPR, type,
9827 negate_expr (arg0),
9828 TREE_OPERAND (arg1, 0));
9830 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9831 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9832 && real_onep (arg1))
9833 return non_lvalue (fold_convert (type, arg0));
9835 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9836 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9837 && real_minus_onep (arg1))
9838 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9840 /* If ARG1 is a constant, we can convert this to a multiply by the
9841 reciprocal. This does not have the same rounding properties,
9842 so only do this if -funsafe-math-optimizations. We can actually
9843 always safely do it if ARG1 is a power of two, but it's hard to
9844 tell if it is or not in a portable manner. */
9845 if (TREE_CODE (arg1) == REAL_CST)
9847 if (flag_unsafe_math_optimizations
9848 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9849 arg1, 0)))
9850 return fold_build2 (MULT_EXPR, type, arg0, tem);
9851 /* Find the reciprocal if optimizing and the result is exact. */
9852 if (optimize)
9854 REAL_VALUE_TYPE r;
9855 r = TREE_REAL_CST (arg1);
9856 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9858 tem = build_real (type, r);
9859 return fold_build2 (MULT_EXPR, type,
9860 fold_convert (type, arg0), tem);
9864 /* Convert A/B/C to A/(B*C). */
9865 if (flag_unsafe_math_optimizations
9866 && TREE_CODE (arg0) == RDIV_EXPR)
9867 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9868 fold_build2 (MULT_EXPR, type,
9869 TREE_OPERAND (arg0, 1), arg1));
9871 /* Convert A/(B/C) to (A/B)*C. */
9872 if (flag_unsafe_math_optimizations
9873 && TREE_CODE (arg1) == RDIV_EXPR)
9874 return fold_build2 (MULT_EXPR, type,
9875 fold_build2 (RDIV_EXPR, type, arg0,
9876 TREE_OPERAND (arg1, 0)),
9877 TREE_OPERAND (arg1, 1));
9879 /* Convert C1/(X*C2) into (C1/C2)/X. */
9880 if (flag_unsafe_math_optimizations
9881 && TREE_CODE (arg1) == MULT_EXPR
9882 && TREE_CODE (arg0) == REAL_CST
9883 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9885 tree tem = const_binop (RDIV_EXPR, arg0,
9886 TREE_OPERAND (arg1, 1), 0);
9887 if (tem)
9888 return fold_build2 (RDIV_EXPR, type, tem,
9889 TREE_OPERAND (arg1, 0));
9892 if (flag_unsafe_math_optimizations)
9894 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9895 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9897 /* Optimize sin(x)/cos(x) as tan(x). */
9898 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9899 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9900 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9901 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9902 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9904 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9906 if (tanfn != NULL_TREE)
9907 return build_function_call_expr (tanfn,
9908 TREE_OPERAND (arg0, 1));
9911 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9912 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9913 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9914 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9915 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9916 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9918 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9920 if (tanfn != NULL_TREE)
9922 tree tmp = TREE_OPERAND (arg0, 1);
9923 tmp = build_function_call_expr (tanfn, tmp);
9924 return fold_build2 (RDIV_EXPR, type,
9925 build_real (type, dconst1), tmp);
9929 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9930 NaNs or Infinities. */
9931 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9932 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9933 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9935 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9936 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9938 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9939 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9940 && operand_equal_p (arg00, arg01, 0))
9942 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9944 if (cosfn != NULL_TREE)
9945 return build_function_call_expr (cosfn,
9946 TREE_OPERAND (arg0, 1));
9950 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9951 NaNs or Infinities. */
9952 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9953 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9954 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9956 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9957 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9959 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9960 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9961 && operand_equal_p (arg00, arg01, 0))
9963 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9965 if (cosfn != NULL_TREE)
9967 tree tmp = TREE_OPERAND (arg0, 1);
9968 tmp = build_function_call_expr (cosfn, tmp);
9969 return fold_build2 (RDIV_EXPR, type,
9970 build_real (type, dconst1),
9971 tmp);
9976 /* Optimize pow(x,c)/x as pow(x,c-1). */
9977 if (fcode0 == BUILT_IN_POW
9978 || fcode0 == BUILT_IN_POWF
9979 || fcode0 == BUILT_IN_POWL)
9981 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9982 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9983 if (TREE_CODE (arg01) == REAL_CST
9984 && ! TREE_CONSTANT_OVERFLOW (arg01)
9985 && operand_equal_p (arg1, arg00, 0))
9987 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9988 REAL_VALUE_TYPE c;
9989 tree arg, arglist;
9991 c = TREE_REAL_CST (arg01);
9992 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9993 arg = build_real (type, c);
9994 arglist = build_tree_list (NULL_TREE, arg);
9995 arglist = tree_cons (NULL_TREE, arg1, arglist);
9996 return build_function_call_expr (powfn, arglist);
10000 /* Optimize x/expN(y) into x*expN(-y). */
10001 if (BUILTIN_EXPONENT_P (fcode1))
10003 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10004 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10005 tree arglist = build_tree_list (NULL_TREE,
10006 fold_convert (type, arg));
10007 arg1 = build_function_call_expr (expfn, arglist);
10008 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10011 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10012 if (fcode1 == BUILT_IN_POW
10013 || fcode1 == BUILT_IN_POWF
10014 || fcode1 == BUILT_IN_POWL)
10016 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10017 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10018 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10019 tree neg11 = fold_convert (type, negate_expr (arg11));
10020 tree arglist = tree_cons(NULL_TREE, arg10,
10021 build_tree_list (NULL_TREE, neg11));
10022 arg1 = build_function_call_expr (powfn, arglist);
10023 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10026 return NULL_TREE;
10028 case TRUNC_DIV_EXPR:
10029 case FLOOR_DIV_EXPR:
10030 /* Simplify A / (B << N) where A and B are positive and B is
10031 a power of 2, to A >> (N + log2(B)). */
10032 strict_overflow_p = false;
10033 if (TREE_CODE (arg1) == LSHIFT_EXPR
10034 && (TYPE_UNSIGNED (type)
10035 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10037 tree sval = TREE_OPERAND (arg1, 0);
10038 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10040 tree sh_cnt = TREE_OPERAND (arg1, 1);
10041 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10043 if (strict_overflow_p)
10044 fold_overflow_warning (("assuming signed overflow does not "
10045 "occur when simplifying A / (B << N)"),
10046 WARN_STRICT_OVERFLOW_MISC);
10048 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10049 sh_cnt, build_int_cst (NULL_TREE, pow2));
10050 return fold_build2 (RSHIFT_EXPR, type,
10051 fold_convert (type, arg0), sh_cnt);
10054 /* Fall thru */
10056 case ROUND_DIV_EXPR:
10057 case CEIL_DIV_EXPR:
10058 case EXACT_DIV_EXPR:
10059 if (integer_onep (arg1))
10060 return non_lvalue (fold_convert (type, arg0));
10061 if (integer_zerop (arg1))
10062 return NULL_TREE;
10063 /* X / -1 is -X. */
10064 if (!TYPE_UNSIGNED (type)
10065 && TREE_CODE (arg1) == INTEGER_CST
10066 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10067 && TREE_INT_CST_HIGH (arg1) == -1)
10068 return fold_convert (type, negate_expr (arg0));
10070 /* Convert -A / -B to A / B when the type is signed and overflow is
10071 undefined. */
10072 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10073 && TREE_CODE (arg0) == NEGATE_EXPR
10074 && negate_expr_p (arg1))
10076 if (INTEGRAL_TYPE_P (type))
10077 fold_overflow_warning (("assuming signed overflow does not occur "
10078 "when distributing negation across "
10079 "division"),
10080 WARN_STRICT_OVERFLOW_MISC);
10081 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10082 negate_expr (arg1));
10084 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10085 && TREE_CODE (arg1) == NEGATE_EXPR
10086 && negate_expr_p (arg0))
10088 if (INTEGRAL_TYPE_P (type))
10089 fold_overflow_warning (("assuming signed overflow does not occur "
10090 "when distributing negation across "
10091 "division"),
10092 WARN_STRICT_OVERFLOW_MISC);
10093 return fold_build2 (code, type, negate_expr (arg0),
10094 TREE_OPERAND (arg1, 0));
10097 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10098 operation, EXACT_DIV_EXPR.
10100 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10101 At one time others generated faster code, it's not clear if they do
10102 after the last round to changes to the DIV code in expmed.c. */
10103 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10104 && multiple_of_p (type, arg0, arg1))
10105 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10107 strict_overflow_p = false;
10108 if (TREE_CODE (arg1) == INTEGER_CST
10109 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10110 &strict_overflow_p)))
10112 if (strict_overflow_p)
10113 fold_overflow_warning (("assuming signed overflow does not occur "
10114 "when simplifying division"),
10115 WARN_STRICT_OVERFLOW_MISC);
10116 return fold_convert (type, tem);
10119 return NULL_TREE;
10121 case CEIL_MOD_EXPR:
10122 case FLOOR_MOD_EXPR:
10123 case ROUND_MOD_EXPR:
10124 case TRUNC_MOD_EXPR:
10125 /* X % 1 is always zero, but be sure to preserve any side
10126 effects in X. */
10127 if (integer_onep (arg1))
10128 return omit_one_operand (type, integer_zero_node, arg0);
10130 /* X % 0, return X % 0 unchanged so that we can get the
10131 proper warnings and errors. */
10132 if (integer_zerop (arg1))
10133 return NULL_TREE;
10135 /* 0 % X is always zero, but be sure to preserve any side
10136 effects in X. Place this after checking for X == 0. */
10137 if (integer_zerop (arg0))
10138 return omit_one_operand (type, integer_zero_node, arg1);
10140 /* X % -1 is zero. */
10141 if (!TYPE_UNSIGNED (type)
10142 && TREE_CODE (arg1) == INTEGER_CST
10143 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10144 && TREE_INT_CST_HIGH (arg1) == -1)
10145 return omit_one_operand (type, integer_zero_node, arg0);
10147 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10148 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10149 strict_overflow_p = false;
10150 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10151 && (TYPE_UNSIGNED (type)
10152 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10154 tree c = arg1;
10155 /* Also optimize A % (C << N) where C is a power of 2,
10156 to A & ((C << N) - 1). */
10157 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10158 c = TREE_OPERAND (arg1, 0);
10160 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10162 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10163 arg1, integer_one_node);
10164 if (strict_overflow_p)
10165 fold_overflow_warning (("assuming signed overflow does not "
10166 "occur when simplifying "
10167 "X % (power of two)"),
10168 WARN_STRICT_OVERFLOW_MISC);
10169 return fold_build2 (BIT_AND_EXPR, type,
10170 fold_convert (type, arg0),
10171 fold_convert (type, mask));
10175 /* X % -C is the same as X % C. */
10176 if (code == TRUNC_MOD_EXPR
10177 && !TYPE_UNSIGNED (type)
10178 && TREE_CODE (arg1) == INTEGER_CST
10179 && !TREE_CONSTANT_OVERFLOW (arg1)
10180 && TREE_INT_CST_HIGH (arg1) < 0
10181 && !TYPE_OVERFLOW_TRAPS (type)
10182 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10183 && !sign_bit_p (arg1, arg1))
10184 return fold_build2 (code, type, fold_convert (type, arg0),
10185 fold_convert (type, negate_expr (arg1)));
10187 /* X % -Y is the same as X % Y. */
10188 if (code == TRUNC_MOD_EXPR
10189 && !TYPE_UNSIGNED (type)
10190 && TREE_CODE (arg1) == NEGATE_EXPR
10191 && !TYPE_OVERFLOW_TRAPS (type))
10192 return fold_build2 (code, type, fold_convert (type, arg0),
10193 fold_convert (type, TREE_OPERAND (arg1, 0)));
10195 if (TREE_CODE (arg1) == INTEGER_CST
10196 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10197 &strict_overflow_p)))
10199 if (strict_overflow_p)
10200 fold_overflow_warning (("assuming signed overflow does not occur "
10201 "when simplifying modulos"),
10202 WARN_STRICT_OVERFLOW_MISC);
10203 return fold_convert (type, tem);
10206 return NULL_TREE;
10208 case LROTATE_EXPR:
10209 case RROTATE_EXPR:
10210 if (integer_all_onesp (arg0))
10211 return omit_one_operand (type, arg0, arg1);
10212 goto shift;
10214 case RSHIFT_EXPR:
10215 /* Optimize -1 >> x for arithmetic right shifts. */
10216 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10217 return omit_one_operand (type, arg0, arg1);
10218 /* ... fall through ... */
10220 case LSHIFT_EXPR:
10221 shift:
10222 if (integer_zerop (arg1))
10223 return non_lvalue (fold_convert (type, arg0));
10224 if (integer_zerop (arg0))
10225 return omit_one_operand (type, arg0, arg1);
10227 /* Since negative shift count is not well-defined,
10228 don't try to compute it in the compiler. */
10229 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10230 return NULL_TREE;
10232 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10233 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10234 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10235 && host_integerp (TREE_OPERAND (arg0, 1), false)
10236 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10238 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10239 + TREE_INT_CST_LOW (arg1));
10241 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10242 being well defined. */
10243 if (low >= TYPE_PRECISION (type))
10245 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10246 low = low % TYPE_PRECISION (type);
10247 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10248 return build_int_cst (type, 0);
10249 else
10250 low = TYPE_PRECISION (type) - 1;
10253 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10254 build_int_cst (type, low));
10257 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10258 into x & ((unsigned)-1 >> c) for unsigned types. */
10259 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10260 || (TYPE_UNSIGNED (type)
10261 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10262 && host_integerp (arg1, false)
10263 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10264 && host_integerp (TREE_OPERAND (arg0, 1), false)
10265 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10267 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10268 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10269 tree lshift;
10270 tree arg00;
10272 if (low0 == low1)
10274 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10276 lshift = build_int_cst (type, -1);
10277 lshift = int_const_binop (code, lshift, arg1, 0);
10279 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10283 /* Rewrite an LROTATE_EXPR by a constant into an
10284 RROTATE_EXPR by a new constant. */
10285 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10287 tree tem = build_int_cst (NULL_TREE,
10288 GET_MODE_BITSIZE (TYPE_MODE (type)));
10289 tem = fold_convert (TREE_TYPE (arg1), tem);
10290 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10291 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10294 /* If we have a rotate of a bit operation with the rotate count and
10295 the second operand of the bit operation both constant,
10296 permute the two operations. */
10297 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10298 && (TREE_CODE (arg0) == BIT_AND_EXPR
10299 || TREE_CODE (arg0) == BIT_IOR_EXPR
10300 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10301 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10302 return fold_build2 (TREE_CODE (arg0), type,
10303 fold_build2 (code, type,
10304 TREE_OPERAND (arg0, 0), arg1),
10305 fold_build2 (code, type,
10306 TREE_OPERAND (arg0, 1), arg1));
10308 /* Two consecutive rotates adding up to the width of the mode can
10309 be ignored. */
10310 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10311 && TREE_CODE (arg0) == RROTATE_EXPR
10312 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10313 && TREE_INT_CST_HIGH (arg1) == 0
10314 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10315 && ((TREE_INT_CST_LOW (arg1)
10316 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10317 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10318 return TREE_OPERAND (arg0, 0);
10320 return NULL_TREE;
10322 case MIN_EXPR:
10323 if (operand_equal_p (arg0, arg1, 0))
10324 return omit_one_operand (type, arg0, arg1);
10325 if (INTEGRAL_TYPE_P (type)
10326 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10327 return omit_one_operand (type, arg1, arg0);
10328 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10329 if (tem)
10330 return tem;
10331 goto associate;
10333 case MAX_EXPR:
10334 if (operand_equal_p (arg0, arg1, 0))
10335 return omit_one_operand (type, arg0, arg1);
10336 if (INTEGRAL_TYPE_P (type)
10337 && TYPE_MAX_VALUE (type)
10338 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10339 return omit_one_operand (type, arg1, arg0);
10340 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10341 if (tem)
10342 return tem;
10343 goto associate;
10345 case TRUTH_ANDIF_EXPR:
10346 /* Note that the operands of this must be ints
10347 and their values must be 0 or 1.
10348 ("true" is a fixed value perhaps depending on the language.) */
10349 /* If first arg is constant zero, return it. */
10350 if (integer_zerop (arg0))
10351 return fold_convert (type, arg0);
10352 case TRUTH_AND_EXPR:
10353 /* If either arg is constant true, drop it. */
10354 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10355 return non_lvalue (fold_convert (type, arg1));
10356 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10357 /* Preserve sequence points. */
10358 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10359 return non_lvalue (fold_convert (type, arg0));
10360 /* If second arg is constant zero, result is zero, but first arg
10361 must be evaluated. */
10362 if (integer_zerop (arg1))
10363 return omit_one_operand (type, arg1, arg0);
10364 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10365 case will be handled here. */
10366 if (integer_zerop (arg0))
10367 return omit_one_operand (type, arg0, arg1);
10369 /* !X && X is always false. */
10370 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10371 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10372 return omit_one_operand (type, integer_zero_node, arg1);
10373 /* X && !X is always false. */
10374 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10375 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10376 return omit_one_operand (type, integer_zero_node, arg0);
10378 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10379 means A >= Y && A != MAX, but in this case we know that
10380 A < X <= MAX. */
10382 if (!TREE_SIDE_EFFECTS (arg0)
10383 && !TREE_SIDE_EFFECTS (arg1))
10385 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10386 if (tem && !operand_equal_p (tem, arg0, 0))
10387 return fold_build2 (code, type, tem, arg1);
10389 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10390 if (tem && !operand_equal_p (tem, arg1, 0))
10391 return fold_build2 (code, type, arg0, tem);
10394 truth_andor:
10395 /* We only do these simplifications if we are optimizing. */
10396 if (!optimize)
10397 return NULL_TREE;
10399 /* Check for things like (A || B) && (A || C). We can convert this
10400 to A || (B && C). Note that either operator can be any of the four
10401 truth and/or operations and the transformation will still be
10402 valid. Also note that we only care about order for the
10403 ANDIF and ORIF operators. If B contains side effects, this
10404 might change the truth-value of A. */
10405 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10406 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10407 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10408 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10409 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10410 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10412 tree a00 = TREE_OPERAND (arg0, 0);
10413 tree a01 = TREE_OPERAND (arg0, 1);
10414 tree a10 = TREE_OPERAND (arg1, 0);
10415 tree a11 = TREE_OPERAND (arg1, 1);
10416 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10417 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10418 && (code == TRUTH_AND_EXPR
10419 || code == TRUTH_OR_EXPR));
10421 if (operand_equal_p (a00, a10, 0))
10422 return fold_build2 (TREE_CODE (arg0), type, a00,
10423 fold_build2 (code, type, a01, a11));
10424 else if (commutative && operand_equal_p (a00, a11, 0))
10425 return fold_build2 (TREE_CODE (arg0), type, a00,
10426 fold_build2 (code, type, a01, a10));
10427 else if (commutative && operand_equal_p (a01, a10, 0))
10428 return fold_build2 (TREE_CODE (arg0), type, a01,
10429 fold_build2 (code, type, a00, a11));
10431 /* This case if tricky because we must either have commutative
10432 operators or else A10 must not have side-effects. */
10434 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10435 && operand_equal_p (a01, a11, 0))
10436 return fold_build2 (TREE_CODE (arg0), type,
10437 fold_build2 (code, type, a00, a10),
10438 a01);
10441 /* See if we can build a range comparison. */
10442 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10443 return tem;
10445 /* Check for the possibility of merging component references. If our
10446 lhs is another similar operation, try to merge its rhs with our
10447 rhs. Then try to merge our lhs and rhs. */
10448 if (TREE_CODE (arg0) == code
10449 && 0 != (tem = fold_truthop (code, type,
10450 TREE_OPERAND (arg0, 1), arg1)))
10451 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10453 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10454 return tem;
10456 return NULL_TREE;
10458 case TRUTH_ORIF_EXPR:
10459 /* Note that the operands of this must be ints
10460 and their values must be 0 or true.
10461 ("true" is a fixed value perhaps depending on the language.) */
10462 /* If first arg is constant true, return it. */
10463 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10464 return fold_convert (type, arg0);
10465 case TRUTH_OR_EXPR:
10466 /* If either arg is constant zero, drop it. */
10467 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10468 return non_lvalue (fold_convert (type, arg1));
10469 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10470 /* Preserve sequence points. */
10471 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10472 return non_lvalue (fold_convert (type, arg0));
10473 /* If second arg is constant true, result is true, but we must
10474 evaluate first arg. */
10475 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10476 return omit_one_operand (type, arg1, arg0);
10477 /* Likewise for first arg, but note this only occurs here for
10478 TRUTH_OR_EXPR. */
10479 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10480 return omit_one_operand (type, arg0, arg1);
10482 /* !X || X is always true. */
10483 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10484 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10485 return omit_one_operand (type, integer_one_node, arg1);
10486 /* X || !X is always true. */
10487 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10488 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10489 return omit_one_operand (type, integer_one_node, arg0);
10491 goto truth_andor;
10493 case TRUTH_XOR_EXPR:
10494 /* If the second arg is constant zero, drop it. */
10495 if (integer_zerop (arg1))
10496 return non_lvalue (fold_convert (type, arg0));
10497 /* If the second arg is constant true, this is a logical inversion. */
10498 if (integer_onep (arg1))
10500 /* Only call invert_truthvalue if operand is a truth value. */
10501 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10502 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10503 else
10504 tem = invert_truthvalue (arg0);
10505 return non_lvalue (fold_convert (type, tem));
10507 /* Identical arguments cancel to zero. */
10508 if (operand_equal_p (arg0, arg1, 0))
10509 return omit_one_operand (type, integer_zero_node, arg0);
10511 /* !X ^ X is always true. */
10512 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10513 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10514 return omit_one_operand (type, integer_one_node, arg1);
10516 /* X ^ !X is always true. */
10517 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10518 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10519 return omit_one_operand (type, integer_one_node, arg0);
10521 return NULL_TREE;
10523 case EQ_EXPR:
10524 case NE_EXPR:
10525 tem = fold_comparison (code, type, op0, op1);
10526 if (tem != NULL_TREE)
10527 return tem;
10529 /* bool_var != 0 becomes bool_var. */
10530 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10531 && code == NE_EXPR)
10532 return non_lvalue (fold_convert (type, arg0));
10534 /* bool_var == 1 becomes bool_var. */
10535 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10536 && code == EQ_EXPR)
10537 return non_lvalue (fold_convert (type, arg0));
10539 /* bool_var != 1 becomes !bool_var. */
10540 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10541 && code == NE_EXPR)
10542 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10544 /* bool_var == 0 becomes !bool_var. */
10545 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10546 && code == EQ_EXPR)
10547 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10549 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10550 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10551 && TREE_CODE (arg1) == INTEGER_CST)
10553 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
10554 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10555 fold_build1 (BIT_NOT_EXPR, cmp_type,
10556 fold_convert (cmp_type, arg1)));
10559 /* If this is an equality comparison of the address of a non-weak
10560 object against zero, then we know the result. */
10561 if (TREE_CODE (arg0) == ADDR_EXPR
10562 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10563 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10564 && integer_zerop (arg1))
10565 return constant_boolean_node (code != EQ_EXPR, type);
10567 /* If this is an equality comparison of the address of two non-weak,
10568 unaliased symbols neither of which are extern (since we do not
10569 have access to attributes for externs), then we know the result. */
10570 if (TREE_CODE (arg0) == ADDR_EXPR
10571 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10572 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10573 && ! lookup_attribute ("alias",
10574 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10575 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10576 && TREE_CODE (arg1) == ADDR_EXPR
10577 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10578 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10579 && ! lookup_attribute ("alias",
10580 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10581 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10583 /* We know that we're looking at the address of two
10584 non-weak, unaliased, static _DECL nodes.
10586 It is both wasteful and incorrect to call operand_equal_p
10587 to compare the two ADDR_EXPR nodes. It is wasteful in that
10588 all we need to do is test pointer equality for the arguments
10589 to the two ADDR_EXPR nodes. It is incorrect to use
10590 operand_equal_p as that function is NOT equivalent to a
10591 C equality test. It can in fact return false for two
10592 objects which would test as equal using the C equality
10593 operator. */
10594 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10595 return constant_boolean_node (equal
10596 ? code == EQ_EXPR : code != EQ_EXPR,
10597 type);
10600 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10601 a MINUS_EXPR of a constant, we can convert it into a comparison with
10602 a revised constant as long as no overflow occurs. */
10603 if (TREE_CODE (arg1) == INTEGER_CST
10604 && (TREE_CODE (arg0) == PLUS_EXPR
10605 || TREE_CODE (arg0) == MINUS_EXPR)
10606 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10607 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10608 ? MINUS_EXPR : PLUS_EXPR,
10609 fold_convert (TREE_TYPE (arg0), arg1),
10610 TREE_OPERAND (arg0, 1), 0))
10611 && ! TREE_CONSTANT_OVERFLOW (tem))
10612 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10614 /* Similarly for a NEGATE_EXPR. */
10615 if (TREE_CODE (arg0) == NEGATE_EXPR
10616 && TREE_CODE (arg1) == INTEGER_CST
10617 && 0 != (tem = negate_expr (arg1))
10618 && TREE_CODE (tem) == INTEGER_CST
10619 && ! TREE_CONSTANT_OVERFLOW (tem))
10620 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10622 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10623 for !=. Don't do this for ordered comparisons due to overflow. */
10624 if (TREE_CODE (arg0) == MINUS_EXPR
10625 && integer_zerop (arg1))
10626 return fold_build2 (code, type,
10627 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10629 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10630 if (TREE_CODE (arg0) == ABS_EXPR
10631 && (integer_zerop (arg1) || real_zerop (arg1)))
10632 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10634 /* If this is an EQ or NE comparison with zero and ARG0 is
10635 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10636 two operations, but the latter can be done in one less insn
10637 on machines that have only two-operand insns or on which a
10638 constant cannot be the first operand. */
10639 if (TREE_CODE (arg0) == BIT_AND_EXPR
10640 && integer_zerop (arg1))
10642 tree arg00 = TREE_OPERAND (arg0, 0);
10643 tree arg01 = TREE_OPERAND (arg0, 1);
10644 if (TREE_CODE (arg00) == LSHIFT_EXPR
10645 && integer_onep (TREE_OPERAND (arg00, 0)))
10646 return
10647 fold_build2 (code, type,
10648 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10649 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10650 arg01, TREE_OPERAND (arg00, 1)),
10651 fold_convert (TREE_TYPE (arg0),
10652 integer_one_node)),
10653 arg1);
10654 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10655 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10656 return
10657 fold_build2 (code, type,
10658 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10659 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10660 arg00, TREE_OPERAND (arg01, 1)),
10661 fold_convert (TREE_TYPE (arg0),
10662 integer_one_node)),
10663 arg1);
10666 /* If this is an NE or EQ comparison of zero against the result of a
10667 signed MOD operation whose second operand is a power of 2, make
10668 the MOD operation unsigned since it is simpler and equivalent. */
10669 if (integer_zerop (arg1)
10670 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10671 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10672 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10673 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10674 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10675 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10677 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10678 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10679 fold_convert (newtype,
10680 TREE_OPERAND (arg0, 0)),
10681 fold_convert (newtype,
10682 TREE_OPERAND (arg0, 1)));
10684 return fold_build2 (code, type, newmod,
10685 fold_convert (newtype, arg1));
10688 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10689 C1 is a valid shift constant, and C2 is a power of two, i.e.
10690 a single bit. */
10691 if (TREE_CODE (arg0) == BIT_AND_EXPR
10692 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10693 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10694 == INTEGER_CST
10695 && integer_pow2p (TREE_OPERAND (arg0, 1))
10696 && integer_zerop (arg1))
10698 tree itype = TREE_TYPE (arg0);
10699 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10700 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10702 /* Check for a valid shift count. */
10703 if (TREE_INT_CST_HIGH (arg001) == 0
10704 && TREE_INT_CST_LOW (arg001) < prec)
10706 tree arg01 = TREE_OPERAND (arg0, 1);
10707 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10708 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10709 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10710 can be rewritten as (X & (C2 << C1)) != 0. */
10711 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10713 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10714 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10715 return fold_build2 (code, type, tem, arg1);
10717 /* Otherwise, for signed (arithmetic) shifts,
10718 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10719 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10720 else if (!TYPE_UNSIGNED (itype))
10721 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10722 arg000, build_int_cst (itype, 0));
10723 /* Otherwise, of unsigned (logical) shifts,
10724 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10725 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10726 else
10727 return omit_one_operand (type,
10728 code == EQ_EXPR ? integer_one_node
10729 : integer_zero_node,
10730 arg000);
10734 /* If this is an NE comparison of zero with an AND of one, remove the
10735 comparison since the AND will give the correct value. */
10736 if (code == NE_EXPR
10737 && integer_zerop (arg1)
10738 && TREE_CODE (arg0) == BIT_AND_EXPR
10739 && integer_onep (TREE_OPERAND (arg0, 1)))
10740 return fold_convert (type, arg0);
10742 /* If we have (A & C) == C where C is a power of 2, convert this into
10743 (A & C) != 0. Similarly for NE_EXPR. */
10744 if (TREE_CODE (arg0) == BIT_AND_EXPR
10745 && integer_pow2p (TREE_OPERAND (arg0, 1))
10746 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10747 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10748 arg0, fold_convert (TREE_TYPE (arg0),
10749 integer_zero_node));
10751 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10752 bit, then fold the expression into A < 0 or A >= 0. */
10753 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10754 if (tem)
10755 return tem;
10757 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10758 Similarly for NE_EXPR. */
10759 if (TREE_CODE (arg0) == BIT_AND_EXPR
10760 && TREE_CODE (arg1) == INTEGER_CST
10761 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10763 tree notc = fold_build1 (BIT_NOT_EXPR,
10764 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10765 TREE_OPERAND (arg0, 1));
10766 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10767 arg1, notc);
10768 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10769 if (integer_nonzerop (dandnotc))
10770 return omit_one_operand (type, rslt, arg0);
10773 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10774 Similarly for NE_EXPR. */
10775 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10776 && TREE_CODE (arg1) == INTEGER_CST
10777 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10779 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10780 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10781 TREE_OPERAND (arg0, 1), notd);
10782 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10783 if (integer_nonzerop (candnotd))
10784 return omit_one_operand (type, rslt, arg0);
10787 /* If this is a comparison of a field, we may be able to simplify it. */
10788 if (((TREE_CODE (arg0) == COMPONENT_REF
10789 && lang_hooks.can_use_bit_fields_p ())
10790 || TREE_CODE (arg0) == BIT_FIELD_REF)
10791 /* Handle the constant case even without -O
10792 to make sure the warnings are given. */
10793 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10795 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10796 if (t1)
10797 return t1;
10800 /* Optimize comparisons of strlen vs zero to a compare of the
10801 first character of the string vs zero. To wit,
10802 strlen(ptr) == 0 => *ptr == 0
10803 strlen(ptr) != 0 => *ptr != 0
10804 Other cases should reduce to one of these two (or a constant)
10805 due to the return value of strlen being unsigned. */
10806 if (TREE_CODE (arg0) == CALL_EXPR
10807 && integer_zerop (arg1))
10809 tree fndecl = get_callee_fndecl (arg0);
10810 tree arglist;
10812 if (fndecl
10813 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10814 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10815 && (arglist = TREE_OPERAND (arg0, 1))
10816 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10817 && ! TREE_CHAIN (arglist))
10819 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10820 return fold_build2 (code, type, iref,
10821 build_int_cst (TREE_TYPE (iref), 0));
10825 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10826 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10827 if (TREE_CODE (arg0) == RSHIFT_EXPR
10828 && integer_zerop (arg1)
10829 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10831 tree arg00 = TREE_OPERAND (arg0, 0);
10832 tree arg01 = TREE_OPERAND (arg0, 1);
10833 tree itype = TREE_TYPE (arg00);
10834 if (TREE_INT_CST_HIGH (arg01) == 0
10835 && TREE_INT_CST_LOW (arg01)
10836 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10838 if (TYPE_UNSIGNED (itype))
10840 itype = lang_hooks.types.signed_type (itype);
10841 arg00 = fold_convert (itype, arg00);
10843 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10844 type, arg00, build_int_cst (itype, 0));
10848 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10849 if (integer_zerop (arg1)
10850 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10851 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10852 TREE_OPERAND (arg0, 1));
10854 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10855 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10856 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10857 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10858 build_int_cst (TREE_TYPE (arg1), 0));
10859 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10860 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10861 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10862 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10863 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10864 build_int_cst (TREE_TYPE (arg1), 0));
10866 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10867 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10868 && TREE_CODE (arg1) == INTEGER_CST
10869 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10870 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10871 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10872 TREE_OPERAND (arg0, 1), arg1));
10874 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10875 (X & C) == 0 when C is a single bit. */
10876 if (TREE_CODE (arg0) == BIT_AND_EXPR
10877 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10878 && integer_zerop (arg1)
10879 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10881 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10882 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10883 TREE_OPERAND (arg0, 1));
10884 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10885 type, tem, arg1);
10888 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10889 constant C is a power of two, i.e. a single bit. */
10890 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10891 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10892 && integer_zerop (arg1)
10893 && integer_pow2p (TREE_OPERAND (arg0, 1))
10894 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10895 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10897 tree arg00 = TREE_OPERAND (arg0, 0);
10898 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10899 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10902 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10903 when is C is a power of two, i.e. a single bit. */
10904 if (TREE_CODE (arg0) == BIT_AND_EXPR
10905 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10906 && integer_zerop (arg1)
10907 && integer_pow2p (TREE_OPERAND (arg0, 1))
10908 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10909 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10911 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10912 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10913 arg000, TREE_OPERAND (arg0, 1));
10914 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10915 tem, build_int_cst (TREE_TYPE (tem), 0));
10918 if (integer_zerop (arg1)
10919 && tree_expr_nonzero_p (arg0))
10921 tree res = constant_boolean_node (code==NE_EXPR, type);
10922 return omit_one_operand (type, res, arg0);
10924 return NULL_TREE;
10926 case LT_EXPR:
10927 case GT_EXPR:
10928 case LE_EXPR:
10929 case GE_EXPR:
10930 tem = fold_comparison (code, type, op0, op1);
10931 if (tem != NULL_TREE)
10932 return tem;
10934 /* Transform comparisons of the form X +- C CMP X. */
10935 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10936 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10937 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10938 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10939 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10942 tree arg01 = TREE_OPERAND (arg0, 1);
10943 enum tree_code code0 = TREE_CODE (arg0);
10944 int is_positive;
10946 if (TREE_CODE (arg01) == REAL_CST)
10947 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10948 else
10949 is_positive = tree_int_cst_sgn (arg01);
10951 /* (X - c) > X becomes false. */
10952 if (code == GT_EXPR
10953 && ((code0 == MINUS_EXPR && is_positive >= 0)
10954 || (code0 == PLUS_EXPR && is_positive <= 0)))
10956 if (TREE_CODE (arg01) == INTEGER_CST
10957 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10958 fold_overflow_warning (("assuming signed overflow does not "
10959 "occur when assuming that (X - c) > X "
10960 "is always false"),
10961 WARN_STRICT_OVERFLOW_ALL);
10962 return constant_boolean_node (0, type);
10965 /* Likewise (X + c) < X becomes false. */
10966 if (code == LT_EXPR
10967 && ((code0 == PLUS_EXPR && is_positive >= 0)
10968 || (code0 == MINUS_EXPR && is_positive <= 0)))
10970 if (TREE_CODE (arg01) == INTEGER_CST
10971 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10972 fold_overflow_warning (("assuming signed overflow does not "
10973 "occur when assuming that "
10974 "(X + c) < X is always false"),
10975 WARN_STRICT_OVERFLOW_ALL);
10976 return constant_boolean_node (0, type);
10979 /* Convert (X - c) <= X to true. */
10980 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10981 && code == LE_EXPR
10982 && ((code0 == MINUS_EXPR && is_positive >= 0)
10983 || (code0 == PLUS_EXPR && is_positive <= 0)))
10985 if (TREE_CODE (arg01) == INTEGER_CST
10986 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10987 fold_overflow_warning (("assuming signed overflow does not "
10988 "occur when assuming that "
10989 "(X - c) <= X is always true"),
10990 WARN_STRICT_OVERFLOW_ALL);
10991 return constant_boolean_node (1, type);
10994 /* Convert (X + c) >= X to true. */
10995 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10996 && code == GE_EXPR
10997 && ((code0 == PLUS_EXPR && is_positive >= 0)
10998 || (code0 == MINUS_EXPR && is_positive <= 0)))
11000 if (TREE_CODE (arg01) == INTEGER_CST
11001 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11002 fold_overflow_warning (("assuming signed overflow does not "
11003 "occur when assuming that "
11004 "(X + c) >= X is always true"),
11005 WARN_STRICT_OVERFLOW_ALL);
11006 return constant_boolean_node (1, type);
11009 if (TREE_CODE (arg01) == INTEGER_CST)
11011 /* Convert X + c > X and X - c < X to true for integers. */
11012 if (code == GT_EXPR
11013 && ((code0 == PLUS_EXPR && is_positive > 0)
11014 || (code0 == MINUS_EXPR && is_positive < 0)))
11016 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11017 fold_overflow_warning (("assuming signed overflow does "
11018 "not occur when assuming that "
11019 "(X + c) > X is always true"),
11020 WARN_STRICT_OVERFLOW_ALL);
11021 return constant_boolean_node (1, type);
11024 if (code == LT_EXPR
11025 && ((code0 == MINUS_EXPR && is_positive > 0)
11026 || (code0 == PLUS_EXPR && is_positive < 0)))
11028 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11029 fold_overflow_warning (("assuming signed overflow does "
11030 "not occur when assuming that "
11031 "(X - c) < X is always true"),
11032 WARN_STRICT_OVERFLOW_ALL);
11033 return constant_boolean_node (1, type);
11036 /* Convert X + c <= X and X - c >= X to false for integers. */
11037 if (code == LE_EXPR
11038 && ((code0 == PLUS_EXPR && is_positive > 0)
11039 || (code0 == MINUS_EXPR && is_positive < 0)))
11041 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11042 fold_overflow_warning (("assuming signed overflow does "
11043 "not occur when assuming that "
11044 "(X + c) <= X is always false"),
11045 WARN_STRICT_OVERFLOW_ALL);
11046 return constant_boolean_node (0, type);
11049 if (code == GE_EXPR
11050 && ((code0 == MINUS_EXPR && is_positive > 0)
11051 || (code0 == PLUS_EXPR && is_positive < 0)))
11053 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11054 fold_overflow_warning (("assuming signed overflow does "
11055 "not occur when assuming that "
11056 "(X - c) >= X is always true"),
11057 WARN_STRICT_OVERFLOW_ALL);
11058 return constant_boolean_node (0, type);
11063 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11064 This transformation affects the cases which are handled in later
11065 optimizations involving comparisons with non-negative constants. */
11066 if (TREE_CODE (arg1) == INTEGER_CST
11067 && TREE_CODE (arg0) != INTEGER_CST
11068 && tree_int_cst_sgn (arg1) > 0)
11070 if (code == GE_EXPR)
11072 arg1 = const_binop (MINUS_EXPR, arg1,
11073 build_int_cst (TREE_TYPE (arg1), 1), 0);
11074 return fold_build2 (GT_EXPR, type, arg0,
11075 fold_convert (TREE_TYPE (arg0), arg1));
11077 if (code == LT_EXPR)
11079 arg1 = const_binop (MINUS_EXPR, arg1,
11080 build_int_cst (TREE_TYPE (arg1), 1), 0);
11081 return fold_build2 (LE_EXPR, type, arg0,
11082 fold_convert (TREE_TYPE (arg0), arg1));
11086 /* Comparisons with the highest or lowest possible integer of
11087 the specified size will have known values. */
11089 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
11091 if (TREE_CODE (arg1) == INTEGER_CST
11092 && ! TREE_CONSTANT_OVERFLOW (arg1)
11093 && width <= 2 * HOST_BITS_PER_WIDE_INT
11094 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11095 || POINTER_TYPE_P (TREE_TYPE (arg1))))
11097 HOST_WIDE_INT signed_max_hi;
11098 unsigned HOST_WIDE_INT signed_max_lo;
11099 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11101 if (width <= HOST_BITS_PER_WIDE_INT)
11103 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11104 - 1;
11105 signed_max_hi = 0;
11106 max_hi = 0;
11108 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11110 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11111 min_lo = 0;
11112 min_hi = 0;
11114 else
11116 max_lo = signed_max_lo;
11117 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11118 min_hi = -1;
11121 else
11123 width -= HOST_BITS_PER_WIDE_INT;
11124 signed_max_lo = -1;
11125 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11126 - 1;
11127 max_lo = -1;
11128 min_lo = 0;
11130 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11132 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11133 min_hi = 0;
11135 else
11137 max_hi = signed_max_hi;
11138 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11142 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11143 && TREE_INT_CST_LOW (arg1) == max_lo)
11144 switch (code)
11146 case GT_EXPR:
11147 return omit_one_operand (type, integer_zero_node, arg0);
11149 case GE_EXPR:
11150 return fold_build2 (EQ_EXPR, type, op0, op1);
11152 case LE_EXPR:
11153 return omit_one_operand (type, integer_one_node, arg0);
11155 case LT_EXPR:
11156 return fold_build2 (NE_EXPR, type, op0, op1);
11158 /* The GE_EXPR and LT_EXPR cases above are not normally
11159 reached because of previous transformations. */
11161 default:
11162 break;
11164 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11165 == max_hi
11166 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11167 switch (code)
11169 case GT_EXPR:
11170 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11171 return fold_build2 (EQ_EXPR, type,
11172 fold_convert (TREE_TYPE (arg1), arg0),
11173 arg1);
11174 case LE_EXPR:
11175 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11176 return fold_build2 (NE_EXPR, type,
11177 fold_convert (TREE_TYPE (arg1), arg0),
11178 arg1);
11179 default:
11180 break;
11182 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11183 == min_hi
11184 && TREE_INT_CST_LOW (arg1) == min_lo)
11185 switch (code)
11187 case LT_EXPR:
11188 return omit_one_operand (type, integer_zero_node, arg0);
11190 case LE_EXPR:
11191 return fold_build2 (EQ_EXPR, type, op0, op1);
11193 case GE_EXPR:
11194 return omit_one_operand (type, integer_one_node, arg0);
11196 case GT_EXPR:
11197 return fold_build2 (NE_EXPR, type, op0, op1);
11199 default:
11200 break;
11202 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11203 == min_hi
11204 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11205 switch (code)
11207 case GE_EXPR:
11208 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11209 return fold_build2 (NE_EXPR, type,
11210 fold_convert (TREE_TYPE (arg1), arg0),
11211 arg1);
11212 case LT_EXPR:
11213 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11214 return fold_build2 (EQ_EXPR, type,
11215 fold_convert (TREE_TYPE (arg1), arg0),
11216 arg1);
11217 default:
11218 break;
11221 else if (!in_gimple_form
11222 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
11223 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11224 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11225 /* signed_type does not work on pointer types. */
11226 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11228 /* The following case also applies to X < signed_max+1
11229 and X >= signed_max+1 because previous transformations. */
11230 if (code == LE_EXPR || code == GT_EXPR)
11232 tree st;
11233 st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11234 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11235 type, fold_convert (st, arg0),
11236 build_int_cst (st, 0));
11242 /* If we are comparing an ABS_EXPR with a constant, we can
11243 convert all the cases into explicit comparisons, but they may
11244 well not be faster than doing the ABS and one comparison.
11245 But ABS (X) <= C is a range comparison, which becomes a subtraction
11246 and a comparison, and is probably faster. */
11247 if (code == LE_EXPR
11248 && TREE_CODE (arg1) == INTEGER_CST
11249 && TREE_CODE (arg0) == ABS_EXPR
11250 && ! TREE_SIDE_EFFECTS (arg0)
11251 && (0 != (tem = negate_expr (arg1)))
11252 && TREE_CODE (tem) == INTEGER_CST
11253 && ! TREE_CONSTANT_OVERFLOW (tem))
11254 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11255 build2 (GE_EXPR, type,
11256 TREE_OPERAND (arg0, 0), tem),
11257 build2 (LE_EXPR, type,
11258 TREE_OPERAND (arg0, 0), arg1));
11260 /* Convert ABS_EXPR<x> >= 0 to true. */
11261 strict_overflow_p = false;
11262 if (code == GE_EXPR
11263 && (integer_zerop (arg1)
11264 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11265 && real_zerop (arg1)))
11266 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11268 if (strict_overflow_p)
11269 fold_overflow_warning (("assuming signed overflow does not occur "
11270 "when simplifying comparison of "
11271 "absolute value and zero"),
11272 WARN_STRICT_OVERFLOW_CONDITIONAL);
11273 return omit_one_operand (type, integer_one_node, arg0);
11276 /* Convert ABS_EXPR<x> < 0 to false. */
11277 strict_overflow_p = false;
11278 if (code == LT_EXPR
11279 && (integer_zerop (arg1) || real_zerop (arg1))
11280 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11282 if (strict_overflow_p)
11283 fold_overflow_warning (("assuming signed overflow does not occur "
11284 "when simplifying comparison of "
11285 "absolute value and zero"),
11286 WARN_STRICT_OVERFLOW_CONDITIONAL);
11287 return omit_one_operand (type, integer_zero_node, arg0);
11290 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11291 and similarly for >= into !=. */
11292 if ((code == LT_EXPR || code == GE_EXPR)
11293 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11294 && TREE_CODE (arg1) == LSHIFT_EXPR
11295 && integer_onep (TREE_OPERAND (arg1, 0)))
11296 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11297 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11298 TREE_OPERAND (arg1, 1)),
11299 build_int_cst (TREE_TYPE (arg0), 0));
11301 if ((code == LT_EXPR || code == GE_EXPR)
11302 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11303 && (TREE_CODE (arg1) == NOP_EXPR
11304 || TREE_CODE (arg1) == CONVERT_EXPR)
11305 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11306 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11307 return
11308 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11309 fold_convert (TREE_TYPE (arg0),
11310 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11311 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11312 1))),
11313 build_int_cst (TREE_TYPE (arg0), 0));
11315 return NULL_TREE;
11317 case UNORDERED_EXPR:
11318 case ORDERED_EXPR:
11319 case UNLT_EXPR:
11320 case UNLE_EXPR:
11321 case UNGT_EXPR:
11322 case UNGE_EXPR:
11323 case UNEQ_EXPR:
11324 case LTGT_EXPR:
11325 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11327 t1 = fold_relational_const (code, type, arg0, arg1);
11328 if (t1 != NULL_TREE)
11329 return t1;
11332 /* If the first operand is NaN, the result is constant. */
11333 if (TREE_CODE (arg0) == REAL_CST
11334 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11335 && (code != LTGT_EXPR || ! flag_trapping_math))
11337 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11338 ? integer_zero_node
11339 : integer_one_node;
11340 return omit_one_operand (type, t1, arg1);
11343 /* If the second operand is NaN, the result is constant. */
11344 if (TREE_CODE (arg1) == REAL_CST
11345 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11346 && (code != LTGT_EXPR || ! flag_trapping_math))
11348 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11349 ? integer_zero_node
11350 : integer_one_node;
11351 return omit_one_operand (type, t1, arg0);
11354 /* Simplify unordered comparison of something with itself. */
11355 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11356 && operand_equal_p (arg0, arg1, 0))
11357 return constant_boolean_node (1, type);
11359 if (code == LTGT_EXPR
11360 && !flag_trapping_math
11361 && operand_equal_p (arg0, arg1, 0))
11362 return constant_boolean_node (0, type);
11364 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11366 tree targ0 = strip_float_extensions (arg0);
11367 tree targ1 = strip_float_extensions (arg1);
11368 tree newtype = TREE_TYPE (targ0);
11370 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11371 newtype = TREE_TYPE (targ1);
11373 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11374 return fold_build2 (code, type, fold_convert (newtype, targ0),
11375 fold_convert (newtype, targ1));
11378 return NULL_TREE;
11380 case COMPOUND_EXPR:
11381 /* When pedantic, a compound expression can be neither an lvalue
11382 nor an integer constant expression. */
11383 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11384 return NULL_TREE;
11385 /* Don't let (0, 0) be null pointer constant. */
11386 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11387 : fold_convert (type, arg1);
11388 return pedantic_non_lvalue (tem);
11390 case COMPLEX_EXPR:
11391 if ((TREE_CODE (arg0) == REAL_CST
11392 && TREE_CODE (arg1) == REAL_CST)
11393 || (TREE_CODE (arg0) == INTEGER_CST
11394 && TREE_CODE (arg1) == INTEGER_CST))
11395 return build_complex (type, arg0, arg1);
11396 return NULL_TREE;
11398 case ASSERT_EXPR:
11399 /* An ASSERT_EXPR should never be passed to fold_binary. */
11400 gcc_unreachable ();
11402 default:
11403 return NULL_TREE;
11404 } /* switch (code) */
11407 /* Callback for walk_tree, looking for LABEL_EXPR.
11408 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11409 Do not check the sub-tree of GOTO_EXPR. */
11411 static tree
11412 contains_label_1 (tree *tp,
11413 int *walk_subtrees,
11414 void *data ATTRIBUTE_UNUSED)
11416 switch (TREE_CODE (*tp))
11418 case LABEL_EXPR:
11419 return *tp;
11420 case GOTO_EXPR:
11421 *walk_subtrees = 0;
11422 /* no break */
11423 default:
11424 return NULL_TREE;
11428 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11429 accessible from outside the sub-tree. Returns NULL_TREE if no
11430 addressable label is found. */
11432 static bool
11433 contains_label_p (tree st)
11435 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11438 /* Fold a ternary expression of code CODE and type TYPE with operands
11439 OP0, OP1, and OP2. Return the folded expression if folding is
11440 successful. Otherwise, return NULL_TREE. */
11442 tree
11443 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11445 tree tem;
11446 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11447 enum tree_code_class kind = TREE_CODE_CLASS (code);
11449 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11450 && TREE_CODE_LENGTH (code) == 3);
11452 /* Strip any conversions that don't change the mode. This is safe
11453 for every expression, except for a comparison expression because
11454 its signedness is derived from its operands. So, in the latter
11455 case, only strip conversions that don't change the signedness.
11457 Note that this is done as an internal manipulation within the
11458 constant folder, in order to find the simplest representation of
11459 the arguments so that their form can be studied. In any cases,
11460 the appropriate type conversions should be put back in the tree
11461 that will get out of the constant folder. */
11462 if (op0)
11464 arg0 = op0;
11465 STRIP_NOPS (arg0);
11468 if (op1)
11470 arg1 = op1;
11471 STRIP_NOPS (arg1);
11474 switch (code)
11476 case COMPONENT_REF:
11477 if (TREE_CODE (arg0) == CONSTRUCTOR
11478 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11480 unsigned HOST_WIDE_INT idx;
11481 tree field, value;
11482 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11483 if (field == arg1)
11484 return value;
11486 return NULL_TREE;
11488 case COND_EXPR:
11489 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11490 so all simple results must be passed through pedantic_non_lvalue. */
11491 if (TREE_CODE (arg0) == INTEGER_CST)
11493 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11494 tem = integer_zerop (arg0) ? op2 : op1;
11495 /* Only optimize constant conditions when the selected branch
11496 has the same type as the COND_EXPR. This avoids optimizing
11497 away "c ? x : throw", where the throw has a void type.
11498 Avoid throwing away that operand which contains label. */
11499 if ((!TREE_SIDE_EFFECTS (unused_op)
11500 || !contains_label_p (unused_op))
11501 && (! VOID_TYPE_P (TREE_TYPE (tem))
11502 || VOID_TYPE_P (type)))
11503 return pedantic_non_lvalue (tem);
11504 return NULL_TREE;
11506 if (operand_equal_p (arg1, op2, 0))
11507 return pedantic_omit_one_operand (type, arg1, arg0);
11509 /* If we have A op B ? A : C, we may be able to convert this to a
11510 simpler expression, depending on the operation and the values
11511 of B and C. Signed zeros prevent all of these transformations,
11512 for reasons given above each one.
11514 Also try swapping the arguments and inverting the conditional. */
11515 if (COMPARISON_CLASS_P (arg0)
11516 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11517 arg1, TREE_OPERAND (arg0, 1))
11518 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11520 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11521 if (tem)
11522 return tem;
11525 if (COMPARISON_CLASS_P (arg0)
11526 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11527 op2,
11528 TREE_OPERAND (arg0, 1))
11529 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11531 tem = fold_truth_not_expr (arg0);
11532 if (tem && COMPARISON_CLASS_P (tem))
11534 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11535 if (tem)
11536 return tem;
11540 /* If the second operand is simpler than the third, swap them
11541 since that produces better jump optimization results. */
11542 if (truth_value_p (TREE_CODE (arg0))
11543 && tree_swap_operands_p (op1, op2, false))
11545 /* See if this can be inverted. If it can't, possibly because
11546 it was a floating-point inequality comparison, don't do
11547 anything. */
11548 tem = fold_truth_not_expr (arg0);
11549 if (tem)
11550 return fold_build3 (code, type, tem, op2, op1);
11553 /* Convert A ? 1 : 0 to simply A. */
11554 if (integer_onep (op1)
11555 && integer_zerop (op2)
11556 /* If we try to convert OP0 to our type, the
11557 call to fold will try to move the conversion inside
11558 a COND, which will recurse. In that case, the COND_EXPR
11559 is probably the best choice, so leave it alone. */
11560 && type == TREE_TYPE (arg0))
11561 return pedantic_non_lvalue (arg0);
11563 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11564 over COND_EXPR in cases such as floating point comparisons. */
11565 if (integer_zerop (op1)
11566 && integer_onep (op2)
11567 && truth_value_p (TREE_CODE (arg0)))
11568 return pedantic_non_lvalue (fold_convert (type,
11569 invert_truthvalue (arg0)));
11571 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11572 if (TREE_CODE (arg0) == LT_EXPR
11573 && integer_zerop (TREE_OPERAND (arg0, 1))
11574 && integer_zerop (op2)
11575 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11577 /* sign_bit_p only checks ARG1 bits within A's precision.
11578 If <sign bit of A> has wider type than A, bits outside
11579 of A's precision in <sign bit of A> need to be checked.
11580 If they are all 0, this optimization needs to be done
11581 in unsigned A's type, if they are all 1 in signed A's type,
11582 otherwise this can't be done. */
11583 if (TYPE_PRECISION (TREE_TYPE (tem))
11584 < TYPE_PRECISION (TREE_TYPE (arg1))
11585 && TYPE_PRECISION (TREE_TYPE (tem))
11586 < TYPE_PRECISION (type))
11588 unsigned HOST_WIDE_INT mask_lo;
11589 HOST_WIDE_INT mask_hi;
11590 int inner_width, outer_width;
11591 tree tem_type;
11593 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11594 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11595 if (outer_width > TYPE_PRECISION (type))
11596 outer_width = TYPE_PRECISION (type);
11598 if (outer_width > HOST_BITS_PER_WIDE_INT)
11600 mask_hi = ((unsigned HOST_WIDE_INT) -1
11601 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11602 mask_lo = -1;
11604 else
11606 mask_hi = 0;
11607 mask_lo = ((unsigned HOST_WIDE_INT) -1
11608 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11610 if (inner_width > HOST_BITS_PER_WIDE_INT)
11612 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11613 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11614 mask_lo = 0;
11616 else
11617 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11618 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11620 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11621 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11623 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11624 tem = fold_convert (tem_type, tem);
11626 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11627 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11629 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11630 tem = fold_convert (tem_type, tem);
11632 else
11633 tem = NULL;
11636 if (tem)
11637 return fold_convert (type,
11638 fold_build2 (BIT_AND_EXPR,
11639 TREE_TYPE (tem), tem,
11640 fold_convert (TREE_TYPE (tem),
11641 arg1)));
11644 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11645 already handled above. */
11646 if (TREE_CODE (arg0) == BIT_AND_EXPR
11647 && integer_onep (TREE_OPERAND (arg0, 1))
11648 && integer_zerop (op2)
11649 && integer_pow2p (arg1))
11651 tree tem = TREE_OPERAND (arg0, 0);
11652 STRIP_NOPS (tem);
11653 if (TREE_CODE (tem) == RSHIFT_EXPR
11654 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11655 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11656 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11657 return fold_build2 (BIT_AND_EXPR, type,
11658 TREE_OPERAND (tem, 0), arg1);
11661 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11662 is probably obsolete because the first operand should be a
11663 truth value (that's why we have the two cases above), but let's
11664 leave it in until we can confirm this for all front-ends. */
11665 if (integer_zerop (op2)
11666 && TREE_CODE (arg0) == NE_EXPR
11667 && integer_zerop (TREE_OPERAND (arg0, 1))
11668 && integer_pow2p (arg1)
11669 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11670 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11671 arg1, OEP_ONLY_CONST))
11672 return pedantic_non_lvalue (fold_convert (type,
11673 TREE_OPERAND (arg0, 0)));
11675 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11676 if (integer_zerop (op2)
11677 && truth_value_p (TREE_CODE (arg0))
11678 && truth_value_p (TREE_CODE (arg1)))
11679 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11680 fold_convert (type, arg0),
11681 arg1);
11683 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11684 if (integer_onep (op2)
11685 && truth_value_p (TREE_CODE (arg0))
11686 && truth_value_p (TREE_CODE (arg1)))
11688 /* Only perform transformation if ARG0 is easily inverted. */
11689 tem = fold_truth_not_expr (arg0);
11690 if (tem)
11691 return fold_build2 (TRUTH_ORIF_EXPR, type,
11692 fold_convert (type, tem),
11693 arg1);
11696 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11697 if (integer_zerop (arg1)
11698 && truth_value_p (TREE_CODE (arg0))
11699 && truth_value_p (TREE_CODE (op2)))
11701 /* Only perform transformation if ARG0 is easily inverted. */
11702 tem = fold_truth_not_expr (arg0);
11703 if (tem)
11704 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11705 fold_convert (type, tem),
11706 op2);
11709 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11710 if (integer_onep (arg1)
11711 && truth_value_p (TREE_CODE (arg0))
11712 && truth_value_p (TREE_CODE (op2)))
11713 return fold_build2 (TRUTH_ORIF_EXPR, type,
11714 fold_convert (type, arg0),
11715 op2);
11717 return NULL_TREE;
11719 case CALL_EXPR:
11720 /* Check for a built-in function. */
11721 if (TREE_CODE (op0) == ADDR_EXPR
11722 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11723 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11724 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11725 return NULL_TREE;
11727 case BIT_FIELD_REF:
11728 if (TREE_CODE (arg0) == VECTOR_CST
11729 && type == TREE_TYPE (TREE_TYPE (arg0))
11730 && host_integerp (arg1, 1)
11731 && host_integerp (op2, 1))
11733 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11734 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11736 if (width != 0
11737 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11738 && (idx % width) == 0
11739 && (idx = idx / width)
11740 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11742 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11743 while (idx-- > 0 && elements)
11744 elements = TREE_CHAIN (elements);
11745 if (elements)
11746 return TREE_VALUE (elements);
11747 else
11748 return fold_convert (type, integer_zero_node);
11751 return NULL_TREE;
11753 default:
11754 return NULL_TREE;
11755 } /* switch (code) */
11758 /* Perform constant folding and related simplification of EXPR.
11759 The related simplifications include x*1 => x, x*0 => 0, etc.,
11760 and application of the associative law.
11761 NOP_EXPR conversions may be removed freely (as long as we
11762 are careful not to change the type of the overall expression).
11763 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11764 but we can constant-fold them if they have constant operands. */
11766 #ifdef ENABLE_FOLD_CHECKING
11767 # define fold(x) fold_1 (x)
11768 static tree fold_1 (tree);
11769 static
11770 #endif
11771 tree
11772 fold (tree expr)
11774 const tree t = expr;
11775 enum tree_code code = TREE_CODE (t);
11776 enum tree_code_class kind = TREE_CODE_CLASS (code);
11777 tree tem;
11779 /* Return right away if a constant. */
11780 if (kind == tcc_constant)
11781 return t;
11783 if (IS_EXPR_CODE_CLASS (kind))
11785 tree type = TREE_TYPE (t);
11786 tree op0, op1, op2;
11788 switch (TREE_CODE_LENGTH (code))
11790 case 1:
11791 op0 = TREE_OPERAND (t, 0);
11792 tem = fold_unary (code, type, op0);
11793 return tem ? tem : expr;
11794 case 2:
11795 op0 = TREE_OPERAND (t, 0);
11796 op1 = TREE_OPERAND (t, 1);
11797 tem = fold_binary (code, type, op0, op1);
11798 return tem ? tem : expr;
11799 case 3:
11800 op0 = TREE_OPERAND (t, 0);
11801 op1 = TREE_OPERAND (t, 1);
11802 op2 = TREE_OPERAND (t, 2);
11803 tem = fold_ternary (code, type, op0, op1, op2);
11804 return tem ? tem : expr;
11805 default:
11806 break;
11810 switch (code)
11812 case CONST_DECL:
11813 return fold (DECL_INITIAL (t));
11815 default:
11816 return t;
11817 } /* switch (code) */
11820 #ifdef ENABLE_FOLD_CHECKING
11821 #undef fold
11823 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11824 static void fold_check_failed (tree, tree);
11825 void print_fold_checksum (tree);
11827 /* When --enable-checking=fold, compute a digest of expr before
11828 and after actual fold call to see if fold did not accidentally
11829 change original expr. */
11831 tree
11832 fold (tree expr)
11834 tree ret;
11835 struct md5_ctx ctx;
11836 unsigned char checksum_before[16], checksum_after[16];
11837 htab_t ht;
11839 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11840 md5_init_ctx (&ctx);
11841 fold_checksum_tree (expr, &ctx, ht);
11842 md5_finish_ctx (&ctx, checksum_before);
11843 htab_empty (ht);
11845 ret = fold_1 (expr);
11847 md5_init_ctx (&ctx);
11848 fold_checksum_tree (expr, &ctx, ht);
11849 md5_finish_ctx (&ctx, checksum_after);
11850 htab_delete (ht);
11852 if (memcmp (checksum_before, checksum_after, 16))
11853 fold_check_failed (expr, ret);
11855 return ret;
11858 void
11859 print_fold_checksum (tree expr)
11861 struct md5_ctx ctx;
11862 unsigned char checksum[16], cnt;
11863 htab_t ht;
11865 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11866 md5_init_ctx (&ctx);
11867 fold_checksum_tree (expr, &ctx, ht);
11868 md5_finish_ctx (&ctx, checksum);
11869 htab_delete (ht);
11870 for (cnt = 0; cnt < 16; ++cnt)
11871 fprintf (stderr, "%02x", checksum[cnt]);
11872 putc ('\n', stderr);
11875 static void
11876 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11878 internal_error ("fold check: original tree changed by fold");
11881 static void
11882 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11884 void **slot;
11885 enum tree_code code;
11886 struct tree_function_decl buf;
11887 int i, len;
11889 recursive_label:
11891 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11892 <= sizeof (struct tree_function_decl))
11893 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11894 if (expr == NULL)
11895 return;
11896 slot = htab_find_slot (ht, expr, INSERT);
11897 if (*slot != NULL)
11898 return;
11899 *slot = expr;
11900 code = TREE_CODE (expr);
11901 if (TREE_CODE_CLASS (code) == tcc_declaration
11902 && DECL_ASSEMBLER_NAME_SET_P (expr))
11904 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11905 memcpy ((char *) &buf, expr, tree_size (expr));
11906 expr = (tree) &buf;
11907 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11909 else if (TREE_CODE_CLASS (code) == tcc_type
11910 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11911 || TYPE_CACHED_VALUES_P (expr)
11912 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11914 /* Allow these fields to be modified. */
11915 memcpy ((char *) &buf, expr, tree_size (expr));
11916 expr = (tree) &buf;
11917 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11918 TYPE_POINTER_TO (expr) = NULL;
11919 TYPE_REFERENCE_TO (expr) = NULL;
11920 if (TYPE_CACHED_VALUES_P (expr))
11922 TYPE_CACHED_VALUES_P (expr) = 0;
11923 TYPE_CACHED_VALUES (expr) = NULL;
11926 md5_process_bytes (expr, tree_size (expr), ctx);
11927 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11928 if (TREE_CODE_CLASS (code) != tcc_type
11929 && TREE_CODE_CLASS (code) != tcc_declaration
11930 && code != TREE_LIST)
11931 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11932 switch (TREE_CODE_CLASS (code))
11934 case tcc_constant:
11935 switch (code)
11937 case STRING_CST:
11938 md5_process_bytes (TREE_STRING_POINTER (expr),
11939 TREE_STRING_LENGTH (expr), ctx);
11940 break;
11941 case COMPLEX_CST:
11942 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11943 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11944 break;
11945 case VECTOR_CST:
11946 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11947 break;
11948 default:
11949 break;
11951 break;
11952 case tcc_exceptional:
11953 switch (code)
11955 case TREE_LIST:
11956 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11957 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11958 expr = TREE_CHAIN (expr);
11959 goto recursive_label;
11960 break;
11961 case TREE_VEC:
11962 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11963 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11964 break;
11965 default:
11966 break;
11968 break;
11969 case tcc_expression:
11970 case tcc_reference:
11971 case tcc_comparison:
11972 case tcc_unary:
11973 case tcc_binary:
11974 case tcc_statement:
11975 len = TREE_CODE_LENGTH (code);
11976 for (i = 0; i < len; ++i)
11977 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11978 break;
11979 case tcc_declaration:
11980 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11981 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11982 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11984 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11985 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11986 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11987 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11988 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11990 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11991 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11993 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11995 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11996 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11997 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11999 break;
12000 case tcc_type:
12001 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12002 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12003 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12004 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12005 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12006 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12007 if (INTEGRAL_TYPE_P (expr)
12008 || SCALAR_FLOAT_TYPE_P (expr))
12010 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12011 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12013 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12014 if (TREE_CODE (expr) == RECORD_TYPE
12015 || TREE_CODE (expr) == UNION_TYPE
12016 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12017 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12018 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12019 break;
12020 default:
12021 break;
12025 #endif
12027 /* Fold a unary tree expression with code CODE of type TYPE with an
12028 operand OP0. Return a folded expression if successful. Otherwise,
12029 return a tree expression with code CODE of type TYPE with an
12030 operand OP0. */
12032 tree
12033 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12035 tree tem;
12036 #ifdef ENABLE_FOLD_CHECKING
12037 unsigned char checksum_before[16], checksum_after[16];
12038 struct md5_ctx ctx;
12039 htab_t ht;
12041 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12042 md5_init_ctx (&ctx);
12043 fold_checksum_tree (op0, &ctx, ht);
12044 md5_finish_ctx (&ctx, checksum_before);
12045 htab_empty (ht);
12046 #endif
12048 tem = fold_unary (code, type, op0);
12049 if (!tem)
12050 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12052 #ifdef ENABLE_FOLD_CHECKING
12053 md5_init_ctx (&ctx);
12054 fold_checksum_tree (op0, &ctx, ht);
12055 md5_finish_ctx (&ctx, checksum_after);
12056 htab_delete (ht);
12058 if (memcmp (checksum_before, checksum_after, 16))
12059 fold_check_failed (op0, tem);
12060 #endif
12061 return tem;
12064 /* Fold a binary tree expression with code CODE of type TYPE with
12065 operands OP0 and OP1. Return a folded expression if successful.
12066 Otherwise, return a tree expression with code CODE of type TYPE
12067 with operands OP0 and OP1. */
12069 tree
12070 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12071 MEM_STAT_DECL)
12073 tree tem;
12074 #ifdef ENABLE_FOLD_CHECKING
12075 unsigned char checksum_before_op0[16],
12076 checksum_before_op1[16],
12077 checksum_after_op0[16],
12078 checksum_after_op1[16];
12079 struct md5_ctx ctx;
12080 htab_t ht;
12082 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12083 md5_init_ctx (&ctx);
12084 fold_checksum_tree (op0, &ctx, ht);
12085 md5_finish_ctx (&ctx, checksum_before_op0);
12086 htab_empty (ht);
12088 md5_init_ctx (&ctx);
12089 fold_checksum_tree (op1, &ctx, ht);
12090 md5_finish_ctx (&ctx, checksum_before_op1);
12091 htab_empty (ht);
12092 #endif
12094 tem = fold_binary (code, type, op0, op1);
12095 if (!tem)
12096 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12098 #ifdef ENABLE_FOLD_CHECKING
12099 md5_init_ctx (&ctx);
12100 fold_checksum_tree (op0, &ctx, ht);
12101 md5_finish_ctx (&ctx, checksum_after_op0);
12102 htab_empty (ht);
12104 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12105 fold_check_failed (op0, tem);
12107 md5_init_ctx (&ctx);
12108 fold_checksum_tree (op1, &ctx, ht);
12109 md5_finish_ctx (&ctx, checksum_after_op1);
12110 htab_delete (ht);
12112 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12113 fold_check_failed (op1, tem);
12114 #endif
12115 return tem;
12118 /* Fold a ternary tree expression with code CODE of type TYPE with
12119 operands OP0, OP1, and OP2. Return a folded expression if
12120 successful. Otherwise, return a tree expression with code CODE of
12121 type TYPE with operands OP0, OP1, and OP2. */
12123 tree
12124 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12125 MEM_STAT_DECL)
12127 tree tem;
12128 #ifdef ENABLE_FOLD_CHECKING
12129 unsigned char checksum_before_op0[16],
12130 checksum_before_op1[16],
12131 checksum_before_op2[16],
12132 checksum_after_op0[16],
12133 checksum_after_op1[16],
12134 checksum_after_op2[16];
12135 struct md5_ctx ctx;
12136 htab_t ht;
12138 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12139 md5_init_ctx (&ctx);
12140 fold_checksum_tree (op0, &ctx, ht);
12141 md5_finish_ctx (&ctx, checksum_before_op0);
12142 htab_empty (ht);
12144 md5_init_ctx (&ctx);
12145 fold_checksum_tree (op1, &ctx, ht);
12146 md5_finish_ctx (&ctx, checksum_before_op1);
12147 htab_empty (ht);
12149 md5_init_ctx (&ctx);
12150 fold_checksum_tree (op2, &ctx, ht);
12151 md5_finish_ctx (&ctx, checksum_before_op2);
12152 htab_empty (ht);
12153 #endif
12155 tem = fold_ternary (code, type, op0, op1, op2);
12156 if (!tem)
12157 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12159 #ifdef ENABLE_FOLD_CHECKING
12160 md5_init_ctx (&ctx);
12161 fold_checksum_tree (op0, &ctx, ht);
12162 md5_finish_ctx (&ctx, checksum_after_op0);
12163 htab_empty (ht);
12165 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12166 fold_check_failed (op0, tem);
12168 md5_init_ctx (&ctx);
12169 fold_checksum_tree (op1, &ctx, ht);
12170 md5_finish_ctx (&ctx, checksum_after_op1);
12171 htab_empty (ht);
12173 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12174 fold_check_failed (op1, tem);
12176 md5_init_ctx (&ctx);
12177 fold_checksum_tree (op2, &ctx, ht);
12178 md5_finish_ctx (&ctx, checksum_after_op2);
12179 htab_delete (ht);
12181 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12182 fold_check_failed (op2, tem);
12183 #endif
12184 return tem;
12187 /* Perform constant folding and related simplification of initializer
12188 expression EXPR. These behave identically to "fold_buildN" but ignore
12189 potential run-time traps and exceptions that fold must preserve. */
12191 #define START_FOLD_INIT \
12192 int saved_signaling_nans = flag_signaling_nans;\
12193 int saved_trapping_math = flag_trapping_math;\
12194 int saved_rounding_math = flag_rounding_math;\
12195 int saved_trapv = flag_trapv;\
12196 int saved_folding_initializer = folding_initializer;\
12197 flag_signaling_nans = 0;\
12198 flag_trapping_math = 0;\
12199 flag_rounding_math = 0;\
12200 flag_trapv = 0;\
12201 folding_initializer = 1;
12203 #define END_FOLD_INIT \
12204 flag_signaling_nans = saved_signaling_nans;\
12205 flag_trapping_math = saved_trapping_math;\
12206 flag_rounding_math = saved_rounding_math;\
12207 flag_trapv = saved_trapv;\
12208 folding_initializer = saved_folding_initializer;
12210 tree
12211 fold_build1_initializer (enum tree_code code, tree type, tree op)
12213 tree result;
12214 START_FOLD_INIT;
12216 result = fold_build1 (code, type, op);
12218 END_FOLD_INIT;
12219 return result;
12222 tree
12223 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12225 tree result;
12226 START_FOLD_INIT;
12228 result = fold_build2 (code, type, op0, op1);
12230 END_FOLD_INIT;
12231 return result;
12234 tree
12235 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12236 tree op2)
12238 tree result;
12239 START_FOLD_INIT;
12241 result = fold_build3 (code, type, op0, op1, op2);
12243 END_FOLD_INIT;
12244 return result;
12247 #undef START_FOLD_INIT
12248 #undef END_FOLD_INIT
12250 /* Determine if first argument is a multiple of second argument. Return 0 if
12251 it is not, or we cannot easily determined it to be.
12253 An example of the sort of thing we care about (at this point; this routine
12254 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12255 fold cases do now) is discovering that
12257 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12259 is a multiple of
12261 SAVE_EXPR (J * 8)
12263 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12265 This code also handles discovering that
12267 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12269 is a multiple of 8 so we don't have to worry about dealing with a
12270 possible remainder.
12272 Note that we *look* inside a SAVE_EXPR only to determine how it was
12273 calculated; it is not safe for fold to do much of anything else with the
12274 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12275 at run time. For example, the latter example above *cannot* be implemented
12276 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12277 evaluation time of the original SAVE_EXPR is not necessarily the same at
12278 the time the new expression is evaluated. The only optimization of this
12279 sort that would be valid is changing
12281 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12283 divided by 8 to
12285 SAVE_EXPR (I) * SAVE_EXPR (J)
12287 (where the same SAVE_EXPR (J) is used in the original and the
12288 transformed version). */
12290 static int
12291 multiple_of_p (tree type, tree top, tree bottom)
12293 if (operand_equal_p (top, bottom, 0))
12294 return 1;
12296 if (TREE_CODE (type) != INTEGER_TYPE)
12297 return 0;
12299 switch (TREE_CODE (top))
12301 case BIT_AND_EXPR:
12302 /* Bitwise and provides a power of two multiple. If the mask is
12303 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12304 if (!integer_pow2p (bottom))
12305 return 0;
12306 /* FALLTHRU */
12308 case MULT_EXPR:
12309 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12310 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12312 case PLUS_EXPR:
12313 case MINUS_EXPR:
12314 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12315 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12317 case LSHIFT_EXPR:
12318 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12320 tree op1, t1;
12322 op1 = TREE_OPERAND (top, 1);
12323 /* const_binop may not detect overflow correctly,
12324 so check for it explicitly here. */
12325 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12326 > TREE_INT_CST_LOW (op1)
12327 && TREE_INT_CST_HIGH (op1) == 0
12328 && 0 != (t1 = fold_convert (type,
12329 const_binop (LSHIFT_EXPR,
12330 size_one_node,
12331 op1, 0)))
12332 && ! TREE_OVERFLOW (t1))
12333 return multiple_of_p (type, t1, bottom);
12335 return 0;
12337 case NOP_EXPR:
12338 /* Can't handle conversions from non-integral or wider integral type. */
12339 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12340 || (TYPE_PRECISION (type)
12341 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12342 return 0;
12344 /* .. fall through ... */
12346 case SAVE_EXPR:
12347 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12349 case INTEGER_CST:
12350 if (TREE_CODE (bottom) != INTEGER_CST
12351 || (TYPE_UNSIGNED (type)
12352 && (tree_int_cst_sgn (top) < 0
12353 || tree_int_cst_sgn (bottom) < 0)))
12354 return 0;
12355 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12356 top, bottom, 0));
12358 default:
12359 return 0;
12363 /* Return true if `t' is known to be non-negative. If the return
12364 value is based on the assumption that signed overflow is undefined,
12365 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12366 *STRICT_OVERFLOW_P. */
12369 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12371 if (t == error_mark_node)
12372 return 0;
12374 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12375 return 1;
12377 switch (TREE_CODE (t))
12379 case SSA_NAME:
12380 /* Query VRP to see if it has recorded any information about
12381 the range of this object. */
12382 return ssa_name_nonnegative_p (t);
12384 case ABS_EXPR:
12385 /* We can't return 1 if flag_wrapv is set because
12386 ABS_EXPR<INT_MIN> = INT_MIN. */
12387 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12388 return 1;
12389 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12391 *strict_overflow_p = true;
12392 return 1;
12394 break;
12396 case INTEGER_CST:
12397 return tree_int_cst_sgn (t) >= 0;
12399 case REAL_CST:
12400 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12402 case PLUS_EXPR:
12403 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12404 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12405 strict_overflow_p)
12406 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12407 strict_overflow_p));
12409 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12410 both unsigned and at least 2 bits shorter than the result. */
12411 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12412 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12413 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12415 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12416 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12417 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12418 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12420 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12421 TYPE_PRECISION (inner2)) + 1;
12422 return prec < TYPE_PRECISION (TREE_TYPE (t));
12425 break;
12427 case MULT_EXPR:
12428 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12430 /* x * x for floating point x is always non-negative. */
12431 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12432 return 1;
12433 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12434 strict_overflow_p)
12435 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12436 strict_overflow_p));
12439 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12440 both unsigned and their total bits is shorter than the result. */
12441 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12442 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12443 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12445 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12446 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12447 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12448 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12449 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12450 < TYPE_PRECISION (TREE_TYPE (t));
12452 return 0;
12454 case BIT_AND_EXPR:
12455 case MAX_EXPR:
12456 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12457 strict_overflow_p)
12458 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12459 strict_overflow_p));
12461 case BIT_IOR_EXPR:
12462 case BIT_XOR_EXPR:
12463 case MIN_EXPR:
12464 case RDIV_EXPR:
12465 case TRUNC_DIV_EXPR:
12466 case CEIL_DIV_EXPR:
12467 case FLOOR_DIV_EXPR:
12468 case ROUND_DIV_EXPR:
12469 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12470 strict_overflow_p)
12471 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12472 strict_overflow_p));
12474 case TRUNC_MOD_EXPR:
12475 case CEIL_MOD_EXPR:
12476 case FLOOR_MOD_EXPR:
12477 case ROUND_MOD_EXPR:
12478 case SAVE_EXPR:
12479 case NON_LVALUE_EXPR:
12480 case FLOAT_EXPR:
12481 case FIX_TRUNC_EXPR:
12482 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12483 strict_overflow_p);
12485 case COMPOUND_EXPR:
12486 case MODIFY_EXPR:
12487 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12488 strict_overflow_p);
12490 case BIND_EXPR:
12491 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
12492 strict_overflow_p);
12494 case COND_EXPR:
12495 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12496 strict_overflow_p)
12497 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12498 strict_overflow_p));
12500 case NOP_EXPR:
12502 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12503 tree outer_type = TREE_TYPE (t);
12505 if (TREE_CODE (outer_type) == REAL_TYPE)
12507 if (TREE_CODE (inner_type) == REAL_TYPE)
12508 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12509 strict_overflow_p);
12510 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12512 if (TYPE_UNSIGNED (inner_type))
12513 return 1;
12514 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12515 strict_overflow_p);
12518 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12520 if (TREE_CODE (inner_type) == REAL_TYPE)
12521 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
12522 strict_overflow_p);
12523 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12524 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12525 && TYPE_UNSIGNED (inner_type);
12528 break;
12530 case TARGET_EXPR:
12532 tree temp = TARGET_EXPR_SLOT (t);
12533 t = TARGET_EXPR_INITIAL (t);
12535 /* If the initializer is non-void, then it's a normal expression
12536 that will be assigned to the slot. */
12537 if (!VOID_TYPE_P (t))
12538 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
12540 /* Otherwise, the initializer sets the slot in some way. One common
12541 way is an assignment statement at the end of the initializer. */
12542 while (1)
12544 if (TREE_CODE (t) == BIND_EXPR)
12545 t = expr_last (BIND_EXPR_BODY (t));
12546 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12547 || TREE_CODE (t) == TRY_CATCH_EXPR)
12548 t = expr_last (TREE_OPERAND (t, 0));
12549 else if (TREE_CODE (t) == STATEMENT_LIST)
12550 t = expr_last (t);
12551 else
12552 break;
12554 if (TREE_CODE (t) == MODIFY_EXPR
12555 && TREE_OPERAND (t, 0) == temp)
12556 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12557 strict_overflow_p);
12559 return 0;
12562 case CALL_EXPR:
12564 tree fndecl = get_callee_fndecl (t);
12565 tree arglist = TREE_OPERAND (t, 1);
12566 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12567 switch (DECL_FUNCTION_CODE (fndecl))
12569 CASE_FLT_FN (BUILT_IN_ACOS):
12570 CASE_FLT_FN (BUILT_IN_ACOSH):
12571 CASE_FLT_FN (BUILT_IN_CABS):
12572 CASE_FLT_FN (BUILT_IN_COSH):
12573 CASE_FLT_FN (BUILT_IN_ERFC):
12574 CASE_FLT_FN (BUILT_IN_EXP):
12575 CASE_FLT_FN (BUILT_IN_EXP10):
12576 CASE_FLT_FN (BUILT_IN_EXP2):
12577 CASE_FLT_FN (BUILT_IN_FABS):
12578 CASE_FLT_FN (BUILT_IN_FDIM):
12579 CASE_FLT_FN (BUILT_IN_HYPOT):
12580 CASE_FLT_FN (BUILT_IN_POW10):
12581 CASE_INT_FN (BUILT_IN_FFS):
12582 CASE_INT_FN (BUILT_IN_PARITY):
12583 CASE_INT_FN (BUILT_IN_POPCOUNT):
12584 /* Always true. */
12585 return 1;
12587 CASE_FLT_FN (BUILT_IN_SQRT):
12588 /* sqrt(-0.0) is -0.0. */
12589 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12590 return 1;
12591 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12592 strict_overflow_p);
12594 CASE_FLT_FN (BUILT_IN_ASINH):
12595 CASE_FLT_FN (BUILT_IN_ATAN):
12596 CASE_FLT_FN (BUILT_IN_ATANH):
12597 CASE_FLT_FN (BUILT_IN_CBRT):
12598 CASE_FLT_FN (BUILT_IN_CEIL):
12599 CASE_FLT_FN (BUILT_IN_ERF):
12600 CASE_FLT_FN (BUILT_IN_EXPM1):
12601 CASE_FLT_FN (BUILT_IN_FLOOR):
12602 CASE_FLT_FN (BUILT_IN_FMOD):
12603 CASE_FLT_FN (BUILT_IN_FREXP):
12604 CASE_FLT_FN (BUILT_IN_LCEIL):
12605 CASE_FLT_FN (BUILT_IN_LDEXP):
12606 CASE_FLT_FN (BUILT_IN_LFLOOR):
12607 CASE_FLT_FN (BUILT_IN_LLCEIL):
12608 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12609 CASE_FLT_FN (BUILT_IN_LLRINT):
12610 CASE_FLT_FN (BUILT_IN_LLROUND):
12611 CASE_FLT_FN (BUILT_IN_LRINT):
12612 CASE_FLT_FN (BUILT_IN_LROUND):
12613 CASE_FLT_FN (BUILT_IN_MODF):
12614 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12615 CASE_FLT_FN (BUILT_IN_POW):
12616 CASE_FLT_FN (BUILT_IN_RINT):
12617 CASE_FLT_FN (BUILT_IN_ROUND):
12618 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12619 CASE_FLT_FN (BUILT_IN_SINH):
12620 CASE_FLT_FN (BUILT_IN_TANH):
12621 CASE_FLT_FN (BUILT_IN_TRUNC):
12622 /* True if the 1st argument is nonnegative. */
12623 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12624 strict_overflow_p);
12626 CASE_FLT_FN (BUILT_IN_FMAX):
12627 /* True if the 1st OR 2nd arguments are nonnegative. */
12628 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12629 strict_overflow_p)
12630 || (tree_expr_nonnegative_warnv_p
12631 (TREE_VALUE (TREE_CHAIN (arglist)),
12632 strict_overflow_p)));
12634 CASE_FLT_FN (BUILT_IN_FMIN):
12635 /* True if the 1st AND 2nd arguments are nonnegative. */
12636 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12637 strict_overflow_p)
12638 && (tree_expr_nonnegative_warnv_p
12639 (TREE_VALUE (TREE_CHAIN (arglist)),
12640 strict_overflow_p)));
12642 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12643 /* True if the 2nd argument is nonnegative. */
12644 return (tree_expr_nonnegative_warnv_p
12645 (TREE_VALUE (TREE_CHAIN (arglist)),
12646 strict_overflow_p));
12648 default:
12649 break;
12653 /* ... fall through ... */
12655 default:
12657 tree type = TREE_TYPE (t);
12658 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12659 && truth_value_p (TREE_CODE (t)))
12660 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12661 have a signed:1 type (where the value is -1 and 0). */
12662 return true;
12666 /* We don't know sign of `t', so be conservative and return false. */
12667 return 0;
12670 /* Return true if `t' is known to be non-negative. Handle warnings
12671 about undefined signed overflow. */
12674 tree_expr_nonnegative_p (tree t)
12676 int ret;
12677 bool strict_overflow_p;
12679 strict_overflow_p = false;
12680 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
12681 if (strict_overflow_p)
12682 fold_overflow_warning (("assuming signed overflow does not occur when "
12683 "determining that expression is always "
12684 "non-negative"),
12685 WARN_STRICT_OVERFLOW_MISC);
12686 return ret;
12689 /* Return true when T is an address and is known to be nonzero.
12690 For floating point we further ensure that T is not denormal.
12691 Similar logic is present in nonzero_address in rtlanal.h.
12693 If the return value is based on the assumption that signed overflow
12694 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
12695 change *STRICT_OVERFLOW_P. */
12697 bool
12698 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
12700 tree type = TREE_TYPE (t);
12701 bool sub_strict_overflow_p;
12703 /* Doing something useful for floating point would need more work. */
12704 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12705 return false;
12707 switch (TREE_CODE (t))
12709 case SSA_NAME:
12710 /* Query VRP to see if it has recorded any information about
12711 the range of this object. */
12712 return ssa_name_nonzero_p (t);
12714 case ABS_EXPR:
12715 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12716 strict_overflow_p);
12718 case INTEGER_CST:
12719 /* We used to test for !integer_zerop here. This does not work correctly
12720 if TREE_CONSTANT_OVERFLOW (t). */
12721 return (TREE_INT_CST_LOW (t) != 0
12722 || TREE_INT_CST_HIGH (t) != 0);
12724 case PLUS_EXPR:
12725 if (TYPE_OVERFLOW_UNDEFINED (type))
12727 /* With the presence of negative values it is hard
12728 to say something. */
12729 sub_strict_overflow_p = false;
12730 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12731 &sub_strict_overflow_p)
12732 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12733 &sub_strict_overflow_p))
12734 return false;
12735 /* One of operands must be positive and the other non-negative. */
12736 /* We don't set *STRICT_OVERFLOW_P here: even if this value
12737 overflows, on a twos-complement machine the sum of two
12738 nonnegative numbers can never be zero. */
12739 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12740 strict_overflow_p)
12741 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12742 strict_overflow_p));
12744 break;
12746 case MULT_EXPR:
12747 if (TYPE_OVERFLOW_UNDEFINED (type))
12749 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12750 strict_overflow_p)
12751 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12752 strict_overflow_p))
12754 *strict_overflow_p = true;
12755 return true;
12758 break;
12760 case NOP_EXPR:
12762 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12763 tree outer_type = TREE_TYPE (t);
12765 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12766 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12767 strict_overflow_p));
12769 break;
12771 case ADDR_EXPR:
12773 tree base = get_base_address (TREE_OPERAND (t, 0));
12775 if (!base)
12776 return false;
12778 /* Weak declarations may link to NULL. */
12779 if (VAR_OR_FUNCTION_DECL_P (base))
12780 return !DECL_WEAK (base);
12782 /* Constants are never weak. */
12783 if (CONSTANT_CLASS_P (base))
12784 return true;
12786 return false;
12789 case COND_EXPR:
12790 sub_strict_overflow_p = false;
12791 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12792 &sub_strict_overflow_p)
12793 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
12794 &sub_strict_overflow_p))
12796 if (sub_strict_overflow_p)
12797 *strict_overflow_p = true;
12798 return true;
12800 break;
12802 case MIN_EXPR:
12803 sub_strict_overflow_p = false;
12804 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12805 &sub_strict_overflow_p)
12806 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12807 &sub_strict_overflow_p))
12809 if (sub_strict_overflow_p)
12810 *strict_overflow_p = true;
12812 break;
12814 case MAX_EXPR:
12815 sub_strict_overflow_p = false;
12816 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12817 &sub_strict_overflow_p))
12819 if (sub_strict_overflow_p)
12820 *strict_overflow_p = true;
12822 /* When both operands are nonzero, then MAX must be too. */
12823 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12824 strict_overflow_p))
12825 return true;
12827 /* MAX where operand 0 is positive is positive. */
12828 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12829 strict_overflow_p);
12831 /* MAX where operand 1 is positive is positive. */
12832 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12833 &sub_strict_overflow_p)
12834 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12835 &sub_strict_overflow_p))
12837 if (sub_strict_overflow_p)
12838 *strict_overflow_p = true;
12839 return true;
12841 break;
12843 case COMPOUND_EXPR:
12844 case MODIFY_EXPR:
12845 case BIND_EXPR:
12846 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12847 strict_overflow_p);
12849 case SAVE_EXPR:
12850 case NON_LVALUE_EXPR:
12851 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12852 strict_overflow_p);
12854 case BIT_IOR_EXPR:
12855 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12856 strict_overflow_p)
12857 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12858 strict_overflow_p));
12860 case CALL_EXPR:
12861 return alloca_call_p (t);
12863 default:
12864 break;
12866 return false;
12869 /* Return true when T is an address and is known to be nonzero.
12870 Handle warnings about undefined signed overflow. */
12872 bool
12873 tree_expr_nonzero_p (tree t)
12875 bool ret, strict_overflow_p;
12877 strict_overflow_p = false;
12878 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
12879 if (strict_overflow_p)
12880 fold_overflow_warning (("assuming signed overflow does not occur when "
12881 "determining that expression is always "
12882 "non-zero"),
12883 WARN_STRICT_OVERFLOW_MISC);
12884 return ret;
12887 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12888 attempt to fold the expression to a constant without modifying TYPE,
12889 OP0 or OP1.
12891 If the expression could be simplified to a constant, then return
12892 the constant. If the expression would not be simplified to a
12893 constant, then return NULL_TREE. */
12895 tree
12896 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12898 tree tem = fold_binary (code, type, op0, op1);
12899 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12902 /* Given the components of a unary expression CODE, TYPE and OP0,
12903 attempt to fold the expression to a constant without modifying
12904 TYPE or OP0.
12906 If the expression could be simplified to a constant, then return
12907 the constant. If the expression would not be simplified to a
12908 constant, then return NULL_TREE. */
12910 tree
12911 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12913 tree tem = fold_unary (code, type, op0);
12914 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12917 /* If EXP represents referencing an element in a constant string
12918 (either via pointer arithmetic or array indexing), return the
12919 tree representing the value accessed, otherwise return NULL. */
12921 tree
12922 fold_read_from_constant_string (tree exp)
12924 if ((TREE_CODE (exp) == INDIRECT_REF
12925 || TREE_CODE (exp) == ARRAY_REF)
12926 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12928 tree exp1 = TREE_OPERAND (exp, 0);
12929 tree index;
12930 tree string;
12932 if (TREE_CODE (exp) == INDIRECT_REF)
12933 string = string_constant (exp1, &index);
12934 else
12936 tree low_bound = array_ref_low_bound (exp);
12937 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12939 /* Optimize the special-case of a zero lower bound.
12941 We convert the low_bound to sizetype to avoid some problems
12942 with constant folding. (E.g. suppose the lower bound is 1,
12943 and its mode is QI. Without the conversion,l (ARRAY
12944 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12945 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12946 if (! integer_zerop (low_bound))
12947 index = size_diffop (index, fold_convert (sizetype, low_bound));
12949 string = exp1;
12952 if (string
12953 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12954 && TREE_CODE (string) == STRING_CST
12955 && TREE_CODE (index) == INTEGER_CST
12956 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12957 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12958 == MODE_INT)
12959 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12960 return fold_convert (TREE_TYPE (exp),
12961 build_int_cst (NULL_TREE,
12962 (TREE_STRING_POINTER (string)
12963 [TREE_INT_CST_LOW (index)])));
12965 return NULL;
12968 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12969 an integer constant or real constant.
12971 TYPE is the type of the result. */
12973 static tree
12974 fold_negate_const (tree arg0, tree type)
12976 tree t = NULL_TREE;
12978 switch (TREE_CODE (arg0))
12980 case INTEGER_CST:
12982 unsigned HOST_WIDE_INT low;
12983 HOST_WIDE_INT high;
12984 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12985 TREE_INT_CST_HIGH (arg0),
12986 &low, &high);
12987 t = build_int_cst_wide (type, low, high);
12988 t = force_fit_type (t, 1,
12989 (overflow | TREE_OVERFLOW (arg0))
12990 && !TYPE_UNSIGNED (type),
12991 TREE_CONSTANT_OVERFLOW (arg0));
12992 break;
12995 case REAL_CST:
12996 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12997 break;
12999 default:
13000 gcc_unreachable ();
13003 return t;
13006 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13007 an integer constant or real constant.
13009 TYPE is the type of the result. */
13011 tree
13012 fold_abs_const (tree arg0, tree type)
13014 tree t = NULL_TREE;
13016 switch (TREE_CODE (arg0))
13018 case INTEGER_CST:
13019 /* If the value is unsigned, then the absolute value is
13020 the same as the ordinary value. */
13021 if (TYPE_UNSIGNED (type))
13022 t = arg0;
13023 /* Similarly, if the value is non-negative. */
13024 else if (INT_CST_LT (integer_minus_one_node, arg0))
13025 t = arg0;
13026 /* If the value is negative, then the absolute value is
13027 its negation. */
13028 else
13030 unsigned HOST_WIDE_INT low;
13031 HOST_WIDE_INT high;
13032 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13033 TREE_INT_CST_HIGH (arg0),
13034 &low, &high);
13035 t = build_int_cst_wide (type, low, high);
13036 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13037 TREE_CONSTANT_OVERFLOW (arg0));
13039 break;
13041 case REAL_CST:
13042 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13043 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13044 else
13045 t = arg0;
13046 break;
13048 default:
13049 gcc_unreachable ();
13052 return t;
13055 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13056 constant. TYPE is the type of the result. */
13058 static tree
13059 fold_not_const (tree arg0, tree type)
13061 tree t = NULL_TREE;
13063 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13065 t = build_int_cst_wide (type,
13066 ~ TREE_INT_CST_LOW (arg0),
13067 ~ TREE_INT_CST_HIGH (arg0));
13068 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13069 TREE_CONSTANT_OVERFLOW (arg0));
13071 return t;
13074 /* Given CODE, a relational operator, the target type, TYPE and two
13075 constant operands OP0 and OP1, return the result of the
13076 relational operation. If the result is not a compile time
13077 constant, then return NULL_TREE. */
13079 static tree
13080 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13082 int result, invert;
13084 /* From here on, the only cases we handle are when the result is
13085 known to be a constant. */
13087 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13089 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13090 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13092 /* Handle the cases where either operand is a NaN. */
13093 if (real_isnan (c0) || real_isnan (c1))
13095 switch (code)
13097 case EQ_EXPR:
13098 case ORDERED_EXPR:
13099 result = 0;
13100 break;
13102 case NE_EXPR:
13103 case UNORDERED_EXPR:
13104 case UNLT_EXPR:
13105 case UNLE_EXPR:
13106 case UNGT_EXPR:
13107 case UNGE_EXPR:
13108 case UNEQ_EXPR:
13109 result = 1;
13110 break;
13112 case LT_EXPR:
13113 case LE_EXPR:
13114 case GT_EXPR:
13115 case GE_EXPR:
13116 case LTGT_EXPR:
13117 if (flag_trapping_math)
13118 return NULL_TREE;
13119 result = 0;
13120 break;
13122 default:
13123 gcc_unreachable ();
13126 return constant_boolean_node (result, type);
13129 return constant_boolean_node (real_compare (code, c0, c1), type);
13132 /* Handle equality/inequality of complex constants. */
13133 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13135 tree rcond = fold_relational_const (code, type,
13136 TREE_REALPART (op0),
13137 TREE_REALPART (op1));
13138 tree icond = fold_relational_const (code, type,
13139 TREE_IMAGPART (op0),
13140 TREE_IMAGPART (op1));
13141 if (code == EQ_EXPR)
13142 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13143 else if (code == NE_EXPR)
13144 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13145 else
13146 return NULL_TREE;
13149 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13151 To compute GT, swap the arguments and do LT.
13152 To compute GE, do LT and invert the result.
13153 To compute LE, swap the arguments, do LT and invert the result.
13154 To compute NE, do EQ and invert the result.
13156 Therefore, the code below must handle only EQ and LT. */
13158 if (code == LE_EXPR || code == GT_EXPR)
13160 tree tem = op0;
13161 op0 = op1;
13162 op1 = tem;
13163 code = swap_tree_comparison (code);
13166 /* Note that it is safe to invert for real values here because we
13167 have already handled the one case that it matters. */
13169 invert = 0;
13170 if (code == NE_EXPR || code == GE_EXPR)
13172 invert = 1;
13173 code = invert_tree_comparison (code, false);
13176 /* Compute a result for LT or EQ if args permit;
13177 Otherwise return T. */
13178 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13180 if (code == EQ_EXPR)
13181 result = tree_int_cst_equal (op0, op1);
13182 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13183 result = INT_CST_LT_UNSIGNED (op0, op1);
13184 else
13185 result = INT_CST_LT (op0, op1);
13187 else
13188 return NULL_TREE;
13190 if (invert)
13191 result ^= 1;
13192 return constant_boolean_node (result, type);
13195 /* Build an expression for the a clean point containing EXPR with type TYPE.
13196 Don't build a cleanup point expression for EXPR which don't have side
13197 effects. */
13199 tree
13200 fold_build_cleanup_point_expr (tree type, tree expr)
13202 /* If the expression does not have side effects then we don't have to wrap
13203 it with a cleanup point expression. */
13204 if (!TREE_SIDE_EFFECTS (expr))
13205 return expr;
13207 /* If the expression is a return, check to see if the expression inside the
13208 return has no side effects or the right hand side of the modify expression
13209 inside the return. If either don't have side effects set we don't need to
13210 wrap the expression in a cleanup point expression. Note we don't check the
13211 left hand side of the modify because it should always be a return decl. */
13212 if (TREE_CODE (expr) == RETURN_EXPR)
13214 tree op = TREE_OPERAND (expr, 0);
13215 if (!op || !TREE_SIDE_EFFECTS (op))
13216 return expr;
13217 op = TREE_OPERAND (op, 1);
13218 if (!TREE_SIDE_EFFECTS (op))
13219 return expr;
13222 return build1 (CLEANUP_POINT_EXPR, type, expr);
13225 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13226 avoid confusing the gimplify process. */
13228 tree
13229 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13231 /* The size of the object is not relevant when talking about its address. */
13232 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13233 t = TREE_OPERAND (t, 0);
13235 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13236 if (TREE_CODE (t) == INDIRECT_REF
13237 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13239 t = TREE_OPERAND (t, 0);
13240 if (TREE_TYPE (t) != ptrtype)
13241 t = build1 (NOP_EXPR, ptrtype, t);
13243 else
13245 tree base = t;
13247 while (handled_component_p (base))
13248 base = TREE_OPERAND (base, 0);
13249 if (DECL_P (base))
13250 TREE_ADDRESSABLE (base) = 1;
13252 t = build1 (ADDR_EXPR, ptrtype, t);
13255 return t;
13258 tree
13259 build_fold_addr_expr (tree t)
13261 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13264 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13265 of an indirection through OP0, or NULL_TREE if no simplification is
13266 possible. */
13268 tree
13269 fold_indirect_ref_1 (tree type, tree op0)
13271 tree sub = op0;
13272 tree subtype;
13274 STRIP_NOPS (sub);
13275 subtype = TREE_TYPE (sub);
13276 if (!POINTER_TYPE_P (subtype))
13277 return NULL_TREE;
13279 if (TREE_CODE (sub) == ADDR_EXPR)
13281 tree op = TREE_OPERAND (sub, 0);
13282 tree optype = TREE_TYPE (op);
13283 /* *&CONST_DECL -> to the value of the const decl. */
13284 if (TREE_CODE (op) == CONST_DECL)
13285 return DECL_INITIAL (op);
13286 /* *&p => p; make sure to handle *&"str"[cst] here. */
13287 if (type == optype)
13289 tree fop = fold_read_from_constant_string (op);
13290 if (fop)
13291 return fop;
13292 else
13293 return op;
13295 /* *(foo *)&fooarray => fooarray[0] */
13296 else if (TREE_CODE (optype) == ARRAY_TYPE
13297 && type == TREE_TYPE (optype))
13299 tree type_domain = TYPE_DOMAIN (optype);
13300 tree min_val = size_zero_node;
13301 if (type_domain && TYPE_MIN_VALUE (type_domain))
13302 min_val = TYPE_MIN_VALUE (type_domain);
13303 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13305 /* *(foo *)&complexfoo => __real__ complexfoo */
13306 else if (TREE_CODE (optype) == COMPLEX_TYPE
13307 && type == TREE_TYPE (optype))
13308 return fold_build1 (REALPART_EXPR, type, op);
13311 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13312 if (TREE_CODE (sub) == PLUS_EXPR
13313 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13315 tree op00 = TREE_OPERAND (sub, 0);
13316 tree op01 = TREE_OPERAND (sub, 1);
13317 tree op00type;
13319 STRIP_NOPS (op00);
13320 op00type = TREE_TYPE (op00);
13321 if (TREE_CODE (op00) == ADDR_EXPR
13322 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13323 && type == TREE_TYPE (TREE_TYPE (op00type)))
13325 tree size = TYPE_SIZE_UNIT (type);
13326 if (tree_int_cst_equal (size, op01))
13327 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13331 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13332 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13333 && type == TREE_TYPE (TREE_TYPE (subtype)))
13335 tree type_domain;
13336 tree min_val = size_zero_node;
13337 sub = build_fold_indirect_ref (sub);
13338 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13339 if (type_domain && TYPE_MIN_VALUE (type_domain))
13340 min_val = TYPE_MIN_VALUE (type_domain);
13341 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13344 return NULL_TREE;
13347 /* Builds an expression for an indirection through T, simplifying some
13348 cases. */
13350 tree
13351 build_fold_indirect_ref (tree t)
13353 tree type = TREE_TYPE (TREE_TYPE (t));
13354 tree sub = fold_indirect_ref_1 (type, t);
13356 if (sub)
13357 return sub;
13358 else
13359 return build1 (INDIRECT_REF, type, t);
13362 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13364 tree
13365 fold_indirect_ref (tree t)
13367 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13369 if (sub)
13370 return sub;
13371 else
13372 return t;
13375 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13376 whose result is ignored. The type of the returned tree need not be
13377 the same as the original expression. */
13379 tree
13380 fold_ignored_result (tree t)
13382 if (!TREE_SIDE_EFFECTS (t))
13383 return integer_zero_node;
13385 for (;;)
13386 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13388 case tcc_unary:
13389 t = TREE_OPERAND (t, 0);
13390 break;
13392 case tcc_binary:
13393 case tcc_comparison:
13394 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13395 t = TREE_OPERAND (t, 0);
13396 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13397 t = TREE_OPERAND (t, 1);
13398 else
13399 return t;
13400 break;
13402 case tcc_expression:
13403 switch (TREE_CODE (t))
13405 case COMPOUND_EXPR:
13406 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13407 return t;
13408 t = TREE_OPERAND (t, 0);
13409 break;
13411 case COND_EXPR:
13412 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13413 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13414 return t;
13415 t = TREE_OPERAND (t, 0);
13416 break;
13418 default:
13419 return t;
13421 break;
13423 default:
13424 return t;
13428 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13429 This can only be applied to objects of a sizetype. */
13431 tree
13432 round_up (tree value, int divisor)
13434 tree div = NULL_TREE;
13436 gcc_assert (divisor > 0);
13437 if (divisor == 1)
13438 return value;
13440 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13441 have to do anything. Only do this when we are not given a const,
13442 because in that case, this check is more expensive than just
13443 doing it. */
13444 if (TREE_CODE (value) != INTEGER_CST)
13446 div = build_int_cst (TREE_TYPE (value), divisor);
13448 if (multiple_of_p (TREE_TYPE (value), value, div))
13449 return value;
13452 /* If divisor is a power of two, simplify this to bit manipulation. */
13453 if (divisor == (divisor & -divisor))
13455 tree t;
13457 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13458 value = size_binop (PLUS_EXPR, value, t);
13459 t = build_int_cst (TREE_TYPE (value), -divisor);
13460 value = size_binop (BIT_AND_EXPR, value, t);
13462 else
13464 if (!div)
13465 div = build_int_cst (TREE_TYPE (value), divisor);
13466 value = size_binop (CEIL_DIV_EXPR, value, div);
13467 value = size_binop (MULT_EXPR, value, div);
13470 return value;
13473 /* Likewise, but round down. */
13475 tree
13476 round_down (tree value, int divisor)
13478 tree div = NULL_TREE;
13480 gcc_assert (divisor > 0);
13481 if (divisor == 1)
13482 return value;
13484 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13485 have to do anything. Only do this when we are not given a const,
13486 because in that case, this check is more expensive than just
13487 doing it. */
13488 if (TREE_CODE (value) != INTEGER_CST)
13490 div = build_int_cst (TREE_TYPE (value), divisor);
13492 if (multiple_of_p (TREE_TYPE (value), value, div))
13493 return value;
13496 /* If divisor is a power of two, simplify this to bit manipulation. */
13497 if (divisor == (divisor & -divisor))
13499 tree t;
13501 t = build_int_cst (TREE_TYPE (value), -divisor);
13502 value = size_binop (BIT_AND_EXPR, value, t);
13504 else
13506 if (!div)
13507 div = build_int_cst (TREE_TYPE (value), divisor);
13508 value = size_binop (FLOOR_DIV_EXPR, value, div);
13509 value = size_binop (MULT_EXPR, value, div);
13512 return value;
13515 /* Returns the pointer to the base of the object addressed by EXP and
13516 extracts the information about the offset of the access, storing it
13517 to PBITPOS and POFFSET. */
13519 static tree
13520 split_address_to_core_and_offset (tree exp,
13521 HOST_WIDE_INT *pbitpos, tree *poffset)
13523 tree core;
13524 enum machine_mode mode;
13525 int unsignedp, volatilep;
13526 HOST_WIDE_INT bitsize;
13528 if (TREE_CODE (exp) == ADDR_EXPR)
13530 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13531 poffset, &mode, &unsignedp, &volatilep,
13532 false);
13533 core = build_fold_addr_expr (core);
13535 else
13537 core = exp;
13538 *pbitpos = 0;
13539 *poffset = NULL_TREE;
13542 return core;
13545 /* Returns true if addresses of E1 and E2 differ by a constant, false
13546 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13548 bool
13549 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13551 tree core1, core2;
13552 HOST_WIDE_INT bitpos1, bitpos2;
13553 tree toffset1, toffset2, tdiff, type;
13555 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13556 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13558 if (bitpos1 % BITS_PER_UNIT != 0
13559 || bitpos2 % BITS_PER_UNIT != 0
13560 || !operand_equal_p (core1, core2, 0))
13561 return false;
13563 if (toffset1 && toffset2)
13565 type = TREE_TYPE (toffset1);
13566 if (type != TREE_TYPE (toffset2))
13567 toffset2 = fold_convert (type, toffset2);
13569 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13570 if (!cst_and_fits_in_hwi (tdiff))
13571 return false;
13573 *diff = int_cst_value (tdiff);
13575 else if (toffset1 || toffset2)
13577 /* If only one of the offsets is non-constant, the difference cannot
13578 be a constant. */
13579 return false;
13581 else
13582 *diff = 0;
13584 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13585 return true;
13588 /* Simplify the floating point expression EXP when the sign of the
13589 result is not significant. Return NULL_TREE if no simplification
13590 is possible. */
13592 tree
13593 fold_strip_sign_ops (tree exp)
13595 tree arg0, arg1;
13597 switch (TREE_CODE (exp))
13599 case ABS_EXPR:
13600 case NEGATE_EXPR:
13601 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13602 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13604 case MULT_EXPR:
13605 case RDIV_EXPR:
13606 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13607 return NULL_TREE;
13608 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13609 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13610 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13611 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13612 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13613 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13614 break;
13616 default:
13617 break;
13619 return NULL_TREE;