Daily bump.
[official-gcc.git] / gcc / fold-const.c
blob93dee15ce287955caec97e1964f1cc34a8e74f50
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
63 otherwise. */
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
70 COMPCODE_FALSE = 0,
71 COMPCODE_LT = 1,
72 COMPCODE_EQ = 2,
73 COMPCODE_LE = 3,
74 COMPCODE_GT = 4,
75 COMPCODE_LTGT = 5,
76 COMPCODE_GE = 6,
77 COMPCODE_ORD = 7,
78 COMPCODE_UNORD = 8,
79 COMPCODE_UNLT = 9,
80 COMPCODE_UNEQ = 10,
81 COMPCODE_UNLE = 11,
82 COMPCODE_UNGT = 12,
83 COMPCODE_NE = 13,
84 COMPCODE_UNGE = 14,
85 COMPCODE_TRUE = 15
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
110 tree *, tree *);
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
120 tree);
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
130 tree, tree,
131 tree, tree, int);
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
134 tree, tree, tree);
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
148 addition.
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 sign. */
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
160 #define LOWPART(x) \
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
170 static void
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
183 static void
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
185 HOST_WIDE_INT *hi)
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
206 tree
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
211 HOST_WIDE_INT high;
212 unsigned int prec;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
222 prec = POINTER_SIZE;
223 else
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
236 else
238 high = 0;
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
257 high = -1;
259 else
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
264 high = -1;
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
275 if (overflowed
276 || overflowable < 0
277 || (overflowable > 0 && sign_extended_type))
279 t = copy_node (t);
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
285 t = copy_node (t);
286 TREE_CONSTANT_OVERFLOW (t) = 1;
290 return t;
293 /* Add two doubleword integers with doubleword result.
294 Return nonzero if the operation overflows according to UNSIGNED_P.
295 Each argument is given as two `HOST_WIDE_INT' pieces.
296 One argument is L1 and H1; the other, L2 and H2.
297 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
300 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
301 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
302 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
303 bool unsigned_p)
305 unsigned HOST_WIDE_INT l;
306 HOST_WIDE_INT h;
308 l = l1 + l2;
309 h = h1 + h2 + (l < l1);
311 *lv = l;
312 *hv = h;
314 if (unsigned_p)
315 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
316 else
317 return OVERFLOW_SUM_SIGN (h1, h2, h);
320 /* Negate a doubleword integer with doubleword result.
321 Return nonzero if the operation overflows, assuming it's signed.
322 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
323 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
327 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
329 if (l1 == 0)
331 *lv = 0;
332 *hv = - h1;
333 return (*hv & h1) < 0;
335 else
337 *lv = -l1;
338 *hv = ~h1;
339 return 0;
343 /* Multiply two doubleword integers with doubleword result.
344 Return nonzero if the operation overflows according to UNSIGNED_P.
345 Each argument is given as two `HOST_WIDE_INT' pieces.
346 One argument is L1 and H1; the other, L2 and H2.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
352 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
353 bool unsigned_p)
355 HOST_WIDE_INT arg1[4];
356 HOST_WIDE_INT arg2[4];
357 HOST_WIDE_INT prod[4 * 2];
358 unsigned HOST_WIDE_INT carry;
359 int i, j, k;
360 unsigned HOST_WIDE_INT toplow, neglow;
361 HOST_WIDE_INT tophigh, neghigh;
363 encode (arg1, l1, h1);
364 encode (arg2, l2, h2);
366 memset (prod, 0, sizeof prod);
368 for (i = 0; i < 4; i++)
370 carry = 0;
371 for (j = 0; j < 4; j++)
373 k = i + j;
374 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
375 carry += arg1[i] * arg2[j];
376 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
377 carry += prod[k];
378 prod[k] = LOWPART (carry);
379 carry = HIGHPART (carry);
381 prod[i + 4] = carry;
384 decode (prod, lv, hv);
385 decode (prod + 4, &toplow, &tophigh);
387 /* Unsigned overflow is immediate. */
388 if (unsigned_p)
389 return (toplow | tophigh) != 0;
391 /* Check for signed overflow by calculating the signed representation of the
392 top half of the result; it should agree with the low half's sign bit. */
393 if (h1 < 0)
395 neg_double (l2, h2, &neglow, &neghigh);
396 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
398 if (h2 < 0)
400 neg_double (l1, h1, &neglow, &neghigh);
401 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
403 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406 /* Shift the doubleword integer in L1, H1 left by COUNT places
407 keeping only PREC bits of result.
408 Shift right if COUNT is negative.
409 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
410 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
412 void
413 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
414 HOST_WIDE_INT count, unsigned int prec,
415 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
417 unsigned HOST_WIDE_INT signmask;
419 if (count < 0)
421 rshift_double (l1, h1, -count, prec, lv, hv, arith);
422 return;
425 if (SHIFT_COUNT_TRUNCATED)
426 count %= prec;
428 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
430 /* Shifting by the host word size is undefined according to the
431 ANSI standard, so we must handle this as a special case. */
432 *hv = 0;
433 *lv = 0;
435 else if (count >= HOST_BITS_PER_WIDE_INT)
437 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
438 *lv = 0;
440 else
442 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
443 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
444 *lv = l1 << count;
447 /* Sign extend all bits that are beyond the precision. */
449 signmask = -((prec > HOST_BITS_PER_WIDE_INT
450 ? ((unsigned HOST_WIDE_INT) *hv
451 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
452 : (*lv >> (prec - 1))) & 1);
454 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
456 else if (prec >= HOST_BITS_PER_WIDE_INT)
458 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
459 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
461 else
463 *hv = signmask;
464 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
465 *lv |= signmask << prec;
469 /* Shift the doubleword integer in L1, H1 right by COUNT places
470 keeping only PREC bits of result. COUNT must be positive.
471 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
472 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
474 void
475 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
476 HOST_WIDE_INT count, unsigned int prec,
477 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
478 int arith)
480 unsigned HOST_WIDE_INT signmask;
482 signmask = (arith
483 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
484 : 0);
486 if (SHIFT_COUNT_TRUNCATED)
487 count %= prec;
489 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
491 /* Shifting by the host word size is undefined according to the
492 ANSI standard, so we must handle this as a special case. */
493 *hv = 0;
494 *lv = 0;
496 else if (count >= HOST_BITS_PER_WIDE_INT)
498 *hv = 0;
499 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
501 else
503 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
504 *lv = ((l1 >> count)
505 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508 /* Zero / sign extend all bits that are beyond the precision. */
510 if (count >= (HOST_WIDE_INT)prec)
512 *hv = signmask;
513 *lv = signmask;
515 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
517 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
519 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
520 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
522 else
524 *hv = signmask;
525 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
526 *lv |= signmask << (prec - count);
530 /* Rotate the doubleword integer in L1, H1 left by COUNT places
531 keeping only PREC bits of result.
532 Rotate right if COUNT is negative.
533 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
535 void
536 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
537 HOST_WIDE_INT count, unsigned int prec,
538 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
540 unsigned HOST_WIDE_INT s1l, s2l;
541 HOST_WIDE_INT s1h, s2h;
543 count %= prec;
544 if (count < 0)
545 count += prec;
547 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
548 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
549 *lv = s1l | s2l;
550 *hv = s1h | s2h;
553 /* Rotate the doubleword integer in L1, H1 left by COUNT places
554 keeping only PREC bits of result. COUNT must be positive.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 void
558 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
565 count %= prec;
566 if (count < 0)
567 count += prec;
569 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
571 *lv = s1l | s2l;
572 *hv = s1h | s2h;
575 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
576 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
577 CODE is a tree code for a kind of division, one of
578 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
579 or EXACT_DIV_EXPR
580 It controls how the quotient is rounded to an integer.
581 Return nonzero if the operation overflows.
582 UNS nonzero says do unsigned division. */
585 div_and_round_double (enum tree_code code, int uns,
586 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
587 HOST_WIDE_INT hnum_orig,
588 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
589 HOST_WIDE_INT hden_orig,
590 unsigned HOST_WIDE_INT *lquo,
591 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
592 HOST_WIDE_INT *hrem)
594 int quo_neg = 0;
595 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
596 HOST_WIDE_INT den[4], quo[4];
597 int i, j;
598 unsigned HOST_WIDE_INT work;
599 unsigned HOST_WIDE_INT carry = 0;
600 unsigned HOST_WIDE_INT lnum = lnum_orig;
601 HOST_WIDE_INT hnum = hnum_orig;
602 unsigned HOST_WIDE_INT lden = lden_orig;
603 HOST_WIDE_INT hden = hden_orig;
604 int overflow = 0;
606 if (hden == 0 && lden == 0)
607 overflow = 1, lden = 1;
609 /* Calculate quotient sign and convert operands to unsigned. */
610 if (!uns)
612 if (hnum < 0)
614 quo_neg = ~ quo_neg;
615 /* (minimum integer) / (-1) is the only overflow case. */
616 if (neg_double (lnum, hnum, &lnum, &hnum)
617 && ((HOST_WIDE_INT) lden & hden) == -1)
618 overflow = 1;
620 if (hden < 0)
622 quo_neg = ~ quo_neg;
623 neg_double (lden, hden, &lden, &hden);
627 if (hnum == 0 && hden == 0)
628 { /* single precision */
629 *hquo = *hrem = 0;
630 /* This unsigned division rounds toward zero. */
631 *lquo = lnum / lden;
632 goto finish_up;
635 if (hnum == 0)
636 { /* trivial case: dividend < divisor */
637 /* hden != 0 already checked. */
638 *hquo = *lquo = 0;
639 *hrem = hnum;
640 *lrem = lnum;
641 goto finish_up;
644 memset (quo, 0, sizeof quo);
646 memset (num, 0, sizeof num); /* to zero 9th element */
647 memset (den, 0, sizeof den);
649 encode (num, lnum, hnum);
650 encode (den, lden, hden);
652 /* Special code for when the divisor < BASE. */
653 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
655 /* hnum != 0 already checked. */
656 for (i = 4 - 1; i >= 0; i--)
658 work = num[i] + carry * BASE;
659 quo[i] = work / lden;
660 carry = work % lden;
663 else
665 /* Full double precision division,
666 with thanks to Don Knuth's "Seminumerical Algorithms". */
667 int num_hi_sig, den_hi_sig;
668 unsigned HOST_WIDE_INT quo_est, scale;
670 /* Find the highest nonzero divisor digit. */
671 for (i = 4 - 1;; i--)
672 if (den[i] != 0)
674 den_hi_sig = i;
675 break;
678 /* Insure that the first digit of the divisor is at least BASE/2.
679 This is required by the quotient digit estimation algorithm. */
681 scale = BASE / (den[den_hi_sig] + 1);
682 if (scale > 1)
683 { /* scale divisor and dividend */
684 carry = 0;
685 for (i = 0; i <= 4 - 1; i++)
687 work = (num[i] * scale) + carry;
688 num[i] = LOWPART (work);
689 carry = HIGHPART (work);
692 num[4] = carry;
693 carry = 0;
694 for (i = 0; i <= 4 - 1; i++)
696 work = (den[i] * scale) + carry;
697 den[i] = LOWPART (work);
698 carry = HIGHPART (work);
699 if (den[i] != 0) den_hi_sig = i;
703 num_hi_sig = 4;
705 /* Main loop */
706 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
708 /* Guess the next quotient digit, quo_est, by dividing the first
709 two remaining dividend digits by the high order quotient digit.
710 quo_est is never low and is at most 2 high. */
711 unsigned HOST_WIDE_INT tmp;
713 num_hi_sig = i + den_hi_sig + 1;
714 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
715 if (num[num_hi_sig] != den[den_hi_sig])
716 quo_est = work / den[den_hi_sig];
717 else
718 quo_est = BASE - 1;
720 /* Refine quo_est so it's usually correct, and at most one high. */
721 tmp = work - quo_est * den[den_hi_sig];
722 if (tmp < BASE
723 && (den[den_hi_sig - 1] * quo_est
724 > (tmp * BASE + num[num_hi_sig - 2])))
725 quo_est--;
727 /* Try QUO_EST as the quotient digit, by multiplying the
728 divisor by QUO_EST and subtracting from the remaining dividend.
729 Keep in mind that QUO_EST is the I - 1st digit. */
731 carry = 0;
732 for (j = 0; j <= den_hi_sig; j++)
734 work = quo_est * den[j] + carry;
735 carry = HIGHPART (work);
736 work = num[i + j] - LOWPART (work);
737 num[i + j] = LOWPART (work);
738 carry += HIGHPART (work) != 0;
741 /* If quo_est was high by one, then num[i] went negative and
742 we need to correct things. */
743 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
745 quo_est--;
746 carry = 0; /* add divisor back in */
747 for (j = 0; j <= den_hi_sig; j++)
749 work = num[i + j] + den[j] + carry;
750 carry = HIGHPART (work);
751 num[i + j] = LOWPART (work);
754 num [num_hi_sig] += carry;
757 /* Store the quotient digit. */
758 quo[i] = quo_est;
762 decode (quo, lquo, hquo);
764 finish_up:
765 /* If result is negative, make it so. */
766 if (quo_neg)
767 neg_double (*lquo, *hquo, lquo, hquo);
769 /* Compute trial remainder: rem = num - (quo * den) */
770 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
771 neg_double (*lrem, *hrem, lrem, hrem);
772 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
774 switch (code)
776 case TRUNC_DIV_EXPR:
777 case TRUNC_MOD_EXPR: /* round toward zero */
778 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
779 return overflow;
781 case FLOOR_DIV_EXPR:
782 case FLOOR_MOD_EXPR: /* round toward negative infinity */
783 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
785 /* quo = quo - 1; */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
787 lquo, hquo);
789 else
790 return overflow;
791 break;
793 case CEIL_DIV_EXPR:
794 case CEIL_MOD_EXPR: /* round toward positive infinity */
795 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
797 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
798 lquo, hquo);
800 else
801 return overflow;
802 break;
804 case ROUND_DIV_EXPR:
805 case ROUND_MOD_EXPR: /* round to closest integer */
807 unsigned HOST_WIDE_INT labs_rem = *lrem;
808 HOST_WIDE_INT habs_rem = *hrem;
809 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
810 HOST_WIDE_INT habs_den = hden, htwice;
812 /* Get absolute values. */
813 if (*hrem < 0)
814 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
815 if (hden < 0)
816 neg_double (lden, hden, &labs_den, &habs_den);
818 /* If (2 * abs (lrem) >= abs (lden)) */
819 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
820 labs_rem, habs_rem, &ltwice, &htwice);
822 if (((unsigned HOST_WIDE_INT) habs_den
823 < (unsigned HOST_WIDE_INT) htwice)
824 || (((unsigned HOST_WIDE_INT) habs_den
825 == (unsigned HOST_WIDE_INT) htwice)
826 && (labs_den < ltwice)))
828 if (*hquo < 0)
829 /* quo = quo - 1; */
830 add_double (*lquo, *hquo,
831 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
832 else
833 /* quo = quo + 1; */
834 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
835 lquo, hquo);
837 else
838 return overflow;
840 break;
842 default:
843 gcc_unreachable ();
846 /* Compute true remainder: rem = num - (quo * den) */
847 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
848 neg_double (*lrem, *hrem, lrem, hrem);
849 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
850 return overflow;
853 /* If ARG2 divides ARG1 with zero remainder, carries out the division
854 of type CODE and returns the quotient.
855 Otherwise returns NULL_TREE. */
857 static tree
858 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
860 unsigned HOST_WIDE_INT int1l, int2l;
861 HOST_WIDE_INT int1h, int2h;
862 unsigned HOST_WIDE_INT quol, reml;
863 HOST_WIDE_INT quoh, remh;
864 tree type = TREE_TYPE (arg1);
865 int uns = TYPE_UNSIGNED (type);
867 int1l = TREE_INT_CST_LOW (arg1);
868 int1h = TREE_INT_CST_HIGH (arg1);
869 int2l = TREE_INT_CST_LOW (arg2);
870 int2h = TREE_INT_CST_HIGH (arg2);
872 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
873 &quol, &quoh, &reml, &remh);
874 if (remh != 0 || reml != 0)
875 return NULL_TREE;
877 return build_int_cst_wide (type, quol, quoh);
880 /* Return true if the built-in mathematical function specified by CODE
881 is odd, i.e. -f(x) == f(-x). */
883 static bool
884 negate_mathfn_p (enum built_in_function code)
886 switch (code)
888 CASE_FLT_FN (BUILT_IN_ASIN):
889 CASE_FLT_FN (BUILT_IN_ASINH):
890 CASE_FLT_FN (BUILT_IN_ATAN):
891 CASE_FLT_FN (BUILT_IN_ATANH):
892 CASE_FLT_FN (BUILT_IN_CBRT):
893 CASE_FLT_FN (BUILT_IN_ERF):
894 CASE_FLT_FN (BUILT_IN_LLROUND):
895 CASE_FLT_FN (BUILT_IN_LROUND):
896 CASE_FLT_FN (BUILT_IN_ROUND):
897 CASE_FLT_FN (BUILT_IN_SIN):
898 CASE_FLT_FN (BUILT_IN_SINH):
899 CASE_FLT_FN (BUILT_IN_TAN):
900 CASE_FLT_FN (BUILT_IN_TANH):
901 CASE_FLT_FN (BUILT_IN_TRUNC):
902 return true;
904 CASE_FLT_FN (BUILT_IN_LLRINT):
905 CASE_FLT_FN (BUILT_IN_LRINT):
906 CASE_FLT_FN (BUILT_IN_NEARBYINT):
907 CASE_FLT_FN (BUILT_IN_RINT):
908 return !flag_rounding_math;
910 default:
911 break;
913 return false;
916 /* Check whether we may negate an integer constant T without causing
917 overflow. */
919 bool
920 may_negate_without_overflow_p (tree t)
922 unsigned HOST_WIDE_INT val;
923 unsigned int prec;
924 tree type;
926 gcc_assert (TREE_CODE (t) == INTEGER_CST);
928 type = TREE_TYPE (t);
929 if (TYPE_UNSIGNED (type))
930 return false;
932 prec = TYPE_PRECISION (type);
933 if (prec > HOST_BITS_PER_WIDE_INT)
935 if (TREE_INT_CST_LOW (t) != 0)
936 return true;
937 prec -= HOST_BITS_PER_WIDE_INT;
938 val = TREE_INT_CST_HIGH (t);
940 else
941 val = TREE_INT_CST_LOW (t);
942 if (prec < HOST_BITS_PER_WIDE_INT)
943 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
944 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
947 /* Determine whether an expression T can be cheaply negated using
948 the function negate_expr without introducing undefined overflow. */
950 static bool
951 negate_expr_p (tree t)
953 tree type;
955 if (t == 0)
956 return false;
958 type = TREE_TYPE (t);
960 STRIP_SIGN_NOPS (t);
961 switch (TREE_CODE (t))
963 case INTEGER_CST:
964 if (TYPE_UNSIGNED (type)
965 || (flag_wrapv && ! flag_trapv))
966 return true;
968 /* Check that -CST will not overflow type. */
969 return may_negate_without_overflow_p (t);
970 case BIT_NOT_EXPR:
971 return INTEGRAL_TYPE_P (type)
972 && (TYPE_UNSIGNED (type)
973 || (flag_wrapv && !flag_trapv));
975 case REAL_CST:
976 case NEGATE_EXPR:
977 return true;
979 case COMPLEX_CST:
980 return negate_expr_p (TREE_REALPART (t))
981 && negate_expr_p (TREE_IMAGPART (t));
983 case PLUS_EXPR:
984 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
985 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
986 return false;
987 /* -(A + B) -> (-B) - A. */
988 if (negate_expr_p (TREE_OPERAND (t, 1))
989 && reorder_operands_p (TREE_OPERAND (t, 0),
990 TREE_OPERAND (t, 1)))
991 return true;
992 /* -(A + B) -> (-A) - B. */
993 return negate_expr_p (TREE_OPERAND (t, 0));
995 case MINUS_EXPR:
996 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
997 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
998 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
999 && reorder_operands_p (TREE_OPERAND (t, 0),
1000 TREE_OPERAND (t, 1));
1002 case MULT_EXPR:
1003 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1004 break;
1006 /* Fall through. */
1008 case RDIV_EXPR:
1009 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1010 return negate_expr_p (TREE_OPERAND (t, 1))
1011 || negate_expr_p (TREE_OPERAND (t, 0));
1012 break;
1014 case TRUNC_DIV_EXPR:
1015 case ROUND_DIV_EXPR:
1016 case FLOOR_DIV_EXPR:
1017 case CEIL_DIV_EXPR:
1018 case EXACT_DIV_EXPR:
1019 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1020 break;
1021 return negate_expr_p (TREE_OPERAND (t, 1))
1022 || negate_expr_p (TREE_OPERAND (t, 0));
1024 case NOP_EXPR:
1025 /* Negate -((double)float) as (double)(-float). */
1026 if (TREE_CODE (type) == REAL_TYPE)
1028 tree tem = strip_float_extensions (t);
1029 if (tem != t)
1030 return negate_expr_p (tem);
1032 break;
1034 case CALL_EXPR:
1035 /* Negate -f(x) as f(-x). */
1036 if (negate_mathfn_p (builtin_mathfn_code (t)))
1037 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1038 break;
1040 case RSHIFT_EXPR:
1041 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1042 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1044 tree op1 = TREE_OPERAND (t, 1);
1045 if (TREE_INT_CST_HIGH (op1) == 0
1046 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1047 == TREE_INT_CST_LOW (op1))
1048 return true;
1050 break;
1052 default:
1053 break;
1055 return false;
1058 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1059 simplification is possible.
1060 If negate_expr_p would return true for T, NULL_TREE will never be
1061 returned. */
1063 static tree
1064 fold_negate_expr (tree t)
1066 tree type = TREE_TYPE (t);
1067 tree tem;
1069 switch (TREE_CODE (t))
1071 /* Convert - (~A) to A + 1. */
1072 case BIT_NOT_EXPR:
1073 if (INTEGRAL_TYPE_P (type))
1074 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1075 build_int_cst (type, 1));
1076 break;
1078 case INTEGER_CST:
1079 tem = fold_negate_const (t, type);
1080 if (! TREE_OVERFLOW (tem)
1081 || TYPE_UNSIGNED (type)
1082 || ! flag_trapv)
1083 return tem;
1084 break;
1086 case REAL_CST:
1087 tem = fold_negate_const (t, type);
1088 /* Two's complement FP formats, such as c4x, may overflow. */
1089 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1090 return tem;
1091 break;
1093 case COMPLEX_CST:
1095 tree rpart = negate_expr (TREE_REALPART (t));
1096 tree ipart = negate_expr (TREE_IMAGPART (t));
1098 if ((TREE_CODE (rpart) == REAL_CST
1099 && TREE_CODE (ipart) == REAL_CST)
1100 || (TREE_CODE (rpart) == INTEGER_CST
1101 && TREE_CODE (ipart) == INTEGER_CST))
1102 return build_complex (type, rpart, ipart);
1104 break;
1106 case NEGATE_EXPR:
1107 return TREE_OPERAND (t, 0);
1109 case PLUS_EXPR:
1110 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1111 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1113 /* -(A + B) -> (-B) - A. */
1114 if (negate_expr_p (TREE_OPERAND (t, 1))
1115 && reorder_operands_p (TREE_OPERAND (t, 0),
1116 TREE_OPERAND (t, 1)))
1118 tem = negate_expr (TREE_OPERAND (t, 1));
1119 return fold_build2 (MINUS_EXPR, type,
1120 tem, TREE_OPERAND (t, 0));
1123 /* -(A + B) -> (-A) - B. */
1124 if (negate_expr_p (TREE_OPERAND (t, 0)))
1126 tem = negate_expr (TREE_OPERAND (t, 0));
1127 return fold_build2 (MINUS_EXPR, type,
1128 tem, TREE_OPERAND (t, 1));
1131 break;
1133 case MINUS_EXPR:
1134 /* - (A - B) -> B - A */
1135 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1136 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1137 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1138 return fold_build2 (MINUS_EXPR, type,
1139 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1140 break;
1142 case MULT_EXPR:
1143 if (TYPE_UNSIGNED (type))
1144 break;
1146 /* Fall through. */
1148 case RDIV_EXPR:
1149 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1151 tem = TREE_OPERAND (t, 1);
1152 if (negate_expr_p (tem))
1153 return fold_build2 (TREE_CODE (t), type,
1154 TREE_OPERAND (t, 0), negate_expr (tem));
1155 tem = TREE_OPERAND (t, 0);
1156 if (negate_expr_p (tem))
1157 return fold_build2 (TREE_CODE (t), type,
1158 negate_expr (tem), TREE_OPERAND (t, 1));
1160 break;
1162 case TRUNC_DIV_EXPR:
1163 case ROUND_DIV_EXPR:
1164 case FLOOR_DIV_EXPR:
1165 case CEIL_DIV_EXPR:
1166 case EXACT_DIV_EXPR:
1167 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1169 tem = TREE_OPERAND (t, 1);
1170 if (negate_expr_p (tem))
1171 return fold_build2 (TREE_CODE (t), type,
1172 TREE_OPERAND (t, 0), negate_expr (tem));
1173 tem = TREE_OPERAND (t, 0);
1174 if (negate_expr_p (tem))
1175 return fold_build2 (TREE_CODE (t), type,
1176 negate_expr (tem), TREE_OPERAND (t, 1));
1178 break;
1180 case NOP_EXPR:
1181 /* Convert -((double)float) into (double)(-float). */
1182 if (TREE_CODE (type) == REAL_TYPE)
1184 tem = strip_float_extensions (t);
1185 if (tem != t && negate_expr_p (tem))
1186 return negate_expr (tem);
1188 break;
1190 case CALL_EXPR:
1191 /* Negate -f(x) as f(-x). */
1192 if (negate_mathfn_p (builtin_mathfn_code (t))
1193 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1195 tree fndecl, arg, arglist;
1197 fndecl = get_callee_fndecl (t);
1198 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1199 arglist = build_tree_list (NULL_TREE, arg);
1200 return build_function_call_expr (fndecl, arglist);
1202 break;
1204 case RSHIFT_EXPR:
1205 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1206 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1208 tree op1 = TREE_OPERAND (t, 1);
1209 if (TREE_INT_CST_HIGH (op1) == 0
1210 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1211 == TREE_INT_CST_LOW (op1))
1213 tree ntype = TYPE_UNSIGNED (type)
1214 ? lang_hooks.types.signed_type (type)
1215 : lang_hooks.types.unsigned_type (type);
1216 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1217 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1218 return fold_convert (type, temp);
1221 break;
1223 default:
1224 break;
1227 return NULL_TREE;
1230 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1231 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1232 return NULL_TREE. */
1234 static tree
1235 negate_expr (tree t)
1237 tree type, tem;
1239 if (t == NULL_TREE)
1240 return NULL_TREE;
1242 type = TREE_TYPE (t);
1243 STRIP_SIGN_NOPS (t);
1245 tem = fold_negate_expr (t);
1246 if (!tem)
1247 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1248 return fold_convert (type, tem);
1251 /* Split a tree IN into a constant, literal and variable parts that could be
1252 combined with CODE to make IN. "constant" means an expression with
1253 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1254 commutative arithmetic operation. Store the constant part into *CONP,
1255 the literal in *LITP and return the variable part. If a part isn't
1256 present, set it to null. If the tree does not decompose in this way,
1257 return the entire tree as the variable part and the other parts as null.
1259 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1260 case, we negate an operand that was subtracted. Except if it is a
1261 literal for which we use *MINUS_LITP instead.
1263 If NEGATE_P is true, we are negating all of IN, again except a literal
1264 for which we use *MINUS_LITP instead.
1266 If IN is itself a literal or constant, return it as appropriate.
1268 Note that we do not guarantee that any of the three values will be the
1269 same type as IN, but they will have the same signedness and mode. */
1271 static tree
1272 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1273 tree *minus_litp, int negate_p)
1275 tree var = 0;
1277 *conp = 0;
1278 *litp = 0;
1279 *minus_litp = 0;
1281 /* Strip any conversions that don't change the machine mode or signedness. */
1282 STRIP_SIGN_NOPS (in);
1284 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1285 *litp = in;
1286 else if (TREE_CODE (in) == code
1287 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1288 /* We can associate addition and subtraction together (even
1289 though the C standard doesn't say so) for integers because
1290 the value is not affected. For reals, the value might be
1291 affected, so we can't. */
1292 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1293 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1295 tree op0 = TREE_OPERAND (in, 0);
1296 tree op1 = TREE_OPERAND (in, 1);
1297 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1298 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1300 /* First see if either of the operands is a literal, then a constant. */
1301 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1302 *litp = op0, op0 = 0;
1303 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1304 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1306 if (op0 != 0 && TREE_CONSTANT (op0))
1307 *conp = op0, op0 = 0;
1308 else if (op1 != 0 && TREE_CONSTANT (op1))
1309 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1311 /* If we haven't dealt with either operand, this is not a case we can
1312 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1313 if (op0 != 0 && op1 != 0)
1314 var = in;
1315 else if (op0 != 0)
1316 var = op0;
1317 else
1318 var = op1, neg_var_p = neg1_p;
1320 /* Now do any needed negations. */
1321 if (neg_litp_p)
1322 *minus_litp = *litp, *litp = 0;
1323 if (neg_conp_p)
1324 *conp = negate_expr (*conp);
1325 if (neg_var_p)
1326 var = negate_expr (var);
1328 else if (TREE_CONSTANT (in))
1329 *conp = in;
1330 else
1331 var = in;
1333 if (negate_p)
1335 if (*litp)
1336 *minus_litp = *litp, *litp = 0;
1337 else if (*minus_litp)
1338 *litp = *minus_litp, *minus_litp = 0;
1339 *conp = negate_expr (*conp);
1340 var = negate_expr (var);
1343 return var;
1346 /* Re-associate trees split by the above function. T1 and T2 are either
1347 expressions to associate or null. Return the new expression, if any. If
1348 we build an operation, do it in TYPE and with CODE. */
1350 static tree
1351 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1353 if (t1 == 0)
1354 return t2;
1355 else if (t2 == 0)
1356 return t1;
1358 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1359 try to fold this since we will have infinite recursion. But do
1360 deal with any NEGATE_EXPRs. */
1361 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1362 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1364 if (code == PLUS_EXPR)
1366 if (TREE_CODE (t1) == NEGATE_EXPR)
1367 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1368 fold_convert (type, TREE_OPERAND (t1, 0)));
1369 else if (TREE_CODE (t2) == NEGATE_EXPR)
1370 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1371 fold_convert (type, TREE_OPERAND (t2, 0)));
1372 else if (integer_zerop (t2))
1373 return fold_convert (type, t1);
1375 else if (code == MINUS_EXPR)
1377 if (integer_zerop (t2))
1378 return fold_convert (type, t1);
1381 return build2 (code, type, fold_convert (type, t1),
1382 fold_convert (type, t2));
1385 return fold_build2 (code, type, fold_convert (type, t1),
1386 fold_convert (type, t2));
1389 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1390 for use in int_const_binop, size_binop and size_diffop. */
1392 static bool
1393 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1395 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1396 return false;
1397 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1398 return false;
1400 switch (code)
1402 case LSHIFT_EXPR:
1403 case RSHIFT_EXPR:
1404 case LROTATE_EXPR:
1405 case RROTATE_EXPR:
1406 return true;
1408 default:
1409 break;
1412 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1413 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1414 && TYPE_MODE (type1) == TYPE_MODE (type2);
1418 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1419 to produce a new constant. Return NULL_TREE if we don't know how
1420 to evaluate CODE at compile-time.
1422 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1424 tree
1425 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1427 unsigned HOST_WIDE_INT int1l, int2l;
1428 HOST_WIDE_INT int1h, int2h;
1429 unsigned HOST_WIDE_INT low;
1430 HOST_WIDE_INT hi;
1431 unsigned HOST_WIDE_INT garbagel;
1432 HOST_WIDE_INT garbageh;
1433 tree t;
1434 tree type = TREE_TYPE (arg1);
1435 int uns = TYPE_UNSIGNED (type);
1436 int is_sizetype
1437 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1438 int overflow = 0;
1440 int1l = TREE_INT_CST_LOW (arg1);
1441 int1h = TREE_INT_CST_HIGH (arg1);
1442 int2l = TREE_INT_CST_LOW (arg2);
1443 int2h = TREE_INT_CST_HIGH (arg2);
1445 switch (code)
1447 case BIT_IOR_EXPR:
1448 low = int1l | int2l, hi = int1h | int2h;
1449 break;
1451 case BIT_XOR_EXPR:
1452 low = int1l ^ int2l, hi = int1h ^ int2h;
1453 break;
1455 case BIT_AND_EXPR:
1456 low = int1l & int2l, hi = int1h & int2h;
1457 break;
1459 case RSHIFT_EXPR:
1460 int2l = -int2l;
1461 case LSHIFT_EXPR:
1462 /* It's unclear from the C standard whether shifts can overflow.
1463 The following code ignores overflow; perhaps a C standard
1464 interpretation ruling is needed. */
1465 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1466 &low, &hi, !uns);
1467 break;
1469 case RROTATE_EXPR:
1470 int2l = - int2l;
1471 case LROTATE_EXPR:
1472 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1473 &low, &hi);
1474 break;
1476 case PLUS_EXPR:
1477 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1478 break;
1480 case MINUS_EXPR:
1481 neg_double (int2l, int2h, &low, &hi);
1482 add_double (int1l, int1h, low, hi, &low, &hi);
1483 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1484 break;
1486 case MULT_EXPR:
1487 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1488 break;
1490 case TRUNC_DIV_EXPR:
1491 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1492 case EXACT_DIV_EXPR:
1493 /* This is a shortcut for a common special case. */
1494 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1495 && ! TREE_CONSTANT_OVERFLOW (arg1)
1496 && ! TREE_CONSTANT_OVERFLOW (arg2)
1497 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1499 if (code == CEIL_DIV_EXPR)
1500 int1l += int2l - 1;
1502 low = int1l / int2l, hi = 0;
1503 break;
1506 /* ... fall through ... */
1508 case ROUND_DIV_EXPR:
1509 if (int2h == 0 && int2l == 0)
1510 return NULL_TREE;
1511 if (int2h == 0 && int2l == 1)
1513 low = int1l, hi = int1h;
1514 break;
1516 if (int1l == int2l && int1h == int2h
1517 && ! (int1l == 0 && int1h == 0))
1519 low = 1, hi = 0;
1520 break;
1522 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1523 &low, &hi, &garbagel, &garbageh);
1524 break;
1526 case TRUNC_MOD_EXPR:
1527 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1528 /* This is a shortcut for a common special case. */
1529 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1530 && ! TREE_CONSTANT_OVERFLOW (arg1)
1531 && ! TREE_CONSTANT_OVERFLOW (arg2)
1532 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1534 if (code == CEIL_MOD_EXPR)
1535 int1l += int2l - 1;
1536 low = int1l % int2l, hi = 0;
1537 break;
1540 /* ... fall through ... */
1542 case ROUND_MOD_EXPR:
1543 if (int2h == 0 && int2l == 0)
1544 return NULL_TREE;
1545 overflow = div_and_round_double (code, uns,
1546 int1l, int1h, int2l, int2h,
1547 &garbagel, &garbageh, &low, &hi);
1548 break;
1550 case MIN_EXPR:
1551 case MAX_EXPR:
1552 if (uns)
1553 low = (((unsigned HOST_WIDE_INT) int1h
1554 < (unsigned HOST_WIDE_INT) int2h)
1555 || (((unsigned HOST_WIDE_INT) int1h
1556 == (unsigned HOST_WIDE_INT) int2h)
1557 && int1l < int2l));
1558 else
1559 low = (int1h < int2h
1560 || (int1h == int2h && int1l < int2l));
1562 if (low == (code == MIN_EXPR))
1563 low = int1l, hi = int1h;
1564 else
1565 low = int2l, hi = int2h;
1566 break;
1568 default:
1569 return NULL_TREE;
1572 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1574 if (notrunc)
1576 /* Propagate overflow flags ourselves. */
1577 if (((!uns || is_sizetype) && overflow)
1578 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1580 t = copy_node (t);
1581 TREE_OVERFLOW (t) = 1;
1582 TREE_CONSTANT_OVERFLOW (t) = 1;
1584 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1586 t = copy_node (t);
1587 TREE_CONSTANT_OVERFLOW (t) = 1;
1590 else
1591 t = force_fit_type (t, 1,
1592 ((!uns || is_sizetype) && overflow)
1593 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1594 TREE_CONSTANT_OVERFLOW (arg1)
1595 | TREE_CONSTANT_OVERFLOW (arg2));
1597 return t;
1600 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1601 constant. We assume ARG1 and ARG2 have the same data type, or at least
1602 are the same kind of constant and the same machine mode. Return zero if
1603 combining the constants is not allowed in the current operating mode.
1605 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1607 static tree
1608 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1610 /* Sanity check for the recursive cases. */
1611 if (!arg1 || !arg2)
1612 return NULL_TREE;
1614 STRIP_NOPS (arg1);
1615 STRIP_NOPS (arg2);
1617 if (TREE_CODE (arg1) == INTEGER_CST)
1618 return int_const_binop (code, arg1, arg2, notrunc);
1620 if (TREE_CODE (arg1) == REAL_CST)
1622 enum machine_mode mode;
1623 REAL_VALUE_TYPE d1;
1624 REAL_VALUE_TYPE d2;
1625 REAL_VALUE_TYPE value;
1626 REAL_VALUE_TYPE result;
1627 bool inexact;
1628 tree t, type;
1630 /* The following codes are handled by real_arithmetic. */
1631 switch (code)
1633 case PLUS_EXPR:
1634 case MINUS_EXPR:
1635 case MULT_EXPR:
1636 case RDIV_EXPR:
1637 case MIN_EXPR:
1638 case MAX_EXPR:
1639 break;
1641 default:
1642 return NULL_TREE;
1645 d1 = TREE_REAL_CST (arg1);
1646 d2 = TREE_REAL_CST (arg2);
1648 type = TREE_TYPE (arg1);
1649 mode = TYPE_MODE (type);
1651 /* Don't perform operation if we honor signaling NaNs and
1652 either operand is a NaN. */
1653 if (HONOR_SNANS (mode)
1654 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1655 return NULL_TREE;
1657 /* Don't perform operation if it would raise a division
1658 by zero exception. */
1659 if (code == RDIV_EXPR
1660 && REAL_VALUES_EQUAL (d2, dconst0)
1661 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1662 return NULL_TREE;
1664 /* If either operand is a NaN, just return it. Otherwise, set up
1665 for floating-point trap; we return an overflow. */
1666 if (REAL_VALUE_ISNAN (d1))
1667 return arg1;
1668 else if (REAL_VALUE_ISNAN (d2))
1669 return arg2;
1671 inexact = real_arithmetic (&value, code, &d1, &d2);
1672 real_convert (&result, mode, &value);
1674 /* Don't constant fold this floating point operation if
1675 the result has overflowed and flag_trapping_math. */
1676 if (flag_trapping_math
1677 && MODE_HAS_INFINITIES (mode)
1678 && REAL_VALUE_ISINF (result)
1679 && !REAL_VALUE_ISINF (d1)
1680 && !REAL_VALUE_ISINF (d2))
1681 return NULL_TREE;
1683 /* Don't constant fold this floating point operation if the
1684 result may dependent upon the run-time rounding mode and
1685 flag_rounding_math is set, or if GCC's software emulation
1686 is unable to accurately represent the result. */
1687 if ((flag_rounding_math
1688 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1689 && !flag_unsafe_math_optimizations))
1690 && (inexact || !real_identical (&result, &value)))
1691 return NULL_TREE;
1693 t = build_real (type, result);
1695 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1696 TREE_CONSTANT_OVERFLOW (t)
1697 = TREE_OVERFLOW (t)
1698 | TREE_CONSTANT_OVERFLOW (arg1)
1699 | TREE_CONSTANT_OVERFLOW (arg2);
1700 return t;
1703 if (TREE_CODE (arg1) == COMPLEX_CST)
1705 tree type = TREE_TYPE (arg1);
1706 tree r1 = TREE_REALPART (arg1);
1707 tree i1 = TREE_IMAGPART (arg1);
1708 tree r2 = TREE_REALPART (arg2);
1709 tree i2 = TREE_IMAGPART (arg2);
1710 tree real, imag;
1712 switch (code)
1714 case PLUS_EXPR:
1715 case MINUS_EXPR:
1716 real = const_binop (code, r1, r2, notrunc);
1717 imag = const_binop (code, i1, i2, notrunc);
1718 break;
1720 case MULT_EXPR:
1721 real = const_binop (MINUS_EXPR,
1722 const_binop (MULT_EXPR, r1, r2, notrunc),
1723 const_binop (MULT_EXPR, i1, i2, notrunc),
1724 notrunc);
1725 imag = const_binop (PLUS_EXPR,
1726 const_binop (MULT_EXPR, r1, i2, notrunc),
1727 const_binop (MULT_EXPR, i1, r2, notrunc),
1728 notrunc);
1729 break;
1731 case RDIV_EXPR:
1733 tree magsquared
1734 = const_binop (PLUS_EXPR,
1735 const_binop (MULT_EXPR, r2, r2, notrunc),
1736 const_binop (MULT_EXPR, i2, i2, notrunc),
1737 notrunc);
1738 tree t1
1739 = const_binop (PLUS_EXPR,
1740 const_binop (MULT_EXPR, r1, r2, notrunc),
1741 const_binop (MULT_EXPR, i1, i2, notrunc),
1742 notrunc);
1743 tree t2
1744 = const_binop (MINUS_EXPR,
1745 const_binop (MULT_EXPR, i1, r2, notrunc),
1746 const_binop (MULT_EXPR, r1, i2, notrunc),
1747 notrunc);
1749 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1750 code = TRUNC_DIV_EXPR;
1752 real = const_binop (code, t1, magsquared, notrunc);
1753 imag = const_binop (code, t2, magsquared, notrunc);
1755 break;
1757 default:
1758 return NULL_TREE;
1761 if (real && imag)
1762 return build_complex (type, real, imag);
1765 return NULL_TREE;
1768 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1769 indicates which particular sizetype to create. */
1771 tree
1772 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1774 return build_int_cst (sizetype_tab[(int) kind], number);
1777 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1778 is a tree code. The type of the result is taken from the operands.
1779 Both must be equivalent integer types, ala int_binop_types_match_p.
1780 If the operands are constant, so is the result. */
1782 tree
1783 size_binop (enum tree_code code, tree arg0, tree arg1)
1785 tree type = TREE_TYPE (arg0);
1787 if (arg0 == error_mark_node || arg1 == error_mark_node)
1788 return error_mark_node;
1790 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1791 TREE_TYPE (arg1)));
1793 /* Handle the special case of two integer constants faster. */
1794 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1796 /* And some specific cases even faster than that. */
1797 if (code == PLUS_EXPR && integer_zerop (arg0))
1798 return arg1;
1799 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1800 && integer_zerop (arg1))
1801 return arg0;
1802 else if (code == MULT_EXPR && integer_onep (arg0))
1803 return arg1;
1805 /* Handle general case of two integer constants. */
1806 return int_const_binop (code, arg0, arg1, 0);
1809 return fold_build2 (code, type, arg0, arg1);
1812 /* Given two values, either both of sizetype or both of bitsizetype,
1813 compute the difference between the two values. Return the value
1814 in signed type corresponding to the type of the operands. */
1816 tree
1817 size_diffop (tree arg0, tree arg1)
1819 tree type = TREE_TYPE (arg0);
1820 tree ctype;
1822 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1823 TREE_TYPE (arg1)));
1825 /* If the type is already signed, just do the simple thing. */
1826 if (!TYPE_UNSIGNED (type))
1827 return size_binop (MINUS_EXPR, arg0, arg1);
1829 if (type == sizetype)
1830 ctype = ssizetype;
1831 else if (type == bitsizetype)
1832 ctype = sbitsizetype;
1833 else
1834 ctype = lang_hooks.types.signed_type (type);
1836 /* If either operand is not a constant, do the conversions to the signed
1837 type and subtract. The hardware will do the right thing with any
1838 overflow in the subtraction. */
1839 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1840 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1841 fold_convert (ctype, arg1));
1843 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1844 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1845 overflow) and negate (which can't either). Special-case a result
1846 of zero while we're here. */
1847 if (tree_int_cst_equal (arg0, arg1))
1848 return build_int_cst (ctype, 0);
1849 else if (tree_int_cst_lt (arg1, arg0))
1850 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1851 else
1852 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1853 fold_convert (ctype, size_binop (MINUS_EXPR,
1854 arg1, arg0)));
1857 /* A subroutine of fold_convert_const handling conversions of an
1858 INTEGER_CST to another integer type. */
1860 static tree
1861 fold_convert_const_int_from_int (tree type, tree arg1)
1863 tree t;
1865 /* Given an integer constant, make new constant with new type,
1866 appropriately sign-extended or truncated. */
1867 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1868 TREE_INT_CST_HIGH (arg1));
1870 t = force_fit_type (t,
1871 /* Don't set the overflow when
1872 converting a pointer */
1873 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1874 (TREE_INT_CST_HIGH (arg1) < 0
1875 && (TYPE_UNSIGNED (type)
1876 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1877 | TREE_OVERFLOW (arg1),
1878 TREE_CONSTANT_OVERFLOW (arg1));
1880 return t;
1883 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1884 to an integer type. */
1886 static tree
1887 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1889 int overflow = 0;
1890 tree t;
1892 /* The following code implements the floating point to integer
1893 conversion rules required by the Java Language Specification,
1894 that IEEE NaNs are mapped to zero and values that overflow
1895 the target precision saturate, i.e. values greater than
1896 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1897 are mapped to INT_MIN. These semantics are allowed by the
1898 C and C++ standards that simply state that the behavior of
1899 FP-to-integer conversion is unspecified upon overflow. */
1901 HOST_WIDE_INT high, low;
1902 REAL_VALUE_TYPE r;
1903 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1905 switch (code)
1907 case FIX_TRUNC_EXPR:
1908 real_trunc (&r, VOIDmode, &x);
1909 break;
1911 default:
1912 gcc_unreachable ();
1915 /* If R is NaN, return zero and show we have an overflow. */
1916 if (REAL_VALUE_ISNAN (r))
1918 overflow = 1;
1919 high = 0;
1920 low = 0;
1923 /* See if R is less than the lower bound or greater than the
1924 upper bound. */
1926 if (! overflow)
1928 tree lt = TYPE_MIN_VALUE (type);
1929 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1930 if (REAL_VALUES_LESS (r, l))
1932 overflow = 1;
1933 high = TREE_INT_CST_HIGH (lt);
1934 low = TREE_INT_CST_LOW (lt);
1938 if (! overflow)
1940 tree ut = TYPE_MAX_VALUE (type);
1941 if (ut)
1943 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1944 if (REAL_VALUES_LESS (u, r))
1946 overflow = 1;
1947 high = TREE_INT_CST_HIGH (ut);
1948 low = TREE_INT_CST_LOW (ut);
1953 if (! overflow)
1954 REAL_VALUE_TO_INT (&low, &high, r);
1956 t = build_int_cst_wide (type, low, high);
1958 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1959 TREE_CONSTANT_OVERFLOW (arg1));
1960 return t;
1963 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1964 to another floating point type. */
1966 static tree
1967 fold_convert_const_real_from_real (tree type, tree arg1)
1969 REAL_VALUE_TYPE value;
1970 tree t;
1972 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1973 t = build_real (type, value);
1975 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1976 TREE_CONSTANT_OVERFLOW (t)
1977 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1978 return t;
1981 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1982 type TYPE. If no simplification can be done return NULL_TREE. */
1984 static tree
1985 fold_convert_const (enum tree_code code, tree type, tree arg1)
1987 if (TREE_TYPE (arg1) == type)
1988 return arg1;
1990 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1992 if (TREE_CODE (arg1) == INTEGER_CST)
1993 return fold_convert_const_int_from_int (type, arg1);
1994 else if (TREE_CODE (arg1) == REAL_CST)
1995 return fold_convert_const_int_from_real (code, type, arg1);
1997 else if (TREE_CODE (type) == REAL_TYPE)
1999 if (TREE_CODE (arg1) == INTEGER_CST)
2000 return build_real_from_int_cst (type, arg1);
2001 if (TREE_CODE (arg1) == REAL_CST)
2002 return fold_convert_const_real_from_real (type, arg1);
2004 return NULL_TREE;
2007 /* Construct a vector of zero elements of vector type TYPE. */
2009 static tree
2010 build_zero_vector (tree type)
2012 tree elem, list;
2013 int i, units;
2015 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2016 units = TYPE_VECTOR_SUBPARTS (type);
2018 list = NULL_TREE;
2019 for (i = 0; i < units; i++)
2020 list = tree_cons (NULL_TREE, elem, list);
2021 return build_vector (type, list);
2024 /* Convert expression ARG to type TYPE. Used by the middle-end for
2025 simple conversions in preference to calling the front-end's convert. */
2027 tree
2028 fold_convert (tree type, tree arg)
2030 tree orig = TREE_TYPE (arg);
2031 tree tem;
2033 if (type == orig)
2034 return arg;
2036 if (TREE_CODE (arg) == ERROR_MARK
2037 || TREE_CODE (type) == ERROR_MARK
2038 || TREE_CODE (orig) == ERROR_MARK)
2039 return error_mark_node;
2041 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2042 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2043 TYPE_MAIN_VARIANT (orig)))
2044 return fold_build1 (NOP_EXPR, type, arg);
2046 switch (TREE_CODE (type))
2048 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2049 case POINTER_TYPE: case REFERENCE_TYPE:
2050 case OFFSET_TYPE:
2051 if (TREE_CODE (arg) == INTEGER_CST)
2053 tem = fold_convert_const (NOP_EXPR, type, arg);
2054 if (tem != NULL_TREE)
2055 return tem;
2057 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2058 || TREE_CODE (orig) == OFFSET_TYPE)
2059 return fold_build1 (NOP_EXPR, type, arg);
2060 if (TREE_CODE (orig) == COMPLEX_TYPE)
2062 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2063 return fold_convert (type, tem);
2065 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2066 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2067 return fold_build1 (NOP_EXPR, type, arg);
2069 case REAL_TYPE:
2070 if (TREE_CODE (arg) == INTEGER_CST)
2072 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2073 if (tem != NULL_TREE)
2074 return tem;
2076 else if (TREE_CODE (arg) == REAL_CST)
2078 tem = fold_convert_const (NOP_EXPR, type, arg);
2079 if (tem != NULL_TREE)
2080 return tem;
2083 switch (TREE_CODE (orig))
2085 case INTEGER_TYPE:
2086 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2087 case POINTER_TYPE: case REFERENCE_TYPE:
2088 return fold_build1 (FLOAT_EXPR, type, arg);
2090 case REAL_TYPE:
2091 return fold_build1 (NOP_EXPR, type, arg);
2093 case COMPLEX_TYPE:
2094 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2095 return fold_convert (type, tem);
2097 default:
2098 gcc_unreachable ();
2101 case COMPLEX_TYPE:
2102 switch (TREE_CODE (orig))
2104 case INTEGER_TYPE:
2105 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2106 case POINTER_TYPE: case REFERENCE_TYPE:
2107 case REAL_TYPE:
2108 return build2 (COMPLEX_EXPR, type,
2109 fold_convert (TREE_TYPE (type), arg),
2110 fold_convert (TREE_TYPE (type), integer_zero_node));
2111 case COMPLEX_TYPE:
2113 tree rpart, ipart;
2115 if (TREE_CODE (arg) == COMPLEX_EXPR)
2117 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2118 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2119 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2122 arg = save_expr (arg);
2123 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2124 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2125 rpart = fold_convert (TREE_TYPE (type), rpart);
2126 ipart = fold_convert (TREE_TYPE (type), ipart);
2127 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2130 default:
2131 gcc_unreachable ();
2134 case VECTOR_TYPE:
2135 if (integer_zerop (arg))
2136 return build_zero_vector (type);
2137 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2138 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2139 || TREE_CODE (orig) == VECTOR_TYPE);
2140 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2142 case VOID_TYPE:
2143 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2145 default:
2146 gcc_unreachable ();
2150 /* Return false if expr can be assumed not to be an lvalue, true
2151 otherwise. */
2153 static bool
2154 maybe_lvalue_p (tree x)
2156 /* We only need to wrap lvalue tree codes. */
2157 switch (TREE_CODE (x))
2159 case VAR_DECL:
2160 case PARM_DECL:
2161 case RESULT_DECL:
2162 case LABEL_DECL:
2163 case FUNCTION_DECL:
2164 case SSA_NAME:
2166 case COMPONENT_REF:
2167 case INDIRECT_REF:
2168 case ALIGN_INDIRECT_REF:
2169 case MISALIGNED_INDIRECT_REF:
2170 case ARRAY_REF:
2171 case ARRAY_RANGE_REF:
2172 case BIT_FIELD_REF:
2173 case OBJ_TYPE_REF:
2175 case REALPART_EXPR:
2176 case IMAGPART_EXPR:
2177 case PREINCREMENT_EXPR:
2178 case PREDECREMENT_EXPR:
2179 case SAVE_EXPR:
2180 case TRY_CATCH_EXPR:
2181 case WITH_CLEANUP_EXPR:
2182 case COMPOUND_EXPR:
2183 case MODIFY_EXPR:
2184 case TARGET_EXPR:
2185 case COND_EXPR:
2186 case BIND_EXPR:
2187 case MIN_EXPR:
2188 case MAX_EXPR:
2189 break;
2191 default:
2192 /* Assume the worst for front-end tree codes. */
2193 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2194 break;
2195 return false;
2198 return true;
2201 /* Return an expr equal to X but certainly not valid as an lvalue. */
2203 tree
2204 non_lvalue (tree x)
2206 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2207 us. */
2208 if (in_gimple_form)
2209 return x;
2211 if (! maybe_lvalue_p (x))
2212 return x;
2213 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2216 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2217 Zero means allow extended lvalues. */
2219 int pedantic_lvalues;
2221 /* When pedantic, return an expr equal to X but certainly not valid as a
2222 pedantic lvalue. Otherwise, return X. */
2224 static tree
2225 pedantic_non_lvalue (tree x)
2227 if (pedantic_lvalues)
2228 return non_lvalue (x);
2229 else
2230 return x;
2233 /* Given a tree comparison code, return the code that is the logical inverse
2234 of the given code. It is not safe to do this for floating-point
2235 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2236 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2238 enum tree_code
2239 invert_tree_comparison (enum tree_code code, bool honor_nans)
2241 if (honor_nans && flag_trapping_math)
2242 return ERROR_MARK;
2244 switch (code)
2246 case EQ_EXPR:
2247 return NE_EXPR;
2248 case NE_EXPR:
2249 return EQ_EXPR;
2250 case GT_EXPR:
2251 return honor_nans ? UNLE_EXPR : LE_EXPR;
2252 case GE_EXPR:
2253 return honor_nans ? UNLT_EXPR : LT_EXPR;
2254 case LT_EXPR:
2255 return honor_nans ? UNGE_EXPR : GE_EXPR;
2256 case LE_EXPR:
2257 return honor_nans ? UNGT_EXPR : GT_EXPR;
2258 case LTGT_EXPR:
2259 return UNEQ_EXPR;
2260 case UNEQ_EXPR:
2261 return LTGT_EXPR;
2262 case UNGT_EXPR:
2263 return LE_EXPR;
2264 case UNGE_EXPR:
2265 return LT_EXPR;
2266 case UNLT_EXPR:
2267 return GE_EXPR;
2268 case UNLE_EXPR:
2269 return GT_EXPR;
2270 case ORDERED_EXPR:
2271 return UNORDERED_EXPR;
2272 case UNORDERED_EXPR:
2273 return ORDERED_EXPR;
2274 default:
2275 gcc_unreachable ();
2279 /* Similar, but return the comparison that results if the operands are
2280 swapped. This is safe for floating-point. */
2282 enum tree_code
2283 swap_tree_comparison (enum tree_code code)
2285 switch (code)
2287 case EQ_EXPR:
2288 case NE_EXPR:
2289 case ORDERED_EXPR:
2290 case UNORDERED_EXPR:
2291 case LTGT_EXPR:
2292 case UNEQ_EXPR:
2293 return code;
2294 case GT_EXPR:
2295 return LT_EXPR;
2296 case GE_EXPR:
2297 return LE_EXPR;
2298 case LT_EXPR:
2299 return GT_EXPR;
2300 case LE_EXPR:
2301 return GE_EXPR;
2302 case UNGT_EXPR:
2303 return UNLT_EXPR;
2304 case UNGE_EXPR:
2305 return UNLE_EXPR;
2306 case UNLT_EXPR:
2307 return UNGT_EXPR;
2308 case UNLE_EXPR:
2309 return UNGE_EXPR;
2310 default:
2311 gcc_unreachable ();
2316 /* Convert a comparison tree code from an enum tree_code representation
2317 into a compcode bit-based encoding. This function is the inverse of
2318 compcode_to_comparison. */
2320 static enum comparison_code
2321 comparison_to_compcode (enum tree_code code)
2323 switch (code)
2325 case LT_EXPR:
2326 return COMPCODE_LT;
2327 case EQ_EXPR:
2328 return COMPCODE_EQ;
2329 case LE_EXPR:
2330 return COMPCODE_LE;
2331 case GT_EXPR:
2332 return COMPCODE_GT;
2333 case NE_EXPR:
2334 return COMPCODE_NE;
2335 case GE_EXPR:
2336 return COMPCODE_GE;
2337 case ORDERED_EXPR:
2338 return COMPCODE_ORD;
2339 case UNORDERED_EXPR:
2340 return COMPCODE_UNORD;
2341 case UNLT_EXPR:
2342 return COMPCODE_UNLT;
2343 case UNEQ_EXPR:
2344 return COMPCODE_UNEQ;
2345 case UNLE_EXPR:
2346 return COMPCODE_UNLE;
2347 case UNGT_EXPR:
2348 return COMPCODE_UNGT;
2349 case LTGT_EXPR:
2350 return COMPCODE_LTGT;
2351 case UNGE_EXPR:
2352 return COMPCODE_UNGE;
2353 default:
2354 gcc_unreachable ();
2358 /* Convert a compcode bit-based encoding of a comparison operator back
2359 to GCC's enum tree_code representation. This function is the
2360 inverse of comparison_to_compcode. */
2362 static enum tree_code
2363 compcode_to_comparison (enum comparison_code code)
2365 switch (code)
2367 case COMPCODE_LT:
2368 return LT_EXPR;
2369 case COMPCODE_EQ:
2370 return EQ_EXPR;
2371 case COMPCODE_LE:
2372 return LE_EXPR;
2373 case COMPCODE_GT:
2374 return GT_EXPR;
2375 case COMPCODE_NE:
2376 return NE_EXPR;
2377 case COMPCODE_GE:
2378 return GE_EXPR;
2379 case COMPCODE_ORD:
2380 return ORDERED_EXPR;
2381 case COMPCODE_UNORD:
2382 return UNORDERED_EXPR;
2383 case COMPCODE_UNLT:
2384 return UNLT_EXPR;
2385 case COMPCODE_UNEQ:
2386 return UNEQ_EXPR;
2387 case COMPCODE_UNLE:
2388 return UNLE_EXPR;
2389 case COMPCODE_UNGT:
2390 return UNGT_EXPR;
2391 case COMPCODE_LTGT:
2392 return LTGT_EXPR;
2393 case COMPCODE_UNGE:
2394 return UNGE_EXPR;
2395 default:
2396 gcc_unreachable ();
2400 /* Return a tree for the comparison which is the combination of
2401 doing the AND or OR (depending on CODE) of the two operations LCODE
2402 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2403 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2404 if this makes the transformation invalid. */
2406 tree
2407 combine_comparisons (enum tree_code code, enum tree_code lcode,
2408 enum tree_code rcode, tree truth_type,
2409 tree ll_arg, tree lr_arg)
2411 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2412 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2413 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2414 enum comparison_code compcode;
2416 switch (code)
2418 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2419 compcode = lcompcode & rcompcode;
2420 break;
2422 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2423 compcode = lcompcode | rcompcode;
2424 break;
2426 default:
2427 return NULL_TREE;
2430 if (!honor_nans)
2432 /* Eliminate unordered comparisons, as well as LTGT and ORD
2433 which are not used unless the mode has NaNs. */
2434 compcode &= ~COMPCODE_UNORD;
2435 if (compcode == COMPCODE_LTGT)
2436 compcode = COMPCODE_NE;
2437 else if (compcode == COMPCODE_ORD)
2438 compcode = COMPCODE_TRUE;
2440 else if (flag_trapping_math)
2442 /* Check that the original operation and the optimized ones will trap
2443 under the same condition. */
2444 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2445 && (lcompcode != COMPCODE_EQ)
2446 && (lcompcode != COMPCODE_ORD);
2447 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2448 && (rcompcode != COMPCODE_EQ)
2449 && (rcompcode != COMPCODE_ORD);
2450 bool trap = (compcode & COMPCODE_UNORD) == 0
2451 && (compcode != COMPCODE_EQ)
2452 && (compcode != COMPCODE_ORD);
2454 /* In a short-circuited boolean expression the LHS might be
2455 such that the RHS, if evaluated, will never trap. For
2456 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2457 if neither x nor y is NaN. (This is a mixed blessing: for
2458 example, the expression above will never trap, hence
2459 optimizing it to x < y would be invalid). */
2460 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2461 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2462 rtrap = false;
2464 /* If the comparison was short-circuited, and only the RHS
2465 trapped, we may now generate a spurious trap. */
2466 if (rtrap && !ltrap
2467 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2468 return NULL_TREE;
2470 /* If we changed the conditions that cause a trap, we lose. */
2471 if ((ltrap || rtrap) != trap)
2472 return NULL_TREE;
2475 if (compcode == COMPCODE_TRUE)
2476 return constant_boolean_node (true, truth_type);
2477 else if (compcode == COMPCODE_FALSE)
2478 return constant_boolean_node (false, truth_type);
2479 else
2480 return fold_build2 (compcode_to_comparison (compcode),
2481 truth_type, ll_arg, lr_arg);
2484 /* Return nonzero if CODE is a tree code that represents a truth value. */
2486 static int
2487 truth_value_p (enum tree_code code)
2489 return (TREE_CODE_CLASS (code) == tcc_comparison
2490 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2491 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2492 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2495 /* Return nonzero if two operands (typically of the same tree node)
2496 are necessarily equal. If either argument has side-effects this
2497 function returns zero. FLAGS modifies behavior as follows:
2499 If OEP_ONLY_CONST is set, only return nonzero for constants.
2500 This function tests whether the operands are indistinguishable;
2501 it does not test whether they are equal using C's == operation.
2502 The distinction is important for IEEE floating point, because
2503 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2504 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2506 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2507 even though it may hold multiple values during a function.
2508 This is because a GCC tree node guarantees that nothing else is
2509 executed between the evaluation of its "operands" (which may often
2510 be evaluated in arbitrary order). Hence if the operands themselves
2511 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2512 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2513 unset means assuming isochronic (or instantaneous) tree equivalence.
2514 Unless comparing arbitrary expression trees, such as from different
2515 statements, this flag can usually be left unset.
2517 If OEP_PURE_SAME is set, then pure functions with identical arguments
2518 are considered the same. It is used when the caller has other ways
2519 to ensure that global memory is unchanged in between. */
2522 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2524 /* If either is ERROR_MARK, they aren't equal. */
2525 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2526 return 0;
2528 /* If both types don't have the same signedness, then we can't consider
2529 them equal. We must check this before the STRIP_NOPS calls
2530 because they may change the signedness of the arguments. */
2531 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2532 return 0;
2534 /* If both types don't have the same precision, then it is not safe
2535 to strip NOPs. */
2536 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2537 return 0;
2539 STRIP_NOPS (arg0);
2540 STRIP_NOPS (arg1);
2542 /* In case both args are comparisons but with different comparison
2543 code, try to swap the comparison operands of one arg to produce
2544 a match and compare that variant. */
2545 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2546 && COMPARISON_CLASS_P (arg0)
2547 && COMPARISON_CLASS_P (arg1))
2549 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2551 if (TREE_CODE (arg0) == swap_code)
2552 return operand_equal_p (TREE_OPERAND (arg0, 0),
2553 TREE_OPERAND (arg1, 1), flags)
2554 && operand_equal_p (TREE_OPERAND (arg0, 1),
2555 TREE_OPERAND (arg1, 0), flags);
2558 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2559 /* This is needed for conversions and for COMPONENT_REF.
2560 Might as well play it safe and always test this. */
2561 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2562 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2563 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2564 return 0;
2566 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2567 We don't care about side effects in that case because the SAVE_EXPR
2568 takes care of that for us. In all other cases, two expressions are
2569 equal if they have no side effects. If we have two identical
2570 expressions with side effects that should be treated the same due
2571 to the only side effects being identical SAVE_EXPR's, that will
2572 be detected in the recursive calls below. */
2573 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2574 && (TREE_CODE (arg0) == SAVE_EXPR
2575 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2576 return 1;
2578 /* Next handle constant cases, those for which we can return 1 even
2579 if ONLY_CONST is set. */
2580 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2581 switch (TREE_CODE (arg0))
2583 case INTEGER_CST:
2584 return tree_int_cst_equal (arg0, arg1);
2586 case REAL_CST:
2587 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2588 TREE_REAL_CST (arg1)))
2589 return 1;
2592 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2594 /* If we do not distinguish between signed and unsigned zero,
2595 consider them equal. */
2596 if (real_zerop (arg0) && real_zerop (arg1))
2597 return 1;
2599 return 0;
2601 case VECTOR_CST:
2603 tree v1, v2;
2605 v1 = TREE_VECTOR_CST_ELTS (arg0);
2606 v2 = TREE_VECTOR_CST_ELTS (arg1);
2607 while (v1 && v2)
2609 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2610 flags))
2611 return 0;
2612 v1 = TREE_CHAIN (v1);
2613 v2 = TREE_CHAIN (v2);
2616 return v1 == v2;
2619 case COMPLEX_CST:
2620 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2621 flags)
2622 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2623 flags));
2625 case STRING_CST:
2626 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2627 && ! memcmp (TREE_STRING_POINTER (arg0),
2628 TREE_STRING_POINTER (arg1),
2629 TREE_STRING_LENGTH (arg0)));
2631 case ADDR_EXPR:
2632 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2634 default:
2635 break;
2638 if (flags & OEP_ONLY_CONST)
2639 return 0;
2641 /* Define macros to test an operand from arg0 and arg1 for equality and a
2642 variant that allows null and views null as being different from any
2643 non-null value. In the latter case, if either is null, the both
2644 must be; otherwise, do the normal comparison. */
2645 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2646 TREE_OPERAND (arg1, N), flags)
2648 #define OP_SAME_WITH_NULL(N) \
2649 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2650 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2652 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2654 case tcc_unary:
2655 /* Two conversions are equal only if signedness and modes match. */
2656 switch (TREE_CODE (arg0))
2658 case NOP_EXPR:
2659 case CONVERT_EXPR:
2660 case FIX_TRUNC_EXPR:
2661 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2662 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2663 return 0;
2664 break;
2665 default:
2666 break;
2669 return OP_SAME (0);
2672 case tcc_comparison:
2673 case tcc_binary:
2674 if (OP_SAME (0) && OP_SAME (1))
2675 return 1;
2677 /* For commutative ops, allow the other order. */
2678 return (commutative_tree_code (TREE_CODE (arg0))
2679 && operand_equal_p (TREE_OPERAND (arg0, 0),
2680 TREE_OPERAND (arg1, 1), flags)
2681 && operand_equal_p (TREE_OPERAND (arg0, 1),
2682 TREE_OPERAND (arg1, 0), flags));
2684 case tcc_reference:
2685 /* If either of the pointer (or reference) expressions we are
2686 dereferencing contain a side effect, these cannot be equal. */
2687 if (TREE_SIDE_EFFECTS (arg0)
2688 || TREE_SIDE_EFFECTS (arg1))
2689 return 0;
2691 switch (TREE_CODE (arg0))
2693 case INDIRECT_REF:
2694 case ALIGN_INDIRECT_REF:
2695 case MISALIGNED_INDIRECT_REF:
2696 case REALPART_EXPR:
2697 case IMAGPART_EXPR:
2698 return OP_SAME (0);
2700 case ARRAY_REF:
2701 case ARRAY_RANGE_REF:
2702 /* Operands 2 and 3 may be null. */
2703 return (OP_SAME (0)
2704 && OP_SAME (1)
2705 && OP_SAME_WITH_NULL (2)
2706 && OP_SAME_WITH_NULL (3));
2708 case COMPONENT_REF:
2709 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2710 may be NULL when we're called to compare MEM_EXPRs. */
2711 return OP_SAME_WITH_NULL (0)
2712 && OP_SAME (1)
2713 && OP_SAME_WITH_NULL (2);
2715 case BIT_FIELD_REF:
2716 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2718 default:
2719 return 0;
2722 case tcc_expression:
2723 switch (TREE_CODE (arg0))
2725 case ADDR_EXPR:
2726 case TRUTH_NOT_EXPR:
2727 return OP_SAME (0);
2729 case TRUTH_ANDIF_EXPR:
2730 case TRUTH_ORIF_EXPR:
2731 return OP_SAME (0) && OP_SAME (1);
2733 case TRUTH_AND_EXPR:
2734 case TRUTH_OR_EXPR:
2735 case TRUTH_XOR_EXPR:
2736 if (OP_SAME (0) && OP_SAME (1))
2737 return 1;
2739 /* Otherwise take into account this is a commutative operation. */
2740 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2741 TREE_OPERAND (arg1, 1), flags)
2742 && operand_equal_p (TREE_OPERAND (arg0, 1),
2743 TREE_OPERAND (arg1, 0), flags));
2745 case CALL_EXPR:
2746 /* If the CALL_EXPRs call different functions, then they
2747 clearly can not be equal. */
2748 if (!OP_SAME (0))
2749 return 0;
2752 unsigned int cef = call_expr_flags (arg0);
2753 if (flags & OEP_PURE_SAME)
2754 cef &= ECF_CONST | ECF_PURE;
2755 else
2756 cef &= ECF_CONST;
2757 if (!cef)
2758 return 0;
2761 /* Now see if all the arguments are the same. operand_equal_p
2762 does not handle TREE_LIST, so we walk the operands here
2763 feeding them to operand_equal_p. */
2764 arg0 = TREE_OPERAND (arg0, 1);
2765 arg1 = TREE_OPERAND (arg1, 1);
2766 while (arg0 && arg1)
2768 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2769 flags))
2770 return 0;
2772 arg0 = TREE_CHAIN (arg0);
2773 arg1 = TREE_CHAIN (arg1);
2776 /* If we get here and both argument lists are exhausted
2777 then the CALL_EXPRs are equal. */
2778 return ! (arg0 || arg1);
2780 default:
2781 return 0;
2784 case tcc_declaration:
2785 /* Consider __builtin_sqrt equal to sqrt. */
2786 return (TREE_CODE (arg0) == FUNCTION_DECL
2787 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2788 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2789 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2791 default:
2792 return 0;
2795 #undef OP_SAME
2796 #undef OP_SAME_WITH_NULL
2799 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2800 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2802 When in doubt, return 0. */
2804 static int
2805 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2807 int unsignedp1, unsignedpo;
2808 tree primarg0, primarg1, primother;
2809 unsigned int correct_width;
2811 if (operand_equal_p (arg0, arg1, 0))
2812 return 1;
2814 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2815 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2816 return 0;
2818 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2819 and see if the inner values are the same. This removes any
2820 signedness comparison, which doesn't matter here. */
2821 primarg0 = arg0, primarg1 = arg1;
2822 STRIP_NOPS (primarg0);
2823 STRIP_NOPS (primarg1);
2824 if (operand_equal_p (primarg0, primarg1, 0))
2825 return 1;
2827 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2828 actual comparison operand, ARG0.
2830 First throw away any conversions to wider types
2831 already present in the operands. */
2833 primarg1 = get_narrower (arg1, &unsignedp1);
2834 primother = get_narrower (other, &unsignedpo);
2836 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2837 if (unsignedp1 == unsignedpo
2838 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2839 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2841 tree type = TREE_TYPE (arg0);
2843 /* Make sure shorter operand is extended the right way
2844 to match the longer operand. */
2845 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2846 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2848 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2849 return 1;
2852 return 0;
2855 /* See if ARG is an expression that is either a comparison or is performing
2856 arithmetic on comparisons. The comparisons must only be comparing
2857 two different values, which will be stored in *CVAL1 and *CVAL2; if
2858 they are nonzero it means that some operands have already been found.
2859 No variables may be used anywhere else in the expression except in the
2860 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2861 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2863 If this is true, return 1. Otherwise, return zero. */
2865 static int
2866 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2868 enum tree_code code = TREE_CODE (arg);
2869 enum tree_code_class class = TREE_CODE_CLASS (code);
2871 /* We can handle some of the tcc_expression cases here. */
2872 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2873 class = tcc_unary;
2874 else if (class == tcc_expression
2875 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2876 || code == COMPOUND_EXPR))
2877 class = tcc_binary;
2879 else if (class == tcc_expression && code == SAVE_EXPR
2880 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2882 /* If we've already found a CVAL1 or CVAL2, this expression is
2883 two complex to handle. */
2884 if (*cval1 || *cval2)
2885 return 0;
2887 class = tcc_unary;
2888 *save_p = 1;
2891 switch (class)
2893 case tcc_unary:
2894 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2896 case tcc_binary:
2897 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2898 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2899 cval1, cval2, save_p));
2901 case tcc_constant:
2902 return 1;
2904 case tcc_expression:
2905 if (code == COND_EXPR)
2906 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2907 cval1, cval2, save_p)
2908 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2909 cval1, cval2, save_p)
2910 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2911 cval1, cval2, save_p));
2912 return 0;
2914 case tcc_comparison:
2915 /* First see if we can handle the first operand, then the second. For
2916 the second operand, we know *CVAL1 can't be zero. It must be that
2917 one side of the comparison is each of the values; test for the
2918 case where this isn't true by failing if the two operands
2919 are the same. */
2921 if (operand_equal_p (TREE_OPERAND (arg, 0),
2922 TREE_OPERAND (arg, 1), 0))
2923 return 0;
2925 if (*cval1 == 0)
2926 *cval1 = TREE_OPERAND (arg, 0);
2927 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2929 else if (*cval2 == 0)
2930 *cval2 = TREE_OPERAND (arg, 0);
2931 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2933 else
2934 return 0;
2936 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2938 else if (*cval2 == 0)
2939 *cval2 = TREE_OPERAND (arg, 1);
2940 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2942 else
2943 return 0;
2945 return 1;
2947 default:
2948 return 0;
2952 /* ARG is a tree that is known to contain just arithmetic operations and
2953 comparisons. Evaluate the operations in the tree substituting NEW0 for
2954 any occurrence of OLD0 as an operand of a comparison and likewise for
2955 NEW1 and OLD1. */
2957 static tree
2958 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2960 tree type = TREE_TYPE (arg);
2961 enum tree_code code = TREE_CODE (arg);
2962 enum tree_code_class class = TREE_CODE_CLASS (code);
2964 /* We can handle some of the tcc_expression cases here. */
2965 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2966 class = tcc_unary;
2967 else if (class == tcc_expression
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2969 class = tcc_binary;
2971 switch (class)
2973 case tcc_unary:
2974 return fold_build1 (code, type,
2975 eval_subst (TREE_OPERAND (arg, 0),
2976 old0, new0, old1, new1));
2978 case tcc_binary:
2979 return fold_build2 (code, type,
2980 eval_subst (TREE_OPERAND (arg, 0),
2981 old0, new0, old1, new1),
2982 eval_subst (TREE_OPERAND (arg, 1),
2983 old0, new0, old1, new1));
2985 case tcc_expression:
2986 switch (code)
2988 case SAVE_EXPR:
2989 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2991 case COMPOUND_EXPR:
2992 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2994 case COND_EXPR:
2995 return fold_build3 (code, type,
2996 eval_subst (TREE_OPERAND (arg, 0),
2997 old0, new0, old1, new1),
2998 eval_subst (TREE_OPERAND (arg, 1),
2999 old0, new0, old1, new1),
3000 eval_subst (TREE_OPERAND (arg, 2),
3001 old0, new0, old1, new1));
3002 default:
3003 break;
3005 /* Fall through - ??? */
3007 case tcc_comparison:
3009 tree arg0 = TREE_OPERAND (arg, 0);
3010 tree arg1 = TREE_OPERAND (arg, 1);
3012 /* We need to check both for exact equality and tree equality. The
3013 former will be true if the operand has a side-effect. In that
3014 case, we know the operand occurred exactly once. */
3016 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3017 arg0 = new0;
3018 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3019 arg0 = new1;
3021 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3022 arg1 = new0;
3023 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3024 arg1 = new1;
3026 return fold_build2 (code, type, arg0, arg1);
3029 default:
3030 return arg;
3034 /* Return a tree for the case when the result of an expression is RESULT
3035 converted to TYPE and OMITTED was previously an operand of the expression
3036 but is now not needed (e.g., we folded OMITTED * 0).
3038 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3039 the conversion of RESULT to TYPE. */
3041 tree
3042 omit_one_operand (tree type, tree result, tree omitted)
3044 tree t = fold_convert (type, result);
3046 if (TREE_SIDE_EFFECTS (omitted))
3047 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3049 return non_lvalue (t);
3052 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3054 static tree
3055 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3057 tree t = fold_convert (type, result);
3059 if (TREE_SIDE_EFFECTS (omitted))
3060 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3062 return pedantic_non_lvalue (t);
3065 /* Return a tree for the case when the result of an expression is RESULT
3066 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3067 of the expression but are now not needed.
3069 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3070 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3071 evaluated before OMITTED2. Otherwise, if neither has side effects,
3072 just do the conversion of RESULT to TYPE. */
3074 tree
3075 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3077 tree t = fold_convert (type, result);
3079 if (TREE_SIDE_EFFECTS (omitted2))
3080 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3081 if (TREE_SIDE_EFFECTS (omitted1))
3082 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3084 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3088 /* Return a simplified tree node for the truth-negation of ARG. This
3089 never alters ARG itself. We assume that ARG is an operation that
3090 returns a truth value (0 or 1).
3092 FIXME: one would think we would fold the result, but it causes
3093 problems with the dominator optimizer. */
3095 tree
3096 fold_truth_not_expr (tree arg)
3098 tree type = TREE_TYPE (arg);
3099 enum tree_code code = TREE_CODE (arg);
3101 /* If this is a comparison, we can simply invert it, except for
3102 floating-point non-equality comparisons, in which case we just
3103 enclose a TRUTH_NOT_EXPR around what we have. */
3105 if (TREE_CODE_CLASS (code) == tcc_comparison)
3107 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3108 if (FLOAT_TYPE_P (op_type)
3109 && flag_trapping_math
3110 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3111 && code != NE_EXPR && code != EQ_EXPR)
3112 return NULL_TREE;
3113 else
3115 code = invert_tree_comparison (code,
3116 HONOR_NANS (TYPE_MODE (op_type)));
3117 if (code == ERROR_MARK)
3118 return NULL_TREE;
3119 else
3120 return build2 (code, type,
3121 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3125 switch (code)
3127 case INTEGER_CST:
3128 return constant_boolean_node (integer_zerop (arg), type);
3130 case TRUTH_AND_EXPR:
3131 return build2 (TRUTH_OR_EXPR, type,
3132 invert_truthvalue (TREE_OPERAND (arg, 0)),
3133 invert_truthvalue (TREE_OPERAND (arg, 1)));
3135 case TRUTH_OR_EXPR:
3136 return build2 (TRUTH_AND_EXPR, type,
3137 invert_truthvalue (TREE_OPERAND (arg, 0)),
3138 invert_truthvalue (TREE_OPERAND (arg, 1)));
3140 case TRUTH_XOR_EXPR:
3141 /* Here we can invert either operand. We invert the first operand
3142 unless the second operand is a TRUTH_NOT_EXPR in which case our
3143 result is the XOR of the first operand with the inside of the
3144 negation of the second operand. */
3146 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3147 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3148 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3149 else
3150 return build2 (TRUTH_XOR_EXPR, type,
3151 invert_truthvalue (TREE_OPERAND (arg, 0)),
3152 TREE_OPERAND (arg, 1));
3154 case TRUTH_ANDIF_EXPR:
3155 return build2 (TRUTH_ORIF_EXPR, type,
3156 invert_truthvalue (TREE_OPERAND (arg, 0)),
3157 invert_truthvalue (TREE_OPERAND (arg, 1)));
3159 case TRUTH_ORIF_EXPR:
3160 return build2 (TRUTH_ANDIF_EXPR, type,
3161 invert_truthvalue (TREE_OPERAND (arg, 0)),
3162 invert_truthvalue (TREE_OPERAND (arg, 1)));
3164 case TRUTH_NOT_EXPR:
3165 return TREE_OPERAND (arg, 0);
3167 case COND_EXPR:
3169 tree arg1 = TREE_OPERAND (arg, 1);
3170 tree arg2 = TREE_OPERAND (arg, 2);
3171 /* A COND_EXPR may have a throw as one operand, which
3172 then has void type. Just leave void operands
3173 as they are. */
3174 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3175 VOID_TYPE_P (TREE_TYPE (arg1))
3176 ? arg1 : invert_truthvalue (arg1),
3177 VOID_TYPE_P (TREE_TYPE (arg2))
3178 ? arg2 : invert_truthvalue (arg2));
3181 case COMPOUND_EXPR:
3182 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3183 invert_truthvalue (TREE_OPERAND (arg, 1)));
3185 case NON_LVALUE_EXPR:
3186 return invert_truthvalue (TREE_OPERAND (arg, 0));
3188 case NOP_EXPR:
3189 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3190 return build1 (TRUTH_NOT_EXPR, type, arg);
3192 case CONVERT_EXPR:
3193 case FLOAT_EXPR:
3194 return build1 (TREE_CODE (arg), type,
3195 invert_truthvalue (TREE_OPERAND (arg, 0)));
3197 case BIT_AND_EXPR:
3198 if (!integer_onep (TREE_OPERAND (arg, 1)))
3199 break;
3200 return build2 (EQ_EXPR, type, arg,
3201 build_int_cst (type, 0));
3203 case SAVE_EXPR:
3204 return build1 (TRUTH_NOT_EXPR, type, arg);
3206 case CLEANUP_POINT_EXPR:
3207 return build1 (CLEANUP_POINT_EXPR, type,
3208 invert_truthvalue (TREE_OPERAND (arg, 0)));
3210 default:
3211 break;
3214 return NULL_TREE;
3217 /* Return a simplified tree node for the truth-negation of ARG. This
3218 never alters ARG itself. We assume that ARG is an operation that
3219 returns a truth value (0 or 1).
3221 FIXME: one would think we would fold the result, but it causes
3222 problems with the dominator optimizer. */
3224 tree
3225 invert_truthvalue (tree arg)
3227 tree tem;
3229 if (TREE_CODE (arg) == ERROR_MARK)
3230 return arg;
3232 tem = fold_truth_not_expr (arg);
3233 if (!tem)
3234 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3236 return tem;
3239 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3240 operands are another bit-wise operation with a common input. If so,
3241 distribute the bit operations to save an operation and possibly two if
3242 constants are involved. For example, convert
3243 (A | B) & (A | C) into A | (B & C)
3244 Further simplification will occur if B and C are constants.
3246 If this optimization cannot be done, 0 will be returned. */
3248 static tree
3249 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3251 tree common;
3252 tree left, right;
3254 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3255 || TREE_CODE (arg0) == code
3256 || (TREE_CODE (arg0) != BIT_AND_EXPR
3257 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3258 return 0;
3260 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3262 common = TREE_OPERAND (arg0, 0);
3263 left = TREE_OPERAND (arg0, 1);
3264 right = TREE_OPERAND (arg1, 1);
3266 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3268 common = TREE_OPERAND (arg0, 0);
3269 left = TREE_OPERAND (arg0, 1);
3270 right = TREE_OPERAND (arg1, 0);
3272 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3274 common = TREE_OPERAND (arg0, 1);
3275 left = TREE_OPERAND (arg0, 0);
3276 right = TREE_OPERAND (arg1, 1);
3278 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3280 common = TREE_OPERAND (arg0, 1);
3281 left = TREE_OPERAND (arg0, 0);
3282 right = TREE_OPERAND (arg1, 0);
3284 else
3285 return 0;
3287 return fold_build2 (TREE_CODE (arg0), type, common,
3288 fold_build2 (code, type, left, right));
3291 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3292 with code CODE. This optimization is unsafe. */
3293 static tree
3294 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3296 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3297 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3299 /* (A / C) +- (B / C) -> (A +- B) / C. */
3300 if (mul0 == mul1
3301 && operand_equal_p (TREE_OPERAND (arg0, 1),
3302 TREE_OPERAND (arg1, 1), 0))
3303 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3304 fold_build2 (code, type,
3305 TREE_OPERAND (arg0, 0),
3306 TREE_OPERAND (arg1, 0)),
3307 TREE_OPERAND (arg0, 1));
3309 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3310 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3311 TREE_OPERAND (arg1, 0), 0)
3312 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3313 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3315 REAL_VALUE_TYPE r0, r1;
3316 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3317 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3318 if (!mul0)
3319 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3320 if (!mul1)
3321 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3322 real_arithmetic (&r0, code, &r0, &r1);
3323 return fold_build2 (MULT_EXPR, type,
3324 TREE_OPERAND (arg0, 0),
3325 build_real (type, r0));
3328 return NULL_TREE;
3331 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3332 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3334 static tree
3335 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3336 int unsignedp)
3338 tree result;
3340 if (bitpos == 0)
3342 tree size = TYPE_SIZE (TREE_TYPE (inner));
3343 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3344 || POINTER_TYPE_P (TREE_TYPE (inner)))
3345 && host_integerp (size, 0)
3346 && tree_low_cst (size, 0) == bitsize)
3347 return fold_convert (type, inner);
3350 result = build3 (BIT_FIELD_REF, type, inner,
3351 size_int (bitsize), bitsize_int (bitpos));
3353 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3355 return result;
3358 /* Optimize a bit-field compare.
3360 There are two cases: First is a compare against a constant and the
3361 second is a comparison of two items where the fields are at the same
3362 bit position relative to the start of a chunk (byte, halfword, word)
3363 large enough to contain it. In these cases we can avoid the shift
3364 implicit in bitfield extractions.
3366 For constants, we emit a compare of the shifted constant with the
3367 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3368 compared. For two fields at the same position, we do the ANDs with the
3369 similar mask and compare the result of the ANDs.
3371 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3372 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3373 are the left and right operands of the comparison, respectively.
3375 If the optimization described above can be done, we return the resulting
3376 tree. Otherwise we return zero. */
3378 static tree
3379 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3380 tree lhs, tree rhs)
3382 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3383 tree type = TREE_TYPE (lhs);
3384 tree signed_type, unsigned_type;
3385 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3386 enum machine_mode lmode, rmode, nmode;
3387 int lunsignedp, runsignedp;
3388 int lvolatilep = 0, rvolatilep = 0;
3389 tree linner, rinner = NULL_TREE;
3390 tree mask;
3391 tree offset;
3393 /* Get all the information about the extractions being done. If the bit size
3394 if the same as the size of the underlying object, we aren't doing an
3395 extraction at all and so can do nothing. We also don't want to
3396 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3397 then will no longer be able to replace it. */
3398 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3399 &lunsignedp, &lvolatilep, false);
3400 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3401 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3402 return 0;
3404 if (!const_p)
3406 /* If this is not a constant, we can only do something if bit positions,
3407 sizes, and signedness are the same. */
3408 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3409 &runsignedp, &rvolatilep, false);
3411 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3412 || lunsignedp != runsignedp || offset != 0
3413 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3414 return 0;
3417 /* See if we can find a mode to refer to this field. We should be able to,
3418 but fail if we can't. */
3419 nmode = get_best_mode (lbitsize, lbitpos,
3420 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3421 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3422 TYPE_ALIGN (TREE_TYPE (rinner))),
3423 word_mode, lvolatilep || rvolatilep);
3424 if (nmode == VOIDmode)
3425 return 0;
3427 /* Set signed and unsigned types of the precision of this mode for the
3428 shifts below. */
3429 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3430 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3432 /* Compute the bit position and size for the new reference and our offset
3433 within it. If the new reference is the same size as the original, we
3434 won't optimize anything, so return zero. */
3435 nbitsize = GET_MODE_BITSIZE (nmode);
3436 nbitpos = lbitpos & ~ (nbitsize - 1);
3437 lbitpos -= nbitpos;
3438 if (nbitsize == lbitsize)
3439 return 0;
3441 if (BYTES_BIG_ENDIAN)
3442 lbitpos = nbitsize - lbitsize - lbitpos;
3444 /* Make the mask to be used against the extracted field. */
3445 mask = build_int_cst (unsigned_type, -1);
3446 mask = force_fit_type (mask, 0, false, false);
3447 mask = fold_convert (unsigned_type, mask);
3448 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3449 mask = const_binop (RSHIFT_EXPR, mask,
3450 size_int (nbitsize - lbitsize - lbitpos), 0);
3452 if (! const_p)
3453 /* If not comparing with constant, just rework the comparison
3454 and return. */
3455 return fold_build2 (code, compare_type,
3456 fold_build2 (BIT_AND_EXPR, unsigned_type,
3457 make_bit_field_ref (linner,
3458 unsigned_type,
3459 nbitsize, nbitpos,
3461 mask),
3462 fold_build2 (BIT_AND_EXPR, unsigned_type,
3463 make_bit_field_ref (rinner,
3464 unsigned_type,
3465 nbitsize, nbitpos,
3467 mask));
3469 /* Otherwise, we are handling the constant case. See if the constant is too
3470 big for the field. Warn and return a tree of for 0 (false) if so. We do
3471 this not only for its own sake, but to avoid having to test for this
3472 error case below. If we didn't, we might generate wrong code.
3474 For unsigned fields, the constant shifted right by the field length should
3475 be all zero. For signed fields, the high-order bits should agree with
3476 the sign bit. */
3478 if (lunsignedp)
3480 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3481 fold_convert (unsigned_type, rhs),
3482 size_int (lbitsize), 0)))
3484 warning (0, "comparison is always %d due to width of bit-field",
3485 code == NE_EXPR);
3486 return constant_boolean_node (code == NE_EXPR, compare_type);
3489 else
3491 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3492 size_int (lbitsize - 1), 0);
3493 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3495 warning (0, "comparison is always %d due to width of bit-field",
3496 code == NE_EXPR);
3497 return constant_boolean_node (code == NE_EXPR, compare_type);
3501 /* Single-bit compares should always be against zero. */
3502 if (lbitsize == 1 && ! integer_zerop (rhs))
3504 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3505 rhs = build_int_cst (type, 0);
3508 /* Make a new bitfield reference, shift the constant over the
3509 appropriate number of bits and mask it with the computed mask
3510 (in case this was a signed field). If we changed it, make a new one. */
3511 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3512 if (lvolatilep)
3514 TREE_SIDE_EFFECTS (lhs) = 1;
3515 TREE_THIS_VOLATILE (lhs) = 1;
3518 rhs = const_binop (BIT_AND_EXPR,
3519 const_binop (LSHIFT_EXPR,
3520 fold_convert (unsigned_type, rhs),
3521 size_int (lbitpos), 0),
3522 mask, 0);
3524 return build2 (code, compare_type,
3525 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3526 rhs);
3529 /* Subroutine for fold_truthop: decode a field reference.
3531 If EXP is a comparison reference, we return the innermost reference.
3533 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3534 set to the starting bit number.
3536 If the innermost field can be completely contained in a mode-sized
3537 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3539 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3540 otherwise it is not changed.
3542 *PUNSIGNEDP is set to the signedness of the field.
3544 *PMASK is set to the mask used. This is either contained in a
3545 BIT_AND_EXPR or derived from the width of the field.
3547 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3549 Return 0 if this is not a component reference or is one that we can't
3550 do anything with. */
3552 static tree
3553 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3554 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3555 int *punsignedp, int *pvolatilep,
3556 tree *pmask, tree *pand_mask)
3558 tree outer_type = 0;
3559 tree and_mask = 0;
3560 tree mask, inner, offset;
3561 tree unsigned_type;
3562 unsigned int precision;
3564 /* All the optimizations using this function assume integer fields.
3565 There are problems with FP fields since the type_for_size call
3566 below can fail for, e.g., XFmode. */
3567 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3568 return 0;
3570 /* We are interested in the bare arrangement of bits, so strip everything
3571 that doesn't affect the machine mode. However, record the type of the
3572 outermost expression if it may matter below. */
3573 if (TREE_CODE (exp) == NOP_EXPR
3574 || TREE_CODE (exp) == CONVERT_EXPR
3575 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3576 outer_type = TREE_TYPE (exp);
3577 STRIP_NOPS (exp);
3579 if (TREE_CODE (exp) == BIT_AND_EXPR)
3581 and_mask = TREE_OPERAND (exp, 1);
3582 exp = TREE_OPERAND (exp, 0);
3583 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3584 if (TREE_CODE (and_mask) != INTEGER_CST)
3585 return 0;
3588 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3589 punsignedp, pvolatilep, false);
3590 if ((inner == exp && and_mask == 0)
3591 || *pbitsize < 0 || offset != 0
3592 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3593 return 0;
3595 /* If the number of bits in the reference is the same as the bitsize of
3596 the outer type, then the outer type gives the signedness. Otherwise
3597 (in case of a small bitfield) the signedness is unchanged. */
3598 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3599 *punsignedp = TYPE_UNSIGNED (outer_type);
3601 /* Compute the mask to access the bitfield. */
3602 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3603 precision = TYPE_PRECISION (unsigned_type);
3605 mask = build_int_cst (unsigned_type, -1);
3606 mask = force_fit_type (mask, 0, false, false);
3608 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3609 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3611 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3612 if (and_mask != 0)
3613 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3614 fold_convert (unsigned_type, and_mask), mask);
3616 *pmask = mask;
3617 *pand_mask = and_mask;
3618 return inner;
3621 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3622 bit positions. */
3624 static int
3625 all_ones_mask_p (tree mask, int size)
3627 tree type = TREE_TYPE (mask);
3628 unsigned int precision = TYPE_PRECISION (type);
3629 tree tmask;
3631 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3632 tmask = force_fit_type (tmask, 0, false, false);
3634 return
3635 tree_int_cst_equal (mask,
3636 const_binop (RSHIFT_EXPR,
3637 const_binop (LSHIFT_EXPR, tmask,
3638 size_int (precision - size),
3640 size_int (precision - size), 0));
3643 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3644 represents the sign bit of EXP's type. If EXP represents a sign
3645 or zero extension, also test VAL against the unextended type.
3646 The return value is the (sub)expression whose sign bit is VAL,
3647 or NULL_TREE otherwise. */
3649 static tree
3650 sign_bit_p (tree exp, tree val)
3652 unsigned HOST_WIDE_INT mask_lo, lo;
3653 HOST_WIDE_INT mask_hi, hi;
3654 int width;
3655 tree t;
3657 /* Tree EXP must have an integral type. */
3658 t = TREE_TYPE (exp);
3659 if (! INTEGRAL_TYPE_P (t))
3660 return NULL_TREE;
3662 /* Tree VAL must be an integer constant. */
3663 if (TREE_CODE (val) != INTEGER_CST
3664 || TREE_CONSTANT_OVERFLOW (val))
3665 return NULL_TREE;
3667 width = TYPE_PRECISION (t);
3668 if (width > HOST_BITS_PER_WIDE_INT)
3670 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3671 lo = 0;
3673 mask_hi = ((unsigned HOST_WIDE_INT) -1
3674 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3675 mask_lo = -1;
3677 else
3679 hi = 0;
3680 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3682 mask_hi = 0;
3683 mask_lo = ((unsigned HOST_WIDE_INT) -1
3684 >> (HOST_BITS_PER_WIDE_INT - width));
3687 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3688 treat VAL as if it were unsigned. */
3689 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3690 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3691 return exp;
3693 /* Handle extension from a narrower type. */
3694 if (TREE_CODE (exp) == NOP_EXPR
3695 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3696 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3698 return NULL_TREE;
3701 /* Subroutine for fold_truthop: determine if an operand is simple enough
3702 to be evaluated unconditionally. */
3704 static int
3705 simple_operand_p (tree exp)
3707 /* Strip any conversions that don't change the machine mode. */
3708 STRIP_NOPS (exp);
3710 return (CONSTANT_CLASS_P (exp)
3711 || TREE_CODE (exp) == SSA_NAME
3712 || (DECL_P (exp)
3713 && ! TREE_ADDRESSABLE (exp)
3714 && ! TREE_THIS_VOLATILE (exp)
3715 && ! DECL_NONLOCAL (exp)
3716 /* Don't regard global variables as simple. They may be
3717 allocated in ways unknown to the compiler (shared memory,
3718 #pragma weak, etc). */
3719 && ! TREE_PUBLIC (exp)
3720 && ! DECL_EXTERNAL (exp)
3721 /* Loading a static variable is unduly expensive, but global
3722 registers aren't expensive. */
3723 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3726 /* The following functions are subroutines to fold_range_test and allow it to
3727 try to change a logical combination of comparisons into a range test.
3729 For example, both
3730 X == 2 || X == 3 || X == 4 || X == 5
3732 X >= 2 && X <= 5
3733 are converted to
3734 (unsigned) (X - 2) <= 3
3736 We describe each set of comparisons as being either inside or outside
3737 a range, using a variable named like IN_P, and then describe the
3738 range with a lower and upper bound. If one of the bounds is omitted,
3739 it represents either the highest or lowest value of the type.
3741 In the comments below, we represent a range by two numbers in brackets
3742 preceded by a "+" to designate being inside that range, or a "-" to
3743 designate being outside that range, so the condition can be inverted by
3744 flipping the prefix. An omitted bound is represented by a "-". For
3745 example, "- [-, 10]" means being outside the range starting at the lowest
3746 possible value and ending at 10, in other words, being greater than 10.
3747 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3748 always false.
3750 We set up things so that the missing bounds are handled in a consistent
3751 manner so neither a missing bound nor "true" and "false" need to be
3752 handled using a special case. */
3754 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3755 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3756 and UPPER1_P are nonzero if the respective argument is an upper bound
3757 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3758 must be specified for a comparison. ARG1 will be converted to ARG0's
3759 type if both are specified. */
3761 static tree
3762 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3763 tree arg1, int upper1_p)
3765 tree tem;
3766 int result;
3767 int sgn0, sgn1;
3769 /* If neither arg represents infinity, do the normal operation.
3770 Else, if not a comparison, return infinity. Else handle the special
3771 comparison rules. Note that most of the cases below won't occur, but
3772 are handled for consistency. */
3774 if (arg0 != 0 && arg1 != 0)
3776 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3777 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3778 STRIP_NOPS (tem);
3779 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3782 if (TREE_CODE_CLASS (code) != tcc_comparison)
3783 return 0;
3785 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3786 for neither. In real maths, we cannot assume open ended ranges are
3787 the same. But, this is computer arithmetic, where numbers are finite.
3788 We can therefore make the transformation of any unbounded range with
3789 the value Z, Z being greater than any representable number. This permits
3790 us to treat unbounded ranges as equal. */
3791 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3792 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3793 switch (code)
3795 case EQ_EXPR:
3796 result = sgn0 == sgn1;
3797 break;
3798 case NE_EXPR:
3799 result = sgn0 != sgn1;
3800 break;
3801 case LT_EXPR:
3802 result = sgn0 < sgn1;
3803 break;
3804 case LE_EXPR:
3805 result = sgn0 <= sgn1;
3806 break;
3807 case GT_EXPR:
3808 result = sgn0 > sgn1;
3809 break;
3810 case GE_EXPR:
3811 result = sgn0 >= sgn1;
3812 break;
3813 default:
3814 gcc_unreachable ();
3817 return constant_boolean_node (result, type);
3820 /* Given EXP, a logical expression, set the range it is testing into
3821 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3822 actually being tested. *PLOW and *PHIGH will be made of the same type
3823 as the returned expression. If EXP is not a comparison, we will most
3824 likely not be returning a useful value and range. */
3826 static tree
3827 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3829 enum tree_code code;
3830 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3831 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3832 int in_p, n_in_p;
3833 tree low, high, n_low, n_high;
3835 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3836 and see if we can refine the range. Some of the cases below may not
3837 happen, but it doesn't seem worth worrying about this. We "continue"
3838 the outer loop when we've changed something; otherwise we "break"
3839 the switch, which will "break" the while. */
3841 in_p = 0;
3842 low = high = build_int_cst (TREE_TYPE (exp), 0);
3844 while (1)
3846 code = TREE_CODE (exp);
3847 exp_type = TREE_TYPE (exp);
3849 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3851 if (TREE_CODE_LENGTH (code) > 0)
3852 arg0 = TREE_OPERAND (exp, 0);
3853 if (TREE_CODE_CLASS (code) == tcc_comparison
3854 || TREE_CODE_CLASS (code) == tcc_unary
3855 || TREE_CODE_CLASS (code) == tcc_binary)
3856 arg0_type = TREE_TYPE (arg0);
3857 if (TREE_CODE_CLASS (code) == tcc_binary
3858 || TREE_CODE_CLASS (code) == tcc_comparison
3859 || (TREE_CODE_CLASS (code) == tcc_expression
3860 && TREE_CODE_LENGTH (code) > 1))
3861 arg1 = TREE_OPERAND (exp, 1);
3864 switch (code)
3866 case TRUTH_NOT_EXPR:
3867 in_p = ! in_p, exp = arg0;
3868 continue;
3870 case EQ_EXPR: case NE_EXPR:
3871 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3872 /* We can only do something if the range is testing for zero
3873 and if the second operand is an integer constant. Note that
3874 saying something is "in" the range we make is done by
3875 complementing IN_P since it will set in the initial case of
3876 being not equal to zero; "out" is leaving it alone. */
3877 if (low == 0 || high == 0
3878 || ! integer_zerop (low) || ! integer_zerop (high)
3879 || TREE_CODE (arg1) != INTEGER_CST)
3880 break;
3882 switch (code)
3884 case NE_EXPR: /* - [c, c] */
3885 low = high = arg1;
3886 break;
3887 case EQ_EXPR: /* + [c, c] */
3888 in_p = ! in_p, low = high = arg1;
3889 break;
3890 case GT_EXPR: /* - [-, c] */
3891 low = 0, high = arg1;
3892 break;
3893 case GE_EXPR: /* + [c, -] */
3894 in_p = ! in_p, low = arg1, high = 0;
3895 break;
3896 case LT_EXPR: /* - [c, -] */
3897 low = arg1, high = 0;
3898 break;
3899 case LE_EXPR: /* + [-, c] */
3900 in_p = ! in_p, low = 0, high = arg1;
3901 break;
3902 default:
3903 gcc_unreachable ();
3906 /* If this is an unsigned comparison, we also know that EXP is
3907 greater than or equal to zero. We base the range tests we make
3908 on that fact, so we record it here so we can parse existing
3909 range tests. We test arg0_type since often the return type
3910 of, e.g. EQ_EXPR, is boolean. */
3911 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3913 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3914 in_p, low, high, 1,
3915 build_int_cst (arg0_type, 0),
3916 NULL_TREE))
3917 break;
3919 in_p = n_in_p, low = n_low, high = n_high;
3921 /* If the high bound is missing, but we have a nonzero low
3922 bound, reverse the range so it goes from zero to the low bound
3923 minus 1. */
3924 if (high == 0 && low && ! integer_zerop (low))
3926 in_p = ! in_p;
3927 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3928 integer_one_node, 0);
3929 low = build_int_cst (arg0_type, 0);
3933 exp = arg0;
3934 continue;
3936 case NEGATE_EXPR:
3937 /* (-x) IN [a,b] -> x in [-b, -a] */
3938 n_low = range_binop (MINUS_EXPR, exp_type,
3939 build_int_cst (exp_type, 0),
3940 0, high, 1);
3941 n_high = range_binop (MINUS_EXPR, exp_type,
3942 build_int_cst (exp_type, 0),
3943 0, low, 0);
3944 low = n_low, high = n_high;
3945 exp = arg0;
3946 continue;
3948 case BIT_NOT_EXPR:
3949 /* ~ X -> -X - 1 */
3950 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3951 build_int_cst (exp_type, 1));
3952 continue;
3954 case PLUS_EXPR: case MINUS_EXPR:
3955 if (TREE_CODE (arg1) != INTEGER_CST)
3956 break;
3958 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3959 move a constant to the other side. */
3960 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3961 break;
3963 /* If EXP is signed, any overflow in the computation is undefined,
3964 so we don't worry about it so long as our computations on
3965 the bounds don't overflow. For unsigned, overflow is defined
3966 and this is exactly the right thing. */
3967 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3968 arg0_type, low, 0, arg1, 0);
3969 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3970 arg0_type, high, 1, arg1, 0);
3971 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3972 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3973 break;
3975 /* Check for an unsigned range which has wrapped around the maximum
3976 value thus making n_high < n_low, and normalize it. */
3977 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3979 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3980 integer_one_node, 0);
3981 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3982 integer_one_node, 0);
3984 /* If the range is of the form +/- [ x+1, x ], we won't
3985 be able to normalize it. But then, it represents the
3986 whole range or the empty set, so make it
3987 +/- [ -, - ]. */
3988 if (tree_int_cst_equal (n_low, low)
3989 && tree_int_cst_equal (n_high, high))
3990 low = high = 0;
3991 else
3992 in_p = ! in_p;
3994 else
3995 low = n_low, high = n_high;
3997 exp = arg0;
3998 continue;
4000 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4001 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4002 break;
4004 if (! INTEGRAL_TYPE_P (arg0_type)
4005 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4006 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4007 break;
4009 n_low = low, n_high = high;
4011 if (n_low != 0)
4012 n_low = fold_convert (arg0_type, n_low);
4014 if (n_high != 0)
4015 n_high = fold_convert (arg0_type, n_high);
4018 /* If we're converting arg0 from an unsigned type, to exp,
4019 a signed type, we will be doing the comparison as unsigned.
4020 The tests above have already verified that LOW and HIGH
4021 are both positive.
4023 So we have to ensure that we will handle large unsigned
4024 values the same way that the current signed bounds treat
4025 negative values. */
4027 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4029 tree high_positive;
4030 tree equiv_type = lang_hooks.types.type_for_mode
4031 (TYPE_MODE (arg0_type), 1);
4033 /* A range without an upper bound is, naturally, unbounded.
4034 Since convert would have cropped a very large value, use
4035 the max value for the destination type. */
4036 high_positive
4037 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4038 : TYPE_MAX_VALUE (arg0_type);
4040 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4041 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4042 fold_convert (arg0_type,
4043 high_positive),
4044 build_int_cst (arg0_type, 1));
4046 /* If the low bound is specified, "and" the range with the
4047 range for which the original unsigned value will be
4048 positive. */
4049 if (low != 0)
4051 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4052 1, n_low, n_high, 1,
4053 fold_convert (arg0_type,
4054 integer_zero_node),
4055 high_positive))
4056 break;
4058 in_p = (n_in_p == in_p);
4060 else
4062 /* Otherwise, "or" the range with the range of the input
4063 that will be interpreted as negative. */
4064 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4065 0, n_low, n_high, 1,
4066 fold_convert (arg0_type,
4067 integer_zero_node),
4068 high_positive))
4069 break;
4071 in_p = (in_p != n_in_p);
4075 exp = arg0;
4076 low = n_low, high = n_high;
4077 continue;
4079 default:
4080 break;
4083 break;
4086 /* If EXP is a constant, we can evaluate whether this is true or false. */
4087 if (TREE_CODE (exp) == INTEGER_CST)
4089 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4090 exp, 0, low, 0))
4091 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4092 exp, 1, high, 1)));
4093 low = high = 0;
4094 exp = 0;
4097 *pin_p = in_p, *plow = low, *phigh = high;
4098 return exp;
4101 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4102 type, TYPE, return an expression to test if EXP is in (or out of, depending
4103 on IN_P) the range. Return 0 if the test couldn't be created. */
4105 static tree
4106 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4108 tree etype = TREE_TYPE (exp);
4109 tree value;
4111 #ifdef HAVE_canonicalize_funcptr_for_compare
4112 /* Disable this optimization for function pointer expressions
4113 on targets that require function pointer canonicalization. */
4114 if (HAVE_canonicalize_funcptr_for_compare
4115 && TREE_CODE (etype) == POINTER_TYPE
4116 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4117 return NULL_TREE;
4118 #endif
4120 if (! in_p)
4122 value = build_range_check (type, exp, 1, low, high);
4123 if (value != 0)
4124 return invert_truthvalue (value);
4126 return 0;
4129 if (low == 0 && high == 0)
4130 return build_int_cst (type, 1);
4132 if (low == 0)
4133 return fold_build2 (LE_EXPR, type, exp,
4134 fold_convert (etype, high));
4136 if (high == 0)
4137 return fold_build2 (GE_EXPR, type, exp,
4138 fold_convert (etype, low));
4140 if (operand_equal_p (low, high, 0))
4141 return fold_build2 (EQ_EXPR, type, exp,
4142 fold_convert (etype, low));
4144 if (integer_zerop (low))
4146 if (! TYPE_UNSIGNED (etype))
4148 etype = lang_hooks.types.unsigned_type (etype);
4149 high = fold_convert (etype, high);
4150 exp = fold_convert (etype, exp);
4152 return build_range_check (type, exp, 1, 0, high);
4155 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4156 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4158 unsigned HOST_WIDE_INT lo;
4159 HOST_WIDE_INT hi;
4160 int prec;
4162 prec = TYPE_PRECISION (etype);
4163 if (prec <= HOST_BITS_PER_WIDE_INT)
4165 hi = 0;
4166 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4168 else
4170 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4171 lo = (unsigned HOST_WIDE_INT) -1;
4174 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4176 if (TYPE_UNSIGNED (etype))
4178 etype = lang_hooks.types.signed_type (etype);
4179 exp = fold_convert (etype, exp);
4181 return fold_build2 (GT_EXPR, type, exp,
4182 build_int_cst (etype, 0));
4186 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4187 This requires wrap-around arithmetics for the type of the expression. */
4188 switch (TREE_CODE (etype))
4190 case INTEGER_TYPE:
4191 /* There is no requirement that LOW be within the range of ETYPE
4192 if the latter is a subtype. It must, however, be within the base
4193 type of ETYPE. So be sure we do the subtraction in that type. */
4194 if (TREE_TYPE (etype))
4195 etype = TREE_TYPE (etype);
4196 break;
4198 case ENUMERAL_TYPE:
4199 case BOOLEAN_TYPE:
4200 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4201 TYPE_UNSIGNED (etype));
4202 break;
4204 default:
4205 break;
4208 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4209 if (TREE_CODE (etype) == INTEGER_TYPE
4210 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4212 tree utype, minv, maxv;
4214 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4215 for the type in question, as we rely on this here. */
4216 utype = lang_hooks.types.unsigned_type (etype);
4217 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4218 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4219 integer_one_node, 1);
4220 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4222 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4223 minv, 1, maxv, 1)))
4224 etype = utype;
4225 else
4226 return 0;
4229 high = fold_convert (etype, high);
4230 low = fold_convert (etype, low);
4231 exp = fold_convert (etype, exp);
4233 value = const_binop (MINUS_EXPR, high, low, 0);
4235 if (value != 0 && !TREE_OVERFLOW (value))
4236 return build_range_check (type,
4237 fold_build2 (MINUS_EXPR, etype, exp, low),
4238 1, build_int_cst (etype, 0), value);
4240 return 0;
4243 /* Return the predecessor of VAL in its type, handling the infinite case. */
4245 static tree
4246 range_predecessor (tree val)
4248 tree type = TREE_TYPE (val);
4250 if (INTEGRAL_TYPE_P (type)
4251 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4252 return 0;
4253 else
4254 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4257 /* Return the successor of VAL in its type, handling the infinite case. */
4259 static tree
4260 range_successor (tree val)
4262 tree type = TREE_TYPE (val);
4264 if (INTEGRAL_TYPE_P (type)
4265 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4266 return 0;
4267 else
4268 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4271 /* Given two ranges, see if we can merge them into one. Return 1 if we
4272 can, 0 if we can't. Set the output range into the specified parameters. */
4274 static int
4275 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4276 tree high0, int in1_p, tree low1, tree high1)
4278 int no_overlap;
4279 int subset;
4280 int temp;
4281 tree tem;
4282 int in_p;
4283 tree low, high;
4284 int lowequal = ((low0 == 0 && low1 == 0)
4285 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4286 low0, 0, low1, 0)));
4287 int highequal = ((high0 == 0 && high1 == 0)
4288 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4289 high0, 1, high1, 1)));
4291 /* Make range 0 be the range that starts first, or ends last if they
4292 start at the same value. Swap them if it isn't. */
4293 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4294 low0, 0, low1, 0))
4295 || (lowequal
4296 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4297 high1, 1, high0, 1))))
4299 temp = in0_p, in0_p = in1_p, in1_p = temp;
4300 tem = low0, low0 = low1, low1 = tem;
4301 tem = high0, high0 = high1, high1 = tem;
4304 /* Now flag two cases, whether the ranges are disjoint or whether the
4305 second range is totally subsumed in the first. Note that the tests
4306 below are simplified by the ones above. */
4307 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4308 high0, 1, low1, 0));
4309 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4310 high1, 1, high0, 1));
4312 /* We now have four cases, depending on whether we are including or
4313 excluding the two ranges. */
4314 if (in0_p && in1_p)
4316 /* If they don't overlap, the result is false. If the second range
4317 is a subset it is the result. Otherwise, the range is from the start
4318 of the second to the end of the first. */
4319 if (no_overlap)
4320 in_p = 0, low = high = 0;
4321 else if (subset)
4322 in_p = 1, low = low1, high = high1;
4323 else
4324 in_p = 1, low = low1, high = high0;
4327 else if (in0_p && ! in1_p)
4329 /* If they don't overlap, the result is the first range. If they are
4330 equal, the result is false. If the second range is a subset of the
4331 first, and the ranges begin at the same place, we go from just after
4332 the end of the second range to the end of the first. If the second
4333 range is not a subset of the first, or if it is a subset and both
4334 ranges end at the same place, the range starts at the start of the
4335 first range and ends just before the second range.
4336 Otherwise, we can't describe this as a single range. */
4337 if (no_overlap)
4338 in_p = 1, low = low0, high = high0;
4339 else if (lowequal && highequal)
4340 in_p = 0, low = high = 0;
4341 else if (subset && lowequal)
4343 low = range_successor (high1);
4344 high = high0;
4345 in_p = (low != 0);
4347 else if (! subset || highequal)
4349 low = low0;
4350 high = range_predecessor (low1);
4351 in_p = (high != 0);
4353 else
4354 return 0;
4357 else if (! in0_p && in1_p)
4359 /* If they don't overlap, the result is the second range. If the second
4360 is a subset of the first, the result is false. Otherwise,
4361 the range starts just after the first range and ends at the
4362 end of the second. */
4363 if (no_overlap)
4364 in_p = 1, low = low1, high = high1;
4365 else if (subset || highequal)
4366 in_p = 0, low = high = 0;
4367 else
4369 low = range_successor (high0);
4370 high = high1;
4371 in_p = (low != 0);
4375 else
4377 /* The case where we are excluding both ranges. Here the complex case
4378 is if they don't overlap. In that case, the only time we have a
4379 range is if they are adjacent. If the second is a subset of the
4380 first, the result is the first. Otherwise, the range to exclude
4381 starts at the beginning of the first range and ends at the end of the
4382 second. */
4383 if (no_overlap)
4385 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4386 range_successor (high0),
4387 1, low1, 0)))
4388 in_p = 0, low = low0, high = high1;
4389 else
4391 /* Canonicalize - [min, x] into - [-, x]. */
4392 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4393 switch (TREE_CODE (TREE_TYPE (low0)))
4395 case ENUMERAL_TYPE:
4396 if (TYPE_PRECISION (TREE_TYPE (low0))
4397 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4398 break;
4399 /* FALLTHROUGH */
4400 case INTEGER_TYPE:
4401 if (tree_int_cst_equal (low0,
4402 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4403 low0 = 0;
4404 break;
4405 case POINTER_TYPE:
4406 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4407 && integer_zerop (low0))
4408 low0 = 0;
4409 break;
4410 default:
4411 break;
4414 /* Canonicalize - [x, max] into - [x, -]. */
4415 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4416 switch (TREE_CODE (TREE_TYPE (high1)))
4418 case ENUMERAL_TYPE:
4419 if (TYPE_PRECISION (TREE_TYPE (high1))
4420 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4421 break;
4422 /* FALLTHROUGH */
4423 case INTEGER_TYPE:
4424 if (tree_int_cst_equal (high1,
4425 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4426 high1 = 0;
4427 break;
4428 case POINTER_TYPE:
4429 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4430 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4431 high1, 1,
4432 integer_one_node, 1)))
4433 high1 = 0;
4434 break;
4435 default:
4436 break;
4439 /* The ranges might be also adjacent between the maximum and
4440 minimum values of the given type. For
4441 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4442 return + [x + 1, y - 1]. */
4443 if (low0 == 0 && high1 == 0)
4445 low = range_successor (high0);
4446 high = range_predecessor (low1);
4447 if (low == 0 || high == 0)
4448 return 0;
4450 in_p = 1;
4452 else
4453 return 0;
4456 else if (subset)
4457 in_p = 0, low = low0, high = high0;
4458 else
4459 in_p = 0, low = low0, high = high1;
4462 *pin_p = in_p, *plow = low, *phigh = high;
4463 return 1;
4467 /* Subroutine of fold, looking inside expressions of the form
4468 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4469 of the COND_EXPR. This function is being used also to optimize
4470 A op B ? C : A, by reversing the comparison first.
4472 Return a folded expression whose code is not a COND_EXPR
4473 anymore, or NULL_TREE if no folding opportunity is found. */
4475 static tree
4476 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4478 enum tree_code comp_code = TREE_CODE (arg0);
4479 tree arg00 = TREE_OPERAND (arg0, 0);
4480 tree arg01 = TREE_OPERAND (arg0, 1);
4481 tree arg1_type = TREE_TYPE (arg1);
4482 tree tem;
4484 STRIP_NOPS (arg1);
4485 STRIP_NOPS (arg2);
4487 /* If we have A op 0 ? A : -A, consider applying the following
4488 transformations:
4490 A == 0? A : -A same as -A
4491 A != 0? A : -A same as A
4492 A >= 0? A : -A same as abs (A)
4493 A > 0? A : -A same as abs (A)
4494 A <= 0? A : -A same as -abs (A)
4495 A < 0? A : -A same as -abs (A)
4497 None of these transformations work for modes with signed
4498 zeros. If A is +/-0, the first two transformations will
4499 change the sign of the result (from +0 to -0, or vice
4500 versa). The last four will fix the sign of the result,
4501 even though the original expressions could be positive or
4502 negative, depending on the sign of A.
4504 Note that all these transformations are correct if A is
4505 NaN, since the two alternatives (A and -A) are also NaNs. */
4506 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4507 ? real_zerop (arg01)
4508 : integer_zerop (arg01))
4509 && ((TREE_CODE (arg2) == NEGATE_EXPR
4510 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4511 /* In the case that A is of the form X-Y, '-A' (arg2) may
4512 have already been folded to Y-X, check for that. */
4513 || (TREE_CODE (arg1) == MINUS_EXPR
4514 && TREE_CODE (arg2) == MINUS_EXPR
4515 && operand_equal_p (TREE_OPERAND (arg1, 0),
4516 TREE_OPERAND (arg2, 1), 0)
4517 && operand_equal_p (TREE_OPERAND (arg1, 1),
4518 TREE_OPERAND (arg2, 0), 0))))
4519 switch (comp_code)
4521 case EQ_EXPR:
4522 case UNEQ_EXPR:
4523 tem = fold_convert (arg1_type, arg1);
4524 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4525 case NE_EXPR:
4526 case LTGT_EXPR:
4527 return pedantic_non_lvalue (fold_convert (type, arg1));
4528 case UNGE_EXPR:
4529 case UNGT_EXPR:
4530 if (flag_trapping_math)
4531 break;
4532 /* Fall through. */
4533 case GE_EXPR:
4534 case GT_EXPR:
4535 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4536 arg1 = fold_convert (lang_hooks.types.signed_type
4537 (TREE_TYPE (arg1)), arg1);
4538 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4539 return pedantic_non_lvalue (fold_convert (type, tem));
4540 case UNLE_EXPR:
4541 case UNLT_EXPR:
4542 if (flag_trapping_math)
4543 break;
4544 case LE_EXPR:
4545 case LT_EXPR:
4546 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4547 arg1 = fold_convert (lang_hooks.types.signed_type
4548 (TREE_TYPE (arg1)), arg1);
4549 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4550 return negate_expr (fold_convert (type, tem));
4551 default:
4552 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4553 break;
4556 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4557 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4558 both transformations are correct when A is NaN: A != 0
4559 is then true, and A == 0 is false. */
4561 if (integer_zerop (arg01) && integer_zerop (arg2))
4563 if (comp_code == NE_EXPR)
4564 return pedantic_non_lvalue (fold_convert (type, arg1));
4565 else if (comp_code == EQ_EXPR)
4566 return build_int_cst (type, 0);
4569 /* Try some transformations of A op B ? A : B.
4571 A == B? A : B same as B
4572 A != B? A : B same as A
4573 A >= B? A : B same as max (A, B)
4574 A > B? A : B same as max (B, A)
4575 A <= B? A : B same as min (A, B)
4576 A < B? A : B same as min (B, A)
4578 As above, these transformations don't work in the presence
4579 of signed zeros. For example, if A and B are zeros of
4580 opposite sign, the first two transformations will change
4581 the sign of the result. In the last four, the original
4582 expressions give different results for (A=+0, B=-0) and
4583 (A=-0, B=+0), but the transformed expressions do not.
4585 The first two transformations are correct if either A or B
4586 is a NaN. In the first transformation, the condition will
4587 be false, and B will indeed be chosen. In the case of the
4588 second transformation, the condition A != B will be true,
4589 and A will be chosen.
4591 The conversions to max() and min() are not correct if B is
4592 a number and A is not. The conditions in the original
4593 expressions will be false, so all four give B. The min()
4594 and max() versions would give a NaN instead. */
4595 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4596 /* Avoid these transformations if the COND_EXPR may be used
4597 as an lvalue in the C++ front-end. PR c++/19199. */
4598 && (in_gimple_form
4599 || (strcmp (lang_hooks.name, "GNU C++") != 0
4600 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4601 || ! maybe_lvalue_p (arg1)
4602 || ! maybe_lvalue_p (arg2)))
4604 tree comp_op0 = arg00;
4605 tree comp_op1 = arg01;
4606 tree comp_type = TREE_TYPE (comp_op0);
4608 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4609 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4611 comp_type = type;
4612 comp_op0 = arg1;
4613 comp_op1 = arg2;
4616 switch (comp_code)
4618 case EQ_EXPR:
4619 return pedantic_non_lvalue (fold_convert (type, arg2));
4620 case NE_EXPR:
4621 return pedantic_non_lvalue (fold_convert (type, arg1));
4622 case LE_EXPR:
4623 case LT_EXPR:
4624 case UNLE_EXPR:
4625 case UNLT_EXPR:
4626 /* In C++ a ?: expression can be an lvalue, so put the
4627 operand which will be used if they are equal first
4628 so that we can convert this back to the
4629 corresponding COND_EXPR. */
4630 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4632 comp_op0 = fold_convert (comp_type, comp_op0);
4633 comp_op1 = fold_convert (comp_type, comp_op1);
4634 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4635 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4636 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4637 return pedantic_non_lvalue (fold_convert (type, tem));
4639 break;
4640 case GE_EXPR:
4641 case GT_EXPR:
4642 case UNGE_EXPR:
4643 case UNGT_EXPR:
4644 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4646 comp_op0 = fold_convert (comp_type, comp_op0);
4647 comp_op1 = fold_convert (comp_type, comp_op1);
4648 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4649 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4650 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4651 return pedantic_non_lvalue (fold_convert (type, tem));
4653 break;
4654 case UNEQ_EXPR:
4655 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4656 return pedantic_non_lvalue (fold_convert (type, arg2));
4657 break;
4658 case LTGT_EXPR:
4659 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4660 return pedantic_non_lvalue (fold_convert (type, arg1));
4661 break;
4662 default:
4663 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4664 break;
4668 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4669 we might still be able to simplify this. For example,
4670 if C1 is one less or one more than C2, this might have started
4671 out as a MIN or MAX and been transformed by this function.
4672 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4674 if (INTEGRAL_TYPE_P (type)
4675 && TREE_CODE (arg01) == INTEGER_CST
4676 && TREE_CODE (arg2) == INTEGER_CST)
4677 switch (comp_code)
4679 case EQ_EXPR:
4680 /* We can replace A with C1 in this case. */
4681 arg1 = fold_convert (type, arg01);
4682 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4684 case LT_EXPR:
4685 /* If C1 is C2 + 1, this is min(A, C2). */
4686 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4687 OEP_ONLY_CONST)
4688 && operand_equal_p (arg01,
4689 const_binop (PLUS_EXPR, arg2,
4690 build_int_cst (type, 1), 0),
4691 OEP_ONLY_CONST))
4692 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4693 type, arg1, arg2));
4694 break;
4696 case LE_EXPR:
4697 /* If C1 is C2 - 1, this is min(A, C2). */
4698 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4699 OEP_ONLY_CONST)
4700 && operand_equal_p (arg01,
4701 const_binop (MINUS_EXPR, arg2,
4702 build_int_cst (type, 1), 0),
4703 OEP_ONLY_CONST))
4704 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4705 type, arg1, arg2));
4706 break;
4708 case GT_EXPR:
4709 /* If C1 is C2 - 1, this is max(A, C2). */
4710 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4711 OEP_ONLY_CONST)
4712 && operand_equal_p (arg01,
4713 const_binop (MINUS_EXPR, arg2,
4714 build_int_cst (type, 1), 0),
4715 OEP_ONLY_CONST))
4716 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4717 type, arg1, arg2));
4718 break;
4720 case GE_EXPR:
4721 /* If C1 is C2 + 1, this is max(A, C2). */
4722 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4723 OEP_ONLY_CONST)
4724 && operand_equal_p (arg01,
4725 const_binop (PLUS_EXPR, arg2,
4726 build_int_cst (type, 1), 0),
4727 OEP_ONLY_CONST))
4728 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4729 type, arg1, arg2));
4730 break;
4731 case NE_EXPR:
4732 break;
4733 default:
4734 gcc_unreachable ();
4737 return NULL_TREE;
4742 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4743 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4744 #endif
4746 /* EXP is some logical combination of boolean tests. See if we can
4747 merge it into some range test. Return the new tree if so. */
4749 static tree
4750 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4752 int or_op = (code == TRUTH_ORIF_EXPR
4753 || code == TRUTH_OR_EXPR);
4754 int in0_p, in1_p, in_p;
4755 tree low0, low1, low, high0, high1, high;
4756 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4757 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4758 tree tem;
4760 /* If this is an OR operation, invert both sides; we will invert
4761 again at the end. */
4762 if (or_op)
4763 in0_p = ! in0_p, in1_p = ! in1_p;
4765 /* If both expressions are the same, if we can merge the ranges, and we
4766 can build the range test, return it or it inverted. If one of the
4767 ranges is always true or always false, consider it to be the same
4768 expression as the other. */
4769 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4770 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4771 in1_p, low1, high1)
4772 && 0 != (tem = (build_range_check (type,
4773 lhs != 0 ? lhs
4774 : rhs != 0 ? rhs : integer_zero_node,
4775 in_p, low, high))))
4776 return or_op ? invert_truthvalue (tem) : tem;
4778 /* On machines where the branch cost is expensive, if this is a
4779 short-circuited branch and the underlying object on both sides
4780 is the same, make a non-short-circuit operation. */
4781 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4782 && lhs != 0 && rhs != 0
4783 && (code == TRUTH_ANDIF_EXPR
4784 || code == TRUTH_ORIF_EXPR)
4785 && operand_equal_p (lhs, rhs, 0))
4787 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4788 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4789 which cases we can't do this. */
4790 if (simple_operand_p (lhs))
4791 return build2 (code == TRUTH_ANDIF_EXPR
4792 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4793 type, op0, op1);
4795 else if (lang_hooks.decls.global_bindings_p () == 0
4796 && ! CONTAINS_PLACEHOLDER_P (lhs))
4798 tree common = save_expr (lhs);
4800 if (0 != (lhs = build_range_check (type, common,
4801 or_op ? ! in0_p : in0_p,
4802 low0, high0))
4803 && (0 != (rhs = build_range_check (type, common,
4804 or_op ? ! in1_p : in1_p,
4805 low1, high1))))
4806 return build2 (code == TRUTH_ANDIF_EXPR
4807 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4808 type, lhs, rhs);
4812 return 0;
4815 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4816 bit value. Arrange things so the extra bits will be set to zero if and
4817 only if C is signed-extended to its full width. If MASK is nonzero,
4818 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4820 static tree
4821 unextend (tree c, int p, int unsignedp, tree mask)
4823 tree type = TREE_TYPE (c);
4824 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4825 tree temp;
4827 if (p == modesize || unsignedp)
4828 return c;
4830 /* We work by getting just the sign bit into the low-order bit, then
4831 into the high-order bit, then sign-extend. We then XOR that value
4832 with C. */
4833 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4834 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4836 /* We must use a signed type in order to get an arithmetic right shift.
4837 However, we must also avoid introducing accidental overflows, so that
4838 a subsequent call to integer_zerop will work. Hence we must
4839 do the type conversion here. At this point, the constant is either
4840 zero or one, and the conversion to a signed type can never overflow.
4841 We could get an overflow if this conversion is done anywhere else. */
4842 if (TYPE_UNSIGNED (type))
4843 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4845 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4846 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4847 if (mask != 0)
4848 temp = const_binop (BIT_AND_EXPR, temp,
4849 fold_convert (TREE_TYPE (c), mask), 0);
4850 /* If necessary, convert the type back to match the type of C. */
4851 if (TYPE_UNSIGNED (type))
4852 temp = fold_convert (type, temp);
4854 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4857 /* Find ways of folding logical expressions of LHS and RHS:
4858 Try to merge two comparisons to the same innermost item.
4859 Look for range tests like "ch >= '0' && ch <= '9'".
4860 Look for combinations of simple terms on machines with expensive branches
4861 and evaluate the RHS unconditionally.
4863 For example, if we have p->a == 2 && p->b == 4 and we can make an
4864 object large enough to span both A and B, we can do this with a comparison
4865 against the object ANDed with the a mask.
4867 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4868 operations to do this with one comparison.
4870 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4871 function and the one above.
4873 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4874 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4876 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4877 two operands.
4879 We return the simplified tree or 0 if no optimization is possible. */
4881 static tree
4882 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4884 /* If this is the "or" of two comparisons, we can do something if
4885 the comparisons are NE_EXPR. If this is the "and", we can do something
4886 if the comparisons are EQ_EXPR. I.e.,
4887 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4889 WANTED_CODE is this operation code. For single bit fields, we can
4890 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4891 comparison for one-bit fields. */
4893 enum tree_code wanted_code;
4894 enum tree_code lcode, rcode;
4895 tree ll_arg, lr_arg, rl_arg, rr_arg;
4896 tree ll_inner, lr_inner, rl_inner, rr_inner;
4897 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4898 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4899 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4900 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4901 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4902 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4903 enum machine_mode lnmode, rnmode;
4904 tree ll_mask, lr_mask, rl_mask, rr_mask;
4905 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4906 tree l_const, r_const;
4907 tree lntype, rntype, result;
4908 int first_bit, end_bit;
4909 int volatilep;
4910 tree orig_lhs = lhs, orig_rhs = rhs;
4911 enum tree_code orig_code = code;
4913 /* Start by getting the comparison codes. Fail if anything is volatile.
4914 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4915 it were surrounded with a NE_EXPR. */
4917 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4918 return 0;
4920 lcode = TREE_CODE (lhs);
4921 rcode = TREE_CODE (rhs);
4923 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4925 lhs = build2 (NE_EXPR, truth_type, lhs,
4926 build_int_cst (TREE_TYPE (lhs), 0));
4927 lcode = NE_EXPR;
4930 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4932 rhs = build2 (NE_EXPR, truth_type, rhs,
4933 build_int_cst (TREE_TYPE (rhs), 0));
4934 rcode = NE_EXPR;
4937 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4938 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4939 return 0;
4941 ll_arg = TREE_OPERAND (lhs, 0);
4942 lr_arg = TREE_OPERAND (lhs, 1);
4943 rl_arg = TREE_OPERAND (rhs, 0);
4944 rr_arg = TREE_OPERAND (rhs, 1);
4946 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4947 if (simple_operand_p (ll_arg)
4948 && simple_operand_p (lr_arg))
4950 tree result;
4951 if (operand_equal_p (ll_arg, rl_arg, 0)
4952 && operand_equal_p (lr_arg, rr_arg, 0))
4954 result = combine_comparisons (code, lcode, rcode,
4955 truth_type, ll_arg, lr_arg);
4956 if (result)
4957 return result;
4959 else if (operand_equal_p (ll_arg, rr_arg, 0)
4960 && operand_equal_p (lr_arg, rl_arg, 0))
4962 result = combine_comparisons (code, lcode,
4963 swap_tree_comparison (rcode),
4964 truth_type, ll_arg, lr_arg);
4965 if (result)
4966 return result;
4970 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4971 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4973 /* If the RHS can be evaluated unconditionally and its operands are
4974 simple, it wins to evaluate the RHS unconditionally on machines
4975 with expensive branches. In this case, this isn't a comparison
4976 that can be merged. Avoid doing this if the RHS is a floating-point
4977 comparison since those can trap. */
4979 if (BRANCH_COST >= 2
4980 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4981 && simple_operand_p (rl_arg)
4982 && simple_operand_p (rr_arg))
4984 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4985 if (code == TRUTH_OR_EXPR
4986 && lcode == NE_EXPR && integer_zerop (lr_arg)
4987 && rcode == NE_EXPR && integer_zerop (rr_arg)
4988 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4989 return build2 (NE_EXPR, truth_type,
4990 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4991 ll_arg, rl_arg),
4992 build_int_cst (TREE_TYPE (ll_arg), 0));
4994 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4995 if (code == TRUTH_AND_EXPR
4996 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4997 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4998 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4999 return build2 (EQ_EXPR, truth_type,
5000 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5001 ll_arg, rl_arg),
5002 build_int_cst (TREE_TYPE (ll_arg), 0));
5004 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5006 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5007 return build2 (code, truth_type, lhs, rhs);
5008 return NULL_TREE;
5012 /* See if the comparisons can be merged. Then get all the parameters for
5013 each side. */
5015 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5016 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5017 return 0;
5019 volatilep = 0;
5020 ll_inner = decode_field_reference (ll_arg,
5021 &ll_bitsize, &ll_bitpos, &ll_mode,
5022 &ll_unsignedp, &volatilep, &ll_mask,
5023 &ll_and_mask);
5024 lr_inner = decode_field_reference (lr_arg,
5025 &lr_bitsize, &lr_bitpos, &lr_mode,
5026 &lr_unsignedp, &volatilep, &lr_mask,
5027 &lr_and_mask);
5028 rl_inner = decode_field_reference (rl_arg,
5029 &rl_bitsize, &rl_bitpos, &rl_mode,
5030 &rl_unsignedp, &volatilep, &rl_mask,
5031 &rl_and_mask);
5032 rr_inner = decode_field_reference (rr_arg,
5033 &rr_bitsize, &rr_bitpos, &rr_mode,
5034 &rr_unsignedp, &volatilep, &rr_mask,
5035 &rr_and_mask);
5037 /* It must be true that the inner operation on the lhs of each
5038 comparison must be the same if we are to be able to do anything.
5039 Then see if we have constants. If not, the same must be true for
5040 the rhs's. */
5041 if (volatilep || ll_inner == 0 || rl_inner == 0
5042 || ! operand_equal_p (ll_inner, rl_inner, 0))
5043 return 0;
5045 if (TREE_CODE (lr_arg) == INTEGER_CST
5046 && TREE_CODE (rr_arg) == INTEGER_CST)
5047 l_const = lr_arg, r_const = rr_arg;
5048 else if (lr_inner == 0 || rr_inner == 0
5049 || ! operand_equal_p (lr_inner, rr_inner, 0))
5050 return 0;
5051 else
5052 l_const = r_const = 0;
5054 /* If either comparison code is not correct for our logical operation,
5055 fail. However, we can convert a one-bit comparison against zero into
5056 the opposite comparison against that bit being set in the field. */
5058 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5059 if (lcode != wanted_code)
5061 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5063 /* Make the left operand unsigned, since we are only interested
5064 in the value of one bit. Otherwise we are doing the wrong
5065 thing below. */
5066 ll_unsignedp = 1;
5067 l_const = ll_mask;
5069 else
5070 return 0;
5073 /* This is analogous to the code for l_const above. */
5074 if (rcode != wanted_code)
5076 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5078 rl_unsignedp = 1;
5079 r_const = rl_mask;
5081 else
5082 return 0;
5085 /* After this point all optimizations will generate bit-field
5086 references, which we might not want. */
5087 if (! lang_hooks.can_use_bit_fields_p ())
5088 return 0;
5090 /* See if we can find a mode that contains both fields being compared on
5091 the left. If we can't, fail. Otherwise, update all constants and masks
5092 to be relative to a field of that size. */
5093 first_bit = MIN (ll_bitpos, rl_bitpos);
5094 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5095 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5096 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5097 volatilep);
5098 if (lnmode == VOIDmode)
5099 return 0;
5101 lnbitsize = GET_MODE_BITSIZE (lnmode);
5102 lnbitpos = first_bit & ~ (lnbitsize - 1);
5103 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5104 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5106 if (BYTES_BIG_ENDIAN)
5108 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5109 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5112 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5113 size_int (xll_bitpos), 0);
5114 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5115 size_int (xrl_bitpos), 0);
5117 if (l_const)
5119 l_const = fold_convert (lntype, l_const);
5120 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5121 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5122 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5123 fold_build1 (BIT_NOT_EXPR,
5124 lntype, ll_mask),
5125 0)))
5127 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5129 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5132 if (r_const)
5134 r_const = fold_convert (lntype, r_const);
5135 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5136 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5137 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5138 fold_build1 (BIT_NOT_EXPR,
5139 lntype, rl_mask),
5140 0)))
5142 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5144 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5148 /* If the right sides are not constant, do the same for it. Also,
5149 disallow this optimization if a size or signedness mismatch occurs
5150 between the left and right sides. */
5151 if (l_const == 0)
5153 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5154 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5155 /* Make sure the two fields on the right
5156 correspond to the left without being swapped. */
5157 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5158 return 0;
5160 first_bit = MIN (lr_bitpos, rr_bitpos);
5161 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5162 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5163 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5164 volatilep);
5165 if (rnmode == VOIDmode)
5166 return 0;
5168 rnbitsize = GET_MODE_BITSIZE (rnmode);
5169 rnbitpos = first_bit & ~ (rnbitsize - 1);
5170 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5171 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5173 if (BYTES_BIG_ENDIAN)
5175 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5176 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5179 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5180 size_int (xlr_bitpos), 0);
5181 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5182 size_int (xrr_bitpos), 0);
5184 /* Make a mask that corresponds to both fields being compared.
5185 Do this for both items being compared. If the operands are the
5186 same size and the bits being compared are in the same position
5187 then we can do this by masking both and comparing the masked
5188 results. */
5189 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5190 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5191 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5193 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5194 ll_unsignedp || rl_unsignedp);
5195 if (! all_ones_mask_p (ll_mask, lnbitsize))
5196 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5198 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5199 lr_unsignedp || rr_unsignedp);
5200 if (! all_ones_mask_p (lr_mask, rnbitsize))
5201 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5203 return build2 (wanted_code, truth_type, lhs, rhs);
5206 /* There is still another way we can do something: If both pairs of
5207 fields being compared are adjacent, we may be able to make a wider
5208 field containing them both.
5210 Note that we still must mask the lhs/rhs expressions. Furthermore,
5211 the mask must be shifted to account for the shift done by
5212 make_bit_field_ref. */
5213 if ((ll_bitsize + ll_bitpos == rl_bitpos
5214 && lr_bitsize + lr_bitpos == rr_bitpos)
5215 || (ll_bitpos == rl_bitpos + rl_bitsize
5216 && lr_bitpos == rr_bitpos + rr_bitsize))
5218 tree type;
5220 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5221 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5222 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5223 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5225 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5226 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5227 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5228 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5230 /* Convert to the smaller type before masking out unwanted bits. */
5231 type = lntype;
5232 if (lntype != rntype)
5234 if (lnbitsize > rnbitsize)
5236 lhs = fold_convert (rntype, lhs);
5237 ll_mask = fold_convert (rntype, ll_mask);
5238 type = rntype;
5240 else if (lnbitsize < rnbitsize)
5242 rhs = fold_convert (lntype, rhs);
5243 lr_mask = fold_convert (lntype, lr_mask);
5244 type = lntype;
5248 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5249 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5251 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5252 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5254 return build2 (wanted_code, truth_type, lhs, rhs);
5257 return 0;
5260 /* Handle the case of comparisons with constants. If there is something in
5261 common between the masks, those bits of the constants must be the same.
5262 If not, the condition is always false. Test for this to avoid generating
5263 incorrect code below. */
5264 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5265 if (! integer_zerop (result)
5266 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5267 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5269 if (wanted_code == NE_EXPR)
5271 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5272 return constant_boolean_node (true, truth_type);
5274 else
5276 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5277 return constant_boolean_node (false, truth_type);
5281 /* Construct the expression we will return. First get the component
5282 reference we will make. Unless the mask is all ones the width of
5283 that field, perform the mask operation. Then compare with the
5284 merged constant. */
5285 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5286 ll_unsignedp || rl_unsignedp);
5288 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5289 if (! all_ones_mask_p (ll_mask, lnbitsize))
5290 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5292 return build2 (wanted_code, truth_type, result,
5293 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5296 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5297 constant. */
5299 static tree
5300 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5302 tree arg0 = op0;
5303 enum tree_code op_code;
5304 tree comp_const = op1;
5305 tree minmax_const;
5306 int consts_equal, consts_lt;
5307 tree inner;
5309 STRIP_SIGN_NOPS (arg0);
5311 op_code = TREE_CODE (arg0);
5312 minmax_const = TREE_OPERAND (arg0, 1);
5313 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5314 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5315 inner = TREE_OPERAND (arg0, 0);
5317 /* If something does not permit us to optimize, return the original tree. */
5318 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5319 || TREE_CODE (comp_const) != INTEGER_CST
5320 || TREE_CONSTANT_OVERFLOW (comp_const)
5321 || TREE_CODE (minmax_const) != INTEGER_CST
5322 || TREE_CONSTANT_OVERFLOW (minmax_const))
5323 return NULL_TREE;
5325 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5326 and GT_EXPR, doing the rest with recursive calls using logical
5327 simplifications. */
5328 switch (code)
5330 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5332 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5333 type, op0, op1);
5334 if (tem)
5335 return invert_truthvalue (tem);
5336 return NULL_TREE;
5339 case GE_EXPR:
5340 return
5341 fold_build2 (TRUTH_ORIF_EXPR, type,
5342 optimize_minmax_comparison
5343 (EQ_EXPR, type, arg0, comp_const),
5344 optimize_minmax_comparison
5345 (GT_EXPR, type, arg0, comp_const));
5347 case EQ_EXPR:
5348 if (op_code == MAX_EXPR && consts_equal)
5349 /* MAX (X, 0) == 0 -> X <= 0 */
5350 return fold_build2 (LE_EXPR, type, inner, comp_const);
5352 else if (op_code == MAX_EXPR && consts_lt)
5353 /* MAX (X, 0) == 5 -> X == 5 */
5354 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5356 else if (op_code == MAX_EXPR)
5357 /* MAX (X, 0) == -1 -> false */
5358 return omit_one_operand (type, integer_zero_node, inner);
5360 else if (consts_equal)
5361 /* MIN (X, 0) == 0 -> X >= 0 */
5362 return fold_build2 (GE_EXPR, type, inner, comp_const);
5364 else if (consts_lt)
5365 /* MIN (X, 0) == 5 -> false */
5366 return omit_one_operand (type, integer_zero_node, inner);
5368 else
5369 /* MIN (X, 0) == -1 -> X == -1 */
5370 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5372 case GT_EXPR:
5373 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5374 /* MAX (X, 0) > 0 -> X > 0
5375 MAX (X, 0) > 5 -> X > 5 */
5376 return fold_build2 (GT_EXPR, type, inner, comp_const);
5378 else if (op_code == MAX_EXPR)
5379 /* MAX (X, 0) > -1 -> true */
5380 return omit_one_operand (type, integer_one_node, inner);
5382 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5383 /* MIN (X, 0) > 0 -> false
5384 MIN (X, 0) > 5 -> false */
5385 return omit_one_operand (type, integer_zero_node, inner);
5387 else
5388 /* MIN (X, 0) > -1 -> X > -1 */
5389 return fold_build2 (GT_EXPR, type, inner, comp_const);
5391 default:
5392 return NULL_TREE;
5396 /* T is an integer expression that is being multiplied, divided, or taken a
5397 modulus (CODE says which and what kind of divide or modulus) by a
5398 constant C. See if we can eliminate that operation by folding it with
5399 other operations already in T. WIDE_TYPE, if non-null, is a type that
5400 should be used for the computation if wider than our type.
5402 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5403 (X * 2) + (Y * 4). We must, however, be assured that either the original
5404 expression would not overflow or that overflow is undefined for the type
5405 in the language in question.
5407 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5408 the machine has a multiply-accumulate insn or that this is part of an
5409 addressing calculation.
5411 If we return a non-null expression, it is an equivalent form of the
5412 original computation, but need not be in the original type. */
5414 static tree
5415 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5417 /* To avoid exponential search depth, refuse to allow recursion past
5418 three levels. Beyond that (1) it's highly unlikely that we'll find
5419 something interesting and (2) we've probably processed it before
5420 when we built the inner expression. */
5422 static int depth;
5423 tree ret;
5425 if (depth > 3)
5426 return NULL;
5428 depth++;
5429 ret = extract_muldiv_1 (t, c, code, wide_type);
5430 depth--;
5432 return ret;
5435 static tree
5436 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5438 tree type = TREE_TYPE (t);
5439 enum tree_code tcode = TREE_CODE (t);
5440 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5441 > GET_MODE_SIZE (TYPE_MODE (type)))
5442 ? wide_type : type);
5443 tree t1, t2;
5444 int same_p = tcode == code;
5445 tree op0 = NULL_TREE, op1 = NULL_TREE;
5447 /* Don't deal with constants of zero here; they confuse the code below. */
5448 if (integer_zerop (c))
5449 return NULL_TREE;
5451 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5452 op0 = TREE_OPERAND (t, 0);
5454 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5455 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5457 /* Note that we need not handle conditional operations here since fold
5458 already handles those cases. So just do arithmetic here. */
5459 switch (tcode)
5461 case INTEGER_CST:
5462 /* For a constant, we can always simplify if we are a multiply
5463 or (for divide and modulus) if it is a multiple of our constant. */
5464 if (code == MULT_EXPR
5465 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5466 return const_binop (code, fold_convert (ctype, t),
5467 fold_convert (ctype, c), 0);
5468 break;
5470 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5471 /* If op0 is an expression ... */
5472 if ((COMPARISON_CLASS_P (op0)
5473 || UNARY_CLASS_P (op0)
5474 || BINARY_CLASS_P (op0)
5475 || EXPRESSION_CLASS_P (op0))
5476 /* ... and is unsigned, and its type is smaller than ctype,
5477 then we cannot pass through as widening. */
5478 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5479 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5480 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5481 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5482 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5483 /* ... or this is a truncation (t is narrower than op0),
5484 then we cannot pass through this narrowing. */
5485 || (GET_MODE_SIZE (TYPE_MODE (type))
5486 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5487 /* ... or signedness changes for division or modulus,
5488 then we cannot pass through this conversion. */
5489 || (code != MULT_EXPR
5490 && (TYPE_UNSIGNED (ctype)
5491 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5492 break;
5494 /* Pass the constant down and see if we can make a simplification. If
5495 we can, replace this expression with the inner simplification for
5496 possible later conversion to our or some other type. */
5497 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5498 && TREE_CODE (t2) == INTEGER_CST
5499 && ! TREE_CONSTANT_OVERFLOW (t2)
5500 && (0 != (t1 = extract_muldiv (op0, t2, code,
5501 code == MULT_EXPR
5502 ? ctype : NULL_TREE))))
5503 return t1;
5504 break;
5506 case ABS_EXPR:
5507 /* If widening the type changes it from signed to unsigned, then we
5508 must avoid building ABS_EXPR itself as unsigned. */
5509 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5511 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5512 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5514 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5515 return fold_convert (ctype, t1);
5517 break;
5519 /* FALLTHROUGH */
5520 case NEGATE_EXPR:
5521 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5522 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5523 break;
5525 case MIN_EXPR: case MAX_EXPR:
5526 /* If widening the type changes the signedness, then we can't perform
5527 this optimization as that changes the result. */
5528 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5529 break;
5531 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5532 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5533 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5535 if (tree_int_cst_sgn (c) < 0)
5536 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5538 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5539 fold_convert (ctype, t2));
5541 break;
5543 case LSHIFT_EXPR: case RSHIFT_EXPR:
5544 /* If the second operand is constant, this is a multiplication
5545 or floor division, by a power of two, so we can treat it that
5546 way unless the multiplier or divisor overflows. Signed
5547 left-shift overflow is implementation-defined rather than
5548 undefined in C90, so do not convert signed left shift into
5549 multiplication. */
5550 if (TREE_CODE (op1) == INTEGER_CST
5551 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5552 /* const_binop may not detect overflow correctly,
5553 so check for it explicitly here. */
5554 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5555 && TREE_INT_CST_HIGH (op1) == 0
5556 && 0 != (t1 = fold_convert (ctype,
5557 const_binop (LSHIFT_EXPR,
5558 size_one_node,
5559 op1, 0)))
5560 && ! TREE_OVERFLOW (t1))
5561 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5562 ? MULT_EXPR : FLOOR_DIV_EXPR,
5563 ctype, fold_convert (ctype, op0), t1),
5564 c, code, wide_type);
5565 break;
5567 case PLUS_EXPR: case MINUS_EXPR:
5568 /* See if we can eliminate the operation on both sides. If we can, we
5569 can return a new PLUS or MINUS. If we can't, the only remaining
5570 cases where we can do anything are if the second operand is a
5571 constant. */
5572 t1 = extract_muldiv (op0, c, code, wide_type);
5573 t2 = extract_muldiv (op1, c, code, wide_type);
5574 if (t1 != 0 && t2 != 0
5575 && (code == MULT_EXPR
5576 /* If not multiplication, we can only do this if both operands
5577 are divisible by c. */
5578 || (multiple_of_p (ctype, op0, c)
5579 && multiple_of_p (ctype, op1, c))))
5580 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5581 fold_convert (ctype, t2));
5583 /* If this was a subtraction, negate OP1 and set it to be an addition.
5584 This simplifies the logic below. */
5585 if (tcode == MINUS_EXPR)
5586 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5588 if (TREE_CODE (op1) != INTEGER_CST)
5589 break;
5591 /* If either OP1 or C are negative, this optimization is not safe for
5592 some of the division and remainder types while for others we need
5593 to change the code. */
5594 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5596 if (code == CEIL_DIV_EXPR)
5597 code = FLOOR_DIV_EXPR;
5598 else if (code == FLOOR_DIV_EXPR)
5599 code = CEIL_DIV_EXPR;
5600 else if (code != MULT_EXPR
5601 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5602 break;
5605 /* If it's a multiply or a division/modulus operation of a multiple
5606 of our constant, do the operation and verify it doesn't overflow. */
5607 if (code == MULT_EXPR
5608 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5610 op1 = const_binop (code, fold_convert (ctype, op1),
5611 fold_convert (ctype, c), 0);
5612 /* We allow the constant to overflow with wrapping semantics. */
5613 if (op1 == 0
5614 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5615 break;
5617 else
5618 break;
5620 /* If we have an unsigned type is not a sizetype, we cannot widen
5621 the operation since it will change the result if the original
5622 computation overflowed. */
5623 if (TYPE_UNSIGNED (ctype)
5624 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5625 && ctype != type)
5626 break;
5628 /* If we were able to eliminate our operation from the first side,
5629 apply our operation to the second side and reform the PLUS. */
5630 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5631 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5633 /* The last case is if we are a multiply. In that case, we can
5634 apply the distributive law to commute the multiply and addition
5635 if the multiplication of the constants doesn't overflow. */
5636 if (code == MULT_EXPR)
5637 return fold_build2 (tcode, ctype,
5638 fold_build2 (code, ctype,
5639 fold_convert (ctype, op0),
5640 fold_convert (ctype, c)),
5641 op1);
5643 break;
5645 case MULT_EXPR:
5646 /* We have a special case here if we are doing something like
5647 (C * 8) % 4 since we know that's zero. */
5648 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5649 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5650 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5651 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5652 return omit_one_operand (type, integer_zero_node, op0);
5654 /* ... fall through ... */
5656 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5657 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5658 /* If we can extract our operation from the LHS, do so and return a
5659 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5660 do something only if the second operand is a constant. */
5661 if (same_p
5662 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5663 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5664 fold_convert (ctype, op1));
5665 else if (tcode == MULT_EXPR && code == MULT_EXPR
5666 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5667 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5668 fold_convert (ctype, t1));
5669 else if (TREE_CODE (op1) != INTEGER_CST)
5670 return 0;
5672 /* If these are the same operation types, we can associate them
5673 assuming no overflow. */
5674 if (tcode == code
5675 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5676 fold_convert (ctype, c), 0))
5677 && ! TREE_OVERFLOW (t1))
5678 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5680 /* If these operations "cancel" each other, we have the main
5681 optimizations of this pass, which occur when either constant is a
5682 multiple of the other, in which case we replace this with either an
5683 operation or CODE or TCODE.
5685 If we have an unsigned type that is not a sizetype, we cannot do
5686 this since it will change the result if the original computation
5687 overflowed. */
5688 if ((! TYPE_UNSIGNED (ctype)
5689 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5690 && ! flag_wrapv
5691 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5692 || (tcode == MULT_EXPR
5693 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5694 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5696 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5697 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5698 fold_convert (ctype,
5699 const_binop (TRUNC_DIV_EXPR,
5700 op1, c, 0)));
5701 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5702 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5703 fold_convert (ctype,
5704 const_binop (TRUNC_DIV_EXPR,
5705 c, op1, 0)));
5707 break;
5709 default:
5710 break;
5713 return 0;
5716 /* Return a node which has the indicated constant VALUE (either 0 or
5717 1), and is of the indicated TYPE. */
5719 tree
5720 constant_boolean_node (int value, tree type)
5722 if (type == integer_type_node)
5723 return value ? integer_one_node : integer_zero_node;
5724 else if (type == boolean_type_node)
5725 return value ? boolean_true_node : boolean_false_node;
5726 else
5727 return build_int_cst (type, value);
5731 /* Return true if expr looks like an ARRAY_REF and set base and
5732 offset to the appropriate trees. If there is no offset,
5733 offset is set to NULL_TREE. Base will be canonicalized to
5734 something you can get the element type from using
5735 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5736 in bytes to the base. */
5738 static bool
5739 extract_array_ref (tree expr, tree *base, tree *offset)
5741 /* One canonical form is a PLUS_EXPR with the first
5742 argument being an ADDR_EXPR with a possible NOP_EXPR
5743 attached. */
5744 if (TREE_CODE (expr) == PLUS_EXPR)
5746 tree op0 = TREE_OPERAND (expr, 0);
5747 tree inner_base, dummy1;
5748 /* Strip NOP_EXPRs here because the C frontends and/or
5749 folders present us (int *)&x.a + 4B possibly. */
5750 STRIP_NOPS (op0);
5751 if (extract_array_ref (op0, &inner_base, &dummy1))
5753 *base = inner_base;
5754 if (dummy1 == NULL_TREE)
5755 *offset = TREE_OPERAND (expr, 1);
5756 else
5757 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5758 dummy1, TREE_OPERAND (expr, 1));
5759 return true;
5762 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5763 which we transform into an ADDR_EXPR with appropriate
5764 offset. For other arguments to the ADDR_EXPR we assume
5765 zero offset and as such do not care about the ADDR_EXPR
5766 type and strip possible nops from it. */
5767 else if (TREE_CODE (expr) == ADDR_EXPR)
5769 tree op0 = TREE_OPERAND (expr, 0);
5770 if (TREE_CODE (op0) == ARRAY_REF)
5772 tree idx = TREE_OPERAND (op0, 1);
5773 *base = TREE_OPERAND (op0, 0);
5774 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5775 array_ref_element_size (op0));
5777 else
5779 /* Handle array-to-pointer decay as &a. */
5780 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5781 *base = TREE_OPERAND (expr, 0);
5782 else
5783 *base = expr;
5784 *offset = NULL_TREE;
5786 return true;
5788 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5789 else if (SSA_VAR_P (expr)
5790 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5792 *base = expr;
5793 *offset = NULL_TREE;
5794 return true;
5797 return false;
5801 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5802 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5803 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5804 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5805 COND is the first argument to CODE; otherwise (as in the example
5806 given here), it is the second argument. TYPE is the type of the
5807 original expression. Return NULL_TREE if no simplification is
5808 possible. */
5810 static tree
5811 fold_binary_op_with_conditional_arg (enum tree_code code,
5812 tree type, tree op0, tree op1,
5813 tree cond, tree arg, int cond_first_p)
5815 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5816 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5817 tree test, true_value, false_value;
5818 tree lhs = NULL_TREE;
5819 tree rhs = NULL_TREE;
5821 /* This transformation is only worthwhile if we don't have to wrap
5822 arg in a SAVE_EXPR, and the operation can be simplified on at least
5823 one of the branches once its pushed inside the COND_EXPR. */
5824 if (!TREE_CONSTANT (arg))
5825 return NULL_TREE;
5827 if (TREE_CODE (cond) == COND_EXPR)
5829 test = TREE_OPERAND (cond, 0);
5830 true_value = TREE_OPERAND (cond, 1);
5831 false_value = TREE_OPERAND (cond, 2);
5832 /* If this operand throws an expression, then it does not make
5833 sense to try to perform a logical or arithmetic operation
5834 involving it. */
5835 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5836 lhs = true_value;
5837 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5838 rhs = false_value;
5840 else
5842 tree testtype = TREE_TYPE (cond);
5843 test = cond;
5844 true_value = constant_boolean_node (true, testtype);
5845 false_value = constant_boolean_node (false, testtype);
5848 arg = fold_convert (arg_type, arg);
5849 if (lhs == 0)
5851 true_value = fold_convert (cond_type, true_value);
5852 if (cond_first_p)
5853 lhs = fold_build2 (code, type, true_value, arg);
5854 else
5855 lhs = fold_build2 (code, type, arg, true_value);
5857 if (rhs == 0)
5859 false_value = fold_convert (cond_type, false_value);
5860 if (cond_first_p)
5861 rhs = fold_build2 (code, type, false_value, arg);
5862 else
5863 rhs = fold_build2 (code, type, arg, false_value);
5866 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5867 return fold_convert (type, test);
5871 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5873 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5874 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5875 ADDEND is the same as X.
5877 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5878 and finite. The problematic cases are when X is zero, and its mode
5879 has signed zeros. In the case of rounding towards -infinity,
5880 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5881 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5883 static bool
5884 fold_real_zero_addition_p (tree type, tree addend, int negate)
5886 if (!real_zerop (addend))
5887 return false;
5889 /* Don't allow the fold with -fsignaling-nans. */
5890 if (HONOR_SNANS (TYPE_MODE (type)))
5891 return false;
5893 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5894 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5895 return true;
5897 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5898 if (TREE_CODE (addend) == REAL_CST
5899 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5900 negate = !negate;
5902 /* The mode has signed zeros, and we have to honor their sign.
5903 In this situation, there is only one case we can return true for.
5904 X - 0 is the same as X unless rounding towards -infinity is
5905 supported. */
5906 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5909 /* Subroutine of fold() that checks comparisons of built-in math
5910 functions against real constants.
5912 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5913 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5914 is the type of the result and ARG0 and ARG1 are the operands of the
5915 comparison. ARG1 must be a TREE_REAL_CST.
5917 The function returns the constant folded tree if a simplification
5918 can be made, and NULL_TREE otherwise. */
5920 static tree
5921 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5922 tree type, tree arg0, tree arg1)
5924 REAL_VALUE_TYPE c;
5926 if (BUILTIN_SQRT_P (fcode))
5928 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5929 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5931 c = TREE_REAL_CST (arg1);
5932 if (REAL_VALUE_NEGATIVE (c))
5934 /* sqrt(x) < y is always false, if y is negative. */
5935 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5936 return omit_one_operand (type, integer_zero_node, arg);
5938 /* sqrt(x) > y is always true, if y is negative and we
5939 don't care about NaNs, i.e. negative values of x. */
5940 if (code == NE_EXPR || !HONOR_NANS (mode))
5941 return omit_one_operand (type, integer_one_node, arg);
5943 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5944 return fold_build2 (GE_EXPR, type, arg,
5945 build_real (TREE_TYPE (arg), dconst0));
5947 else if (code == GT_EXPR || code == GE_EXPR)
5949 REAL_VALUE_TYPE c2;
5951 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5952 real_convert (&c2, mode, &c2);
5954 if (REAL_VALUE_ISINF (c2))
5956 /* sqrt(x) > y is x == +Inf, when y is very large. */
5957 if (HONOR_INFINITIES (mode))
5958 return fold_build2 (EQ_EXPR, type, arg,
5959 build_real (TREE_TYPE (arg), c2));
5961 /* sqrt(x) > y is always false, when y is very large
5962 and we don't care about infinities. */
5963 return omit_one_operand (type, integer_zero_node, arg);
5966 /* sqrt(x) > c is the same as x > c*c. */
5967 return fold_build2 (code, type, arg,
5968 build_real (TREE_TYPE (arg), c2));
5970 else if (code == LT_EXPR || code == LE_EXPR)
5972 REAL_VALUE_TYPE c2;
5974 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5975 real_convert (&c2, mode, &c2);
5977 if (REAL_VALUE_ISINF (c2))
5979 /* sqrt(x) < y is always true, when y is a very large
5980 value and we don't care about NaNs or Infinities. */
5981 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5982 return omit_one_operand (type, integer_one_node, arg);
5984 /* sqrt(x) < y is x != +Inf when y is very large and we
5985 don't care about NaNs. */
5986 if (! HONOR_NANS (mode))
5987 return fold_build2 (NE_EXPR, type, arg,
5988 build_real (TREE_TYPE (arg), c2));
5990 /* sqrt(x) < y is x >= 0 when y is very large and we
5991 don't care about Infinities. */
5992 if (! HONOR_INFINITIES (mode))
5993 return fold_build2 (GE_EXPR, type, arg,
5994 build_real (TREE_TYPE (arg), dconst0));
5996 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5997 if (lang_hooks.decls.global_bindings_p () != 0
5998 || CONTAINS_PLACEHOLDER_P (arg))
5999 return NULL_TREE;
6001 arg = save_expr (arg);
6002 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6003 fold_build2 (GE_EXPR, type, arg,
6004 build_real (TREE_TYPE (arg),
6005 dconst0)),
6006 fold_build2 (NE_EXPR, type, arg,
6007 build_real (TREE_TYPE (arg),
6008 c2)));
6011 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6012 if (! HONOR_NANS (mode))
6013 return fold_build2 (code, type, arg,
6014 build_real (TREE_TYPE (arg), c2));
6016 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6017 if (lang_hooks.decls.global_bindings_p () == 0
6018 && ! CONTAINS_PLACEHOLDER_P (arg))
6020 arg = save_expr (arg);
6021 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6022 fold_build2 (GE_EXPR, type, arg,
6023 build_real (TREE_TYPE (arg),
6024 dconst0)),
6025 fold_build2 (code, type, arg,
6026 build_real (TREE_TYPE (arg),
6027 c2)));
6032 return NULL_TREE;
6035 /* Subroutine of fold() that optimizes comparisons against Infinities,
6036 either +Inf or -Inf.
6038 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6039 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6040 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6042 The function returns the constant folded tree if a simplification
6043 can be made, and NULL_TREE otherwise. */
6045 static tree
6046 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6048 enum machine_mode mode;
6049 REAL_VALUE_TYPE max;
6050 tree temp;
6051 bool neg;
6053 mode = TYPE_MODE (TREE_TYPE (arg0));
6055 /* For negative infinity swap the sense of the comparison. */
6056 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6057 if (neg)
6058 code = swap_tree_comparison (code);
6060 switch (code)
6062 case GT_EXPR:
6063 /* x > +Inf is always false, if with ignore sNANs. */
6064 if (HONOR_SNANS (mode))
6065 return NULL_TREE;
6066 return omit_one_operand (type, integer_zero_node, arg0);
6068 case LE_EXPR:
6069 /* x <= +Inf is always true, if we don't case about NaNs. */
6070 if (! HONOR_NANS (mode))
6071 return omit_one_operand (type, integer_one_node, arg0);
6073 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6074 if (lang_hooks.decls.global_bindings_p () == 0
6075 && ! CONTAINS_PLACEHOLDER_P (arg0))
6077 arg0 = save_expr (arg0);
6078 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6080 break;
6082 case EQ_EXPR:
6083 case GE_EXPR:
6084 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6085 real_maxval (&max, neg, mode);
6086 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6087 arg0, build_real (TREE_TYPE (arg0), max));
6089 case LT_EXPR:
6090 /* x < +Inf is always equal to x <= DBL_MAX. */
6091 real_maxval (&max, neg, mode);
6092 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6093 arg0, build_real (TREE_TYPE (arg0), max));
6095 case NE_EXPR:
6096 /* x != +Inf is always equal to !(x > DBL_MAX). */
6097 real_maxval (&max, neg, mode);
6098 if (! HONOR_NANS (mode))
6099 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6100 arg0, build_real (TREE_TYPE (arg0), max));
6102 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6103 arg0, build_real (TREE_TYPE (arg0), max));
6104 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6106 default:
6107 break;
6110 return NULL_TREE;
6113 /* Subroutine of fold() that optimizes comparisons of a division by
6114 a nonzero integer constant against an integer constant, i.e.
6115 X/C1 op C2.
6117 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6118 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6119 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6121 The function returns the constant folded tree if a simplification
6122 can be made, and NULL_TREE otherwise. */
6124 static tree
6125 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6127 tree prod, tmp, hi, lo;
6128 tree arg00 = TREE_OPERAND (arg0, 0);
6129 tree arg01 = TREE_OPERAND (arg0, 1);
6130 unsigned HOST_WIDE_INT lpart;
6131 HOST_WIDE_INT hpart;
6132 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6133 bool neg_overflow;
6134 int overflow;
6136 /* We have to do this the hard way to detect unsigned overflow.
6137 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6138 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6139 TREE_INT_CST_HIGH (arg01),
6140 TREE_INT_CST_LOW (arg1),
6141 TREE_INT_CST_HIGH (arg1),
6142 &lpart, &hpart, unsigned_p);
6143 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6144 prod = force_fit_type (prod, -1, overflow, false);
6145 neg_overflow = false;
6147 if (unsigned_p)
6149 tmp = int_const_binop (MINUS_EXPR, arg01,
6150 build_int_cst (TREE_TYPE (arg01), 1), 0);
6151 lo = prod;
6153 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6154 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6155 TREE_INT_CST_HIGH (prod),
6156 TREE_INT_CST_LOW (tmp),
6157 TREE_INT_CST_HIGH (tmp),
6158 &lpart, &hpart, unsigned_p);
6159 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6160 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6161 TREE_CONSTANT_OVERFLOW (prod));
6163 else if (tree_int_cst_sgn (arg01) >= 0)
6165 tmp = int_const_binop (MINUS_EXPR, arg01,
6166 build_int_cst (TREE_TYPE (arg01), 1), 0);
6167 switch (tree_int_cst_sgn (arg1))
6169 case -1:
6170 neg_overflow = true;
6171 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6172 hi = prod;
6173 break;
6175 case 0:
6176 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6177 hi = tmp;
6178 break;
6180 case 1:
6181 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6182 lo = prod;
6183 break;
6185 default:
6186 gcc_unreachable ();
6189 else
6191 /* A negative divisor reverses the relational operators. */
6192 code = swap_tree_comparison (code);
6194 tmp = int_const_binop (PLUS_EXPR, arg01,
6195 build_int_cst (TREE_TYPE (arg01), 1), 0);
6196 switch (tree_int_cst_sgn (arg1))
6198 case -1:
6199 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6200 lo = prod;
6201 break;
6203 case 0:
6204 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6205 lo = tmp;
6206 break;
6208 case 1:
6209 neg_overflow = true;
6210 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6211 hi = prod;
6212 break;
6214 default:
6215 gcc_unreachable ();
6219 switch (code)
6221 case EQ_EXPR:
6222 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6223 return omit_one_operand (type, integer_zero_node, arg00);
6224 if (TREE_OVERFLOW (hi))
6225 return fold_build2 (GE_EXPR, type, arg00, lo);
6226 if (TREE_OVERFLOW (lo))
6227 return fold_build2 (LE_EXPR, type, arg00, hi);
6228 return build_range_check (type, arg00, 1, lo, hi);
6230 case NE_EXPR:
6231 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6232 return omit_one_operand (type, integer_one_node, arg00);
6233 if (TREE_OVERFLOW (hi))
6234 return fold_build2 (LT_EXPR, type, arg00, lo);
6235 if (TREE_OVERFLOW (lo))
6236 return fold_build2 (GT_EXPR, type, arg00, hi);
6237 return build_range_check (type, arg00, 0, lo, hi);
6239 case LT_EXPR:
6240 if (TREE_OVERFLOW (lo))
6242 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6243 return omit_one_operand (type, tmp, arg00);
6245 return fold_build2 (LT_EXPR, type, arg00, lo);
6247 case LE_EXPR:
6248 if (TREE_OVERFLOW (hi))
6250 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6251 return omit_one_operand (type, tmp, arg00);
6253 return fold_build2 (LE_EXPR, type, arg00, hi);
6255 case GT_EXPR:
6256 if (TREE_OVERFLOW (hi))
6258 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6259 return omit_one_operand (type, tmp, arg00);
6261 return fold_build2 (GT_EXPR, type, arg00, hi);
6263 case GE_EXPR:
6264 if (TREE_OVERFLOW (lo))
6266 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6267 return omit_one_operand (type, tmp, arg00);
6269 return fold_build2 (GE_EXPR, type, arg00, lo);
6271 default:
6272 break;
6275 return NULL_TREE;
6279 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6280 equality/inequality test, then return a simplified form of the test
6281 using a sign testing. Otherwise return NULL. TYPE is the desired
6282 result type. */
6284 static tree
6285 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6286 tree result_type)
6288 /* If this is testing a single bit, we can optimize the test. */
6289 if ((code == NE_EXPR || code == EQ_EXPR)
6290 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6291 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6293 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6294 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6295 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6297 if (arg00 != NULL_TREE
6298 /* This is only a win if casting to a signed type is cheap,
6299 i.e. when arg00's type is not a partial mode. */
6300 && TYPE_PRECISION (TREE_TYPE (arg00))
6301 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6303 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6304 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6305 result_type, fold_convert (stype, arg00),
6306 build_int_cst (stype, 0));
6310 return NULL_TREE;
6313 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6314 equality/inequality test, then return a simplified form of
6315 the test using shifts and logical operations. Otherwise return
6316 NULL. TYPE is the desired result type. */
6318 tree
6319 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6320 tree result_type)
6322 /* If this is testing a single bit, we can optimize the test. */
6323 if ((code == NE_EXPR || code == EQ_EXPR)
6324 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6325 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6327 tree inner = TREE_OPERAND (arg0, 0);
6328 tree type = TREE_TYPE (arg0);
6329 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6330 enum machine_mode operand_mode = TYPE_MODE (type);
6331 int ops_unsigned;
6332 tree signed_type, unsigned_type, intermediate_type;
6333 tree tem, one;
6335 /* First, see if we can fold the single bit test into a sign-bit
6336 test. */
6337 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6338 result_type);
6339 if (tem)
6340 return tem;
6342 /* Otherwise we have (A & C) != 0 where C is a single bit,
6343 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6344 Similarly for (A & C) == 0. */
6346 /* If INNER is a right shift of a constant and it plus BITNUM does
6347 not overflow, adjust BITNUM and INNER. */
6348 if (TREE_CODE (inner) == RSHIFT_EXPR
6349 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6350 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6351 && bitnum < TYPE_PRECISION (type)
6352 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6353 bitnum - TYPE_PRECISION (type)))
6355 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6356 inner = TREE_OPERAND (inner, 0);
6359 /* If we are going to be able to omit the AND below, we must do our
6360 operations as unsigned. If we must use the AND, we have a choice.
6361 Normally unsigned is faster, but for some machines signed is. */
6362 #ifdef LOAD_EXTEND_OP
6363 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6364 && !flag_syntax_only) ? 0 : 1;
6365 #else
6366 ops_unsigned = 1;
6367 #endif
6369 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6370 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6371 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6372 inner = fold_convert (intermediate_type, inner);
6374 if (bitnum != 0)
6375 inner = build2 (RSHIFT_EXPR, intermediate_type,
6376 inner, size_int (bitnum));
6378 one = build_int_cst (intermediate_type, 1);
6380 if (code == EQ_EXPR)
6381 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6383 /* Put the AND last so it can combine with more things. */
6384 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6386 /* Make sure to return the proper type. */
6387 inner = fold_convert (result_type, inner);
6389 return inner;
6391 return NULL_TREE;
6394 /* Check whether we are allowed to reorder operands arg0 and arg1,
6395 such that the evaluation of arg1 occurs before arg0. */
6397 static bool
6398 reorder_operands_p (tree arg0, tree arg1)
6400 if (! flag_evaluation_order)
6401 return true;
6402 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6403 return true;
6404 return ! TREE_SIDE_EFFECTS (arg0)
6405 && ! TREE_SIDE_EFFECTS (arg1);
6408 /* Test whether it is preferable two swap two operands, ARG0 and
6409 ARG1, for example because ARG0 is an integer constant and ARG1
6410 isn't. If REORDER is true, only recommend swapping if we can
6411 evaluate the operands in reverse order. */
6413 bool
6414 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6416 STRIP_SIGN_NOPS (arg0);
6417 STRIP_SIGN_NOPS (arg1);
6419 if (TREE_CODE (arg1) == INTEGER_CST)
6420 return 0;
6421 if (TREE_CODE (arg0) == INTEGER_CST)
6422 return 1;
6424 if (TREE_CODE (arg1) == REAL_CST)
6425 return 0;
6426 if (TREE_CODE (arg0) == REAL_CST)
6427 return 1;
6429 if (TREE_CODE (arg1) == COMPLEX_CST)
6430 return 0;
6431 if (TREE_CODE (arg0) == COMPLEX_CST)
6432 return 1;
6434 if (TREE_CONSTANT (arg1))
6435 return 0;
6436 if (TREE_CONSTANT (arg0))
6437 return 1;
6439 if (optimize_size)
6440 return 0;
6442 if (reorder && flag_evaluation_order
6443 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6444 return 0;
6446 if (DECL_P (arg1))
6447 return 0;
6448 if (DECL_P (arg0))
6449 return 1;
6451 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6452 for commutative and comparison operators. Ensuring a canonical
6453 form allows the optimizers to find additional redundancies without
6454 having to explicitly check for both orderings. */
6455 if (TREE_CODE (arg0) == SSA_NAME
6456 && TREE_CODE (arg1) == SSA_NAME
6457 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6458 return 1;
6460 return 0;
6463 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6464 ARG0 is extended to a wider type. */
6466 static tree
6467 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6469 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6470 tree arg1_unw;
6471 tree shorter_type, outer_type;
6472 tree min, max;
6473 bool above, below;
6475 if (arg0_unw == arg0)
6476 return NULL_TREE;
6477 shorter_type = TREE_TYPE (arg0_unw);
6479 #ifdef HAVE_canonicalize_funcptr_for_compare
6480 /* Disable this optimization if we're casting a function pointer
6481 type on targets that require function pointer canonicalization. */
6482 if (HAVE_canonicalize_funcptr_for_compare
6483 && TREE_CODE (shorter_type) == POINTER_TYPE
6484 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6485 return NULL_TREE;
6486 #endif
6488 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6489 return NULL_TREE;
6491 arg1_unw = get_unwidened (arg1, shorter_type);
6493 /* If possible, express the comparison in the shorter mode. */
6494 if ((code == EQ_EXPR || code == NE_EXPR
6495 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6496 && (TREE_TYPE (arg1_unw) == shorter_type
6497 || (TREE_CODE (arg1_unw) == INTEGER_CST
6498 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6499 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6500 && int_fits_type_p (arg1_unw, shorter_type))))
6501 return fold_build2 (code, type, arg0_unw,
6502 fold_convert (shorter_type, arg1_unw));
6504 if (TREE_CODE (arg1_unw) != INTEGER_CST
6505 || TREE_CODE (shorter_type) != INTEGER_TYPE
6506 || !int_fits_type_p (arg1_unw, shorter_type))
6507 return NULL_TREE;
6509 /* If we are comparing with the integer that does not fit into the range
6510 of the shorter type, the result is known. */
6511 outer_type = TREE_TYPE (arg1_unw);
6512 min = lower_bound_in_type (outer_type, shorter_type);
6513 max = upper_bound_in_type (outer_type, shorter_type);
6515 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6516 max, arg1_unw));
6517 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6518 arg1_unw, min));
6520 switch (code)
6522 case EQ_EXPR:
6523 if (above || below)
6524 return omit_one_operand (type, integer_zero_node, arg0);
6525 break;
6527 case NE_EXPR:
6528 if (above || below)
6529 return omit_one_operand (type, integer_one_node, arg0);
6530 break;
6532 case LT_EXPR:
6533 case LE_EXPR:
6534 if (above)
6535 return omit_one_operand (type, integer_one_node, arg0);
6536 else if (below)
6537 return omit_one_operand (type, integer_zero_node, arg0);
6539 case GT_EXPR:
6540 case GE_EXPR:
6541 if (above)
6542 return omit_one_operand (type, integer_zero_node, arg0);
6543 else if (below)
6544 return omit_one_operand (type, integer_one_node, arg0);
6546 default:
6547 break;
6550 return NULL_TREE;
6553 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6554 ARG0 just the signedness is changed. */
6556 static tree
6557 fold_sign_changed_comparison (enum tree_code code, tree type,
6558 tree arg0, tree arg1)
6560 tree arg0_inner, tmp;
6561 tree inner_type, outer_type;
6563 if (TREE_CODE (arg0) != NOP_EXPR
6564 && TREE_CODE (arg0) != CONVERT_EXPR)
6565 return NULL_TREE;
6567 outer_type = TREE_TYPE (arg0);
6568 arg0_inner = TREE_OPERAND (arg0, 0);
6569 inner_type = TREE_TYPE (arg0_inner);
6571 #ifdef HAVE_canonicalize_funcptr_for_compare
6572 /* Disable this optimization if we're casting a function pointer
6573 type on targets that require function pointer canonicalization. */
6574 if (HAVE_canonicalize_funcptr_for_compare
6575 && TREE_CODE (inner_type) == POINTER_TYPE
6576 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6577 return NULL_TREE;
6578 #endif
6580 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6581 return NULL_TREE;
6583 if (TREE_CODE (arg1) != INTEGER_CST
6584 && !((TREE_CODE (arg1) == NOP_EXPR
6585 || TREE_CODE (arg1) == CONVERT_EXPR)
6586 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6587 return NULL_TREE;
6589 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6590 && code != NE_EXPR
6591 && code != EQ_EXPR)
6592 return NULL_TREE;
6594 if (TREE_CODE (arg1) == INTEGER_CST)
6596 tmp = build_int_cst_wide (inner_type,
6597 TREE_INT_CST_LOW (arg1),
6598 TREE_INT_CST_HIGH (arg1));
6599 arg1 = force_fit_type (tmp, 0,
6600 TREE_OVERFLOW (arg1),
6601 TREE_CONSTANT_OVERFLOW (arg1));
6603 else
6604 arg1 = fold_convert (inner_type, arg1);
6606 return fold_build2 (code, type, arg0_inner, arg1);
6609 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6610 step of the array. Reconstructs s and delta in the case of s * delta
6611 being an integer constant (and thus already folded).
6612 ADDR is the address. MULT is the multiplicative expression.
6613 If the function succeeds, the new address expression is returned. Otherwise
6614 NULL_TREE is returned. */
6616 static tree
6617 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6619 tree s, delta, step;
6620 tree ref = TREE_OPERAND (addr, 0), pref;
6621 tree ret, pos;
6622 tree itype;
6624 /* Canonicalize op1 into a possibly non-constant delta
6625 and an INTEGER_CST s. */
6626 if (TREE_CODE (op1) == MULT_EXPR)
6628 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6630 STRIP_NOPS (arg0);
6631 STRIP_NOPS (arg1);
6633 if (TREE_CODE (arg0) == INTEGER_CST)
6635 s = arg0;
6636 delta = arg1;
6638 else if (TREE_CODE (arg1) == INTEGER_CST)
6640 s = arg1;
6641 delta = arg0;
6643 else
6644 return NULL_TREE;
6646 else if (TREE_CODE (op1) == INTEGER_CST)
6648 delta = op1;
6649 s = NULL_TREE;
6651 else
6653 /* Simulate we are delta * 1. */
6654 delta = op1;
6655 s = integer_one_node;
6658 for (;; ref = TREE_OPERAND (ref, 0))
6660 if (TREE_CODE (ref) == ARRAY_REF)
6662 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6663 if (! itype)
6664 continue;
6666 step = array_ref_element_size (ref);
6667 if (TREE_CODE (step) != INTEGER_CST)
6668 continue;
6670 if (s)
6672 if (! tree_int_cst_equal (step, s))
6673 continue;
6675 else
6677 /* Try if delta is a multiple of step. */
6678 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6679 if (! tmp)
6680 continue;
6681 delta = tmp;
6684 break;
6687 if (!handled_component_p (ref))
6688 return NULL_TREE;
6691 /* We found the suitable array reference. So copy everything up to it,
6692 and replace the index. */
6694 pref = TREE_OPERAND (addr, 0);
6695 ret = copy_node (pref);
6696 pos = ret;
6698 while (pref != ref)
6700 pref = TREE_OPERAND (pref, 0);
6701 TREE_OPERAND (pos, 0) = copy_node (pref);
6702 pos = TREE_OPERAND (pos, 0);
6705 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6706 fold_convert (itype,
6707 TREE_OPERAND (pos, 1)),
6708 fold_convert (itype, delta));
6710 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6714 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6715 means A >= Y && A != MAX, but in this case we know that
6716 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6718 static tree
6719 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6721 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6723 if (TREE_CODE (bound) == LT_EXPR)
6724 a = TREE_OPERAND (bound, 0);
6725 else if (TREE_CODE (bound) == GT_EXPR)
6726 a = TREE_OPERAND (bound, 1);
6727 else
6728 return NULL_TREE;
6730 typea = TREE_TYPE (a);
6731 if (!INTEGRAL_TYPE_P (typea)
6732 && !POINTER_TYPE_P (typea))
6733 return NULL_TREE;
6735 if (TREE_CODE (ineq) == LT_EXPR)
6737 a1 = TREE_OPERAND (ineq, 1);
6738 y = TREE_OPERAND (ineq, 0);
6740 else if (TREE_CODE (ineq) == GT_EXPR)
6742 a1 = TREE_OPERAND (ineq, 0);
6743 y = TREE_OPERAND (ineq, 1);
6745 else
6746 return NULL_TREE;
6748 if (TREE_TYPE (a1) != typea)
6749 return NULL_TREE;
6751 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6752 if (!integer_onep (diff))
6753 return NULL_TREE;
6755 return fold_build2 (GE_EXPR, type, a, y);
6758 /* Fold a sum or difference of at least one multiplication.
6759 Returns the folded tree or NULL if no simplification could be made. */
6761 static tree
6762 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6764 tree arg00, arg01, arg10, arg11;
6765 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6767 /* (A * C) +- (B * C) -> (A+-B) * C.
6768 (A * C) +- A -> A * (C+-1).
6769 We are most concerned about the case where C is a constant,
6770 but other combinations show up during loop reduction. Since
6771 it is not difficult, try all four possibilities. */
6773 if (TREE_CODE (arg0) == MULT_EXPR)
6775 arg00 = TREE_OPERAND (arg0, 0);
6776 arg01 = TREE_OPERAND (arg0, 1);
6778 else
6780 arg00 = arg0;
6781 arg01 = build_one_cst (type);
6783 if (TREE_CODE (arg1) == MULT_EXPR)
6785 arg10 = TREE_OPERAND (arg1, 0);
6786 arg11 = TREE_OPERAND (arg1, 1);
6788 else
6790 arg10 = arg1;
6791 arg11 = build_one_cst (type);
6793 same = NULL_TREE;
6795 if (operand_equal_p (arg01, arg11, 0))
6796 same = arg01, alt0 = arg00, alt1 = arg10;
6797 else if (operand_equal_p (arg00, arg10, 0))
6798 same = arg00, alt0 = arg01, alt1 = arg11;
6799 else if (operand_equal_p (arg00, arg11, 0))
6800 same = arg00, alt0 = arg01, alt1 = arg10;
6801 else if (operand_equal_p (arg01, arg10, 0))
6802 same = arg01, alt0 = arg00, alt1 = arg11;
6804 /* No identical multiplicands; see if we can find a common
6805 power-of-two factor in non-power-of-two multiplies. This
6806 can help in multi-dimensional array access. */
6807 else if (host_integerp (arg01, 0)
6808 && host_integerp (arg11, 0))
6810 HOST_WIDE_INT int01, int11, tmp;
6811 bool swap = false;
6812 tree maybe_same;
6813 int01 = TREE_INT_CST_LOW (arg01);
6814 int11 = TREE_INT_CST_LOW (arg11);
6816 /* Move min of absolute values to int11. */
6817 if ((int01 >= 0 ? int01 : -int01)
6818 < (int11 >= 0 ? int11 : -int11))
6820 tmp = int01, int01 = int11, int11 = tmp;
6821 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6822 maybe_same = arg01;
6823 swap = true;
6825 else
6826 maybe_same = arg11;
6828 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6830 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6831 build_int_cst (TREE_TYPE (arg00),
6832 int01 / int11));
6833 alt1 = arg10;
6834 same = maybe_same;
6835 if (swap)
6836 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6840 if (same)
6841 return fold_build2 (MULT_EXPR, type,
6842 fold_build2 (code, type,
6843 fold_convert (type, alt0),
6844 fold_convert (type, alt1)),
6845 fold_convert (type, same));
6847 return NULL_TREE;
6850 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6851 specified by EXPR into the buffer PTR of length LEN bytes.
6852 Return the number of bytes placed in the buffer, or zero
6853 upon failure. */
6855 static int
6856 native_encode_int (tree expr, unsigned char *ptr, int len)
6858 tree type = TREE_TYPE (expr);
6859 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6860 int byte, offset, word, words;
6861 unsigned char value;
6863 if (total_bytes > len)
6864 return 0;
6865 words = total_bytes / UNITS_PER_WORD;
6867 for (byte = 0; byte < total_bytes; byte++)
6869 int bitpos = byte * BITS_PER_UNIT;
6870 if (bitpos < HOST_BITS_PER_WIDE_INT)
6871 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6872 else
6873 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6874 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6876 if (total_bytes > UNITS_PER_WORD)
6878 word = byte / UNITS_PER_WORD;
6879 if (WORDS_BIG_ENDIAN)
6880 word = (words - 1) - word;
6881 offset = word * UNITS_PER_WORD;
6882 if (BYTES_BIG_ENDIAN)
6883 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6884 else
6885 offset += byte % UNITS_PER_WORD;
6887 else
6888 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6889 ptr[offset] = value;
6891 return total_bytes;
6895 /* Subroutine of native_encode_expr. Encode the REAL_CST
6896 specified by EXPR into the buffer PTR of length LEN bytes.
6897 Return the number of bytes placed in the buffer, or zero
6898 upon failure. */
6900 static int
6901 native_encode_real (tree expr, unsigned char *ptr, int len)
6903 tree type = TREE_TYPE (expr);
6904 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6905 int byte, offset, word, words;
6906 unsigned char value;
6908 /* There are always 32 bits in each long, no matter the size of
6909 the hosts long. We handle floating point representations with
6910 up to 192 bits. */
6911 long tmp[6];
6913 if (total_bytes > len)
6914 return 0;
6915 words = total_bytes / UNITS_PER_WORD;
6917 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6919 for (byte = 0; byte < total_bytes; byte++)
6921 int bitpos = byte * BITS_PER_UNIT;
6922 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6924 if (total_bytes > UNITS_PER_WORD)
6926 word = byte / UNITS_PER_WORD;
6927 if (FLOAT_WORDS_BIG_ENDIAN)
6928 word = (words - 1) - word;
6929 offset = word * UNITS_PER_WORD;
6930 if (BYTES_BIG_ENDIAN)
6931 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6932 else
6933 offset += byte % UNITS_PER_WORD;
6935 else
6936 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6937 ptr[offset] = value;
6939 return total_bytes;
6942 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6943 specified by EXPR into the buffer PTR of length LEN bytes.
6944 Return the number of bytes placed in the buffer, or zero
6945 upon failure. */
6947 static int
6948 native_encode_complex (tree expr, unsigned char *ptr, int len)
6950 int rsize, isize;
6951 tree part;
6953 part = TREE_REALPART (expr);
6954 rsize = native_encode_expr (part, ptr, len);
6955 if (rsize == 0)
6956 return 0;
6957 part = TREE_IMAGPART (expr);
6958 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6959 if (isize != rsize)
6960 return 0;
6961 return rsize + isize;
6965 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6966 specified by EXPR into the buffer PTR of length LEN bytes.
6967 Return the number of bytes placed in the buffer, or zero
6968 upon failure. */
6970 static int
6971 native_encode_vector (tree expr, unsigned char *ptr, int len)
6973 int i, size, offset, count;
6974 tree itype, elem, elements;
6976 offset = 0;
6977 elements = TREE_VECTOR_CST_ELTS (expr);
6978 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6979 itype = TREE_TYPE (TREE_TYPE (expr));
6980 size = GET_MODE_SIZE (TYPE_MODE (itype));
6981 for (i = 0; i < count; i++)
6983 if (elements)
6985 elem = TREE_VALUE (elements);
6986 elements = TREE_CHAIN (elements);
6988 else
6989 elem = NULL_TREE;
6991 if (elem)
6993 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6994 return 0;
6996 else
6998 if (offset + size > len)
6999 return 0;
7000 memset (ptr+offset, 0, size);
7002 offset += size;
7004 return offset;
7008 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7009 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7010 buffer PTR of length LEN bytes. Return the number of bytes
7011 placed in the buffer, or zero upon failure. */
7013 static int
7014 native_encode_expr (tree expr, unsigned char *ptr, int len)
7016 switch (TREE_CODE (expr))
7018 case INTEGER_CST:
7019 return native_encode_int (expr, ptr, len);
7021 case REAL_CST:
7022 return native_encode_real (expr, ptr, len);
7024 case COMPLEX_CST:
7025 return native_encode_complex (expr, ptr, len);
7027 case VECTOR_CST:
7028 return native_encode_vector (expr, ptr, len);
7030 default:
7031 return 0;
7036 /* Subroutine of native_interpret_expr. Interpret the contents of
7037 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7038 If the buffer cannot be interpreted, return NULL_TREE. */
7040 static tree
7041 native_interpret_int (tree type, unsigned char *ptr, int len)
7043 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7044 int byte, offset, word, words;
7045 unsigned char value;
7046 unsigned int HOST_WIDE_INT lo = 0;
7047 HOST_WIDE_INT hi = 0;
7049 if (total_bytes > len)
7050 return NULL_TREE;
7051 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7052 return NULL_TREE;
7053 words = total_bytes / UNITS_PER_WORD;
7055 for (byte = 0; byte < total_bytes; byte++)
7057 int bitpos = byte * BITS_PER_UNIT;
7058 if (total_bytes > UNITS_PER_WORD)
7060 word = byte / UNITS_PER_WORD;
7061 if (WORDS_BIG_ENDIAN)
7062 word = (words - 1) - word;
7063 offset = word * UNITS_PER_WORD;
7064 if (BYTES_BIG_ENDIAN)
7065 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7066 else
7067 offset += byte % UNITS_PER_WORD;
7069 else
7070 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7071 value = ptr[offset];
7073 if (bitpos < HOST_BITS_PER_WIDE_INT)
7074 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7075 else
7076 hi |= (unsigned HOST_WIDE_INT) value
7077 << (bitpos - HOST_BITS_PER_WIDE_INT);
7080 return force_fit_type (build_int_cst_wide (type, lo, hi),
7081 0, false, false);
7085 /* Subroutine of native_interpret_expr. Interpret the contents of
7086 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7087 If the buffer cannot be interpreted, return NULL_TREE. */
7089 static tree
7090 native_interpret_real (tree type, unsigned char *ptr, int len)
7092 enum machine_mode mode = TYPE_MODE (type);
7093 int total_bytes = GET_MODE_SIZE (mode);
7094 int byte, offset, word, words;
7095 unsigned char value;
7096 /* There are always 32 bits in each long, no matter the size of
7097 the hosts long. We handle floating point representations with
7098 up to 192 bits. */
7099 REAL_VALUE_TYPE r;
7100 long tmp[6];
7102 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7103 if (total_bytes > len || total_bytes > 24)
7104 return NULL_TREE;
7105 words = total_bytes / UNITS_PER_WORD;
7107 memset (tmp, 0, sizeof (tmp));
7108 for (byte = 0; byte < total_bytes; byte++)
7110 int bitpos = byte * BITS_PER_UNIT;
7111 if (total_bytes > UNITS_PER_WORD)
7113 word = byte / UNITS_PER_WORD;
7114 if (FLOAT_WORDS_BIG_ENDIAN)
7115 word = (words - 1) - word;
7116 offset = word * UNITS_PER_WORD;
7117 if (BYTES_BIG_ENDIAN)
7118 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7119 else
7120 offset += byte % UNITS_PER_WORD;
7122 else
7123 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7124 value = ptr[offset];
7126 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7129 real_from_target (&r, tmp, mode);
7130 return build_real (type, r);
7134 /* Subroutine of native_interpret_expr. Interpret the contents of
7135 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7136 If the buffer cannot be interpreted, return NULL_TREE. */
7138 static tree
7139 native_interpret_complex (tree type, unsigned char *ptr, int len)
7141 tree etype, rpart, ipart;
7142 int size;
7144 etype = TREE_TYPE (type);
7145 size = GET_MODE_SIZE (TYPE_MODE (etype));
7146 if (size * 2 > len)
7147 return NULL_TREE;
7148 rpart = native_interpret_expr (etype, ptr, size);
7149 if (!rpart)
7150 return NULL_TREE;
7151 ipart = native_interpret_expr (etype, ptr+size, size);
7152 if (!ipart)
7153 return NULL_TREE;
7154 return build_complex (type, rpart, ipart);
7158 /* Subroutine of native_interpret_expr. Interpret the contents of
7159 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7160 If the buffer cannot be interpreted, return NULL_TREE. */
7162 static tree
7163 native_interpret_vector (tree type, unsigned char *ptr, int len)
7165 tree etype, elem, elements;
7166 int i, size, count;
7168 etype = TREE_TYPE (type);
7169 size = GET_MODE_SIZE (TYPE_MODE (etype));
7170 count = TYPE_VECTOR_SUBPARTS (type);
7171 if (size * count > len)
7172 return NULL_TREE;
7174 elements = NULL_TREE;
7175 for (i = count - 1; i >= 0; i--)
7177 elem = native_interpret_expr (etype, ptr+(i*size), size);
7178 if (!elem)
7179 return NULL_TREE;
7180 elements = tree_cons (NULL_TREE, elem, elements);
7182 return build_vector (type, elements);
7186 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7187 the buffer PTR of length LEN as a constant of type TYPE. For
7188 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7189 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7190 return NULL_TREE. */
7192 static tree
7193 native_interpret_expr (tree type, unsigned char *ptr, int len)
7195 switch (TREE_CODE (type))
7197 case INTEGER_TYPE:
7198 case ENUMERAL_TYPE:
7199 case BOOLEAN_TYPE:
7200 return native_interpret_int (type, ptr, len);
7202 case REAL_TYPE:
7203 return native_interpret_real (type, ptr, len);
7205 case COMPLEX_TYPE:
7206 return native_interpret_complex (type, ptr, len);
7208 case VECTOR_TYPE:
7209 return native_interpret_vector (type, ptr, len);
7211 default:
7212 return NULL_TREE;
7217 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7218 TYPE at compile-time. If we're unable to perform the conversion
7219 return NULL_TREE. */
7221 static tree
7222 fold_view_convert_expr (tree type, tree expr)
7224 /* We support up to 512-bit values (for V8DFmode). */
7225 unsigned char buffer[64];
7226 int len;
7228 /* Check that the host and target are sane. */
7229 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7230 return NULL_TREE;
7232 len = native_encode_expr (expr, buffer, sizeof (buffer));
7233 if (len == 0)
7234 return NULL_TREE;
7236 return native_interpret_expr (type, buffer, len);
7240 /* Fold a unary expression of code CODE and type TYPE with operand
7241 OP0. Return the folded expression if folding is successful.
7242 Otherwise, return NULL_TREE. */
7244 tree
7245 fold_unary (enum tree_code code, tree type, tree op0)
7247 tree tem;
7248 tree arg0;
7249 enum tree_code_class kind = TREE_CODE_CLASS (code);
7251 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7252 && TREE_CODE_LENGTH (code) == 1);
7254 arg0 = op0;
7255 if (arg0)
7257 if (code == NOP_EXPR || code == CONVERT_EXPR
7258 || code == FLOAT_EXPR || code == ABS_EXPR)
7260 /* Don't use STRIP_NOPS, because signedness of argument type
7261 matters. */
7262 STRIP_SIGN_NOPS (arg0);
7264 else
7266 /* Strip any conversions that don't change the mode. This
7267 is safe for every expression, except for a comparison
7268 expression because its signedness is derived from its
7269 operands.
7271 Note that this is done as an internal manipulation within
7272 the constant folder, in order to find the simplest
7273 representation of the arguments so that their form can be
7274 studied. In any cases, the appropriate type conversions
7275 should be put back in the tree that will get out of the
7276 constant folder. */
7277 STRIP_NOPS (arg0);
7281 if (TREE_CODE_CLASS (code) == tcc_unary)
7283 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7284 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7285 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7286 else if (TREE_CODE (arg0) == COND_EXPR)
7288 tree arg01 = TREE_OPERAND (arg0, 1);
7289 tree arg02 = TREE_OPERAND (arg0, 2);
7290 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7291 arg01 = fold_build1 (code, type, arg01);
7292 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7293 arg02 = fold_build1 (code, type, arg02);
7294 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7295 arg01, arg02);
7297 /* If this was a conversion, and all we did was to move into
7298 inside the COND_EXPR, bring it back out. But leave it if
7299 it is a conversion from integer to integer and the
7300 result precision is no wider than a word since such a
7301 conversion is cheap and may be optimized away by combine,
7302 while it couldn't if it were outside the COND_EXPR. Then return
7303 so we don't get into an infinite recursion loop taking the
7304 conversion out and then back in. */
7306 if ((code == NOP_EXPR || code == CONVERT_EXPR
7307 || code == NON_LVALUE_EXPR)
7308 && TREE_CODE (tem) == COND_EXPR
7309 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7310 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7311 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7312 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7313 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7314 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7315 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7316 && (INTEGRAL_TYPE_P
7317 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7318 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7319 || flag_syntax_only))
7320 tem = build1 (code, type,
7321 build3 (COND_EXPR,
7322 TREE_TYPE (TREE_OPERAND
7323 (TREE_OPERAND (tem, 1), 0)),
7324 TREE_OPERAND (tem, 0),
7325 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7326 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7327 return tem;
7329 else if (COMPARISON_CLASS_P (arg0))
7331 if (TREE_CODE (type) == BOOLEAN_TYPE)
7333 arg0 = copy_node (arg0);
7334 TREE_TYPE (arg0) = type;
7335 return arg0;
7337 else if (TREE_CODE (type) != INTEGER_TYPE)
7338 return fold_build3 (COND_EXPR, type, arg0,
7339 fold_build1 (code, type,
7340 integer_one_node),
7341 fold_build1 (code, type,
7342 integer_zero_node));
7346 switch (code)
7348 case NOP_EXPR:
7349 case FLOAT_EXPR:
7350 case CONVERT_EXPR:
7351 case FIX_TRUNC_EXPR:
7352 if (TREE_TYPE (op0) == type)
7353 return op0;
7355 /* If we have (type) (a CMP b) and type is an integral type, return
7356 new expression involving the new type. */
7357 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7358 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7359 TREE_OPERAND (op0, 1));
7361 /* Handle cases of two conversions in a row. */
7362 if (TREE_CODE (op0) == NOP_EXPR
7363 || TREE_CODE (op0) == CONVERT_EXPR)
7365 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7366 tree inter_type = TREE_TYPE (op0);
7367 int inside_int = INTEGRAL_TYPE_P (inside_type);
7368 int inside_ptr = POINTER_TYPE_P (inside_type);
7369 int inside_float = FLOAT_TYPE_P (inside_type);
7370 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7371 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7372 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7373 int inter_int = INTEGRAL_TYPE_P (inter_type);
7374 int inter_ptr = POINTER_TYPE_P (inter_type);
7375 int inter_float = FLOAT_TYPE_P (inter_type);
7376 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7377 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7378 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7379 int final_int = INTEGRAL_TYPE_P (type);
7380 int final_ptr = POINTER_TYPE_P (type);
7381 int final_float = FLOAT_TYPE_P (type);
7382 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7383 unsigned int final_prec = TYPE_PRECISION (type);
7384 int final_unsignedp = TYPE_UNSIGNED (type);
7386 /* In addition to the cases of two conversions in a row
7387 handled below, if we are converting something to its own
7388 type via an object of identical or wider precision, neither
7389 conversion is needed. */
7390 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7391 && (((inter_int || inter_ptr) && final_int)
7392 || (inter_float && final_float))
7393 && inter_prec >= final_prec)
7394 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7396 /* Likewise, if the intermediate and final types are either both
7397 float or both integer, we don't need the middle conversion if
7398 it is wider than the final type and doesn't change the signedness
7399 (for integers). Avoid this if the final type is a pointer
7400 since then we sometimes need the inner conversion. Likewise if
7401 the outer has a precision not equal to the size of its mode. */
7402 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7403 || (inter_float && inside_float)
7404 || (inter_vec && inside_vec))
7405 && inter_prec >= inside_prec
7406 && (inter_float || inter_vec
7407 || inter_unsignedp == inside_unsignedp)
7408 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7409 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7410 && ! final_ptr
7411 && (! final_vec || inter_prec == inside_prec))
7412 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7414 /* If we have a sign-extension of a zero-extended value, we can
7415 replace that by a single zero-extension. */
7416 if (inside_int && inter_int && final_int
7417 && inside_prec < inter_prec && inter_prec < final_prec
7418 && inside_unsignedp && !inter_unsignedp)
7419 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7421 /* Two conversions in a row are not needed unless:
7422 - some conversion is floating-point (overstrict for now), or
7423 - some conversion is a vector (overstrict for now), or
7424 - the intermediate type is narrower than both initial and
7425 final, or
7426 - the intermediate type and innermost type differ in signedness,
7427 and the outermost type is wider than the intermediate, or
7428 - the initial type is a pointer type and the precisions of the
7429 intermediate and final types differ, or
7430 - the final type is a pointer type and the precisions of the
7431 initial and intermediate types differ.
7432 - the final type is a pointer type and the initial type not
7433 - the initial type is a pointer to an array and the final type
7434 not. */
7435 if (! inside_float && ! inter_float && ! final_float
7436 && ! inside_vec && ! inter_vec && ! final_vec
7437 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7438 && ! (inside_int && inter_int
7439 && inter_unsignedp != inside_unsignedp
7440 && inter_prec < final_prec)
7441 && ((inter_unsignedp && inter_prec > inside_prec)
7442 == (final_unsignedp && final_prec > inter_prec))
7443 && ! (inside_ptr && inter_prec != final_prec)
7444 && ! (final_ptr && inside_prec != inter_prec)
7445 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7446 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7447 && final_ptr == inside_ptr
7448 && ! (inside_ptr
7449 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7450 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7451 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7454 /* Handle (T *)&A.B.C for A being of type T and B and C
7455 living at offset zero. This occurs frequently in
7456 C++ upcasting and then accessing the base. */
7457 if (TREE_CODE (op0) == ADDR_EXPR
7458 && POINTER_TYPE_P (type)
7459 && handled_component_p (TREE_OPERAND (op0, 0)))
7461 HOST_WIDE_INT bitsize, bitpos;
7462 tree offset;
7463 enum machine_mode mode;
7464 int unsignedp, volatilep;
7465 tree base = TREE_OPERAND (op0, 0);
7466 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7467 &mode, &unsignedp, &volatilep, false);
7468 /* If the reference was to a (constant) zero offset, we can use
7469 the address of the base if it has the same base type
7470 as the result type. */
7471 if (! offset && bitpos == 0
7472 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7473 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7474 return fold_convert (type, build_fold_addr_expr (base));
7477 if (TREE_CODE (op0) == MODIFY_EXPR
7478 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7479 /* Detect assigning a bitfield. */
7480 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7481 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7483 /* Don't leave an assignment inside a conversion
7484 unless assigning a bitfield. */
7485 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7486 /* First do the assignment, then return converted constant. */
7487 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7488 TREE_NO_WARNING (tem) = 1;
7489 TREE_USED (tem) = 1;
7490 return tem;
7493 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7494 constants (if x has signed type, the sign bit cannot be set
7495 in c). This folds extension into the BIT_AND_EXPR. */
7496 if (INTEGRAL_TYPE_P (type)
7497 && TREE_CODE (type) != BOOLEAN_TYPE
7498 && TREE_CODE (op0) == BIT_AND_EXPR
7499 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7501 tree and = op0;
7502 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7503 int change = 0;
7505 if (TYPE_UNSIGNED (TREE_TYPE (and))
7506 || (TYPE_PRECISION (type)
7507 <= TYPE_PRECISION (TREE_TYPE (and))))
7508 change = 1;
7509 else if (TYPE_PRECISION (TREE_TYPE (and1))
7510 <= HOST_BITS_PER_WIDE_INT
7511 && host_integerp (and1, 1))
7513 unsigned HOST_WIDE_INT cst;
7515 cst = tree_low_cst (and1, 1);
7516 cst &= (HOST_WIDE_INT) -1
7517 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7518 change = (cst == 0);
7519 #ifdef LOAD_EXTEND_OP
7520 if (change
7521 && !flag_syntax_only
7522 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7523 == ZERO_EXTEND))
7525 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7526 and0 = fold_convert (uns, and0);
7527 and1 = fold_convert (uns, and1);
7529 #endif
7531 if (change)
7533 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7534 TREE_INT_CST_HIGH (and1));
7535 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7536 TREE_CONSTANT_OVERFLOW (and1));
7537 return fold_build2 (BIT_AND_EXPR, type,
7538 fold_convert (type, and0), tem);
7542 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7543 T2 being pointers to types of the same size. */
7544 if (POINTER_TYPE_P (type)
7545 && BINARY_CLASS_P (arg0)
7546 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7547 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7549 tree arg00 = TREE_OPERAND (arg0, 0);
7550 tree t0 = type;
7551 tree t1 = TREE_TYPE (arg00);
7552 tree tt0 = TREE_TYPE (t0);
7553 tree tt1 = TREE_TYPE (t1);
7554 tree s0 = TYPE_SIZE (tt0);
7555 tree s1 = TYPE_SIZE (tt1);
7557 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7558 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7559 TREE_OPERAND (arg0, 1));
7562 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7563 of the same precision, and X is a integer type not narrower than
7564 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7565 if (INTEGRAL_TYPE_P (type)
7566 && TREE_CODE (op0) == BIT_NOT_EXPR
7567 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7568 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7569 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7570 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7572 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7573 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7574 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7575 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7578 tem = fold_convert_const (code, type, arg0);
7579 return tem ? tem : NULL_TREE;
7581 case VIEW_CONVERT_EXPR:
7582 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7583 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7584 return fold_view_convert_expr (type, op0);
7586 case NEGATE_EXPR:
7587 tem = fold_negate_expr (arg0);
7588 if (tem)
7589 return fold_convert (type, tem);
7590 return NULL_TREE;
7592 case ABS_EXPR:
7593 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7594 return fold_abs_const (arg0, type);
7595 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7596 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7597 /* Convert fabs((double)float) into (double)fabsf(float). */
7598 else if (TREE_CODE (arg0) == NOP_EXPR
7599 && TREE_CODE (type) == REAL_TYPE)
7601 tree targ0 = strip_float_extensions (arg0);
7602 if (targ0 != arg0)
7603 return fold_convert (type, fold_build1 (ABS_EXPR,
7604 TREE_TYPE (targ0),
7605 targ0));
7607 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7608 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7609 return arg0;
7611 /* Strip sign ops from argument. */
7612 if (TREE_CODE (type) == REAL_TYPE)
7614 tem = fold_strip_sign_ops (arg0);
7615 if (tem)
7616 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7618 return NULL_TREE;
7620 case CONJ_EXPR:
7621 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7622 return fold_convert (type, arg0);
7623 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7625 tree itype = TREE_TYPE (type);
7626 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7627 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7628 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7630 if (TREE_CODE (arg0) == COMPLEX_CST)
7632 tree itype = TREE_TYPE (type);
7633 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7634 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7635 return build_complex (type, rpart, negate_expr (ipart));
7637 if (TREE_CODE (arg0) == CONJ_EXPR)
7638 return fold_convert (type, TREE_OPERAND (arg0, 0));
7639 return NULL_TREE;
7641 case BIT_NOT_EXPR:
7642 if (TREE_CODE (arg0) == INTEGER_CST)
7643 return fold_not_const (arg0, type);
7644 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7645 return TREE_OPERAND (arg0, 0);
7646 /* Convert ~ (-A) to A - 1. */
7647 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7648 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7649 build_int_cst (type, 1));
7650 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7651 else if (INTEGRAL_TYPE_P (type)
7652 && ((TREE_CODE (arg0) == MINUS_EXPR
7653 && integer_onep (TREE_OPERAND (arg0, 1)))
7654 || (TREE_CODE (arg0) == PLUS_EXPR
7655 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7656 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7657 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7658 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7659 && (tem = fold_unary (BIT_NOT_EXPR, type,
7660 fold_convert (type,
7661 TREE_OPERAND (arg0, 0)))))
7662 return fold_build2 (BIT_XOR_EXPR, type, tem,
7663 fold_convert (type, TREE_OPERAND (arg0, 1)));
7664 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7665 && (tem = fold_unary (BIT_NOT_EXPR, type,
7666 fold_convert (type,
7667 TREE_OPERAND (arg0, 1)))))
7668 return fold_build2 (BIT_XOR_EXPR, type,
7669 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7671 return NULL_TREE;
7673 case TRUTH_NOT_EXPR:
7674 /* The argument to invert_truthvalue must have Boolean type. */
7675 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7676 arg0 = fold_convert (boolean_type_node, arg0);
7678 /* Note that the operand of this must be an int
7679 and its values must be 0 or 1.
7680 ("true" is a fixed value perhaps depending on the language,
7681 but we don't handle values other than 1 correctly yet.) */
7682 tem = fold_truth_not_expr (arg0);
7683 if (!tem)
7684 return NULL_TREE;
7685 return fold_convert (type, tem);
7687 case REALPART_EXPR:
7688 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7689 return fold_convert (type, arg0);
7690 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7691 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7692 TREE_OPERAND (arg0, 1));
7693 if (TREE_CODE (arg0) == COMPLEX_CST)
7694 return fold_convert (type, TREE_REALPART (arg0));
7695 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7697 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7698 tem = fold_build2 (TREE_CODE (arg0), itype,
7699 fold_build1 (REALPART_EXPR, itype,
7700 TREE_OPERAND (arg0, 0)),
7701 fold_build1 (REALPART_EXPR, itype,
7702 TREE_OPERAND (arg0, 1)));
7703 return fold_convert (type, tem);
7705 if (TREE_CODE (arg0) == CONJ_EXPR)
7707 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7708 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7709 return fold_convert (type, tem);
7711 return NULL_TREE;
7713 case IMAGPART_EXPR:
7714 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7715 return fold_convert (type, integer_zero_node);
7716 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7717 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7718 TREE_OPERAND (arg0, 0));
7719 if (TREE_CODE (arg0) == COMPLEX_CST)
7720 return fold_convert (type, TREE_IMAGPART (arg0));
7721 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7723 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7724 tem = fold_build2 (TREE_CODE (arg0), itype,
7725 fold_build1 (IMAGPART_EXPR, itype,
7726 TREE_OPERAND (arg0, 0)),
7727 fold_build1 (IMAGPART_EXPR, itype,
7728 TREE_OPERAND (arg0, 1)));
7729 return fold_convert (type, tem);
7731 if (TREE_CODE (arg0) == CONJ_EXPR)
7733 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7734 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7735 return fold_convert (type, negate_expr (tem));
7737 return NULL_TREE;
7739 default:
7740 return NULL_TREE;
7741 } /* switch (code) */
7744 /* Fold a binary expression of code CODE and type TYPE with operands
7745 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7746 Return the folded expression if folding is successful. Otherwise,
7747 return NULL_TREE. */
7749 static tree
7750 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7752 enum tree_code compl_code;
7754 if (code == MIN_EXPR)
7755 compl_code = MAX_EXPR;
7756 else if (code == MAX_EXPR)
7757 compl_code = MIN_EXPR;
7758 else
7759 gcc_unreachable ();
7761 /* MIN (MAX (a, b), b) == b. */
7762 if (TREE_CODE (op0) == compl_code
7763 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7764 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7766 /* MIN (MAX (b, a), b) == b. */
7767 if (TREE_CODE (op0) == compl_code
7768 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7769 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7770 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7772 /* MIN (a, MAX (a, b)) == a. */
7773 if (TREE_CODE (op1) == compl_code
7774 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7775 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7776 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7778 /* MIN (a, MAX (b, a)) == a. */
7779 if (TREE_CODE (op1) == compl_code
7780 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7781 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7782 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7784 return NULL_TREE;
7787 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7788 by changing CODE to reduce the magnitude of constants involved in
7789 ARG0 of the comparison.
7790 Returns a canonicalized comparison tree if a simplification was
7791 possible, otherwise returns NULL_TREE. */
7793 static tree
7794 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7795 tree arg0, tree arg1)
7797 enum tree_code code0 = TREE_CODE (arg0);
7798 tree t, cst0 = NULL_TREE;
7799 int sgn0;
7800 bool swap = false;
7802 /* Match A +- CST code arg1 and CST code arg1. */
7803 if (!(((code0 == MINUS_EXPR
7804 || code0 == PLUS_EXPR)
7805 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7806 || code0 == INTEGER_CST))
7807 return NULL_TREE;
7809 /* Identify the constant in arg0 and its sign. */
7810 if (code0 == INTEGER_CST)
7811 cst0 = arg0;
7812 else
7813 cst0 = TREE_OPERAND (arg0, 1);
7814 sgn0 = tree_int_cst_sgn (cst0);
7816 /* Overflowed constants and zero will cause problems. */
7817 if (integer_zerop (cst0)
7818 || TREE_OVERFLOW (cst0))
7819 return NULL_TREE;
7821 /* See if we can reduce the magnitude of the constant in
7822 arg0 by changing the comparison code. */
7823 if (code0 == INTEGER_CST)
7825 /* CST <= arg1 -> CST-1 < arg1. */
7826 if (code == LE_EXPR && sgn0 == 1)
7827 code = LT_EXPR;
7828 /* -CST < arg1 -> -CST-1 <= arg1. */
7829 else if (code == LT_EXPR && sgn0 == -1)
7830 code = LE_EXPR;
7831 /* CST > arg1 -> CST-1 >= arg1. */
7832 else if (code == GT_EXPR && sgn0 == 1)
7833 code = GE_EXPR;
7834 /* -CST >= arg1 -> -CST-1 > arg1. */
7835 else if (code == GE_EXPR && sgn0 == -1)
7836 code = GT_EXPR;
7837 else
7838 return NULL_TREE;
7839 /* arg1 code' CST' might be more canonical. */
7840 swap = true;
7842 else
7844 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7845 if (code == LT_EXPR
7846 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7847 code = LE_EXPR;
7848 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7849 else if (code == GT_EXPR
7850 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7851 code = GE_EXPR;
7852 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7853 else if (code == LE_EXPR
7854 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7855 code = LT_EXPR;
7856 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7857 else if (code == GE_EXPR
7858 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7859 code = GT_EXPR;
7860 else
7861 return NULL_TREE;
7864 /* Now build the constant reduced in magnitude. */
7865 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7866 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7867 if (code0 != INTEGER_CST)
7868 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7870 /* If swapping might yield to a more canonical form, do so. */
7871 if (swap)
7872 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7873 else
7874 return fold_build2 (code, type, t, arg1);
7877 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7878 overflow further. Try to decrease the magnitude of constants involved
7879 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7880 and put sole constants at the second argument position.
7881 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7883 static tree
7884 maybe_canonicalize_comparison (enum tree_code code, tree type,
7885 tree arg0, tree arg1)
7887 tree t;
7889 /* In principle pointers also have undefined overflow behavior,
7890 but that causes problems elsewhere. */
7891 if ((flag_wrapv || flag_trapv)
7892 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7893 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7894 return NULL_TREE;
7896 /* Try canonicalization by simplifying arg0. */
7897 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7898 if (t)
7899 return t;
7901 /* Try canonicalization by simplifying arg1 using the swapped
7902 comparison. */
7903 code = swap_tree_comparison (code);
7904 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7907 /* Subroutine of fold_binary. This routine performs all of the
7908 transformations that are common to the equality/inequality
7909 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7910 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7911 fold_binary should call fold_binary. Fold a comparison with
7912 tree code CODE and type TYPE with operands OP0 and OP1. Return
7913 the folded comparison or NULL_TREE. */
7915 static tree
7916 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7918 tree arg0, arg1, tem;
7920 arg0 = op0;
7921 arg1 = op1;
7923 STRIP_SIGN_NOPS (arg0);
7924 STRIP_SIGN_NOPS (arg1);
7926 tem = fold_relational_const (code, type, arg0, arg1);
7927 if (tem != NULL_TREE)
7928 return tem;
7930 /* If one arg is a real or integer constant, put it last. */
7931 if (tree_swap_operands_p (arg0, arg1, true))
7932 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7934 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7935 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7936 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7937 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7938 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7939 && !(flag_wrapv || flag_trapv))
7940 && (TREE_CODE (arg1) == INTEGER_CST
7941 && !TREE_OVERFLOW (arg1)))
7943 tree const1 = TREE_OPERAND (arg0, 1);
7944 tree const2 = arg1;
7945 tree variable = TREE_OPERAND (arg0, 0);
7946 tree lhs;
7947 int lhs_add;
7948 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7950 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7951 TREE_TYPE (arg1), const2, const1);
7952 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7953 && (TREE_CODE (lhs) != INTEGER_CST
7954 || !TREE_OVERFLOW (lhs)))
7955 return fold_build2 (code, type, variable, lhs);
7958 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7959 same object, then we can fold this to a comparison of the two offsets in
7960 signed size type. This is possible because pointer arithmetic is
7961 restricted to retain within an object and overflow on pointer differences
7962 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7963 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7964 && !flag_wrapv && !flag_trapv)
7966 tree base0, offset0, base1, offset1;
7968 if (extract_array_ref (arg0, &base0, &offset0)
7969 && extract_array_ref (arg1, &base1, &offset1)
7970 && operand_equal_p (base0, base1, 0))
7972 tree signed_size_type_node;
7973 signed_size_type_node = signed_type_for (size_type_node);
7975 /* By converting to signed size type we cover middle-end pointer
7976 arithmetic which operates on unsigned pointer types of size
7977 type size and ARRAY_REF offsets which are properly sign or
7978 zero extended from their type in case it is narrower than
7979 size type. */
7980 if (offset0 == NULL_TREE)
7981 offset0 = build_int_cst (signed_size_type_node, 0);
7982 else
7983 offset0 = fold_convert (signed_size_type_node, offset0);
7984 if (offset1 == NULL_TREE)
7985 offset1 = build_int_cst (signed_size_type_node, 0);
7986 else
7987 offset1 = fold_convert (signed_size_type_node, offset1);
7989 return fold_build2 (code, type, offset0, offset1);
7993 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
7994 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
7995 the resulting offset is smaller in absolute value than the
7996 original one. */
7997 if (!(flag_wrapv || flag_trapv)
7998 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7999 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8000 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8001 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8002 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8003 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8004 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8006 tree const1 = TREE_OPERAND (arg0, 1);
8007 tree const2 = TREE_OPERAND (arg1, 1);
8008 tree variable1 = TREE_OPERAND (arg0, 0);
8009 tree variable2 = TREE_OPERAND (arg1, 0);
8010 tree cst;
8012 /* Put the constant on the side where it doesn't overflow and is
8013 of lower absolute value than before. */
8014 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8015 ? MINUS_EXPR : PLUS_EXPR,
8016 const2, const1, 0);
8017 if (!TREE_OVERFLOW (cst)
8018 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8019 return fold_build2 (code, type,
8020 variable1,
8021 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8022 variable2, cst));
8024 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8025 ? MINUS_EXPR : PLUS_EXPR,
8026 const1, const2, 0);
8027 if (!TREE_OVERFLOW (cst)
8028 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8029 return fold_build2 (code, type,
8030 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8031 variable1, cst),
8032 variable2);
8035 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8036 if (tem)
8037 return tem;
8039 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8041 tree targ0 = strip_float_extensions (arg0);
8042 tree targ1 = strip_float_extensions (arg1);
8043 tree newtype = TREE_TYPE (targ0);
8045 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8046 newtype = TREE_TYPE (targ1);
8048 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8049 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8050 return fold_build2 (code, type, fold_convert (newtype, targ0),
8051 fold_convert (newtype, targ1));
8053 /* (-a) CMP (-b) -> b CMP a */
8054 if (TREE_CODE (arg0) == NEGATE_EXPR
8055 && TREE_CODE (arg1) == NEGATE_EXPR)
8056 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8057 TREE_OPERAND (arg0, 0));
8059 if (TREE_CODE (arg1) == REAL_CST)
8061 REAL_VALUE_TYPE cst;
8062 cst = TREE_REAL_CST (arg1);
8064 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8065 if (TREE_CODE (arg0) == NEGATE_EXPR)
8066 return fold_build2 (swap_tree_comparison (code), type,
8067 TREE_OPERAND (arg0, 0),
8068 build_real (TREE_TYPE (arg1),
8069 REAL_VALUE_NEGATE (cst)));
8071 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8072 /* a CMP (-0) -> a CMP 0 */
8073 if (REAL_VALUE_MINUS_ZERO (cst))
8074 return fold_build2 (code, type, arg0,
8075 build_real (TREE_TYPE (arg1), dconst0));
8077 /* x != NaN is always true, other ops are always false. */
8078 if (REAL_VALUE_ISNAN (cst)
8079 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8081 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8082 return omit_one_operand (type, tem, arg0);
8085 /* Fold comparisons against infinity. */
8086 if (REAL_VALUE_ISINF (cst))
8088 tem = fold_inf_compare (code, type, arg0, arg1);
8089 if (tem != NULL_TREE)
8090 return tem;
8094 /* If this is a comparison of a real constant with a PLUS_EXPR
8095 or a MINUS_EXPR of a real constant, we can convert it into a
8096 comparison with a revised real constant as long as no overflow
8097 occurs when unsafe_math_optimizations are enabled. */
8098 if (flag_unsafe_math_optimizations
8099 && TREE_CODE (arg1) == REAL_CST
8100 && (TREE_CODE (arg0) == PLUS_EXPR
8101 || TREE_CODE (arg0) == MINUS_EXPR)
8102 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8103 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8104 ? MINUS_EXPR : PLUS_EXPR,
8105 arg1, TREE_OPERAND (arg0, 1), 0))
8106 && ! TREE_CONSTANT_OVERFLOW (tem))
8107 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8109 /* Likewise, we can simplify a comparison of a real constant with
8110 a MINUS_EXPR whose first operand is also a real constant, i.e.
8111 (c1 - x) < c2 becomes x > c1-c2. */
8112 if (flag_unsafe_math_optimizations
8113 && TREE_CODE (arg1) == REAL_CST
8114 && TREE_CODE (arg0) == MINUS_EXPR
8115 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8116 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8117 arg1, 0))
8118 && ! TREE_CONSTANT_OVERFLOW (tem))
8119 return fold_build2 (swap_tree_comparison (code), type,
8120 TREE_OPERAND (arg0, 1), tem);
8122 /* Fold comparisons against built-in math functions. */
8123 if (TREE_CODE (arg1) == REAL_CST
8124 && flag_unsafe_math_optimizations
8125 && ! flag_errno_math)
8127 enum built_in_function fcode = builtin_mathfn_code (arg0);
8129 if (fcode != END_BUILTINS)
8131 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8132 if (tem != NULL_TREE)
8133 return tem;
8138 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8139 if (TREE_CONSTANT (arg1)
8140 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8141 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8142 /* This optimization is invalid for ordered comparisons
8143 if CONST+INCR overflows or if foo+incr might overflow.
8144 This optimization is invalid for floating point due to rounding.
8145 For pointer types we assume overflow doesn't happen. */
8146 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8147 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8148 && (code == EQ_EXPR || code == NE_EXPR))))
8150 tree varop, newconst;
8152 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8154 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8155 arg1, TREE_OPERAND (arg0, 1));
8156 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8157 TREE_OPERAND (arg0, 0),
8158 TREE_OPERAND (arg0, 1));
8160 else
8162 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8163 arg1, TREE_OPERAND (arg0, 1));
8164 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8165 TREE_OPERAND (arg0, 0),
8166 TREE_OPERAND (arg0, 1));
8170 /* If VAROP is a reference to a bitfield, we must mask
8171 the constant by the width of the field. */
8172 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8173 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8174 && host_integerp (DECL_SIZE (TREE_OPERAND
8175 (TREE_OPERAND (varop, 0), 1)), 1))
8177 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8178 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8179 tree folded_compare, shift;
8181 /* First check whether the comparison would come out
8182 always the same. If we don't do that we would
8183 change the meaning with the masking. */
8184 folded_compare = fold_build2 (code, type,
8185 TREE_OPERAND (varop, 0), arg1);
8186 if (TREE_CODE (folded_compare) == INTEGER_CST)
8187 return omit_one_operand (type, folded_compare, varop);
8189 shift = build_int_cst (NULL_TREE,
8190 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8191 shift = fold_convert (TREE_TYPE (varop), shift);
8192 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8193 newconst, shift);
8194 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8195 newconst, shift);
8198 return fold_build2 (code, type, varop, newconst);
8201 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8202 && (TREE_CODE (arg0) == NOP_EXPR
8203 || TREE_CODE (arg0) == CONVERT_EXPR))
8205 /* If we are widening one operand of an integer comparison,
8206 see if the other operand is similarly being widened. Perhaps we
8207 can do the comparison in the narrower type. */
8208 tem = fold_widened_comparison (code, type, arg0, arg1);
8209 if (tem)
8210 return tem;
8212 /* Or if we are changing signedness. */
8213 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8214 if (tem)
8215 return tem;
8218 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8219 constant, we can simplify it. */
8220 if (TREE_CODE (arg1) == INTEGER_CST
8221 && (TREE_CODE (arg0) == MIN_EXPR
8222 || TREE_CODE (arg0) == MAX_EXPR)
8223 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8225 tem = optimize_minmax_comparison (code, type, op0, op1);
8226 if (tem)
8227 return tem;
8230 /* Simplify comparison of something with itself. (For IEEE
8231 floating-point, we can only do some of these simplifications.) */
8232 if (operand_equal_p (arg0, arg1, 0))
8234 switch (code)
8236 case EQ_EXPR:
8237 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8238 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8239 return constant_boolean_node (1, type);
8240 break;
8242 case GE_EXPR:
8243 case LE_EXPR:
8244 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8245 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8246 return constant_boolean_node (1, type);
8247 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8249 case NE_EXPR:
8250 /* For NE, we can only do this simplification if integer
8251 or we don't honor IEEE floating point NaNs. */
8252 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8253 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8254 break;
8255 /* ... fall through ... */
8256 case GT_EXPR:
8257 case LT_EXPR:
8258 return constant_boolean_node (0, type);
8259 default:
8260 gcc_unreachable ();
8264 /* If we are comparing an expression that just has comparisons
8265 of two integer values, arithmetic expressions of those comparisons,
8266 and constants, we can simplify it. There are only three cases
8267 to check: the two values can either be equal, the first can be
8268 greater, or the second can be greater. Fold the expression for
8269 those three values. Since each value must be 0 or 1, we have
8270 eight possibilities, each of which corresponds to the constant 0
8271 or 1 or one of the six possible comparisons.
8273 This handles common cases like (a > b) == 0 but also handles
8274 expressions like ((x > y) - (y > x)) > 0, which supposedly
8275 occur in macroized code. */
8277 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8279 tree cval1 = 0, cval2 = 0;
8280 int save_p = 0;
8282 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8283 /* Don't handle degenerate cases here; they should already
8284 have been handled anyway. */
8285 && cval1 != 0 && cval2 != 0
8286 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8287 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8288 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8289 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8290 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8291 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8292 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8294 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8295 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8297 /* We can't just pass T to eval_subst in case cval1 or cval2
8298 was the same as ARG1. */
8300 tree high_result
8301 = fold_build2 (code, type,
8302 eval_subst (arg0, cval1, maxval,
8303 cval2, minval),
8304 arg1);
8305 tree equal_result
8306 = fold_build2 (code, type,
8307 eval_subst (arg0, cval1, maxval,
8308 cval2, maxval),
8309 arg1);
8310 tree low_result
8311 = fold_build2 (code, type,
8312 eval_subst (arg0, cval1, minval,
8313 cval2, maxval),
8314 arg1);
8316 /* All three of these results should be 0 or 1. Confirm they are.
8317 Then use those values to select the proper code to use. */
8319 if (TREE_CODE (high_result) == INTEGER_CST
8320 && TREE_CODE (equal_result) == INTEGER_CST
8321 && TREE_CODE (low_result) == INTEGER_CST)
8323 /* Make a 3-bit mask with the high-order bit being the
8324 value for `>', the next for '=', and the low for '<'. */
8325 switch ((integer_onep (high_result) * 4)
8326 + (integer_onep (equal_result) * 2)
8327 + integer_onep (low_result))
8329 case 0:
8330 /* Always false. */
8331 return omit_one_operand (type, integer_zero_node, arg0);
8332 case 1:
8333 code = LT_EXPR;
8334 break;
8335 case 2:
8336 code = EQ_EXPR;
8337 break;
8338 case 3:
8339 code = LE_EXPR;
8340 break;
8341 case 4:
8342 code = GT_EXPR;
8343 break;
8344 case 5:
8345 code = NE_EXPR;
8346 break;
8347 case 6:
8348 code = GE_EXPR;
8349 break;
8350 case 7:
8351 /* Always true. */
8352 return omit_one_operand (type, integer_one_node, arg0);
8355 if (save_p)
8356 return save_expr (build2 (code, type, cval1, cval2));
8357 return fold_build2 (code, type, cval1, cval2);
8362 /* Fold a comparison of the address of COMPONENT_REFs with the same
8363 type and component to a comparison of the address of the base
8364 object. In short, &x->a OP &y->a to x OP y and
8365 &x->a OP &y.a to x OP &y */
8366 if (TREE_CODE (arg0) == ADDR_EXPR
8367 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8368 && TREE_CODE (arg1) == ADDR_EXPR
8369 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8371 tree cref0 = TREE_OPERAND (arg0, 0);
8372 tree cref1 = TREE_OPERAND (arg1, 0);
8373 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8375 tree op0 = TREE_OPERAND (cref0, 0);
8376 tree op1 = TREE_OPERAND (cref1, 0);
8377 return fold_build2 (code, type,
8378 build_fold_addr_expr (op0),
8379 build_fold_addr_expr (op1));
8383 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8384 into a single range test. */
8385 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8386 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8387 && TREE_CODE (arg1) == INTEGER_CST
8388 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8389 && !integer_zerop (TREE_OPERAND (arg0, 1))
8390 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8391 && !TREE_OVERFLOW (arg1))
8393 tem = fold_div_compare (code, type, arg0, arg1);
8394 if (tem != NULL_TREE)
8395 return tem;
8398 /* Fold ~X op ~Y as Y op X. */
8399 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8400 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8401 return fold_build2 (code, type,
8402 TREE_OPERAND (arg1, 0),
8403 TREE_OPERAND (arg0, 0));
8405 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8406 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8407 && TREE_CODE (arg1) == INTEGER_CST)
8408 return fold_build2 (swap_tree_comparison (code), type,
8409 TREE_OPERAND (arg0, 0),
8410 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8412 return NULL_TREE;
8416 /* Subroutine of fold_binary. Optimize complex multiplications of the
8417 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8418 argument EXPR represents the expression "z" of type TYPE. */
8420 static tree
8421 fold_mult_zconjz (tree type, tree expr)
8423 tree itype = TREE_TYPE (type);
8424 tree rpart, ipart, tem;
8426 if (TREE_CODE (expr) == COMPLEX_EXPR)
8428 rpart = TREE_OPERAND (expr, 0);
8429 ipart = TREE_OPERAND (expr, 1);
8431 else if (TREE_CODE (expr) == COMPLEX_CST)
8433 rpart = TREE_REALPART (expr);
8434 ipart = TREE_IMAGPART (expr);
8436 else
8438 expr = save_expr (expr);
8439 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8440 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8443 rpart = save_expr (rpart);
8444 ipart = save_expr (ipart);
8445 tem = fold_build2 (PLUS_EXPR, itype,
8446 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8447 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8448 return fold_build2 (COMPLEX_EXPR, type, tem,
8449 fold_convert (itype, integer_zero_node));
8453 /* Fold a binary expression of code CODE and type TYPE with operands
8454 OP0 and OP1. Return the folded expression if folding is
8455 successful. Otherwise, return NULL_TREE. */
8457 tree
8458 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8460 enum tree_code_class kind = TREE_CODE_CLASS (code);
8461 tree arg0, arg1, tem;
8462 tree t1 = NULL_TREE;
8464 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8465 && TREE_CODE_LENGTH (code) == 2
8466 && op0 != NULL_TREE
8467 && op1 != NULL_TREE);
8469 arg0 = op0;
8470 arg1 = op1;
8472 /* Strip any conversions that don't change the mode. This is
8473 safe for every expression, except for a comparison expression
8474 because its signedness is derived from its operands. So, in
8475 the latter case, only strip conversions that don't change the
8476 signedness.
8478 Note that this is done as an internal manipulation within the
8479 constant folder, in order to find the simplest representation
8480 of the arguments so that their form can be studied. In any
8481 cases, the appropriate type conversions should be put back in
8482 the tree that will get out of the constant folder. */
8484 if (kind == tcc_comparison)
8486 STRIP_SIGN_NOPS (arg0);
8487 STRIP_SIGN_NOPS (arg1);
8489 else
8491 STRIP_NOPS (arg0);
8492 STRIP_NOPS (arg1);
8495 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8496 constant but we can't do arithmetic on them. */
8497 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8498 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8499 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8500 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8502 if (kind == tcc_binary)
8503 tem = const_binop (code, arg0, arg1, 0);
8504 else if (kind == tcc_comparison)
8505 tem = fold_relational_const (code, type, arg0, arg1);
8506 else
8507 tem = NULL_TREE;
8509 if (tem != NULL_TREE)
8511 if (TREE_TYPE (tem) != type)
8512 tem = fold_convert (type, tem);
8513 return tem;
8517 /* If this is a commutative operation, and ARG0 is a constant, move it
8518 to ARG1 to reduce the number of tests below. */
8519 if (commutative_tree_code (code)
8520 && tree_swap_operands_p (arg0, arg1, true))
8521 return fold_build2 (code, type, op1, op0);
8523 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8525 First check for cases where an arithmetic operation is applied to a
8526 compound, conditional, or comparison operation. Push the arithmetic
8527 operation inside the compound or conditional to see if any folding
8528 can then be done. Convert comparison to conditional for this purpose.
8529 The also optimizes non-constant cases that used to be done in
8530 expand_expr.
8532 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8533 one of the operands is a comparison and the other is a comparison, a
8534 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8535 code below would make the expression more complex. Change it to a
8536 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8537 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8539 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8540 || code == EQ_EXPR || code == NE_EXPR)
8541 && ((truth_value_p (TREE_CODE (arg0))
8542 && (truth_value_p (TREE_CODE (arg1))
8543 || (TREE_CODE (arg1) == BIT_AND_EXPR
8544 && integer_onep (TREE_OPERAND (arg1, 1)))))
8545 || (truth_value_p (TREE_CODE (arg1))
8546 && (truth_value_p (TREE_CODE (arg0))
8547 || (TREE_CODE (arg0) == BIT_AND_EXPR
8548 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8550 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8551 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8552 : TRUTH_XOR_EXPR,
8553 boolean_type_node,
8554 fold_convert (boolean_type_node, arg0),
8555 fold_convert (boolean_type_node, arg1));
8557 if (code == EQ_EXPR)
8558 tem = invert_truthvalue (tem);
8560 return fold_convert (type, tem);
8563 if (TREE_CODE_CLASS (code) == tcc_binary
8564 || TREE_CODE_CLASS (code) == tcc_comparison)
8566 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8567 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8568 fold_build2 (code, type,
8569 TREE_OPERAND (arg0, 1), op1));
8570 if (TREE_CODE (arg1) == COMPOUND_EXPR
8571 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8572 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8573 fold_build2 (code, type,
8574 op0, TREE_OPERAND (arg1, 1)));
8576 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8578 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8579 arg0, arg1,
8580 /*cond_first_p=*/1);
8581 if (tem != NULL_TREE)
8582 return tem;
8585 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8587 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8588 arg1, arg0,
8589 /*cond_first_p=*/0);
8590 if (tem != NULL_TREE)
8591 return tem;
8595 switch (code)
8597 case PLUS_EXPR:
8598 /* A + (-B) -> A - B */
8599 if (TREE_CODE (arg1) == NEGATE_EXPR)
8600 return fold_build2 (MINUS_EXPR, type,
8601 fold_convert (type, arg0),
8602 fold_convert (type, TREE_OPERAND (arg1, 0)));
8603 /* (-A) + B -> B - A */
8604 if (TREE_CODE (arg0) == NEGATE_EXPR
8605 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8606 return fold_build2 (MINUS_EXPR, type,
8607 fold_convert (type, arg1),
8608 fold_convert (type, TREE_OPERAND (arg0, 0)));
8609 /* Convert ~A + 1 to -A. */
8610 if (INTEGRAL_TYPE_P (type)
8611 && TREE_CODE (arg0) == BIT_NOT_EXPR
8612 && integer_onep (arg1))
8613 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8615 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8616 same or one. */
8617 if ((TREE_CODE (arg0) == MULT_EXPR
8618 || TREE_CODE (arg1) == MULT_EXPR)
8619 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8621 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8622 if (tem)
8623 return tem;
8626 if (! FLOAT_TYPE_P (type))
8628 if (integer_zerop (arg1))
8629 return non_lvalue (fold_convert (type, arg0));
8631 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8632 with a constant, and the two constants have no bits in common,
8633 we should treat this as a BIT_IOR_EXPR since this may produce more
8634 simplifications. */
8635 if (TREE_CODE (arg0) == BIT_AND_EXPR
8636 && TREE_CODE (arg1) == BIT_AND_EXPR
8637 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8638 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8639 && integer_zerop (const_binop (BIT_AND_EXPR,
8640 TREE_OPERAND (arg0, 1),
8641 TREE_OPERAND (arg1, 1), 0)))
8643 code = BIT_IOR_EXPR;
8644 goto bit_ior;
8647 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8648 (plus (plus (mult) (mult)) (foo)) so that we can
8649 take advantage of the factoring cases below. */
8650 if (((TREE_CODE (arg0) == PLUS_EXPR
8651 || TREE_CODE (arg0) == MINUS_EXPR)
8652 && TREE_CODE (arg1) == MULT_EXPR)
8653 || ((TREE_CODE (arg1) == PLUS_EXPR
8654 || TREE_CODE (arg1) == MINUS_EXPR)
8655 && TREE_CODE (arg0) == MULT_EXPR))
8657 tree parg0, parg1, parg, marg;
8658 enum tree_code pcode;
8660 if (TREE_CODE (arg1) == MULT_EXPR)
8661 parg = arg0, marg = arg1;
8662 else
8663 parg = arg1, marg = arg0;
8664 pcode = TREE_CODE (parg);
8665 parg0 = TREE_OPERAND (parg, 0);
8666 parg1 = TREE_OPERAND (parg, 1);
8667 STRIP_NOPS (parg0);
8668 STRIP_NOPS (parg1);
8670 if (TREE_CODE (parg0) == MULT_EXPR
8671 && TREE_CODE (parg1) != MULT_EXPR)
8672 return fold_build2 (pcode, type,
8673 fold_build2 (PLUS_EXPR, type,
8674 fold_convert (type, parg0),
8675 fold_convert (type, marg)),
8676 fold_convert (type, parg1));
8677 if (TREE_CODE (parg0) != MULT_EXPR
8678 && TREE_CODE (parg1) == MULT_EXPR)
8679 return fold_build2 (PLUS_EXPR, type,
8680 fold_convert (type, parg0),
8681 fold_build2 (pcode, type,
8682 fold_convert (type, marg),
8683 fold_convert (type,
8684 parg1)));
8687 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8688 of the array. Loop optimizer sometimes produce this type of
8689 expressions. */
8690 if (TREE_CODE (arg0) == ADDR_EXPR)
8692 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8693 if (tem)
8694 return fold_convert (type, tem);
8696 else if (TREE_CODE (arg1) == ADDR_EXPR)
8698 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8699 if (tem)
8700 return fold_convert (type, tem);
8703 else
8705 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8706 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8707 return non_lvalue (fold_convert (type, arg0));
8709 /* Likewise if the operands are reversed. */
8710 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8711 return non_lvalue (fold_convert (type, arg1));
8713 /* Convert X + -C into X - C. */
8714 if (TREE_CODE (arg1) == REAL_CST
8715 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8717 tem = fold_negate_const (arg1, type);
8718 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8719 return fold_build2 (MINUS_EXPR, type,
8720 fold_convert (type, arg0),
8721 fold_convert (type, tem));
8724 if (flag_unsafe_math_optimizations
8725 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8726 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8727 && (tem = distribute_real_division (code, type, arg0, arg1)))
8728 return tem;
8730 /* Convert x+x into x*2.0. */
8731 if (operand_equal_p (arg0, arg1, 0)
8732 && SCALAR_FLOAT_TYPE_P (type))
8733 return fold_build2 (MULT_EXPR, type, arg0,
8734 build_real (type, dconst2));
8736 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8737 if (flag_unsafe_math_optimizations
8738 && TREE_CODE (arg1) == PLUS_EXPR
8739 && TREE_CODE (arg0) != MULT_EXPR)
8741 tree tree10 = TREE_OPERAND (arg1, 0);
8742 tree tree11 = TREE_OPERAND (arg1, 1);
8743 if (TREE_CODE (tree11) == MULT_EXPR
8744 && TREE_CODE (tree10) == MULT_EXPR)
8746 tree tree0;
8747 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8748 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8751 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8752 if (flag_unsafe_math_optimizations
8753 && TREE_CODE (arg0) == PLUS_EXPR
8754 && TREE_CODE (arg1) != MULT_EXPR)
8756 tree tree00 = TREE_OPERAND (arg0, 0);
8757 tree tree01 = TREE_OPERAND (arg0, 1);
8758 if (TREE_CODE (tree01) == MULT_EXPR
8759 && TREE_CODE (tree00) == MULT_EXPR)
8761 tree tree0;
8762 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8763 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8768 bit_rotate:
8769 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8770 is a rotate of A by C1 bits. */
8771 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8772 is a rotate of A by B bits. */
8774 enum tree_code code0, code1;
8775 code0 = TREE_CODE (arg0);
8776 code1 = TREE_CODE (arg1);
8777 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8778 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8779 && operand_equal_p (TREE_OPERAND (arg0, 0),
8780 TREE_OPERAND (arg1, 0), 0)
8781 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8783 tree tree01, tree11;
8784 enum tree_code code01, code11;
8786 tree01 = TREE_OPERAND (arg0, 1);
8787 tree11 = TREE_OPERAND (arg1, 1);
8788 STRIP_NOPS (tree01);
8789 STRIP_NOPS (tree11);
8790 code01 = TREE_CODE (tree01);
8791 code11 = TREE_CODE (tree11);
8792 if (code01 == INTEGER_CST
8793 && code11 == INTEGER_CST
8794 && TREE_INT_CST_HIGH (tree01) == 0
8795 && TREE_INT_CST_HIGH (tree11) == 0
8796 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8797 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8798 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8799 code0 == LSHIFT_EXPR ? tree01 : tree11);
8800 else if (code11 == MINUS_EXPR)
8802 tree tree110, tree111;
8803 tree110 = TREE_OPERAND (tree11, 0);
8804 tree111 = TREE_OPERAND (tree11, 1);
8805 STRIP_NOPS (tree110);
8806 STRIP_NOPS (tree111);
8807 if (TREE_CODE (tree110) == INTEGER_CST
8808 && 0 == compare_tree_int (tree110,
8809 TYPE_PRECISION
8810 (TREE_TYPE (TREE_OPERAND
8811 (arg0, 0))))
8812 && operand_equal_p (tree01, tree111, 0))
8813 return build2 ((code0 == LSHIFT_EXPR
8814 ? LROTATE_EXPR
8815 : RROTATE_EXPR),
8816 type, TREE_OPERAND (arg0, 0), tree01);
8818 else if (code01 == MINUS_EXPR)
8820 tree tree010, tree011;
8821 tree010 = TREE_OPERAND (tree01, 0);
8822 tree011 = TREE_OPERAND (tree01, 1);
8823 STRIP_NOPS (tree010);
8824 STRIP_NOPS (tree011);
8825 if (TREE_CODE (tree010) == INTEGER_CST
8826 && 0 == compare_tree_int (tree010,
8827 TYPE_PRECISION
8828 (TREE_TYPE (TREE_OPERAND
8829 (arg0, 0))))
8830 && operand_equal_p (tree11, tree011, 0))
8831 return build2 ((code0 != LSHIFT_EXPR
8832 ? LROTATE_EXPR
8833 : RROTATE_EXPR),
8834 type, TREE_OPERAND (arg0, 0), tree11);
8839 associate:
8840 /* In most languages, can't associate operations on floats through
8841 parentheses. Rather than remember where the parentheses were, we
8842 don't associate floats at all, unless the user has specified
8843 -funsafe-math-optimizations. */
8845 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8847 tree var0, con0, lit0, minus_lit0;
8848 tree var1, con1, lit1, minus_lit1;
8850 /* Split both trees into variables, constants, and literals. Then
8851 associate each group together, the constants with literals,
8852 then the result with variables. This increases the chances of
8853 literals being recombined later and of generating relocatable
8854 expressions for the sum of a constant and literal. */
8855 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8856 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8857 code == MINUS_EXPR);
8859 /* Only do something if we found more than two objects. Otherwise,
8860 nothing has changed and we risk infinite recursion. */
8861 if (2 < ((var0 != 0) + (var1 != 0)
8862 + (con0 != 0) + (con1 != 0)
8863 + (lit0 != 0) + (lit1 != 0)
8864 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8866 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8867 if (code == MINUS_EXPR)
8868 code = PLUS_EXPR;
8870 var0 = associate_trees (var0, var1, code, type);
8871 con0 = associate_trees (con0, con1, code, type);
8872 lit0 = associate_trees (lit0, lit1, code, type);
8873 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8875 /* Preserve the MINUS_EXPR if the negative part of the literal is
8876 greater than the positive part. Otherwise, the multiplicative
8877 folding code (i.e extract_muldiv) may be fooled in case
8878 unsigned constants are subtracted, like in the following
8879 example: ((X*2 + 4) - 8U)/2. */
8880 if (minus_lit0 && lit0)
8882 if (TREE_CODE (lit0) == INTEGER_CST
8883 && TREE_CODE (minus_lit0) == INTEGER_CST
8884 && tree_int_cst_lt (lit0, minus_lit0))
8886 minus_lit0 = associate_trees (minus_lit0, lit0,
8887 MINUS_EXPR, type);
8888 lit0 = 0;
8890 else
8892 lit0 = associate_trees (lit0, minus_lit0,
8893 MINUS_EXPR, type);
8894 minus_lit0 = 0;
8897 if (minus_lit0)
8899 if (con0 == 0)
8900 return fold_convert (type,
8901 associate_trees (var0, minus_lit0,
8902 MINUS_EXPR, type));
8903 else
8905 con0 = associate_trees (con0, minus_lit0,
8906 MINUS_EXPR, type);
8907 return fold_convert (type,
8908 associate_trees (var0, con0,
8909 PLUS_EXPR, type));
8913 con0 = associate_trees (con0, lit0, code, type);
8914 return fold_convert (type, associate_trees (var0, con0,
8915 code, type));
8919 return NULL_TREE;
8921 case MINUS_EXPR:
8922 /* A - (-B) -> A + B */
8923 if (TREE_CODE (arg1) == NEGATE_EXPR)
8924 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8925 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8926 if (TREE_CODE (arg0) == NEGATE_EXPR
8927 && (FLOAT_TYPE_P (type)
8928 || INTEGRAL_TYPE_P (type))
8929 && negate_expr_p (arg1)
8930 && reorder_operands_p (arg0, arg1))
8931 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8932 TREE_OPERAND (arg0, 0));
8933 /* Convert -A - 1 to ~A. */
8934 if (INTEGRAL_TYPE_P (type)
8935 && TREE_CODE (arg0) == NEGATE_EXPR
8936 && integer_onep (arg1))
8937 return fold_build1 (BIT_NOT_EXPR, type,
8938 fold_convert (type, TREE_OPERAND (arg0, 0)));
8940 /* Convert -1 - A to ~A. */
8941 if (INTEGRAL_TYPE_P (type)
8942 && integer_all_onesp (arg0))
8943 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8945 if (! FLOAT_TYPE_P (type))
8947 if (integer_zerop (arg0))
8948 return negate_expr (fold_convert (type, arg1));
8949 if (integer_zerop (arg1))
8950 return non_lvalue (fold_convert (type, arg0));
8952 /* Fold A - (A & B) into ~B & A. */
8953 if (!TREE_SIDE_EFFECTS (arg0)
8954 && TREE_CODE (arg1) == BIT_AND_EXPR)
8956 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8957 return fold_build2 (BIT_AND_EXPR, type,
8958 fold_build1 (BIT_NOT_EXPR, type,
8959 TREE_OPERAND (arg1, 0)),
8960 arg0);
8961 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8962 return fold_build2 (BIT_AND_EXPR, type,
8963 fold_build1 (BIT_NOT_EXPR, type,
8964 TREE_OPERAND (arg1, 1)),
8965 arg0);
8968 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8969 any power of 2 minus 1. */
8970 if (TREE_CODE (arg0) == BIT_AND_EXPR
8971 && TREE_CODE (arg1) == BIT_AND_EXPR
8972 && operand_equal_p (TREE_OPERAND (arg0, 0),
8973 TREE_OPERAND (arg1, 0), 0))
8975 tree mask0 = TREE_OPERAND (arg0, 1);
8976 tree mask1 = TREE_OPERAND (arg1, 1);
8977 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8979 if (operand_equal_p (tem, mask1, 0))
8981 tem = fold_build2 (BIT_XOR_EXPR, type,
8982 TREE_OPERAND (arg0, 0), mask1);
8983 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8988 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8989 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8990 return non_lvalue (fold_convert (type, arg0));
8992 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8993 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8994 (-ARG1 + ARG0) reduces to -ARG1. */
8995 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8996 return negate_expr (fold_convert (type, arg1));
8998 /* Fold &x - &x. This can happen from &x.foo - &x.
8999 This is unsafe for certain floats even in non-IEEE formats.
9000 In IEEE, it is unsafe because it does wrong for NaNs.
9001 Also note that operand_equal_p is always false if an operand
9002 is volatile. */
9004 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9005 && operand_equal_p (arg0, arg1, 0))
9006 return fold_convert (type, integer_zero_node);
9008 /* A - B -> A + (-B) if B is easily negatable. */
9009 if (negate_expr_p (arg1)
9010 && ((FLOAT_TYPE_P (type)
9011 /* Avoid this transformation if B is a positive REAL_CST. */
9012 && (TREE_CODE (arg1) != REAL_CST
9013 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9014 || INTEGRAL_TYPE_P (type)))
9015 return fold_build2 (PLUS_EXPR, type,
9016 fold_convert (type, arg0),
9017 fold_convert (type, negate_expr (arg1)));
9019 /* Try folding difference of addresses. */
9021 HOST_WIDE_INT diff;
9023 if ((TREE_CODE (arg0) == ADDR_EXPR
9024 || TREE_CODE (arg1) == ADDR_EXPR)
9025 && ptr_difference_const (arg0, arg1, &diff))
9026 return build_int_cst_type (type, diff);
9029 /* Fold &a[i] - &a[j] to i-j. */
9030 if (TREE_CODE (arg0) == ADDR_EXPR
9031 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9032 && TREE_CODE (arg1) == ADDR_EXPR
9033 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9035 tree aref0 = TREE_OPERAND (arg0, 0);
9036 tree aref1 = TREE_OPERAND (arg1, 0);
9037 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9038 TREE_OPERAND (aref1, 0), 0))
9040 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9041 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9042 tree esz = array_ref_element_size (aref0);
9043 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9044 return fold_build2 (MULT_EXPR, type, diff,
9045 fold_convert (type, esz));
9050 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9051 of the array. Loop optimizer sometimes produce this type of
9052 expressions. */
9053 if (TREE_CODE (arg0) == ADDR_EXPR)
9055 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9056 if (tem)
9057 return fold_convert (type, tem);
9060 if (flag_unsafe_math_optimizations
9061 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9062 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9063 && (tem = distribute_real_division (code, type, arg0, arg1)))
9064 return tem;
9066 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9067 same or one. */
9068 if ((TREE_CODE (arg0) == MULT_EXPR
9069 || TREE_CODE (arg1) == MULT_EXPR)
9070 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9072 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9073 if (tem)
9074 return tem;
9077 goto associate;
9079 case MULT_EXPR:
9080 /* (-A) * (-B) -> A * B */
9081 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9082 return fold_build2 (MULT_EXPR, type,
9083 fold_convert (type, TREE_OPERAND (arg0, 0)),
9084 fold_convert (type, negate_expr (arg1)));
9085 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9086 return fold_build2 (MULT_EXPR, type,
9087 fold_convert (type, negate_expr (arg0)),
9088 fold_convert (type, TREE_OPERAND (arg1, 0)));
9090 if (! FLOAT_TYPE_P (type))
9092 if (integer_zerop (arg1))
9093 return omit_one_operand (type, arg1, arg0);
9094 if (integer_onep (arg1))
9095 return non_lvalue (fold_convert (type, arg0));
9096 /* Transform x * -1 into -x. */
9097 if (integer_all_onesp (arg1))
9098 return fold_convert (type, negate_expr (arg0));
9099 /* Transform x * -C into -x * C if x is easily negatable. */
9100 if (TREE_CODE (arg1) == INTEGER_CST
9101 && tree_int_cst_sgn (arg1) == -1
9102 && negate_expr_p (arg0)
9103 && (tem = negate_expr (arg1)) != arg1
9104 && !TREE_OVERFLOW (tem))
9105 return fold_build2 (MULT_EXPR, type,
9106 negate_expr (arg0), tem);
9108 /* (a * (1 << b)) is (a << b) */
9109 if (TREE_CODE (arg1) == LSHIFT_EXPR
9110 && integer_onep (TREE_OPERAND (arg1, 0)))
9111 return fold_build2 (LSHIFT_EXPR, type, arg0,
9112 TREE_OPERAND (arg1, 1));
9113 if (TREE_CODE (arg0) == LSHIFT_EXPR
9114 && integer_onep (TREE_OPERAND (arg0, 0)))
9115 return fold_build2 (LSHIFT_EXPR, type, arg1,
9116 TREE_OPERAND (arg0, 1));
9118 if (TREE_CODE (arg1) == INTEGER_CST
9119 && 0 != (tem = extract_muldiv (op0,
9120 fold_convert (type, arg1),
9121 code, NULL_TREE)))
9122 return fold_convert (type, tem);
9124 /* Optimize z * conj(z) for integer complex numbers. */
9125 if (TREE_CODE (arg0) == CONJ_EXPR
9126 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9127 return fold_mult_zconjz (type, arg1);
9128 if (TREE_CODE (arg1) == CONJ_EXPR
9129 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9130 return fold_mult_zconjz (type, arg0);
9132 else
9134 /* Maybe fold x * 0 to 0. The expressions aren't the same
9135 when x is NaN, since x * 0 is also NaN. Nor are they the
9136 same in modes with signed zeros, since multiplying a
9137 negative value by 0 gives -0, not +0. */
9138 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9139 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9140 && real_zerop (arg1))
9141 return omit_one_operand (type, arg1, arg0);
9142 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9143 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9144 && real_onep (arg1))
9145 return non_lvalue (fold_convert (type, arg0));
9147 /* Transform x * -1.0 into -x. */
9148 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9149 && real_minus_onep (arg1))
9150 return fold_convert (type, negate_expr (arg0));
9152 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9153 if (flag_unsafe_math_optimizations
9154 && TREE_CODE (arg0) == RDIV_EXPR
9155 && TREE_CODE (arg1) == REAL_CST
9156 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9158 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9159 arg1, 0);
9160 if (tem)
9161 return fold_build2 (RDIV_EXPR, type, tem,
9162 TREE_OPERAND (arg0, 1));
9165 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9166 if (operand_equal_p (arg0, arg1, 0))
9168 tree tem = fold_strip_sign_ops (arg0);
9169 if (tem != NULL_TREE)
9171 tem = fold_convert (type, tem);
9172 return fold_build2 (MULT_EXPR, type, tem, tem);
9176 /* Optimize z * conj(z) for floating point complex numbers.
9177 Guarded by flag_unsafe_math_optimizations as non-finite
9178 imaginary components don't produce scalar results. */
9179 if (flag_unsafe_math_optimizations
9180 && TREE_CODE (arg0) == CONJ_EXPR
9181 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9182 return fold_mult_zconjz (type, arg1);
9183 if (flag_unsafe_math_optimizations
9184 && TREE_CODE (arg1) == CONJ_EXPR
9185 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9186 return fold_mult_zconjz (type, arg0);
9188 if (flag_unsafe_math_optimizations)
9190 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9191 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9193 /* Optimizations of root(...)*root(...). */
9194 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9196 tree rootfn, arg, arglist;
9197 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9198 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9200 /* Optimize sqrt(x)*sqrt(x) as x. */
9201 if (BUILTIN_SQRT_P (fcode0)
9202 && operand_equal_p (arg00, arg10, 0)
9203 && ! HONOR_SNANS (TYPE_MODE (type)))
9204 return arg00;
9206 /* Optimize root(x)*root(y) as root(x*y). */
9207 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9208 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9209 arglist = build_tree_list (NULL_TREE, arg);
9210 return build_function_call_expr (rootfn, arglist);
9213 /* Optimize expN(x)*expN(y) as expN(x+y). */
9214 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9216 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9217 tree arg = fold_build2 (PLUS_EXPR, type,
9218 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9219 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9220 tree arglist = build_tree_list (NULL_TREE, arg);
9221 return build_function_call_expr (expfn, arglist);
9224 /* Optimizations of pow(...)*pow(...). */
9225 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9226 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9227 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9229 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9230 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9231 1)));
9232 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9233 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9234 1)));
9236 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9237 if (operand_equal_p (arg01, arg11, 0))
9239 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9240 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9241 tree arglist = tree_cons (NULL_TREE, arg,
9242 build_tree_list (NULL_TREE,
9243 arg01));
9244 return build_function_call_expr (powfn, arglist);
9247 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9248 if (operand_equal_p (arg00, arg10, 0))
9250 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9251 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9252 tree arglist = tree_cons (NULL_TREE, arg00,
9253 build_tree_list (NULL_TREE,
9254 arg));
9255 return build_function_call_expr (powfn, arglist);
9259 /* Optimize tan(x)*cos(x) as sin(x). */
9260 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9261 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9262 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9263 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9264 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9265 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9266 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9267 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9269 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9271 if (sinfn != NULL_TREE)
9272 return build_function_call_expr (sinfn,
9273 TREE_OPERAND (arg0, 1));
9276 /* Optimize x*pow(x,c) as pow(x,c+1). */
9277 if (fcode1 == BUILT_IN_POW
9278 || fcode1 == BUILT_IN_POWF
9279 || fcode1 == BUILT_IN_POWL)
9281 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9282 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9283 1)));
9284 if (TREE_CODE (arg11) == REAL_CST
9285 && ! TREE_CONSTANT_OVERFLOW (arg11)
9286 && operand_equal_p (arg0, arg10, 0))
9288 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9289 REAL_VALUE_TYPE c;
9290 tree arg, arglist;
9292 c = TREE_REAL_CST (arg11);
9293 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9294 arg = build_real (type, c);
9295 arglist = build_tree_list (NULL_TREE, arg);
9296 arglist = tree_cons (NULL_TREE, arg0, arglist);
9297 return build_function_call_expr (powfn, arglist);
9301 /* Optimize pow(x,c)*x as pow(x,c+1). */
9302 if (fcode0 == BUILT_IN_POW
9303 || fcode0 == BUILT_IN_POWF
9304 || fcode0 == BUILT_IN_POWL)
9306 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9307 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9308 1)));
9309 if (TREE_CODE (arg01) == REAL_CST
9310 && ! TREE_CONSTANT_OVERFLOW (arg01)
9311 && operand_equal_p (arg1, arg00, 0))
9313 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9314 REAL_VALUE_TYPE c;
9315 tree arg, arglist;
9317 c = TREE_REAL_CST (arg01);
9318 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9319 arg = build_real (type, c);
9320 arglist = build_tree_list (NULL_TREE, arg);
9321 arglist = tree_cons (NULL_TREE, arg1, arglist);
9322 return build_function_call_expr (powfn, arglist);
9326 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9327 if (! optimize_size
9328 && operand_equal_p (arg0, arg1, 0))
9330 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9332 if (powfn)
9334 tree arg = build_real (type, dconst2);
9335 tree arglist = build_tree_list (NULL_TREE, arg);
9336 arglist = tree_cons (NULL_TREE, arg0, arglist);
9337 return build_function_call_expr (powfn, arglist);
9342 goto associate;
9344 case BIT_IOR_EXPR:
9345 bit_ior:
9346 if (integer_all_onesp (arg1))
9347 return omit_one_operand (type, arg1, arg0);
9348 if (integer_zerop (arg1))
9349 return non_lvalue (fold_convert (type, arg0));
9350 if (operand_equal_p (arg0, arg1, 0))
9351 return non_lvalue (fold_convert (type, arg0));
9353 /* ~X | X is -1. */
9354 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9355 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9357 t1 = build_int_cst (type, -1);
9358 t1 = force_fit_type (t1, 0, false, false);
9359 return omit_one_operand (type, t1, arg1);
9362 /* X | ~X is -1. */
9363 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9364 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9366 t1 = build_int_cst (type, -1);
9367 t1 = force_fit_type (t1, 0, false, false);
9368 return omit_one_operand (type, t1, arg0);
9371 /* Canonicalize (X & C1) | C2. */
9372 if (TREE_CODE (arg0) == BIT_AND_EXPR
9373 && TREE_CODE (arg1) == INTEGER_CST
9374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9376 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9377 int width = TYPE_PRECISION (type);
9378 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9379 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9380 hi2 = TREE_INT_CST_HIGH (arg1);
9381 lo2 = TREE_INT_CST_LOW (arg1);
9383 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9384 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9385 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9387 if (width > HOST_BITS_PER_WIDE_INT)
9389 mhi = (unsigned HOST_WIDE_INT) -1
9390 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9391 mlo = -1;
9393 else
9395 mhi = 0;
9396 mlo = (unsigned HOST_WIDE_INT) -1
9397 >> (HOST_BITS_PER_WIDE_INT - width);
9400 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9401 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9402 return fold_build2 (BIT_IOR_EXPR, type,
9403 TREE_OPERAND (arg0, 0), arg1);
9405 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9406 hi1 &= mhi;
9407 lo1 &= mlo;
9408 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9409 return fold_build2 (BIT_IOR_EXPR, type,
9410 fold_build2 (BIT_AND_EXPR, type,
9411 TREE_OPERAND (arg0, 0),
9412 build_int_cst_wide (type,
9413 lo1 & ~lo2,
9414 hi1 & ~hi2)),
9415 arg1);
9418 /* (X & Y) | Y is (X, Y). */
9419 if (TREE_CODE (arg0) == BIT_AND_EXPR
9420 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9421 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9422 /* (X & Y) | X is (Y, X). */
9423 if (TREE_CODE (arg0) == BIT_AND_EXPR
9424 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9425 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9426 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9427 /* X | (X & Y) is (Y, X). */
9428 if (TREE_CODE (arg1) == BIT_AND_EXPR
9429 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9430 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9431 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9432 /* X | (Y & X) is (Y, X). */
9433 if (TREE_CODE (arg1) == BIT_AND_EXPR
9434 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9435 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9436 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9438 t1 = distribute_bit_expr (code, type, arg0, arg1);
9439 if (t1 != NULL_TREE)
9440 return t1;
9442 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9444 This results in more efficient code for machines without a NAND
9445 instruction. Combine will canonicalize to the first form
9446 which will allow use of NAND instructions provided by the
9447 backend if they exist. */
9448 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9449 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9451 return fold_build1 (BIT_NOT_EXPR, type,
9452 build2 (BIT_AND_EXPR, type,
9453 TREE_OPERAND (arg0, 0),
9454 TREE_OPERAND (arg1, 0)));
9457 /* See if this can be simplified into a rotate first. If that
9458 is unsuccessful continue in the association code. */
9459 goto bit_rotate;
9461 case BIT_XOR_EXPR:
9462 if (integer_zerop (arg1))
9463 return non_lvalue (fold_convert (type, arg0));
9464 if (integer_all_onesp (arg1))
9465 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9466 if (operand_equal_p (arg0, arg1, 0))
9467 return omit_one_operand (type, integer_zero_node, arg0);
9469 /* ~X ^ X is -1. */
9470 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9471 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9473 t1 = build_int_cst (type, -1);
9474 t1 = force_fit_type (t1, 0, false, false);
9475 return omit_one_operand (type, t1, arg1);
9478 /* X ^ ~X is -1. */
9479 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9480 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9482 t1 = build_int_cst (type, -1);
9483 t1 = force_fit_type (t1, 0, false, false);
9484 return omit_one_operand (type, t1, arg0);
9487 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9488 with a constant, and the two constants have no bits in common,
9489 we should treat this as a BIT_IOR_EXPR since this may produce more
9490 simplifications. */
9491 if (TREE_CODE (arg0) == BIT_AND_EXPR
9492 && TREE_CODE (arg1) == BIT_AND_EXPR
9493 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9494 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9495 && integer_zerop (const_binop (BIT_AND_EXPR,
9496 TREE_OPERAND (arg0, 1),
9497 TREE_OPERAND (arg1, 1), 0)))
9499 code = BIT_IOR_EXPR;
9500 goto bit_ior;
9503 /* (X | Y) ^ X -> Y & ~ X*/
9504 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9505 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9507 tree t2 = TREE_OPERAND (arg0, 1);
9508 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9509 arg1);
9510 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9511 fold_convert (type, t1));
9512 return t1;
9515 /* (Y | X) ^ X -> Y & ~ X*/
9516 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9517 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9519 tree t2 = TREE_OPERAND (arg0, 0);
9520 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9521 arg1);
9522 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9523 fold_convert (type, t1));
9524 return t1;
9527 /* X ^ (X | Y) -> Y & ~ X*/
9528 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9529 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9531 tree t2 = TREE_OPERAND (arg1, 1);
9532 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9533 arg0);
9534 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9535 fold_convert (type, t1));
9536 return t1;
9539 /* X ^ (Y | X) -> Y & ~ X*/
9540 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9541 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9543 tree t2 = TREE_OPERAND (arg1, 0);
9544 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9545 arg0);
9546 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9547 fold_convert (type, t1));
9548 return t1;
9551 /* Convert ~X ^ ~Y to X ^ Y. */
9552 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9553 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9554 return fold_build2 (code, type,
9555 fold_convert (type, TREE_OPERAND (arg0, 0)),
9556 fold_convert (type, TREE_OPERAND (arg1, 0)));
9558 /* Convert ~X ^ C to X ^ ~C. */
9559 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9560 && TREE_CODE (arg1) == INTEGER_CST)
9561 return fold_build2 (code, type,
9562 fold_convert (type, TREE_OPERAND (arg0, 0)),
9563 fold_build1 (BIT_NOT_EXPR, type, arg1));
9565 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9566 if (TREE_CODE (arg0) == BIT_AND_EXPR
9567 && integer_onep (TREE_OPERAND (arg0, 1))
9568 && integer_onep (arg1))
9569 return fold_build2 (EQ_EXPR, type, arg0,
9570 build_int_cst (TREE_TYPE (arg0), 0));
9572 /* Fold (X & Y) ^ Y as ~X & Y. */
9573 if (TREE_CODE (arg0) == BIT_AND_EXPR
9574 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9576 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9577 return fold_build2 (BIT_AND_EXPR, type,
9578 fold_build1 (BIT_NOT_EXPR, type, tem),
9579 fold_convert (type, arg1));
9581 /* Fold (X & Y) ^ X as ~Y & X. */
9582 if (TREE_CODE (arg0) == BIT_AND_EXPR
9583 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9584 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9586 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9587 return fold_build2 (BIT_AND_EXPR, type,
9588 fold_build1 (BIT_NOT_EXPR, type, tem),
9589 fold_convert (type, arg1));
9591 /* Fold X ^ (X & Y) as X & ~Y. */
9592 if (TREE_CODE (arg1) == BIT_AND_EXPR
9593 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9595 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9596 return fold_build2 (BIT_AND_EXPR, type,
9597 fold_convert (type, arg0),
9598 fold_build1 (BIT_NOT_EXPR, type, tem));
9600 /* Fold X ^ (Y & X) as ~Y & X. */
9601 if (TREE_CODE (arg1) == BIT_AND_EXPR
9602 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9603 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9605 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9606 return fold_build2 (BIT_AND_EXPR, type,
9607 fold_build1 (BIT_NOT_EXPR, type, tem),
9608 fold_convert (type, arg0));
9611 /* See if this can be simplified into a rotate first. If that
9612 is unsuccessful continue in the association code. */
9613 goto bit_rotate;
9615 case BIT_AND_EXPR:
9616 if (integer_all_onesp (arg1))
9617 return non_lvalue (fold_convert (type, arg0));
9618 if (integer_zerop (arg1))
9619 return omit_one_operand (type, arg1, arg0);
9620 if (operand_equal_p (arg0, arg1, 0))
9621 return non_lvalue (fold_convert (type, arg0));
9623 /* ~X & X is always zero. */
9624 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9625 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9626 return omit_one_operand (type, integer_zero_node, arg1);
9628 /* X & ~X is always zero. */
9629 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9630 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9631 return omit_one_operand (type, integer_zero_node, arg0);
9633 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9634 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9635 && TREE_CODE (arg1) == INTEGER_CST
9636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9637 return fold_build2 (BIT_IOR_EXPR, type,
9638 fold_build2 (BIT_AND_EXPR, type,
9639 TREE_OPERAND (arg0, 0), arg1),
9640 fold_build2 (BIT_AND_EXPR, type,
9641 TREE_OPERAND (arg0, 1), arg1));
9643 /* (X | Y) & Y is (X, Y). */
9644 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9645 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9646 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9647 /* (X | Y) & X is (Y, X). */
9648 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9649 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9650 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9651 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9652 /* X & (X | Y) is (Y, X). */
9653 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9654 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9655 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9656 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9657 /* X & (Y | X) is (Y, X). */
9658 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9659 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9660 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9661 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9663 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9664 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9665 && integer_onep (TREE_OPERAND (arg0, 1))
9666 && integer_onep (arg1))
9668 tem = TREE_OPERAND (arg0, 0);
9669 return fold_build2 (EQ_EXPR, type,
9670 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9671 build_int_cst (TREE_TYPE (tem), 1)),
9672 build_int_cst (TREE_TYPE (tem), 0));
9674 /* Fold ~X & 1 as (X & 1) == 0. */
9675 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9676 && integer_onep (arg1))
9678 tem = TREE_OPERAND (arg0, 0);
9679 return fold_build2 (EQ_EXPR, type,
9680 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9681 build_int_cst (TREE_TYPE (tem), 1)),
9682 build_int_cst (TREE_TYPE (tem), 0));
9685 /* Fold (X ^ Y) & Y as ~X & Y. */
9686 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9687 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9689 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9690 return fold_build2 (BIT_AND_EXPR, type,
9691 fold_build1 (BIT_NOT_EXPR, type, tem),
9692 fold_convert (type, arg1));
9694 /* Fold (X ^ Y) & X as ~Y & X. */
9695 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9696 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9697 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9699 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9700 return fold_build2 (BIT_AND_EXPR, type,
9701 fold_build1 (BIT_NOT_EXPR, type, tem),
9702 fold_convert (type, arg1));
9704 /* Fold X & (X ^ Y) as X & ~Y. */
9705 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9706 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9708 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9709 return fold_build2 (BIT_AND_EXPR, type,
9710 fold_convert (type, arg0),
9711 fold_build1 (BIT_NOT_EXPR, type, tem));
9713 /* Fold X & (Y ^ X) as ~Y & X. */
9714 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9715 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9716 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9718 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9719 return fold_build2 (BIT_AND_EXPR, type,
9720 fold_build1 (BIT_NOT_EXPR, type, tem),
9721 fold_convert (type, arg0));
9724 t1 = distribute_bit_expr (code, type, arg0, arg1);
9725 if (t1 != NULL_TREE)
9726 return t1;
9727 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9728 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9729 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9731 unsigned int prec
9732 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9734 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9735 && (~TREE_INT_CST_LOW (arg1)
9736 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9737 return fold_convert (type, TREE_OPERAND (arg0, 0));
9740 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9742 This results in more efficient code for machines without a NOR
9743 instruction. Combine will canonicalize to the first form
9744 which will allow use of NOR instructions provided by the
9745 backend if they exist. */
9746 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9747 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9749 return fold_build1 (BIT_NOT_EXPR, type,
9750 build2 (BIT_IOR_EXPR, type,
9751 TREE_OPERAND (arg0, 0),
9752 TREE_OPERAND (arg1, 0)));
9755 goto associate;
9757 case RDIV_EXPR:
9758 /* Don't touch a floating-point divide by zero unless the mode
9759 of the constant can represent infinity. */
9760 if (TREE_CODE (arg1) == REAL_CST
9761 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9762 && real_zerop (arg1))
9763 return NULL_TREE;
9765 /* Optimize A / A to 1.0 if we don't care about
9766 NaNs or Infinities. Skip the transformation
9767 for non-real operands. */
9768 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9769 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9770 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9771 && operand_equal_p (arg0, arg1, 0))
9773 tree r = build_real (TREE_TYPE (arg0), dconst1);
9775 return omit_two_operands (type, r, arg0, arg1);
9778 /* The complex version of the above A / A optimization. */
9779 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9780 && operand_equal_p (arg0, arg1, 0))
9782 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9783 if (! HONOR_NANS (TYPE_MODE (elem_type))
9784 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9786 tree r = build_real (elem_type, dconst1);
9787 /* omit_two_operands will call fold_convert for us. */
9788 return omit_two_operands (type, r, arg0, arg1);
9792 /* (-A) / (-B) -> A / B */
9793 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9794 return fold_build2 (RDIV_EXPR, type,
9795 TREE_OPERAND (arg0, 0),
9796 negate_expr (arg1));
9797 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9798 return fold_build2 (RDIV_EXPR, type,
9799 negate_expr (arg0),
9800 TREE_OPERAND (arg1, 0));
9802 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9803 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9804 && real_onep (arg1))
9805 return non_lvalue (fold_convert (type, arg0));
9807 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9808 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9809 && real_minus_onep (arg1))
9810 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9812 /* If ARG1 is a constant, we can convert this to a multiply by the
9813 reciprocal. This does not have the same rounding properties,
9814 so only do this if -funsafe-math-optimizations. We can actually
9815 always safely do it if ARG1 is a power of two, but it's hard to
9816 tell if it is or not in a portable manner. */
9817 if (TREE_CODE (arg1) == REAL_CST)
9819 if (flag_unsafe_math_optimizations
9820 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9821 arg1, 0)))
9822 return fold_build2 (MULT_EXPR, type, arg0, tem);
9823 /* Find the reciprocal if optimizing and the result is exact. */
9824 if (optimize)
9826 REAL_VALUE_TYPE r;
9827 r = TREE_REAL_CST (arg1);
9828 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9830 tem = build_real (type, r);
9831 return fold_build2 (MULT_EXPR, type,
9832 fold_convert (type, arg0), tem);
9836 /* Convert A/B/C to A/(B*C). */
9837 if (flag_unsafe_math_optimizations
9838 && TREE_CODE (arg0) == RDIV_EXPR)
9839 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9840 fold_build2 (MULT_EXPR, type,
9841 TREE_OPERAND (arg0, 1), arg1));
9843 /* Convert A/(B/C) to (A/B)*C. */
9844 if (flag_unsafe_math_optimizations
9845 && TREE_CODE (arg1) == RDIV_EXPR)
9846 return fold_build2 (MULT_EXPR, type,
9847 fold_build2 (RDIV_EXPR, type, arg0,
9848 TREE_OPERAND (arg1, 0)),
9849 TREE_OPERAND (arg1, 1));
9851 /* Convert C1/(X*C2) into (C1/C2)/X. */
9852 if (flag_unsafe_math_optimizations
9853 && TREE_CODE (arg1) == MULT_EXPR
9854 && TREE_CODE (arg0) == REAL_CST
9855 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9857 tree tem = const_binop (RDIV_EXPR, arg0,
9858 TREE_OPERAND (arg1, 1), 0);
9859 if (tem)
9860 return fold_build2 (RDIV_EXPR, type, tem,
9861 TREE_OPERAND (arg1, 0));
9864 if (flag_unsafe_math_optimizations)
9866 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9867 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9869 /* Optimize sin(x)/cos(x) as tan(x). */
9870 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9871 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9872 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9873 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9874 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9876 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9878 if (tanfn != NULL_TREE)
9879 return build_function_call_expr (tanfn,
9880 TREE_OPERAND (arg0, 1));
9883 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9884 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9885 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9886 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9887 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9888 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9890 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9892 if (tanfn != NULL_TREE)
9894 tree tmp = TREE_OPERAND (arg0, 1);
9895 tmp = build_function_call_expr (tanfn, tmp);
9896 return fold_build2 (RDIV_EXPR, type,
9897 build_real (type, dconst1), tmp);
9901 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9902 NaNs or Infinities. */
9903 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9904 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9905 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9907 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9908 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9910 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9911 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9912 && operand_equal_p (arg00, arg01, 0))
9914 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9916 if (cosfn != NULL_TREE)
9917 return build_function_call_expr (cosfn,
9918 TREE_OPERAND (arg0, 1));
9922 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9923 NaNs or Infinities. */
9924 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9925 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9926 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9928 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9929 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9931 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9932 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9933 && operand_equal_p (arg00, arg01, 0))
9935 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9937 if (cosfn != NULL_TREE)
9939 tree tmp = TREE_OPERAND (arg0, 1);
9940 tmp = build_function_call_expr (cosfn, tmp);
9941 return fold_build2 (RDIV_EXPR, type,
9942 build_real (type, dconst1),
9943 tmp);
9948 /* Optimize pow(x,c)/x as pow(x,c-1). */
9949 if (fcode0 == BUILT_IN_POW
9950 || fcode0 == BUILT_IN_POWF
9951 || fcode0 == BUILT_IN_POWL)
9953 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9954 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9955 if (TREE_CODE (arg01) == REAL_CST
9956 && ! TREE_CONSTANT_OVERFLOW (arg01)
9957 && operand_equal_p (arg1, arg00, 0))
9959 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9960 REAL_VALUE_TYPE c;
9961 tree arg, arglist;
9963 c = TREE_REAL_CST (arg01);
9964 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9965 arg = build_real (type, c);
9966 arglist = build_tree_list (NULL_TREE, arg);
9967 arglist = tree_cons (NULL_TREE, arg1, arglist);
9968 return build_function_call_expr (powfn, arglist);
9972 /* Optimize x/expN(y) into x*expN(-y). */
9973 if (BUILTIN_EXPONENT_P (fcode1))
9975 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9976 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9977 tree arglist = build_tree_list (NULL_TREE,
9978 fold_convert (type, arg));
9979 arg1 = build_function_call_expr (expfn, arglist);
9980 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9983 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9984 if (fcode1 == BUILT_IN_POW
9985 || fcode1 == BUILT_IN_POWF
9986 || fcode1 == BUILT_IN_POWL)
9988 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9989 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9990 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9991 tree neg11 = fold_convert (type, negate_expr (arg11));
9992 tree arglist = tree_cons(NULL_TREE, arg10,
9993 build_tree_list (NULL_TREE, neg11));
9994 arg1 = build_function_call_expr (powfn, arglist);
9995 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9998 return NULL_TREE;
10000 case TRUNC_DIV_EXPR:
10001 case FLOOR_DIV_EXPR:
10002 /* Simplify A / (B << N) where A and B are positive and B is
10003 a power of 2, to A >> (N + log2(B)). */
10004 if (TREE_CODE (arg1) == LSHIFT_EXPR
10005 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10007 tree sval = TREE_OPERAND (arg1, 0);
10008 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10010 tree sh_cnt = TREE_OPERAND (arg1, 1);
10011 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10013 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10014 sh_cnt, build_int_cst (NULL_TREE, pow2));
10015 return fold_build2 (RSHIFT_EXPR, type,
10016 fold_convert (type, arg0), sh_cnt);
10019 /* Fall thru */
10021 case ROUND_DIV_EXPR:
10022 case CEIL_DIV_EXPR:
10023 case EXACT_DIV_EXPR:
10024 if (integer_onep (arg1))
10025 return non_lvalue (fold_convert (type, arg0));
10026 if (integer_zerop (arg1))
10027 return NULL_TREE;
10028 /* X / -1 is -X. */
10029 if (!TYPE_UNSIGNED (type)
10030 && TREE_CODE (arg1) == INTEGER_CST
10031 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10032 && TREE_INT_CST_HIGH (arg1) == -1)
10033 return fold_convert (type, negate_expr (arg0));
10035 /* Convert -A / -B to A / B when the type is signed and overflow is
10036 undefined. */
10037 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10038 && TREE_CODE (arg0) == NEGATE_EXPR
10039 && negate_expr_p (arg1))
10040 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10041 negate_expr (arg1));
10042 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10043 && TREE_CODE (arg1) == NEGATE_EXPR
10044 && negate_expr_p (arg0))
10045 return fold_build2 (code, type, negate_expr (arg0),
10046 TREE_OPERAND (arg1, 0));
10048 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10049 operation, EXACT_DIV_EXPR.
10051 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10052 At one time others generated faster code, it's not clear if they do
10053 after the last round to changes to the DIV code in expmed.c. */
10054 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10055 && multiple_of_p (type, arg0, arg1))
10056 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10058 if (TREE_CODE (arg1) == INTEGER_CST
10059 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10060 return fold_convert (type, tem);
10062 return NULL_TREE;
10064 case CEIL_MOD_EXPR:
10065 case FLOOR_MOD_EXPR:
10066 case ROUND_MOD_EXPR:
10067 case TRUNC_MOD_EXPR:
10068 /* X % 1 is always zero, but be sure to preserve any side
10069 effects in X. */
10070 if (integer_onep (arg1))
10071 return omit_one_operand (type, integer_zero_node, arg0);
10073 /* X % 0, return X % 0 unchanged so that we can get the
10074 proper warnings and errors. */
10075 if (integer_zerop (arg1))
10076 return NULL_TREE;
10078 /* 0 % X is always zero, but be sure to preserve any side
10079 effects in X. Place this after checking for X == 0. */
10080 if (integer_zerop (arg0))
10081 return omit_one_operand (type, integer_zero_node, arg1);
10083 /* X % -1 is zero. */
10084 if (!TYPE_UNSIGNED (type)
10085 && TREE_CODE (arg1) == INTEGER_CST
10086 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10087 && TREE_INT_CST_HIGH (arg1) == -1)
10088 return omit_one_operand (type, integer_zero_node, arg0);
10090 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10091 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10092 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10093 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10095 tree c = arg1;
10096 /* Also optimize A % (C << N) where C is a power of 2,
10097 to A & ((C << N) - 1). */
10098 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10099 c = TREE_OPERAND (arg1, 0);
10101 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10103 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10104 build_int_cst (TREE_TYPE (arg1), 1));
10105 return fold_build2 (BIT_AND_EXPR, type,
10106 fold_convert (type, arg0),
10107 fold_convert (type, mask));
10111 /* X % -C is the same as X % C. */
10112 if (code == TRUNC_MOD_EXPR
10113 && !TYPE_UNSIGNED (type)
10114 && TREE_CODE (arg1) == INTEGER_CST
10115 && !TREE_CONSTANT_OVERFLOW (arg1)
10116 && TREE_INT_CST_HIGH (arg1) < 0
10117 && !flag_trapv
10118 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10119 && !sign_bit_p (arg1, arg1))
10120 return fold_build2 (code, type, fold_convert (type, arg0),
10121 fold_convert (type, negate_expr (arg1)));
10123 /* X % -Y is the same as X % Y. */
10124 if (code == TRUNC_MOD_EXPR
10125 && !TYPE_UNSIGNED (type)
10126 && TREE_CODE (arg1) == NEGATE_EXPR
10127 && !flag_trapv)
10128 return fold_build2 (code, type, fold_convert (type, arg0),
10129 fold_convert (type, TREE_OPERAND (arg1, 0)));
10131 if (TREE_CODE (arg1) == INTEGER_CST
10132 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10133 return fold_convert (type, tem);
10135 return NULL_TREE;
10137 case LROTATE_EXPR:
10138 case RROTATE_EXPR:
10139 if (integer_all_onesp (arg0))
10140 return omit_one_operand (type, arg0, arg1);
10141 goto shift;
10143 case RSHIFT_EXPR:
10144 /* Optimize -1 >> x for arithmetic right shifts. */
10145 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10146 return omit_one_operand (type, arg0, arg1);
10147 /* ... fall through ... */
10149 case LSHIFT_EXPR:
10150 shift:
10151 if (integer_zerop (arg1))
10152 return non_lvalue (fold_convert (type, arg0));
10153 if (integer_zerop (arg0))
10154 return omit_one_operand (type, arg0, arg1);
10156 /* Since negative shift count is not well-defined,
10157 don't try to compute it in the compiler. */
10158 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10159 return NULL_TREE;
10161 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10162 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10163 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10164 && host_integerp (TREE_OPERAND (arg0, 1), false)
10165 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10167 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10168 + TREE_INT_CST_LOW (arg1));
10170 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10171 being well defined. */
10172 if (low >= TYPE_PRECISION (type))
10174 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10175 low = low % TYPE_PRECISION (type);
10176 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10177 return build_int_cst (type, 0);
10178 else
10179 low = TYPE_PRECISION (type) - 1;
10182 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10183 build_int_cst (type, low));
10186 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10187 into x & ((unsigned)-1 >> c) for unsigned types. */
10188 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10189 || (TYPE_UNSIGNED (type)
10190 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10191 && host_integerp (arg1, false)
10192 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10193 && host_integerp (TREE_OPERAND (arg0, 1), false)
10194 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10196 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10197 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10198 tree lshift;
10199 tree arg00;
10201 if (low0 == low1)
10203 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10205 lshift = build_int_cst (type, -1);
10206 lshift = int_const_binop (code, lshift, arg1, 0);
10208 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10212 /* Rewrite an LROTATE_EXPR by a constant into an
10213 RROTATE_EXPR by a new constant. */
10214 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10216 tree tem = build_int_cst (TREE_TYPE (arg1),
10217 GET_MODE_BITSIZE (TYPE_MODE (type)));
10218 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10219 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10222 /* If we have a rotate of a bit operation with the rotate count and
10223 the second operand of the bit operation both constant,
10224 permute the two operations. */
10225 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10226 && (TREE_CODE (arg0) == BIT_AND_EXPR
10227 || TREE_CODE (arg0) == BIT_IOR_EXPR
10228 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10229 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10230 return fold_build2 (TREE_CODE (arg0), type,
10231 fold_build2 (code, type,
10232 TREE_OPERAND (arg0, 0), arg1),
10233 fold_build2 (code, type,
10234 TREE_OPERAND (arg0, 1), arg1));
10236 /* Two consecutive rotates adding up to the width of the mode can
10237 be ignored. */
10238 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10239 && TREE_CODE (arg0) == RROTATE_EXPR
10240 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10241 && TREE_INT_CST_HIGH (arg1) == 0
10242 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10243 && ((TREE_INT_CST_LOW (arg1)
10244 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10245 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10246 return TREE_OPERAND (arg0, 0);
10248 return NULL_TREE;
10250 case MIN_EXPR:
10251 if (operand_equal_p (arg0, arg1, 0))
10252 return omit_one_operand (type, arg0, arg1);
10253 if (INTEGRAL_TYPE_P (type)
10254 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10255 return omit_one_operand (type, arg1, arg0);
10256 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10257 if (tem)
10258 return tem;
10259 goto associate;
10261 case MAX_EXPR:
10262 if (operand_equal_p (arg0, arg1, 0))
10263 return omit_one_operand (type, arg0, arg1);
10264 if (INTEGRAL_TYPE_P (type)
10265 && TYPE_MAX_VALUE (type)
10266 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10267 return omit_one_operand (type, arg1, arg0);
10268 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10269 if (tem)
10270 return tem;
10271 goto associate;
10273 case TRUTH_ANDIF_EXPR:
10274 /* Note that the operands of this must be ints
10275 and their values must be 0 or 1.
10276 ("true" is a fixed value perhaps depending on the language.) */
10277 /* If first arg is constant zero, return it. */
10278 if (integer_zerop (arg0))
10279 return fold_convert (type, arg0);
10280 case TRUTH_AND_EXPR:
10281 /* If either arg is constant true, drop it. */
10282 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10283 return non_lvalue (fold_convert (type, arg1));
10284 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10285 /* Preserve sequence points. */
10286 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10287 return non_lvalue (fold_convert (type, arg0));
10288 /* If second arg is constant zero, result is zero, but first arg
10289 must be evaluated. */
10290 if (integer_zerop (arg1))
10291 return omit_one_operand (type, arg1, arg0);
10292 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10293 case will be handled here. */
10294 if (integer_zerop (arg0))
10295 return omit_one_operand (type, arg0, arg1);
10297 /* !X && X is always false. */
10298 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10299 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10300 return omit_one_operand (type, integer_zero_node, arg1);
10301 /* X && !X is always false. */
10302 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10303 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10304 return omit_one_operand (type, integer_zero_node, arg0);
10306 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10307 means A >= Y && A != MAX, but in this case we know that
10308 A < X <= MAX. */
10310 if (!TREE_SIDE_EFFECTS (arg0)
10311 && !TREE_SIDE_EFFECTS (arg1))
10313 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10314 if (tem && !operand_equal_p (tem, arg0, 0))
10315 return fold_build2 (code, type, tem, arg1);
10317 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10318 if (tem && !operand_equal_p (tem, arg1, 0))
10319 return fold_build2 (code, type, arg0, tem);
10322 truth_andor:
10323 /* We only do these simplifications if we are optimizing. */
10324 if (!optimize)
10325 return NULL_TREE;
10327 /* Check for things like (A || B) && (A || C). We can convert this
10328 to A || (B && C). Note that either operator can be any of the four
10329 truth and/or operations and the transformation will still be
10330 valid. Also note that we only care about order for the
10331 ANDIF and ORIF operators. If B contains side effects, this
10332 might change the truth-value of A. */
10333 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10334 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10335 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10336 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10337 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10338 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10340 tree a00 = TREE_OPERAND (arg0, 0);
10341 tree a01 = TREE_OPERAND (arg0, 1);
10342 tree a10 = TREE_OPERAND (arg1, 0);
10343 tree a11 = TREE_OPERAND (arg1, 1);
10344 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10345 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10346 && (code == TRUTH_AND_EXPR
10347 || code == TRUTH_OR_EXPR));
10349 if (operand_equal_p (a00, a10, 0))
10350 return fold_build2 (TREE_CODE (arg0), type, a00,
10351 fold_build2 (code, type, a01, a11));
10352 else if (commutative && operand_equal_p (a00, a11, 0))
10353 return fold_build2 (TREE_CODE (arg0), type, a00,
10354 fold_build2 (code, type, a01, a10));
10355 else if (commutative && operand_equal_p (a01, a10, 0))
10356 return fold_build2 (TREE_CODE (arg0), type, a01,
10357 fold_build2 (code, type, a00, a11));
10359 /* This case if tricky because we must either have commutative
10360 operators or else A10 must not have side-effects. */
10362 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10363 && operand_equal_p (a01, a11, 0))
10364 return fold_build2 (TREE_CODE (arg0), type,
10365 fold_build2 (code, type, a00, a10),
10366 a01);
10369 /* See if we can build a range comparison. */
10370 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10371 return tem;
10373 /* Check for the possibility of merging component references. If our
10374 lhs is another similar operation, try to merge its rhs with our
10375 rhs. Then try to merge our lhs and rhs. */
10376 if (TREE_CODE (arg0) == code
10377 && 0 != (tem = fold_truthop (code, type,
10378 TREE_OPERAND (arg0, 1), arg1)))
10379 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10381 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10382 return tem;
10384 return NULL_TREE;
10386 case TRUTH_ORIF_EXPR:
10387 /* Note that the operands of this must be ints
10388 and their values must be 0 or true.
10389 ("true" is a fixed value perhaps depending on the language.) */
10390 /* If first arg is constant true, return it. */
10391 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10392 return fold_convert (type, arg0);
10393 case TRUTH_OR_EXPR:
10394 /* If either arg is constant zero, drop it. */
10395 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10396 return non_lvalue (fold_convert (type, arg1));
10397 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10398 /* Preserve sequence points. */
10399 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10400 return non_lvalue (fold_convert (type, arg0));
10401 /* If second arg is constant true, result is true, but we must
10402 evaluate first arg. */
10403 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10404 return omit_one_operand (type, arg1, arg0);
10405 /* Likewise for first arg, but note this only occurs here for
10406 TRUTH_OR_EXPR. */
10407 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10408 return omit_one_operand (type, arg0, arg1);
10410 /* !X || X is always true. */
10411 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10412 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10413 return omit_one_operand (type, integer_one_node, arg1);
10414 /* X || !X is always true. */
10415 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10416 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10417 return omit_one_operand (type, integer_one_node, arg0);
10419 goto truth_andor;
10421 case TRUTH_XOR_EXPR:
10422 /* If the second arg is constant zero, drop it. */
10423 if (integer_zerop (arg1))
10424 return non_lvalue (fold_convert (type, arg0));
10425 /* If the second arg is constant true, this is a logical inversion. */
10426 if (integer_onep (arg1))
10428 /* Only call invert_truthvalue if operand is a truth value. */
10429 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10430 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10431 else
10432 tem = invert_truthvalue (arg0);
10433 return non_lvalue (fold_convert (type, tem));
10435 /* Identical arguments cancel to zero. */
10436 if (operand_equal_p (arg0, arg1, 0))
10437 return omit_one_operand (type, integer_zero_node, arg0);
10439 /* !X ^ X is always true. */
10440 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10441 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10442 return omit_one_operand (type, integer_one_node, arg1);
10444 /* X ^ !X is always true. */
10445 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10446 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10447 return omit_one_operand (type, integer_one_node, arg0);
10449 return NULL_TREE;
10451 case EQ_EXPR:
10452 case NE_EXPR:
10453 tem = fold_comparison (code, type, op0, op1);
10454 if (tem != NULL_TREE)
10455 return tem;
10457 /* bool_var != 0 becomes bool_var. */
10458 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10459 && code == NE_EXPR)
10460 return non_lvalue (fold_convert (type, arg0));
10462 /* bool_var == 1 becomes bool_var. */
10463 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10464 && code == EQ_EXPR)
10465 return non_lvalue (fold_convert (type, arg0));
10467 /* bool_var != 1 becomes !bool_var. */
10468 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10469 && code == NE_EXPR)
10470 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10472 /* bool_var == 0 becomes !bool_var. */
10473 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10474 && code == EQ_EXPR)
10475 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10477 /* If this is an equality comparison of the address of a non-weak
10478 object against zero, then we know the result. */
10479 if (TREE_CODE (arg0) == ADDR_EXPR
10480 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10481 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10482 && integer_zerop (arg1))
10483 return constant_boolean_node (code != EQ_EXPR, type);
10485 /* If this is an equality comparison of the address of two non-weak,
10486 unaliased symbols neither of which are extern (since we do not
10487 have access to attributes for externs), then we know the result. */
10488 if (TREE_CODE (arg0) == ADDR_EXPR
10489 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10490 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10491 && ! lookup_attribute ("alias",
10492 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10493 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10494 && TREE_CODE (arg1) == ADDR_EXPR
10495 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10496 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10497 && ! lookup_attribute ("alias",
10498 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10499 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10501 /* We know that we're looking at the address of two
10502 non-weak, unaliased, static _DECL nodes.
10504 It is both wasteful and incorrect to call operand_equal_p
10505 to compare the two ADDR_EXPR nodes. It is wasteful in that
10506 all we need to do is test pointer equality for the arguments
10507 to the two ADDR_EXPR nodes. It is incorrect to use
10508 operand_equal_p as that function is NOT equivalent to a
10509 C equality test. It can in fact return false for two
10510 objects which would test as equal using the C equality
10511 operator. */
10512 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10513 return constant_boolean_node (equal
10514 ? code == EQ_EXPR : code != EQ_EXPR,
10515 type);
10518 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10519 a MINUS_EXPR of a constant, we can convert it into a comparison with
10520 a revised constant as long as no overflow occurs. */
10521 if (TREE_CODE (arg1) == INTEGER_CST
10522 && (TREE_CODE (arg0) == PLUS_EXPR
10523 || TREE_CODE (arg0) == MINUS_EXPR)
10524 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10525 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10526 ? MINUS_EXPR : PLUS_EXPR,
10527 fold_convert (TREE_TYPE (arg0), arg1),
10528 TREE_OPERAND (arg0, 1), 0))
10529 && ! TREE_CONSTANT_OVERFLOW (tem))
10530 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10532 /* Similarly for a NEGATE_EXPR. */
10533 if (TREE_CODE (arg0) == NEGATE_EXPR
10534 && TREE_CODE (arg1) == INTEGER_CST
10535 && 0 != (tem = negate_expr (arg1))
10536 && TREE_CODE (tem) == INTEGER_CST
10537 && ! TREE_CONSTANT_OVERFLOW (tem))
10538 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10540 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10541 for !=. Don't do this for ordered comparisons due to overflow. */
10542 if (TREE_CODE (arg0) == MINUS_EXPR
10543 && integer_zerop (arg1))
10544 return fold_build2 (code, type,
10545 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10547 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10548 if (TREE_CODE (arg0) == ABS_EXPR
10549 && (integer_zerop (arg1) || real_zerop (arg1)))
10550 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10552 /* If this is an EQ or NE comparison with zero and ARG0 is
10553 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10554 two operations, but the latter can be done in one less insn
10555 on machines that have only two-operand insns or on which a
10556 constant cannot be the first operand. */
10557 if (TREE_CODE (arg0) == BIT_AND_EXPR
10558 && integer_zerop (arg1))
10560 tree arg00 = TREE_OPERAND (arg0, 0);
10561 tree arg01 = TREE_OPERAND (arg0, 1);
10562 if (TREE_CODE (arg00) == LSHIFT_EXPR
10563 && integer_onep (TREE_OPERAND (arg00, 0)))
10564 return
10565 fold_build2 (code, type,
10566 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10567 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10568 arg01, TREE_OPERAND (arg00, 1)),
10569 fold_convert (TREE_TYPE (arg0),
10570 integer_one_node)),
10571 arg1);
10572 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10573 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10574 return
10575 fold_build2 (code, type,
10576 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10577 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10578 arg00, TREE_OPERAND (arg01, 1)),
10579 fold_convert (TREE_TYPE (arg0),
10580 integer_one_node)),
10581 arg1);
10584 /* If this is an NE or EQ comparison of zero against the result of a
10585 signed MOD operation whose second operand is a power of 2, make
10586 the MOD operation unsigned since it is simpler and equivalent. */
10587 if (integer_zerop (arg1)
10588 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10589 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10590 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10591 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10592 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10593 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10595 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10596 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10597 fold_convert (newtype,
10598 TREE_OPERAND (arg0, 0)),
10599 fold_convert (newtype,
10600 TREE_OPERAND (arg0, 1)));
10602 return fold_build2 (code, type, newmod,
10603 fold_convert (newtype, arg1));
10606 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10607 C1 is a valid shift constant, and C2 is a power of two, i.e.
10608 a single bit. */
10609 if (TREE_CODE (arg0) == BIT_AND_EXPR
10610 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10611 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10612 == INTEGER_CST
10613 && integer_pow2p (TREE_OPERAND (arg0, 1))
10614 && integer_zerop (arg1))
10616 tree itype = TREE_TYPE (arg0);
10617 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10618 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10620 /* Check for a valid shift count. */
10621 if (TREE_INT_CST_HIGH (arg001) == 0
10622 && TREE_INT_CST_LOW (arg001) < prec)
10624 tree arg01 = TREE_OPERAND (arg0, 1);
10625 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10626 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10627 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10628 can be rewritten as (X & (C2 << C1)) != 0. */
10629 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10631 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10632 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10633 return fold_build2 (code, type, tem, arg1);
10635 /* Otherwise, for signed (arithmetic) shifts,
10636 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10637 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10638 else if (!TYPE_UNSIGNED (itype))
10639 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10640 arg000, build_int_cst (itype, 0));
10641 /* Otherwise, of unsigned (logical) shifts,
10642 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10643 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10644 else
10645 return omit_one_operand (type,
10646 code == EQ_EXPR ? integer_one_node
10647 : integer_zero_node,
10648 arg000);
10652 /* If this is an NE comparison of zero with an AND of one, remove the
10653 comparison since the AND will give the correct value. */
10654 if (code == NE_EXPR
10655 && integer_zerop (arg1)
10656 && TREE_CODE (arg0) == BIT_AND_EXPR
10657 && integer_onep (TREE_OPERAND (arg0, 1)))
10658 return fold_convert (type, arg0);
10660 /* If we have (A & C) == C where C is a power of 2, convert this into
10661 (A & C) != 0. Similarly for NE_EXPR. */
10662 if (TREE_CODE (arg0) == BIT_AND_EXPR
10663 && integer_pow2p (TREE_OPERAND (arg0, 1))
10664 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10665 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10666 arg0, fold_convert (TREE_TYPE (arg0),
10667 integer_zero_node));
10669 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10670 bit, then fold the expression into A < 0 or A >= 0. */
10671 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10672 if (tem)
10673 return tem;
10675 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10676 Similarly for NE_EXPR. */
10677 if (TREE_CODE (arg0) == BIT_AND_EXPR
10678 && TREE_CODE (arg1) == INTEGER_CST
10679 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10681 tree notc = fold_build1 (BIT_NOT_EXPR,
10682 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10683 TREE_OPERAND (arg0, 1));
10684 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10685 arg1, notc);
10686 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10687 if (integer_nonzerop (dandnotc))
10688 return omit_one_operand (type, rslt, arg0);
10691 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10692 Similarly for NE_EXPR. */
10693 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10694 && TREE_CODE (arg1) == INTEGER_CST
10695 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10697 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10698 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10699 TREE_OPERAND (arg0, 1), notd);
10700 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10701 if (integer_nonzerop (candnotd))
10702 return omit_one_operand (type, rslt, arg0);
10705 /* If this is a comparison of a field, we may be able to simplify it. */
10706 if (((TREE_CODE (arg0) == COMPONENT_REF
10707 && lang_hooks.can_use_bit_fields_p ())
10708 || TREE_CODE (arg0) == BIT_FIELD_REF)
10709 /* Handle the constant case even without -O
10710 to make sure the warnings are given. */
10711 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10713 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10714 if (t1)
10715 return t1;
10718 /* Optimize comparisons of strlen vs zero to a compare of the
10719 first character of the string vs zero. To wit,
10720 strlen(ptr) == 0 => *ptr == 0
10721 strlen(ptr) != 0 => *ptr != 0
10722 Other cases should reduce to one of these two (or a constant)
10723 due to the return value of strlen being unsigned. */
10724 if (TREE_CODE (arg0) == CALL_EXPR
10725 && integer_zerop (arg1))
10727 tree fndecl = get_callee_fndecl (arg0);
10728 tree arglist;
10730 if (fndecl
10731 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10732 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10733 && (arglist = TREE_OPERAND (arg0, 1))
10734 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10735 && ! TREE_CHAIN (arglist))
10737 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10738 return fold_build2 (code, type, iref,
10739 build_int_cst (TREE_TYPE (iref), 0));
10743 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10744 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10745 if (TREE_CODE (arg0) == RSHIFT_EXPR
10746 && integer_zerop (arg1)
10747 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10749 tree arg00 = TREE_OPERAND (arg0, 0);
10750 tree arg01 = TREE_OPERAND (arg0, 1);
10751 tree itype = TREE_TYPE (arg00);
10752 if (TREE_INT_CST_HIGH (arg01) == 0
10753 && TREE_INT_CST_LOW (arg01)
10754 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10756 if (TYPE_UNSIGNED (itype))
10758 itype = lang_hooks.types.signed_type (itype);
10759 arg00 = fold_convert (itype, arg00);
10761 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10762 type, arg00, build_int_cst (itype, 0));
10766 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10767 if (integer_zerop (arg1)
10768 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10769 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10770 TREE_OPERAND (arg0, 1));
10772 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10773 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10774 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10775 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10776 build_int_cst (TREE_TYPE (arg1), 0));
10777 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10778 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10779 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10780 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10781 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10782 build_int_cst (TREE_TYPE (arg1), 0));
10784 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10785 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10786 && TREE_CODE (arg1) == INTEGER_CST
10787 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10788 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10789 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10790 TREE_OPERAND (arg0, 1), arg1));
10792 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10793 (X & C) == 0 when C is a single bit. */
10794 if (TREE_CODE (arg0) == BIT_AND_EXPR
10795 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10796 && integer_zerop (arg1)
10797 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10799 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10800 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10801 TREE_OPERAND (arg0, 1));
10802 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10803 type, tem, arg1);
10806 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10807 constant C is a power of two, i.e. a single bit. */
10808 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10809 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10810 && integer_zerop (arg1)
10811 && integer_pow2p (TREE_OPERAND (arg0, 1))
10812 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10813 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10815 tree arg00 = TREE_OPERAND (arg0, 0);
10816 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10817 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10820 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10821 when is C is a power of two, i.e. a single bit. */
10822 if (TREE_CODE (arg0) == BIT_AND_EXPR
10823 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10824 && integer_zerop (arg1)
10825 && integer_pow2p (TREE_OPERAND (arg0, 1))
10826 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10827 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10829 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10830 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10831 arg000, TREE_OPERAND (arg0, 1));
10832 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10833 tem, build_int_cst (TREE_TYPE (tem), 0));
10836 if (integer_zerop (arg1)
10837 && tree_expr_nonzero_p (arg0))
10839 tree res = constant_boolean_node (code==NE_EXPR, type);
10840 return omit_one_operand (type, res, arg0);
10843 /* Fold -X op -Y as X op Y, where op is eq/ne. */
10844 if (TREE_CODE (arg0) == NEGATE_EXPR
10845 && TREE_CODE (arg1) == NEGATE_EXPR)
10846 return fold_build2 (code, type,
10847 TREE_OPERAND (arg0, 0),
10848 TREE_OPERAND (arg1, 0));
10850 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10851 if (TREE_CODE (arg0) == BIT_AND_EXPR
10852 && TREE_CODE (arg1) == BIT_AND_EXPR)
10854 tree arg00 = TREE_OPERAND (arg0, 0);
10855 tree arg01 = TREE_OPERAND (arg0, 1);
10856 tree arg10 = TREE_OPERAND (arg1, 0);
10857 tree arg11 = TREE_OPERAND (arg1, 1);
10858 tree itype = TREE_TYPE (arg0);
10860 if (operand_equal_p (arg01, arg11, 0))
10861 return fold_build2 (code, type,
10862 fold_build2 (BIT_AND_EXPR, itype,
10863 fold_build2 (BIT_XOR_EXPR, itype,
10864 arg00, arg10),
10865 arg01),
10866 build_int_cst (itype, 0));
10868 if (operand_equal_p (arg01, arg10, 0))
10869 return fold_build2 (code, type,
10870 fold_build2 (BIT_AND_EXPR, itype,
10871 fold_build2 (BIT_XOR_EXPR, itype,
10872 arg00, arg11),
10873 arg01),
10874 build_int_cst (itype, 0));
10876 if (operand_equal_p (arg00, arg11, 0))
10877 return fold_build2 (code, type,
10878 fold_build2 (BIT_AND_EXPR, itype,
10879 fold_build2 (BIT_XOR_EXPR, itype,
10880 arg01, arg10),
10881 arg00),
10882 build_int_cst (itype, 0));
10884 if (operand_equal_p (arg00, arg10, 0))
10885 return fold_build2 (code, type,
10886 fold_build2 (BIT_AND_EXPR, itype,
10887 fold_build2 (BIT_XOR_EXPR, itype,
10888 arg01, arg11),
10889 arg00),
10890 build_int_cst (itype, 0));
10893 return NULL_TREE;
10895 case LT_EXPR:
10896 case GT_EXPR:
10897 case LE_EXPR:
10898 case GE_EXPR:
10899 tem = fold_comparison (code, type, op0, op1);
10900 if (tem != NULL_TREE)
10901 return tem;
10903 /* Transform comparisons of the form X +- C CMP X. */
10904 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10905 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10906 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10907 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10908 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10909 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10910 && !(flag_wrapv || flag_trapv))))
10912 tree arg01 = TREE_OPERAND (arg0, 1);
10913 enum tree_code code0 = TREE_CODE (arg0);
10914 int is_positive;
10916 if (TREE_CODE (arg01) == REAL_CST)
10917 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10918 else
10919 is_positive = tree_int_cst_sgn (arg01);
10921 /* (X - c) > X becomes false. */
10922 if (code == GT_EXPR
10923 && ((code0 == MINUS_EXPR && is_positive >= 0)
10924 || (code0 == PLUS_EXPR && is_positive <= 0)))
10925 return constant_boolean_node (0, type);
10927 /* Likewise (X + c) < X becomes false. */
10928 if (code == LT_EXPR
10929 && ((code0 == PLUS_EXPR && is_positive >= 0)
10930 || (code0 == MINUS_EXPR && is_positive <= 0)))
10931 return constant_boolean_node (0, type);
10933 /* Convert (X - c) <= X to true. */
10934 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10935 && code == LE_EXPR
10936 && ((code0 == MINUS_EXPR && is_positive >= 0)
10937 || (code0 == PLUS_EXPR && is_positive <= 0)))
10938 return constant_boolean_node (1, type);
10940 /* Convert (X + c) >= X to true. */
10941 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10942 && code == GE_EXPR
10943 && ((code0 == PLUS_EXPR && is_positive >= 0)
10944 || (code0 == MINUS_EXPR && is_positive <= 0)))
10945 return constant_boolean_node (1, type);
10947 if (TREE_CODE (arg01) == INTEGER_CST)
10949 /* Convert X + c > X and X - c < X to true for integers. */
10950 if (code == GT_EXPR
10951 && ((code0 == PLUS_EXPR && is_positive > 0)
10952 || (code0 == MINUS_EXPR && is_positive < 0)))
10953 return constant_boolean_node (1, type);
10955 if (code == LT_EXPR
10956 && ((code0 == MINUS_EXPR && is_positive > 0)
10957 || (code0 == PLUS_EXPR && is_positive < 0)))
10958 return constant_boolean_node (1, type);
10960 /* Convert X + c <= X and X - c >= X to false for integers. */
10961 if (code == LE_EXPR
10962 && ((code0 == PLUS_EXPR && is_positive > 0)
10963 || (code0 == MINUS_EXPR && is_positive < 0)))
10964 return constant_boolean_node (0, type);
10966 if (code == GE_EXPR
10967 && ((code0 == MINUS_EXPR && is_positive > 0)
10968 || (code0 == PLUS_EXPR && is_positive < 0)))
10969 return constant_boolean_node (0, type);
10973 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10974 This transformation affects the cases which are handled in later
10975 optimizations involving comparisons with non-negative constants. */
10976 if (TREE_CODE (arg1) == INTEGER_CST
10977 && TREE_CODE (arg0) != INTEGER_CST
10978 && tree_int_cst_sgn (arg1) > 0)
10980 if (code == GE_EXPR)
10982 arg1 = const_binop (MINUS_EXPR, arg1,
10983 build_int_cst (TREE_TYPE (arg1), 1), 0);
10984 return fold_build2 (GT_EXPR, type, arg0,
10985 fold_convert (TREE_TYPE (arg0), arg1));
10987 if (code == LT_EXPR)
10989 arg1 = const_binop (MINUS_EXPR, arg1,
10990 build_int_cst (TREE_TYPE (arg1), 1), 0);
10991 return fold_build2 (LE_EXPR, type, arg0,
10992 fold_convert (TREE_TYPE (arg0), arg1));
10996 /* Comparisons with the highest or lowest possible integer of
10997 the specified precision will have known values. */
10999 tree arg1_type = TREE_TYPE (arg1);
11000 unsigned int width = TYPE_PRECISION (arg1_type);
11002 if (TREE_CODE (arg1) == INTEGER_CST
11003 && ! TREE_CONSTANT_OVERFLOW (arg1)
11004 && width <= 2 * HOST_BITS_PER_WIDE_INT
11005 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11007 HOST_WIDE_INT signed_max_hi;
11008 unsigned HOST_WIDE_INT signed_max_lo;
11009 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11011 if (width <= HOST_BITS_PER_WIDE_INT)
11013 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11014 - 1;
11015 signed_max_hi = 0;
11016 max_hi = 0;
11018 if (TYPE_UNSIGNED (arg1_type))
11020 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11021 min_lo = 0;
11022 min_hi = 0;
11024 else
11026 max_lo = signed_max_lo;
11027 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11028 min_hi = -1;
11031 else
11033 width -= HOST_BITS_PER_WIDE_INT;
11034 signed_max_lo = -1;
11035 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11036 - 1;
11037 max_lo = -1;
11038 min_lo = 0;
11040 if (TYPE_UNSIGNED (arg1_type))
11042 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11043 min_hi = 0;
11045 else
11047 max_hi = signed_max_hi;
11048 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11052 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11053 && TREE_INT_CST_LOW (arg1) == max_lo)
11054 switch (code)
11056 case GT_EXPR:
11057 return omit_one_operand (type, integer_zero_node, arg0);
11059 case GE_EXPR:
11060 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11062 case LE_EXPR:
11063 return omit_one_operand (type, integer_one_node, arg0);
11065 case LT_EXPR:
11066 return fold_build2 (NE_EXPR, type, arg0, arg1);
11068 /* The GE_EXPR and LT_EXPR cases above are not normally
11069 reached because of previous transformations. */
11071 default:
11072 break;
11074 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11075 == max_hi
11076 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11077 switch (code)
11079 case GT_EXPR:
11080 arg1 = const_binop (PLUS_EXPR, arg1,
11081 build_int_cst (TREE_TYPE (arg1), 1), 0);
11082 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11083 case LE_EXPR:
11084 arg1 = const_binop (PLUS_EXPR, arg1,
11085 build_int_cst (TREE_TYPE (arg1), 1), 0);
11086 return fold_build2 (NE_EXPR, type, arg0, arg1);
11087 default:
11088 break;
11090 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11091 == min_hi
11092 && TREE_INT_CST_LOW (arg1) == min_lo)
11093 switch (code)
11095 case LT_EXPR:
11096 return omit_one_operand (type, integer_zero_node, arg0);
11098 case LE_EXPR:
11099 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11101 case GE_EXPR:
11102 return omit_one_operand (type, integer_one_node, arg0);
11104 case GT_EXPR:
11105 return fold_build2 (NE_EXPR, type, op0, op1);
11107 default:
11108 break;
11110 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11111 == min_hi
11112 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11113 switch (code)
11115 case GE_EXPR:
11116 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11117 return fold_build2 (NE_EXPR, type, arg0, arg1);
11118 case LT_EXPR:
11119 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11120 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11121 default:
11122 break;
11125 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11126 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11127 && TYPE_UNSIGNED (arg1_type)
11128 /* We will flip the signedness of the comparison operator
11129 associated with the mode of arg1, so the sign bit is
11130 specified by this mode. Check that arg1 is the signed
11131 max associated with this sign bit. */
11132 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11133 /* signed_type does not work on pointer types. */
11134 && INTEGRAL_TYPE_P (arg1_type))
11136 /* The following case also applies to X < signed_max+1
11137 and X >= signed_max+1 because previous transformations. */
11138 if (code == LE_EXPR || code == GT_EXPR)
11140 tree st0, st1;
11141 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11142 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11143 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11144 type, fold_convert (st0, arg0),
11145 build_int_cst (st1, 0));
11151 /* If we are comparing an ABS_EXPR with a constant, we can
11152 convert all the cases into explicit comparisons, but they may
11153 well not be faster than doing the ABS and one comparison.
11154 But ABS (X) <= C is a range comparison, which becomes a subtraction
11155 and a comparison, and is probably faster. */
11156 if (code == LE_EXPR
11157 && TREE_CODE (arg1) == INTEGER_CST
11158 && TREE_CODE (arg0) == ABS_EXPR
11159 && ! TREE_SIDE_EFFECTS (arg0)
11160 && (0 != (tem = negate_expr (arg1)))
11161 && TREE_CODE (tem) == INTEGER_CST
11162 && ! TREE_CONSTANT_OVERFLOW (tem))
11163 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11164 build2 (GE_EXPR, type,
11165 TREE_OPERAND (arg0, 0), tem),
11166 build2 (LE_EXPR, type,
11167 TREE_OPERAND (arg0, 0), arg1));
11169 /* Convert ABS_EXPR<x> >= 0 to true. */
11170 if (code == GE_EXPR
11171 && tree_expr_nonnegative_p (arg0)
11172 && (integer_zerop (arg1)
11173 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11174 && real_zerop (arg1))))
11175 return omit_one_operand (type, integer_one_node, arg0);
11177 /* Convert ABS_EXPR<x> < 0 to false. */
11178 if (code == LT_EXPR
11179 && tree_expr_nonnegative_p (arg0)
11180 && (integer_zerop (arg1) || real_zerop (arg1)))
11181 return omit_one_operand (type, integer_zero_node, arg0);
11183 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11184 and similarly for >= into !=. */
11185 if ((code == LT_EXPR || code == GE_EXPR)
11186 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11187 && TREE_CODE (arg1) == LSHIFT_EXPR
11188 && integer_onep (TREE_OPERAND (arg1, 0)))
11189 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11190 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11191 TREE_OPERAND (arg1, 1)),
11192 build_int_cst (TREE_TYPE (arg0), 0));
11194 if ((code == LT_EXPR || code == GE_EXPR)
11195 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11196 && (TREE_CODE (arg1) == NOP_EXPR
11197 || TREE_CODE (arg1) == CONVERT_EXPR)
11198 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11199 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11200 return
11201 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11202 fold_convert (TREE_TYPE (arg0),
11203 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11204 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11205 1))),
11206 build_int_cst (TREE_TYPE (arg0), 0));
11208 return NULL_TREE;
11210 case UNORDERED_EXPR:
11211 case ORDERED_EXPR:
11212 case UNLT_EXPR:
11213 case UNLE_EXPR:
11214 case UNGT_EXPR:
11215 case UNGE_EXPR:
11216 case UNEQ_EXPR:
11217 case LTGT_EXPR:
11218 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11220 t1 = fold_relational_const (code, type, arg0, arg1);
11221 if (t1 != NULL_TREE)
11222 return t1;
11225 /* If the first operand is NaN, the result is constant. */
11226 if (TREE_CODE (arg0) == REAL_CST
11227 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11228 && (code != LTGT_EXPR || ! flag_trapping_math))
11230 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11231 ? integer_zero_node
11232 : integer_one_node;
11233 return omit_one_operand (type, t1, arg1);
11236 /* If the second operand is NaN, the result is constant. */
11237 if (TREE_CODE (arg1) == REAL_CST
11238 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11239 && (code != LTGT_EXPR || ! flag_trapping_math))
11241 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11242 ? integer_zero_node
11243 : integer_one_node;
11244 return omit_one_operand (type, t1, arg0);
11247 /* Simplify unordered comparison of something with itself. */
11248 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11249 && operand_equal_p (arg0, arg1, 0))
11250 return constant_boolean_node (1, type);
11252 if (code == LTGT_EXPR
11253 && !flag_trapping_math
11254 && operand_equal_p (arg0, arg1, 0))
11255 return constant_boolean_node (0, type);
11257 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11259 tree targ0 = strip_float_extensions (arg0);
11260 tree targ1 = strip_float_extensions (arg1);
11261 tree newtype = TREE_TYPE (targ0);
11263 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11264 newtype = TREE_TYPE (targ1);
11266 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11267 return fold_build2 (code, type, fold_convert (newtype, targ0),
11268 fold_convert (newtype, targ1));
11271 return NULL_TREE;
11273 case COMPOUND_EXPR:
11274 /* When pedantic, a compound expression can be neither an lvalue
11275 nor an integer constant expression. */
11276 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11277 return NULL_TREE;
11278 /* Don't let (0, 0) be null pointer constant. */
11279 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11280 : fold_convert (type, arg1);
11281 return pedantic_non_lvalue (tem);
11283 case COMPLEX_EXPR:
11284 if ((TREE_CODE (arg0) == REAL_CST
11285 && TREE_CODE (arg1) == REAL_CST)
11286 || (TREE_CODE (arg0) == INTEGER_CST
11287 && TREE_CODE (arg1) == INTEGER_CST))
11288 return build_complex (type, arg0, arg1);
11289 return NULL_TREE;
11291 case ASSERT_EXPR:
11292 /* An ASSERT_EXPR should never be passed to fold_binary. */
11293 gcc_unreachable ();
11295 default:
11296 return NULL_TREE;
11297 } /* switch (code) */
11300 /* Callback for walk_tree, looking for LABEL_EXPR.
11301 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11302 Do not check the sub-tree of GOTO_EXPR. */
11304 static tree
11305 contains_label_1 (tree *tp,
11306 int *walk_subtrees,
11307 void *data ATTRIBUTE_UNUSED)
11309 switch (TREE_CODE (*tp))
11311 case LABEL_EXPR:
11312 return *tp;
11313 case GOTO_EXPR:
11314 *walk_subtrees = 0;
11315 /* no break */
11316 default:
11317 return NULL_TREE;
11321 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11322 accessible from outside the sub-tree. Returns NULL_TREE if no
11323 addressable label is found. */
11325 static bool
11326 contains_label_p (tree st)
11328 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11331 /* Fold a ternary expression of code CODE and type TYPE with operands
11332 OP0, OP1, and OP2. Return the folded expression if folding is
11333 successful. Otherwise, return NULL_TREE. */
11335 tree
11336 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11338 tree tem;
11339 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11340 enum tree_code_class kind = TREE_CODE_CLASS (code);
11342 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11343 && TREE_CODE_LENGTH (code) == 3);
11345 /* Strip any conversions that don't change the mode. This is safe
11346 for every expression, except for a comparison expression because
11347 its signedness is derived from its operands. So, in the latter
11348 case, only strip conversions that don't change the signedness.
11350 Note that this is done as an internal manipulation within the
11351 constant folder, in order to find the simplest representation of
11352 the arguments so that their form can be studied. In any cases,
11353 the appropriate type conversions should be put back in the tree
11354 that will get out of the constant folder. */
11355 if (op0)
11357 arg0 = op0;
11358 STRIP_NOPS (arg0);
11361 if (op1)
11363 arg1 = op1;
11364 STRIP_NOPS (arg1);
11367 switch (code)
11369 case COMPONENT_REF:
11370 if (TREE_CODE (arg0) == CONSTRUCTOR
11371 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11373 unsigned HOST_WIDE_INT idx;
11374 tree field, value;
11375 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11376 if (field == arg1)
11377 return value;
11379 return NULL_TREE;
11381 case COND_EXPR:
11382 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11383 so all simple results must be passed through pedantic_non_lvalue. */
11384 if (TREE_CODE (arg0) == INTEGER_CST)
11386 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11387 tem = integer_zerop (arg0) ? op2 : op1;
11388 /* Only optimize constant conditions when the selected branch
11389 has the same type as the COND_EXPR. This avoids optimizing
11390 away "c ? x : throw", where the throw has a void type.
11391 Avoid throwing away that operand which contains label. */
11392 if ((!TREE_SIDE_EFFECTS (unused_op)
11393 || !contains_label_p (unused_op))
11394 && (! VOID_TYPE_P (TREE_TYPE (tem))
11395 || VOID_TYPE_P (type)))
11396 return pedantic_non_lvalue (tem);
11397 return NULL_TREE;
11399 if (operand_equal_p (arg1, op2, 0))
11400 return pedantic_omit_one_operand (type, arg1, arg0);
11402 /* If we have A op B ? A : C, we may be able to convert this to a
11403 simpler expression, depending on the operation and the values
11404 of B and C. Signed zeros prevent all of these transformations,
11405 for reasons given above each one.
11407 Also try swapping the arguments and inverting the conditional. */
11408 if (COMPARISON_CLASS_P (arg0)
11409 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11410 arg1, TREE_OPERAND (arg0, 1))
11411 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11413 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11414 if (tem)
11415 return tem;
11418 if (COMPARISON_CLASS_P (arg0)
11419 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11420 op2,
11421 TREE_OPERAND (arg0, 1))
11422 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11424 tem = fold_truth_not_expr (arg0);
11425 if (tem && COMPARISON_CLASS_P (tem))
11427 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11428 if (tem)
11429 return tem;
11433 /* If the second operand is simpler than the third, swap them
11434 since that produces better jump optimization results. */
11435 if (truth_value_p (TREE_CODE (arg0))
11436 && tree_swap_operands_p (op1, op2, false))
11438 /* See if this can be inverted. If it can't, possibly because
11439 it was a floating-point inequality comparison, don't do
11440 anything. */
11441 tem = fold_truth_not_expr (arg0);
11442 if (tem)
11443 return fold_build3 (code, type, tem, op2, op1);
11446 /* Convert A ? 1 : 0 to simply A. */
11447 if (integer_onep (op1)
11448 && integer_zerop (op2)
11449 /* If we try to convert OP0 to our type, the
11450 call to fold will try to move the conversion inside
11451 a COND, which will recurse. In that case, the COND_EXPR
11452 is probably the best choice, so leave it alone. */
11453 && type == TREE_TYPE (arg0))
11454 return pedantic_non_lvalue (arg0);
11456 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11457 over COND_EXPR in cases such as floating point comparisons. */
11458 if (integer_zerop (op1)
11459 && integer_onep (op2)
11460 && truth_value_p (TREE_CODE (arg0)))
11461 return pedantic_non_lvalue (fold_convert (type,
11462 invert_truthvalue (arg0)));
11464 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11465 if (TREE_CODE (arg0) == LT_EXPR
11466 && integer_zerop (TREE_OPERAND (arg0, 1))
11467 && integer_zerop (op2)
11468 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11470 /* sign_bit_p only checks ARG1 bits within A's precision.
11471 If <sign bit of A> has wider type than A, bits outside
11472 of A's precision in <sign bit of A> need to be checked.
11473 If they are all 0, this optimization needs to be done
11474 in unsigned A's type, if they are all 1 in signed A's type,
11475 otherwise this can't be done. */
11476 if (TYPE_PRECISION (TREE_TYPE (tem))
11477 < TYPE_PRECISION (TREE_TYPE (arg1))
11478 && TYPE_PRECISION (TREE_TYPE (tem))
11479 < TYPE_PRECISION (type))
11481 unsigned HOST_WIDE_INT mask_lo;
11482 HOST_WIDE_INT mask_hi;
11483 int inner_width, outer_width;
11484 tree tem_type;
11486 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11487 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11488 if (outer_width > TYPE_PRECISION (type))
11489 outer_width = TYPE_PRECISION (type);
11491 if (outer_width > HOST_BITS_PER_WIDE_INT)
11493 mask_hi = ((unsigned HOST_WIDE_INT) -1
11494 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11495 mask_lo = -1;
11497 else
11499 mask_hi = 0;
11500 mask_lo = ((unsigned HOST_WIDE_INT) -1
11501 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11503 if (inner_width > HOST_BITS_PER_WIDE_INT)
11505 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11506 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11507 mask_lo = 0;
11509 else
11510 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11511 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11513 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11514 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11516 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11517 tem = fold_convert (tem_type, tem);
11519 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11520 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11522 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11523 tem = fold_convert (tem_type, tem);
11525 else
11526 tem = NULL;
11529 if (tem)
11530 return fold_convert (type,
11531 fold_build2 (BIT_AND_EXPR,
11532 TREE_TYPE (tem), tem,
11533 fold_convert (TREE_TYPE (tem),
11534 arg1)));
11537 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11538 already handled above. */
11539 if (TREE_CODE (arg0) == BIT_AND_EXPR
11540 && integer_onep (TREE_OPERAND (arg0, 1))
11541 && integer_zerop (op2)
11542 && integer_pow2p (arg1))
11544 tree tem = TREE_OPERAND (arg0, 0);
11545 STRIP_NOPS (tem);
11546 if (TREE_CODE (tem) == RSHIFT_EXPR
11547 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11548 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11549 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11550 return fold_build2 (BIT_AND_EXPR, type,
11551 TREE_OPERAND (tem, 0), arg1);
11554 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11555 is probably obsolete because the first operand should be a
11556 truth value (that's why we have the two cases above), but let's
11557 leave it in until we can confirm this for all front-ends. */
11558 if (integer_zerop (op2)
11559 && TREE_CODE (arg0) == NE_EXPR
11560 && integer_zerop (TREE_OPERAND (arg0, 1))
11561 && integer_pow2p (arg1)
11562 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11563 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11564 arg1, OEP_ONLY_CONST))
11565 return pedantic_non_lvalue (fold_convert (type,
11566 TREE_OPERAND (arg0, 0)));
11568 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11569 if (integer_zerop (op2)
11570 && truth_value_p (TREE_CODE (arg0))
11571 && truth_value_p (TREE_CODE (arg1)))
11572 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11573 fold_convert (type, arg0),
11574 arg1);
11576 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11577 if (integer_onep (op2)
11578 && truth_value_p (TREE_CODE (arg0))
11579 && truth_value_p (TREE_CODE (arg1)))
11581 /* Only perform transformation if ARG0 is easily inverted. */
11582 tem = fold_truth_not_expr (arg0);
11583 if (tem)
11584 return fold_build2 (TRUTH_ORIF_EXPR, type,
11585 fold_convert (type, tem),
11586 arg1);
11589 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11590 if (integer_zerop (arg1)
11591 && truth_value_p (TREE_CODE (arg0))
11592 && truth_value_p (TREE_CODE (op2)))
11594 /* Only perform transformation if ARG0 is easily inverted. */
11595 tem = fold_truth_not_expr (arg0);
11596 if (tem)
11597 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11598 fold_convert (type, tem),
11599 op2);
11602 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11603 if (integer_onep (arg1)
11604 && truth_value_p (TREE_CODE (arg0))
11605 && truth_value_p (TREE_CODE (op2)))
11606 return fold_build2 (TRUTH_ORIF_EXPR, type,
11607 fold_convert (type, arg0),
11608 op2);
11610 return NULL_TREE;
11612 case CALL_EXPR:
11613 /* Check for a built-in function. */
11614 if (TREE_CODE (op0) == ADDR_EXPR
11615 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11616 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11617 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11618 return NULL_TREE;
11620 case BIT_FIELD_REF:
11621 if (TREE_CODE (arg0) == VECTOR_CST
11622 && type == TREE_TYPE (TREE_TYPE (arg0))
11623 && host_integerp (arg1, 1)
11624 && host_integerp (op2, 1))
11626 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11627 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11629 if (width != 0
11630 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11631 && (idx % width) == 0
11632 && (idx = idx / width)
11633 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11635 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11636 while (idx-- > 0 && elements)
11637 elements = TREE_CHAIN (elements);
11638 if (elements)
11639 return TREE_VALUE (elements);
11640 else
11641 return fold_convert (type, integer_zero_node);
11644 return NULL_TREE;
11646 default:
11647 return NULL_TREE;
11648 } /* switch (code) */
11651 /* Perform constant folding and related simplification of EXPR.
11652 The related simplifications include x*1 => x, x*0 => 0, etc.,
11653 and application of the associative law.
11654 NOP_EXPR conversions may be removed freely (as long as we
11655 are careful not to change the type of the overall expression).
11656 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11657 but we can constant-fold them if they have constant operands. */
11659 #ifdef ENABLE_FOLD_CHECKING
11660 # define fold(x) fold_1 (x)
11661 static tree fold_1 (tree);
11662 static
11663 #endif
11664 tree
11665 fold (tree expr)
11667 const tree t = expr;
11668 enum tree_code code = TREE_CODE (t);
11669 enum tree_code_class kind = TREE_CODE_CLASS (code);
11670 tree tem;
11672 /* Return right away if a constant. */
11673 if (kind == tcc_constant)
11674 return t;
11676 if (IS_EXPR_CODE_CLASS (kind))
11678 tree type = TREE_TYPE (t);
11679 tree op0, op1, op2;
11681 switch (TREE_CODE_LENGTH (code))
11683 case 1:
11684 op0 = TREE_OPERAND (t, 0);
11685 tem = fold_unary (code, type, op0);
11686 return tem ? tem : expr;
11687 case 2:
11688 op0 = TREE_OPERAND (t, 0);
11689 op1 = TREE_OPERAND (t, 1);
11690 tem = fold_binary (code, type, op0, op1);
11691 return tem ? tem : expr;
11692 case 3:
11693 op0 = TREE_OPERAND (t, 0);
11694 op1 = TREE_OPERAND (t, 1);
11695 op2 = TREE_OPERAND (t, 2);
11696 tem = fold_ternary (code, type, op0, op1, op2);
11697 return tem ? tem : expr;
11698 default:
11699 break;
11703 switch (code)
11705 case CONST_DECL:
11706 return fold (DECL_INITIAL (t));
11708 default:
11709 return t;
11710 } /* switch (code) */
11713 #ifdef ENABLE_FOLD_CHECKING
11714 #undef fold
11716 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11717 static void fold_check_failed (tree, tree);
11718 void print_fold_checksum (tree);
11720 /* When --enable-checking=fold, compute a digest of expr before
11721 and after actual fold call to see if fold did not accidentally
11722 change original expr. */
11724 tree
11725 fold (tree expr)
11727 tree ret;
11728 struct md5_ctx ctx;
11729 unsigned char checksum_before[16], checksum_after[16];
11730 htab_t ht;
11732 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11733 md5_init_ctx (&ctx);
11734 fold_checksum_tree (expr, &ctx, ht);
11735 md5_finish_ctx (&ctx, checksum_before);
11736 htab_empty (ht);
11738 ret = fold_1 (expr);
11740 md5_init_ctx (&ctx);
11741 fold_checksum_tree (expr, &ctx, ht);
11742 md5_finish_ctx (&ctx, checksum_after);
11743 htab_delete (ht);
11745 if (memcmp (checksum_before, checksum_after, 16))
11746 fold_check_failed (expr, ret);
11748 return ret;
11751 void
11752 print_fold_checksum (tree expr)
11754 struct md5_ctx ctx;
11755 unsigned char checksum[16], cnt;
11756 htab_t ht;
11758 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11759 md5_init_ctx (&ctx);
11760 fold_checksum_tree (expr, &ctx, ht);
11761 md5_finish_ctx (&ctx, checksum);
11762 htab_delete (ht);
11763 for (cnt = 0; cnt < 16; ++cnt)
11764 fprintf (stderr, "%02x", checksum[cnt]);
11765 putc ('\n', stderr);
11768 static void
11769 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11771 internal_error ("fold check: original tree changed by fold");
11774 static void
11775 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11777 void **slot;
11778 enum tree_code code;
11779 struct tree_function_decl buf;
11780 int i, len;
11782 recursive_label:
11784 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11785 <= sizeof (struct tree_function_decl))
11786 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11787 if (expr == NULL)
11788 return;
11789 slot = htab_find_slot (ht, expr, INSERT);
11790 if (*slot != NULL)
11791 return;
11792 *slot = expr;
11793 code = TREE_CODE (expr);
11794 if (TREE_CODE_CLASS (code) == tcc_declaration
11795 && DECL_ASSEMBLER_NAME_SET_P (expr))
11797 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11798 memcpy ((char *) &buf, expr, tree_size (expr));
11799 expr = (tree) &buf;
11800 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11802 else if (TREE_CODE_CLASS (code) == tcc_type
11803 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11804 || TYPE_CACHED_VALUES_P (expr)
11805 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11807 /* Allow these fields to be modified. */
11808 memcpy ((char *) &buf, expr, tree_size (expr));
11809 expr = (tree) &buf;
11810 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11811 TYPE_POINTER_TO (expr) = NULL;
11812 TYPE_REFERENCE_TO (expr) = NULL;
11813 if (TYPE_CACHED_VALUES_P (expr))
11815 TYPE_CACHED_VALUES_P (expr) = 0;
11816 TYPE_CACHED_VALUES (expr) = NULL;
11819 md5_process_bytes (expr, tree_size (expr), ctx);
11820 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11821 if (TREE_CODE_CLASS (code) != tcc_type
11822 && TREE_CODE_CLASS (code) != tcc_declaration
11823 && code != TREE_LIST)
11824 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11825 switch (TREE_CODE_CLASS (code))
11827 case tcc_constant:
11828 switch (code)
11830 case STRING_CST:
11831 md5_process_bytes (TREE_STRING_POINTER (expr),
11832 TREE_STRING_LENGTH (expr), ctx);
11833 break;
11834 case COMPLEX_CST:
11835 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11836 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11837 break;
11838 case VECTOR_CST:
11839 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11840 break;
11841 default:
11842 break;
11844 break;
11845 case tcc_exceptional:
11846 switch (code)
11848 case TREE_LIST:
11849 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11850 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11851 expr = TREE_CHAIN (expr);
11852 goto recursive_label;
11853 break;
11854 case TREE_VEC:
11855 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11856 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11857 break;
11858 default:
11859 break;
11861 break;
11862 case tcc_expression:
11863 case tcc_reference:
11864 case tcc_comparison:
11865 case tcc_unary:
11866 case tcc_binary:
11867 case tcc_statement:
11868 len = TREE_CODE_LENGTH (code);
11869 for (i = 0; i < len; ++i)
11870 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11871 break;
11872 case tcc_declaration:
11873 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11874 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11875 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11877 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11878 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11879 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11880 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11881 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11883 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11884 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11886 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11888 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11889 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11890 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11892 break;
11893 case tcc_type:
11894 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11895 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11896 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11897 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11898 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11899 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11900 if (INTEGRAL_TYPE_P (expr)
11901 || SCALAR_FLOAT_TYPE_P (expr))
11903 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11904 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11906 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11907 if (TREE_CODE (expr) == RECORD_TYPE
11908 || TREE_CODE (expr) == UNION_TYPE
11909 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11910 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11911 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11912 break;
11913 default:
11914 break;
11918 #endif
11920 /* Fold a unary tree expression with code CODE of type TYPE with an
11921 operand OP0. Return a folded expression if successful. Otherwise,
11922 return a tree expression with code CODE of type TYPE with an
11923 operand OP0. */
11925 tree
11926 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11928 tree tem;
11929 #ifdef ENABLE_FOLD_CHECKING
11930 unsigned char checksum_before[16], checksum_after[16];
11931 struct md5_ctx ctx;
11932 htab_t ht;
11934 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11935 md5_init_ctx (&ctx);
11936 fold_checksum_tree (op0, &ctx, ht);
11937 md5_finish_ctx (&ctx, checksum_before);
11938 htab_empty (ht);
11939 #endif
11941 tem = fold_unary (code, type, op0);
11942 if (!tem)
11943 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11945 #ifdef ENABLE_FOLD_CHECKING
11946 md5_init_ctx (&ctx);
11947 fold_checksum_tree (op0, &ctx, ht);
11948 md5_finish_ctx (&ctx, checksum_after);
11949 htab_delete (ht);
11951 if (memcmp (checksum_before, checksum_after, 16))
11952 fold_check_failed (op0, tem);
11953 #endif
11954 return tem;
11957 /* Fold a binary tree expression with code CODE of type TYPE with
11958 operands OP0 and OP1. Return a folded expression if successful.
11959 Otherwise, return a tree expression with code CODE of type TYPE
11960 with operands OP0 and OP1. */
11962 tree
11963 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11964 MEM_STAT_DECL)
11966 tree tem;
11967 #ifdef ENABLE_FOLD_CHECKING
11968 unsigned char checksum_before_op0[16],
11969 checksum_before_op1[16],
11970 checksum_after_op0[16],
11971 checksum_after_op1[16];
11972 struct md5_ctx ctx;
11973 htab_t ht;
11975 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11976 md5_init_ctx (&ctx);
11977 fold_checksum_tree (op0, &ctx, ht);
11978 md5_finish_ctx (&ctx, checksum_before_op0);
11979 htab_empty (ht);
11981 md5_init_ctx (&ctx);
11982 fold_checksum_tree (op1, &ctx, ht);
11983 md5_finish_ctx (&ctx, checksum_before_op1);
11984 htab_empty (ht);
11985 #endif
11987 tem = fold_binary (code, type, op0, op1);
11988 if (!tem)
11989 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11991 #ifdef ENABLE_FOLD_CHECKING
11992 md5_init_ctx (&ctx);
11993 fold_checksum_tree (op0, &ctx, ht);
11994 md5_finish_ctx (&ctx, checksum_after_op0);
11995 htab_empty (ht);
11997 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11998 fold_check_failed (op0, tem);
12000 md5_init_ctx (&ctx);
12001 fold_checksum_tree (op1, &ctx, ht);
12002 md5_finish_ctx (&ctx, checksum_after_op1);
12003 htab_delete (ht);
12005 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12006 fold_check_failed (op1, tem);
12007 #endif
12008 return tem;
12011 /* Fold a ternary tree expression with code CODE of type TYPE with
12012 operands OP0, OP1, and OP2. Return a folded expression if
12013 successful. Otherwise, return a tree expression with code CODE of
12014 type TYPE with operands OP0, OP1, and OP2. */
12016 tree
12017 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12018 MEM_STAT_DECL)
12020 tree tem;
12021 #ifdef ENABLE_FOLD_CHECKING
12022 unsigned char checksum_before_op0[16],
12023 checksum_before_op1[16],
12024 checksum_before_op2[16],
12025 checksum_after_op0[16],
12026 checksum_after_op1[16],
12027 checksum_after_op2[16];
12028 struct md5_ctx ctx;
12029 htab_t ht;
12031 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12032 md5_init_ctx (&ctx);
12033 fold_checksum_tree (op0, &ctx, ht);
12034 md5_finish_ctx (&ctx, checksum_before_op0);
12035 htab_empty (ht);
12037 md5_init_ctx (&ctx);
12038 fold_checksum_tree (op1, &ctx, ht);
12039 md5_finish_ctx (&ctx, checksum_before_op1);
12040 htab_empty (ht);
12042 md5_init_ctx (&ctx);
12043 fold_checksum_tree (op2, &ctx, ht);
12044 md5_finish_ctx (&ctx, checksum_before_op2);
12045 htab_empty (ht);
12046 #endif
12048 tem = fold_ternary (code, type, op0, op1, op2);
12049 if (!tem)
12050 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12052 #ifdef ENABLE_FOLD_CHECKING
12053 md5_init_ctx (&ctx);
12054 fold_checksum_tree (op0, &ctx, ht);
12055 md5_finish_ctx (&ctx, checksum_after_op0);
12056 htab_empty (ht);
12058 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12059 fold_check_failed (op0, tem);
12061 md5_init_ctx (&ctx);
12062 fold_checksum_tree (op1, &ctx, ht);
12063 md5_finish_ctx (&ctx, checksum_after_op1);
12064 htab_empty (ht);
12066 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12067 fold_check_failed (op1, tem);
12069 md5_init_ctx (&ctx);
12070 fold_checksum_tree (op2, &ctx, ht);
12071 md5_finish_ctx (&ctx, checksum_after_op2);
12072 htab_delete (ht);
12074 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12075 fold_check_failed (op2, tem);
12076 #endif
12077 return tem;
12080 /* Perform constant folding and related simplification of initializer
12081 expression EXPR. These behave identically to "fold_buildN" but ignore
12082 potential run-time traps and exceptions that fold must preserve. */
12084 #define START_FOLD_INIT \
12085 int saved_signaling_nans = flag_signaling_nans;\
12086 int saved_trapping_math = flag_trapping_math;\
12087 int saved_rounding_math = flag_rounding_math;\
12088 int saved_trapv = flag_trapv;\
12089 int saved_folding_initializer = folding_initializer;\
12090 flag_signaling_nans = 0;\
12091 flag_trapping_math = 0;\
12092 flag_rounding_math = 0;\
12093 flag_trapv = 0;\
12094 folding_initializer = 1;
12096 #define END_FOLD_INIT \
12097 flag_signaling_nans = saved_signaling_nans;\
12098 flag_trapping_math = saved_trapping_math;\
12099 flag_rounding_math = saved_rounding_math;\
12100 flag_trapv = saved_trapv;\
12101 folding_initializer = saved_folding_initializer;
12103 tree
12104 fold_build1_initializer (enum tree_code code, tree type, tree op)
12106 tree result;
12107 START_FOLD_INIT;
12109 result = fold_build1 (code, type, op);
12111 END_FOLD_INIT;
12112 return result;
12115 tree
12116 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12118 tree result;
12119 START_FOLD_INIT;
12121 result = fold_build2 (code, type, op0, op1);
12123 END_FOLD_INIT;
12124 return result;
12127 tree
12128 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12129 tree op2)
12131 tree result;
12132 START_FOLD_INIT;
12134 result = fold_build3 (code, type, op0, op1, op2);
12136 END_FOLD_INIT;
12137 return result;
12140 #undef START_FOLD_INIT
12141 #undef END_FOLD_INIT
12143 /* Determine if first argument is a multiple of second argument. Return 0 if
12144 it is not, or we cannot easily determined it to be.
12146 An example of the sort of thing we care about (at this point; this routine
12147 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12148 fold cases do now) is discovering that
12150 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12152 is a multiple of
12154 SAVE_EXPR (J * 8)
12156 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12158 This code also handles discovering that
12160 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12162 is a multiple of 8 so we don't have to worry about dealing with a
12163 possible remainder.
12165 Note that we *look* inside a SAVE_EXPR only to determine how it was
12166 calculated; it is not safe for fold to do much of anything else with the
12167 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12168 at run time. For example, the latter example above *cannot* be implemented
12169 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12170 evaluation time of the original SAVE_EXPR is not necessarily the same at
12171 the time the new expression is evaluated. The only optimization of this
12172 sort that would be valid is changing
12174 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12176 divided by 8 to
12178 SAVE_EXPR (I) * SAVE_EXPR (J)
12180 (where the same SAVE_EXPR (J) is used in the original and the
12181 transformed version). */
12183 static int
12184 multiple_of_p (tree type, tree top, tree bottom)
12186 if (operand_equal_p (top, bottom, 0))
12187 return 1;
12189 if (TREE_CODE (type) != INTEGER_TYPE)
12190 return 0;
12192 switch (TREE_CODE (top))
12194 case BIT_AND_EXPR:
12195 /* Bitwise and provides a power of two multiple. If the mask is
12196 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12197 if (!integer_pow2p (bottom))
12198 return 0;
12199 /* FALLTHRU */
12201 case MULT_EXPR:
12202 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12203 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12205 case PLUS_EXPR:
12206 case MINUS_EXPR:
12207 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12208 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12210 case LSHIFT_EXPR:
12211 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12213 tree op1, t1;
12215 op1 = TREE_OPERAND (top, 1);
12216 /* const_binop may not detect overflow correctly,
12217 so check for it explicitly here. */
12218 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12219 > TREE_INT_CST_LOW (op1)
12220 && TREE_INT_CST_HIGH (op1) == 0
12221 && 0 != (t1 = fold_convert (type,
12222 const_binop (LSHIFT_EXPR,
12223 size_one_node,
12224 op1, 0)))
12225 && ! TREE_OVERFLOW (t1))
12226 return multiple_of_p (type, t1, bottom);
12228 return 0;
12230 case NOP_EXPR:
12231 /* Can't handle conversions from non-integral or wider integral type. */
12232 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12233 || (TYPE_PRECISION (type)
12234 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12235 return 0;
12237 /* .. fall through ... */
12239 case SAVE_EXPR:
12240 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12242 case INTEGER_CST:
12243 if (TREE_CODE (bottom) != INTEGER_CST
12244 || (TYPE_UNSIGNED (type)
12245 && (tree_int_cst_sgn (top) < 0
12246 || tree_int_cst_sgn (bottom) < 0)))
12247 return 0;
12248 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12249 top, bottom, 0));
12251 default:
12252 return 0;
12256 /* Return true if `t' is known to be non-negative. */
12258 bool
12259 tree_expr_nonnegative_p (tree t)
12261 if (t == error_mark_node)
12262 return false;
12264 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12265 return true;
12267 switch (TREE_CODE (t))
12269 case SSA_NAME:
12270 /* Query VRP to see if it has recorded any information about
12271 the range of this object. */
12272 return ssa_name_nonnegative_p (t);
12274 case ABS_EXPR:
12275 /* We can't return 1 if flag_wrapv is set because
12276 ABS_EXPR<INT_MIN> = INT_MIN. */
12277 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12278 return true;
12279 break;
12281 case INTEGER_CST:
12282 return tree_int_cst_sgn (t) >= 0;
12284 case REAL_CST:
12285 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12287 case PLUS_EXPR:
12288 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12289 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12290 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12292 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12293 both unsigned and at least 2 bits shorter than the result. */
12294 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12295 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12296 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12298 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12299 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12300 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12301 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12303 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12304 TYPE_PRECISION (inner2)) + 1;
12305 return prec < TYPE_PRECISION (TREE_TYPE (t));
12308 break;
12310 case MULT_EXPR:
12311 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12313 /* x * x for floating point x is always non-negative. */
12314 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12315 return true;
12316 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12317 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12320 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12321 both unsigned and their total bits is shorter than the result. */
12322 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12323 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12324 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12326 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12327 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12328 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12329 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12330 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12331 < TYPE_PRECISION (TREE_TYPE (t));
12333 return false;
12335 case BIT_AND_EXPR:
12336 case MAX_EXPR:
12337 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12338 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12340 case BIT_IOR_EXPR:
12341 case BIT_XOR_EXPR:
12342 case MIN_EXPR:
12343 case RDIV_EXPR:
12344 case TRUNC_DIV_EXPR:
12345 case CEIL_DIV_EXPR:
12346 case FLOOR_DIV_EXPR:
12347 case ROUND_DIV_EXPR:
12348 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12349 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12351 case TRUNC_MOD_EXPR:
12352 case CEIL_MOD_EXPR:
12353 case FLOOR_MOD_EXPR:
12354 case ROUND_MOD_EXPR:
12355 case SAVE_EXPR:
12356 case NON_LVALUE_EXPR:
12357 case FLOAT_EXPR:
12358 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12360 case COMPOUND_EXPR:
12361 case MODIFY_EXPR:
12362 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12364 case BIND_EXPR:
12365 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12367 case COND_EXPR:
12368 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12369 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12371 case NOP_EXPR:
12373 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12374 tree outer_type = TREE_TYPE (t);
12376 if (TREE_CODE (outer_type) == REAL_TYPE)
12378 if (TREE_CODE (inner_type) == REAL_TYPE)
12379 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12380 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12382 if (TYPE_UNSIGNED (inner_type))
12383 return true;
12384 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12387 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12389 if (TREE_CODE (inner_type) == REAL_TYPE)
12390 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12391 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12392 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12393 && TYPE_UNSIGNED (inner_type);
12396 break;
12398 case TARGET_EXPR:
12400 tree temp = TARGET_EXPR_SLOT (t);
12401 t = TARGET_EXPR_INITIAL (t);
12403 /* If the initializer is non-void, then it's a normal expression
12404 that will be assigned to the slot. */
12405 if (!VOID_TYPE_P (t))
12406 return tree_expr_nonnegative_p (t);
12408 /* Otherwise, the initializer sets the slot in some way. One common
12409 way is an assignment statement at the end of the initializer. */
12410 while (1)
12412 if (TREE_CODE (t) == BIND_EXPR)
12413 t = expr_last (BIND_EXPR_BODY (t));
12414 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12415 || TREE_CODE (t) == TRY_CATCH_EXPR)
12416 t = expr_last (TREE_OPERAND (t, 0));
12417 else if (TREE_CODE (t) == STATEMENT_LIST)
12418 t = expr_last (t);
12419 else
12420 break;
12422 if (TREE_CODE (t) == MODIFY_EXPR
12423 && TREE_OPERAND (t, 0) == temp)
12424 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12426 return false;
12429 case CALL_EXPR:
12431 tree fndecl = get_callee_fndecl (t);
12432 tree arglist = TREE_OPERAND (t, 1);
12433 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12434 switch (DECL_FUNCTION_CODE (fndecl))
12436 CASE_FLT_FN (BUILT_IN_ACOS):
12437 CASE_FLT_FN (BUILT_IN_ACOSH):
12438 CASE_FLT_FN (BUILT_IN_CABS):
12439 CASE_FLT_FN (BUILT_IN_COSH):
12440 CASE_FLT_FN (BUILT_IN_ERFC):
12441 CASE_FLT_FN (BUILT_IN_EXP):
12442 CASE_FLT_FN (BUILT_IN_EXP10):
12443 CASE_FLT_FN (BUILT_IN_EXP2):
12444 CASE_FLT_FN (BUILT_IN_FABS):
12445 CASE_FLT_FN (BUILT_IN_FDIM):
12446 CASE_FLT_FN (BUILT_IN_HYPOT):
12447 CASE_FLT_FN (BUILT_IN_POW10):
12448 CASE_INT_FN (BUILT_IN_FFS):
12449 CASE_INT_FN (BUILT_IN_PARITY):
12450 CASE_INT_FN (BUILT_IN_POPCOUNT):
12451 case BUILT_IN_BSWAP32:
12452 case BUILT_IN_BSWAP64:
12453 /* Always true. */
12454 return true;
12456 CASE_FLT_FN (BUILT_IN_SQRT):
12457 /* sqrt(-0.0) is -0.0. */
12458 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12459 return true;
12460 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12462 CASE_FLT_FN (BUILT_IN_ASINH):
12463 CASE_FLT_FN (BUILT_IN_ATAN):
12464 CASE_FLT_FN (BUILT_IN_ATANH):
12465 CASE_FLT_FN (BUILT_IN_CBRT):
12466 CASE_FLT_FN (BUILT_IN_CEIL):
12467 CASE_FLT_FN (BUILT_IN_ERF):
12468 CASE_FLT_FN (BUILT_IN_EXPM1):
12469 CASE_FLT_FN (BUILT_IN_FLOOR):
12470 CASE_FLT_FN (BUILT_IN_FMOD):
12471 CASE_FLT_FN (BUILT_IN_FREXP):
12472 CASE_FLT_FN (BUILT_IN_LCEIL):
12473 CASE_FLT_FN (BUILT_IN_LDEXP):
12474 CASE_FLT_FN (BUILT_IN_LFLOOR):
12475 CASE_FLT_FN (BUILT_IN_LLCEIL):
12476 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12477 CASE_FLT_FN (BUILT_IN_LLRINT):
12478 CASE_FLT_FN (BUILT_IN_LLROUND):
12479 CASE_FLT_FN (BUILT_IN_LRINT):
12480 CASE_FLT_FN (BUILT_IN_LROUND):
12481 CASE_FLT_FN (BUILT_IN_MODF):
12482 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12483 CASE_FLT_FN (BUILT_IN_RINT):
12484 CASE_FLT_FN (BUILT_IN_ROUND):
12485 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12486 CASE_FLT_FN (BUILT_IN_SINH):
12487 CASE_FLT_FN (BUILT_IN_TANH):
12488 CASE_FLT_FN (BUILT_IN_TRUNC):
12489 /* True if the 1st argument is nonnegative. */
12490 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12492 CASE_FLT_FN (BUILT_IN_FMAX):
12493 /* True if the 1st OR 2nd arguments are nonnegative. */
12494 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12495 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12497 CASE_FLT_FN (BUILT_IN_FMIN):
12498 /* True if the 1st AND 2nd arguments are nonnegative. */
12499 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12500 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12502 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12503 /* True if the 2nd argument is nonnegative. */
12504 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12506 CASE_FLT_FN (BUILT_IN_POWI):
12507 /* True if the 1st argument is nonnegative or the second
12508 argument is an even integer. */
12509 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12511 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12512 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12513 return true;
12515 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12517 CASE_FLT_FN (BUILT_IN_POW):
12518 /* True if the 1st argument is nonnegative or the second
12519 argument is an even integer valued real. */
12520 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12522 REAL_VALUE_TYPE c;
12523 HOST_WIDE_INT n;
12525 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12526 n = real_to_integer (&c);
12527 if ((n & 1) == 0)
12529 REAL_VALUE_TYPE cint;
12530 real_from_integer (&cint, VOIDmode, n,
12531 n < 0 ? -1 : 0, 0);
12532 if (real_identical (&c, &cint))
12533 return true;
12536 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12538 default:
12539 break;
12543 /* ... fall through ... */
12545 default:
12546 if (truth_value_p (TREE_CODE (t)))
12547 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12548 return true;
12551 /* We don't know sign of `t', so be conservative and return false. */
12552 return false;
12555 /* Return true when T is an address and is known to be nonzero.
12556 For floating point we further ensure that T is not denormal.
12557 Similar logic is present in nonzero_address in rtlanal.h. */
12559 bool
12560 tree_expr_nonzero_p (tree t)
12562 tree type = TREE_TYPE (t);
12564 /* Doing something useful for floating point would need more work. */
12565 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12566 return false;
12568 switch (TREE_CODE (t))
12570 case SSA_NAME:
12571 /* Query VRP to see if it has recorded any information about
12572 the range of this object. */
12573 return ssa_name_nonzero_p (t);
12575 case ABS_EXPR:
12576 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12578 case INTEGER_CST:
12579 /* We used to test for !integer_zerop here. This does not work correctly
12580 if TREE_CONSTANT_OVERFLOW (t). */
12581 return (TREE_INT_CST_LOW (t) != 0
12582 || TREE_INT_CST_HIGH (t) != 0);
12584 case PLUS_EXPR:
12585 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12587 /* With the presence of negative values it is hard
12588 to say something. */
12589 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12590 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12591 return false;
12592 /* One of operands must be positive and the other non-negative. */
12593 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12594 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12596 break;
12598 case MULT_EXPR:
12599 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12601 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12602 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12604 break;
12606 case NOP_EXPR:
12608 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12609 tree outer_type = TREE_TYPE (t);
12611 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12612 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12614 break;
12616 case ADDR_EXPR:
12618 tree base = get_base_address (TREE_OPERAND (t, 0));
12620 if (!base)
12621 return false;
12623 /* Weak declarations may link to NULL. */
12624 if (VAR_OR_FUNCTION_DECL_P (base))
12625 return !DECL_WEAK (base);
12627 /* Constants are never weak. */
12628 if (CONSTANT_CLASS_P (base))
12629 return true;
12631 return false;
12634 case COND_EXPR:
12635 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12636 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12638 case MIN_EXPR:
12639 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12640 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12642 case MAX_EXPR:
12643 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12645 /* When both operands are nonzero, then MAX must be too. */
12646 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12647 return true;
12649 /* MAX where operand 0 is positive is positive. */
12650 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12652 /* MAX where operand 1 is positive is positive. */
12653 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12654 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12655 return true;
12656 break;
12658 case COMPOUND_EXPR:
12659 case MODIFY_EXPR:
12660 case BIND_EXPR:
12661 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12663 case SAVE_EXPR:
12664 case NON_LVALUE_EXPR:
12665 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12667 case BIT_IOR_EXPR:
12668 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12669 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12671 case CALL_EXPR:
12672 return alloca_call_p (t);
12674 default:
12675 break;
12677 return false;
12680 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12681 attempt to fold the expression to a constant without modifying TYPE,
12682 OP0 or OP1.
12684 If the expression could be simplified to a constant, then return
12685 the constant. If the expression would not be simplified to a
12686 constant, then return NULL_TREE. */
12688 tree
12689 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12691 tree tem = fold_binary (code, type, op0, op1);
12692 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12695 /* Given the components of a unary expression CODE, TYPE and OP0,
12696 attempt to fold the expression to a constant without modifying
12697 TYPE or OP0.
12699 If the expression could be simplified to a constant, then return
12700 the constant. If the expression would not be simplified to a
12701 constant, then return NULL_TREE. */
12703 tree
12704 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12706 tree tem = fold_unary (code, type, op0);
12707 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12710 /* If EXP represents referencing an element in a constant string
12711 (either via pointer arithmetic or array indexing), return the
12712 tree representing the value accessed, otherwise return NULL. */
12714 tree
12715 fold_read_from_constant_string (tree exp)
12717 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12719 tree exp1 = TREE_OPERAND (exp, 0);
12720 tree index;
12721 tree string;
12723 if (TREE_CODE (exp) == INDIRECT_REF)
12724 string = string_constant (exp1, &index);
12725 else
12727 tree low_bound = array_ref_low_bound (exp);
12728 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12730 /* Optimize the special-case of a zero lower bound.
12732 We convert the low_bound to sizetype to avoid some problems
12733 with constant folding. (E.g. suppose the lower bound is 1,
12734 and its mode is QI. Without the conversion,l (ARRAY
12735 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12736 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12737 if (! integer_zerop (low_bound))
12738 index = size_diffop (index, fold_convert (sizetype, low_bound));
12740 string = exp1;
12743 if (string
12744 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12745 && TREE_CODE (string) == STRING_CST
12746 && TREE_CODE (index) == INTEGER_CST
12747 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12748 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12749 == MODE_INT)
12750 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12751 return fold_convert (TREE_TYPE (exp),
12752 build_int_cst (NULL_TREE,
12753 (TREE_STRING_POINTER (string)
12754 [TREE_INT_CST_LOW (index)])));
12756 return NULL;
12759 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12760 an integer constant or real constant.
12762 TYPE is the type of the result. */
12764 static tree
12765 fold_negate_const (tree arg0, tree type)
12767 tree t = NULL_TREE;
12769 switch (TREE_CODE (arg0))
12771 case INTEGER_CST:
12773 unsigned HOST_WIDE_INT low;
12774 HOST_WIDE_INT high;
12775 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12776 TREE_INT_CST_HIGH (arg0),
12777 &low, &high);
12778 t = build_int_cst_wide (type, low, high);
12779 t = force_fit_type (t, 1,
12780 (overflow | TREE_OVERFLOW (arg0))
12781 && !TYPE_UNSIGNED (type),
12782 TREE_CONSTANT_OVERFLOW (arg0));
12783 break;
12786 case REAL_CST:
12787 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12788 break;
12790 default:
12791 gcc_unreachable ();
12794 return t;
12797 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12798 an integer constant or real constant.
12800 TYPE is the type of the result. */
12802 tree
12803 fold_abs_const (tree arg0, tree type)
12805 tree t = NULL_TREE;
12807 switch (TREE_CODE (arg0))
12809 case INTEGER_CST:
12810 /* If the value is unsigned, then the absolute value is
12811 the same as the ordinary value. */
12812 if (TYPE_UNSIGNED (type))
12813 t = arg0;
12814 /* Similarly, if the value is non-negative. */
12815 else if (INT_CST_LT (integer_minus_one_node, arg0))
12816 t = arg0;
12817 /* If the value is negative, then the absolute value is
12818 its negation. */
12819 else
12821 unsigned HOST_WIDE_INT low;
12822 HOST_WIDE_INT high;
12823 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12824 TREE_INT_CST_HIGH (arg0),
12825 &low, &high);
12826 t = build_int_cst_wide (type, low, high);
12827 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12828 TREE_CONSTANT_OVERFLOW (arg0));
12830 break;
12832 case REAL_CST:
12833 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12834 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12835 else
12836 t = arg0;
12837 break;
12839 default:
12840 gcc_unreachable ();
12843 return t;
12846 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12847 constant. TYPE is the type of the result. */
12849 static tree
12850 fold_not_const (tree arg0, tree type)
12852 tree t = NULL_TREE;
12854 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12856 t = build_int_cst_wide (type,
12857 ~ TREE_INT_CST_LOW (arg0),
12858 ~ TREE_INT_CST_HIGH (arg0));
12859 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12860 TREE_CONSTANT_OVERFLOW (arg0));
12862 return t;
12865 /* Given CODE, a relational operator, the target type, TYPE and two
12866 constant operands OP0 and OP1, return the result of the
12867 relational operation. If the result is not a compile time
12868 constant, then return NULL_TREE. */
12870 static tree
12871 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12873 int result, invert;
12875 /* From here on, the only cases we handle are when the result is
12876 known to be a constant. */
12878 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12880 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12881 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12883 /* Handle the cases where either operand is a NaN. */
12884 if (real_isnan (c0) || real_isnan (c1))
12886 switch (code)
12888 case EQ_EXPR:
12889 case ORDERED_EXPR:
12890 result = 0;
12891 break;
12893 case NE_EXPR:
12894 case UNORDERED_EXPR:
12895 case UNLT_EXPR:
12896 case UNLE_EXPR:
12897 case UNGT_EXPR:
12898 case UNGE_EXPR:
12899 case UNEQ_EXPR:
12900 result = 1;
12901 break;
12903 case LT_EXPR:
12904 case LE_EXPR:
12905 case GT_EXPR:
12906 case GE_EXPR:
12907 case LTGT_EXPR:
12908 if (flag_trapping_math)
12909 return NULL_TREE;
12910 result = 0;
12911 break;
12913 default:
12914 gcc_unreachable ();
12917 return constant_boolean_node (result, type);
12920 return constant_boolean_node (real_compare (code, c0, c1), type);
12923 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12925 To compute GT, swap the arguments and do LT.
12926 To compute GE, do LT and invert the result.
12927 To compute LE, swap the arguments, do LT and invert the result.
12928 To compute NE, do EQ and invert the result.
12930 Therefore, the code below must handle only EQ and LT. */
12932 if (code == LE_EXPR || code == GT_EXPR)
12934 tree tem = op0;
12935 op0 = op1;
12936 op1 = tem;
12937 code = swap_tree_comparison (code);
12940 /* Note that it is safe to invert for real values here because we
12941 have already handled the one case that it matters. */
12943 invert = 0;
12944 if (code == NE_EXPR || code == GE_EXPR)
12946 invert = 1;
12947 code = invert_tree_comparison (code, false);
12950 /* Compute a result for LT or EQ if args permit;
12951 Otherwise return T. */
12952 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12954 if (code == EQ_EXPR)
12955 result = tree_int_cst_equal (op0, op1);
12956 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12957 result = INT_CST_LT_UNSIGNED (op0, op1);
12958 else
12959 result = INT_CST_LT (op0, op1);
12961 else
12962 return NULL_TREE;
12964 if (invert)
12965 result ^= 1;
12966 return constant_boolean_node (result, type);
12969 /* Build an expression for the a clean point containing EXPR with type TYPE.
12970 Don't build a cleanup point expression for EXPR which don't have side
12971 effects. */
12973 tree
12974 fold_build_cleanup_point_expr (tree type, tree expr)
12976 /* If the expression does not have side effects then we don't have to wrap
12977 it with a cleanup point expression. */
12978 if (!TREE_SIDE_EFFECTS (expr))
12979 return expr;
12981 /* If the expression is a return, check to see if the expression inside the
12982 return has no side effects or the right hand side of the modify expression
12983 inside the return. If either don't have side effects set we don't need to
12984 wrap the expression in a cleanup point expression. Note we don't check the
12985 left hand side of the modify because it should always be a return decl. */
12986 if (TREE_CODE (expr) == RETURN_EXPR)
12988 tree op = TREE_OPERAND (expr, 0);
12989 if (!op || !TREE_SIDE_EFFECTS (op))
12990 return expr;
12991 op = TREE_OPERAND (op, 1);
12992 if (!TREE_SIDE_EFFECTS (op))
12993 return expr;
12996 return build1 (CLEANUP_POINT_EXPR, type, expr);
12999 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13000 avoid confusing the gimplify process. */
13002 tree
13003 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13005 /* The size of the object is not relevant when talking about its address. */
13006 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13007 t = TREE_OPERAND (t, 0);
13009 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13010 if (TREE_CODE (t) == INDIRECT_REF
13011 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13013 t = TREE_OPERAND (t, 0);
13014 if (TREE_TYPE (t) != ptrtype)
13015 t = build1 (NOP_EXPR, ptrtype, t);
13017 else
13019 tree base = t;
13021 while (handled_component_p (base))
13022 base = TREE_OPERAND (base, 0);
13023 if (DECL_P (base))
13024 TREE_ADDRESSABLE (base) = 1;
13026 t = build1 (ADDR_EXPR, ptrtype, t);
13029 return t;
13032 tree
13033 build_fold_addr_expr (tree t)
13035 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13038 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13039 of an indirection through OP0, or NULL_TREE if no simplification is
13040 possible. */
13042 tree
13043 fold_indirect_ref_1 (tree type, tree op0)
13045 tree sub = op0;
13046 tree subtype;
13048 STRIP_NOPS (sub);
13049 subtype = TREE_TYPE (sub);
13050 if (!POINTER_TYPE_P (subtype))
13051 return NULL_TREE;
13053 if (TREE_CODE (sub) == ADDR_EXPR)
13055 tree op = TREE_OPERAND (sub, 0);
13056 tree optype = TREE_TYPE (op);
13057 /* *&CONST_DECL -> to the value of the const decl. */
13058 if (TREE_CODE (op) == CONST_DECL)
13059 return DECL_INITIAL (op);
13060 /* *&p => p; make sure to handle *&"str"[cst] here. */
13061 if (type == optype)
13063 tree fop = fold_read_from_constant_string (op);
13064 if (fop)
13065 return fop;
13066 else
13067 return op;
13069 /* *(foo *)&fooarray => fooarray[0] */
13070 else if (TREE_CODE (optype) == ARRAY_TYPE
13071 && type == TREE_TYPE (optype))
13073 tree type_domain = TYPE_DOMAIN (optype);
13074 tree min_val = size_zero_node;
13075 if (type_domain && TYPE_MIN_VALUE (type_domain))
13076 min_val = TYPE_MIN_VALUE (type_domain);
13077 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13079 /* *(foo *)&complexfoo => __real__ complexfoo */
13080 else if (TREE_CODE (optype) == COMPLEX_TYPE
13081 && type == TREE_TYPE (optype))
13082 return fold_build1 (REALPART_EXPR, type, op);
13085 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13086 if (TREE_CODE (sub) == PLUS_EXPR
13087 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13089 tree op00 = TREE_OPERAND (sub, 0);
13090 tree op01 = TREE_OPERAND (sub, 1);
13091 tree op00type;
13093 STRIP_NOPS (op00);
13094 op00type = TREE_TYPE (op00);
13095 if (TREE_CODE (op00) == ADDR_EXPR
13096 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13097 && type == TREE_TYPE (TREE_TYPE (op00type)))
13099 tree size = TYPE_SIZE_UNIT (type);
13100 if (tree_int_cst_equal (size, op01))
13101 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13105 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13106 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13107 && type == TREE_TYPE (TREE_TYPE (subtype)))
13109 tree type_domain;
13110 tree min_val = size_zero_node;
13111 sub = build_fold_indirect_ref (sub);
13112 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13113 if (type_domain && TYPE_MIN_VALUE (type_domain))
13114 min_val = TYPE_MIN_VALUE (type_domain);
13115 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13118 return NULL_TREE;
13121 /* Builds an expression for an indirection through T, simplifying some
13122 cases. */
13124 tree
13125 build_fold_indirect_ref (tree t)
13127 tree type = TREE_TYPE (TREE_TYPE (t));
13128 tree sub = fold_indirect_ref_1 (type, t);
13130 if (sub)
13131 return sub;
13132 else
13133 return build1 (INDIRECT_REF, type, t);
13136 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13138 tree
13139 fold_indirect_ref (tree t)
13141 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13143 if (sub)
13144 return sub;
13145 else
13146 return t;
13149 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13150 whose result is ignored. The type of the returned tree need not be
13151 the same as the original expression. */
13153 tree
13154 fold_ignored_result (tree t)
13156 if (!TREE_SIDE_EFFECTS (t))
13157 return integer_zero_node;
13159 for (;;)
13160 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13162 case tcc_unary:
13163 t = TREE_OPERAND (t, 0);
13164 break;
13166 case tcc_binary:
13167 case tcc_comparison:
13168 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13169 t = TREE_OPERAND (t, 0);
13170 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13171 t = TREE_OPERAND (t, 1);
13172 else
13173 return t;
13174 break;
13176 case tcc_expression:
13177 switch (TREE_CODE (t))
13179 case COMPOUND_EXPR:
13180 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13181 return t;
13182 t = TREE_OPERAND (t, 0);
13183 break;
13185 case COND_EXPR:
13186 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13187 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13188 return t;
13189 t = TREE_OPERAND (t, 0);
13190 break;
13192 default:
13193 return t;
13195 break;
13197 default:
13198 return t;
13202 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13203 This can only be applied to objects of a sizetype. */
13205 tree
13206 round_up (tree value, int divisor)
13208 tree div = NULL_TREE;
13210 gcc_assert (divisor > 0);
13211 if (divisor == 1)
13212 return value;
13214 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13215 have to do anything. Only do this when we are not given a const,
13216 because in that case, this check is more expensive than just
13217 doing it. */
13218 if (TREE_CODE (value) != INTEGER_CST)
13220 div = build_int_cst (TREE_TYPE (value), divisor);
13222 if (multiple_of_p (TREE_TYPE (value), value, div))
13223 return value;
13226 /* If divisor is a power of two, simplify this to bit manipulation. */
13227 if (divisor == (divisor & -divisor))
13229 tree t;
13231 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13232 value = size_binop (PLUS_EXPR, value, t);
13233 t = build_int_cst (TREE_TYPE (value), -divisor);
13234 value = size_binop (BIT_AND_EXPR, value, t);
13236 else
13238 if (!div)
13239 div = build_int_cst (TREE_TYPE (value), divisor);
13240 value = size_binop (CEIL_DIV_EXPR, value, div);
13241 value = size_binop (MULT_EXPR, value, div);
13244 return value;
13247 /* Likewise, but round down. */
13249 tree
13250 round_down (tree value, int divisor)
13252 tree div = NULL_TREE;
13254 gcc_assert (divisor > 0);
13255 if (divisor == 1)
13256 return value;
13258 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13259 have to do anything. Only do this when we are not given a const,
13260 because in that case, this check is more expensive than just
13261 doing it. */
13262 if (TREE_CODE (value) != INTEGER_CST)
13264 div = build_int_cst (TREE_TYPE (value), divisor);
13266 if (multiple_of_p (TREE_TYPE (value), value, div))
13267 return value;
13270 /* If divisor is a power of two, simplify this to bit manipulation. */
13271 if (divisor == (divisor & -divisor))
13273 tree t;
13275 t = build_int_cst (TREE_TYPE (value), -divisor);
13276 value = size_binop (BIT_AND_EXPR, value, t);
13278 else
13280 if (!div)
13281 div = build_int_cst (TREE_TYPE (value), divisor);
13282 value = size_binop (FLOOR_DIV_EXPR, value, div);
13283 value = size_binop (MULT_EXPR, value, div);
13286 return value;
13289 /* Returns the pointer to the base of the object addressed by EXP and
13290 extracts the information about the offset of the access, storing it
13291 to PBITPOS and POFFSET. */
13293 static tree
13294 split_address_to_core_and_offset (tree exp,
13295 HOST_WIDE_INT *pbitpos, tree *poffset)
13297 tree core;
13298 enum machine_mode mode;
13299 int unsignedp, volatilep;
13300 HOST_WIDE_INT bitsize;
13302 if (TREE_CODE (exp) == ADDR_EXPR)
13304 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13305 poffset, &mode, &unsignedp, &volatilep,
13306 false);
13307 core = build_fold_addr_expr (core);
13309 else
13311 core = exp;
13312 *pbitpos = 0;
13313 *poffset = NULL_TREE;
13316 return core;
13319 /* Returns true if addresses of E1 and E2 differ by a constant, false
13320 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13322 bool
13323 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13325 tree core1, core2;
13326 HOST_WIDE_INT bitpos1, bitpos2;
13327 tree toffset1, toffset2, tdiff, type;
13329 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13330 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13332 if (bitpos1 % BITS_PER_UNIT != 0
13333 || bitpos2 % BITS_PER_UNIT != 0
13334 || !operand_equal_p (core1, core2, 0))
13335 return false;
13337 if (toffset1 && toffset2)
13339 type = TREE_TYPE (toffset1);
13340 if (type != TREE_TYPE (toffset2))
13341 toffset2 = fold_convert (type, toffset2);
13343 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13344 if (!cst_and_fits_in_hwi (tdiff))
13345 return false;
13347 *diff = int_cst_value (tdiff);
13349 else if (toffset1 || toffset2)
13351 /* If only one of the offsets is non-constant, the difference cannot
13352 be a constant. */
13353 return false;
13355 else
13356 *diff = 0;
13358 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13359 return true;
13362 /* Simplify the floating point expression EXP when the sign of the
13363 result is not significant. Return NULL_TREE if no simplification
13364 is possible. */
13366 tree
13367 fold_strip_sign_ops (tree exp)
13369 tree arg0, arg1;
13371 switch (TREE_CODE (exp))
13373 case ABS_EXPR:
13374 case NEGATE_EXPR:
13375 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13376 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13378 case MULT_EXPR:
13379 case RDIV_EXPR:
13380 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13381 return NULL_TREE;
13382 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13383 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13384 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13385 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13386 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13387 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13388 break;
13390 case COMPOUND_EXPR:
13391 arg0 = TREE_OPERAND (exp, 0);
13392 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13393 if (arg1)
13394 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13395 break;
13397 case COND_EXPR:
13398 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13399 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13400 if (arg0 || arg1)
13401 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13402 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13403 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13404 break;
13406 case CALL_EXPR:
13408 const enum built_in_function fcode = builtin_mathfn_code (exp);
13409 switch (fcode)
13411 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13412 /* Strip copysign function call, return the 1st argument. */
13413 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13414 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13415 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13417 default:
13418 /* Strip sign ops from the argument of "odd" math functions. */
13419 if (negate_mathfn_p (fcode))
13421 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13422 if (arg0)
13423 return build_function_call_expr (get_callee_fndecl (exp),
13424 build_tree_list (NULL_TREE,
13425 arg0));
13427 break;
13430 break;
13432 default:
13433 break;
13435 return NULL_TREE;