1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
69 /* Nonzero if we are folding constants inside an initializer; zero
71 int folding_initializer
= 0;
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code
{
95 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
96 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
97 static bool negate_mathfn_p (enum built_in_function
);
98 static bool negate_expr_p (tree
);
99 static tree
negate_expr (tree
);
100 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
101 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
102 static tree
const_binop (enum tree_code
, tree
, tree
, int);
103 static enum comparison_code
comparison_to_compcode (enum tree_code
);
104 static enum tree_code
compcode_to_comparison (enum comparison_code
);
105 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
106 enum tree_code
, tree
, tree
, tree
);
107 static int truth_value_p (enum tree_code
);
108 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
109 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
110 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
111 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
112 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
113 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
114 enum machine_mode
*, int *, int *,
116 static tree
sign_bit_p (tree
, const_tree
);
117 static int simple_operand_p (const_tree
);
118 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
119 static tree
range_predecessor (tree
);
120 static tree
range_successor (tree
);
121 static tree
make_range (tree
, int *, tree
*, tree
*, bool *);
122 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
123 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
125 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
126 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
127 static tree
unextend (tree
, int, int, tree
);
128 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
129 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
130 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
131 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
132 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
135 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
137 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
138 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
139 static bool reorder_operands_p (const_tree
, const_tree
);
140 static tree
fold_negate_const (tree
, tree
);
141 static tree
fold_not_const (tree
, tree
);
142 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
173 words
[0] = LOWPART (low
);
174 words
[1] = HIGHPART (low
);
175 words
[2] = LOWPART (hi
);
176 words
[3] = HIGHPART (hi
);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
187 *low
= words
[0] + words
[1] * BASE
;
188 *hi
= words
[2] + words
[3] * BASE
;
191 /* Force the double-word integer L1, H1 to be within the range of the
192 integer type TYPE. Stores the properly truncated and sign-extended
193 double-word integer in *LV, *HV. Returns true if the operation
194 overflows, that is, argument and result are different. */
197 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
198 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, const_tree type
)
200 unsigned HOST_WIDE_INT low0
= l1
;
201 HOST_WIDE_INT high0
= h1
;
203 int sign_extended_type
;
205 if (POINTER_TYPE_P (type
)
206 || TREE_CODE (type
) == OFFSET_TYPE
)
209 prec
= TYPE_PRECISION (type
);
211 /* Size types *are* sign extended. */
212 sign_extended_type
= (!TYPE_UNSIGNED (type
)
213 || (TREE_CODE (type
) == INTEGER_TYPE
214 && TYPE_IS_SIZETYPE (type
)));
216 /* First clear all bits that are beyond the type's precision. */
217 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
219 else if (prec
> HOST_BITS_PER_WIDE_INT
)
220 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
224 if (prec
< HOST_BITS_PER_WIDE_INT
)
225 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
228 /* Then do sign extension if necessary. */
229 if (!sign_extended_type
)
230 /* No sign extension */;
231 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
232 /* Correct width already. */;
233 else if (prec
> HOST_BITS_PER_WIDE_INT
)
235 /* Sign extend top half? */
236 if (h1
& ((unsigned HOST_WIDE_INT
)1
237 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
238 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
240 else if (prec
== HOST_BITS_PER_WIDE_INT
)
242 if ((HOST_WIDE_INT
)l1
< 0)
247 /* Sign extend bottom half? */
248 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
251 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
258 /* If the value didn't fit, signal overflow. */
259 return l1
!= low0
|| h1
!= high0
;
262 /* We force the double-int HIGH:LOW to the range of the type TYPE by
263 sign or zero extending it.
264 OVERFLOWABLE indicates if we are interested
265 in overflow of the value, when >0 we are only interested in signed
266 overflow, for <0 we are interested in any overflow. OVERFLOWED
267 indicates whether overflow has already occurred. CONST_OVERFLOWED
268 indicates whether constant overflow has already occurred. We force
269 T's value to be within range of T's type (by setting to 0 or 1 all
270 the bits outside the type's range). We set TREE_OVERFLOWED if,
271 OVERFLOWED is nonzero,
272 or OVERFLOWABLE is >0 and signed overflow occurs
273 or OVERFLOWABLE is <0 and any overflow occurs
274 We return a new tree node for the extended double-int. The node
275 is shared if no overflow flags are set. */
278 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
279 HOST_WIDE_INT high
, int overflowable
,
282 int sign_extended_type
;
285 /* Size types *are* sign extended. */
286 sign_extended_type
= (!TYPE_UNSIGNED (type
)
287 || (TREE_CODE (type
) == INTEGER_TYPE
288 && TYPE_IS_SIZETYPE (type
)));
290 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
292 /* If we need to set overflow flags, return a new unshared node. */
293 if (overflowed
|| overflow
)
297 || (overflowable
> 0 && sign_extended_type
))
299 tree t
= make_node (INTEGER_CST
);
300 TREE_INT_CST_LOW (t
) = low
;
301 TREE_INT_CST_HIGH (t
) = high
;
302 TREE_TYPE (t
) = type
;
303 TREE_OVERFLOW (t
) = 1;
308 /* Else build a shared node. */
309 return build_int_cst_wide (type
, low
, high
);
312 /* Add two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows according to UNSIGNED_P.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
320 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
321 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
324 unsigned HOST_WIDE_INT l
;
328 h
= h1
+ h2
+ (l
< l1
);
334 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
336 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
339 /* Negate a doubleword integer with doubleword result.
340 Return nonzero if the operation overflows, assuming it's signed.
341 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
342 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
346 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
352 return (*hv
& h1
) < 0;
362 /* Multiply two doubleword integers with doubleword result.
363 Return nonzero if the operation overflows according to UNSIGNED_P.
364 Each argument is given as two `HOST_WIDE_INT' pieces.
365 One argument is L1 and H1; the other, L2 and H2.
366 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
370 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
371 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
374 HOST_WIDE_INT arg1
[4];
375 HOST_WIDE_INT arg2
[4];
376 HOST_WIDE_INT prod
[4 * 2];
377 unsigned HOST_WIDE_INT carry
;
379 unsigned HOST_WIDE_INT toplow
, neglow
;
380 HOST_WIDE_INT tophigh
, neghigh
;
382 encode (arg1
, l1
, h1
);
383 encode (arg2
, l2
, h2
);
385 memset (prod
, 0, sizeof prod
);
387 for (i
= 0; i
< 4; i
++)
390 for (j
= 0; j
< 4; j
++)
393 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
394 carry
+= arg1
[i
] * arg2
[j
];
395 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 prod
[k
] = LOWPART (carry
);
398 carry
= HIGHPART (carry
);
403 decode (prod
, lv
, hv
);
404 decode (prod
+ 4, &toplow
, &tophigh
);
406 /* Unsigned overflow is immediate. */
408 return (toplow
| tophigh
) != 0;
410 /* Check for signed overflow by calculating the signed representation of the
411 top half of the result; it should agree with the low half's sign bit. */
414 neg_double (l2
, h2
, &neglow
, &neghigh
);
415 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
419 neg_double (l1
, h1
, &neglow
, &neghigh
);
420 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
422 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
425 /* Shift the doubleword integer in L1, H1 left by COUNT places
426 keeping only PREC bits of result.
427 Shift right if COUNT is negative.
428 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
429 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
433 HOST_WIDE_INT count
, unsigned int prec
,
434 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
436 unsigned HOST_WIDE_INT signmask
;
440 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
444 if (SHIFT_COUNT_TRUNCATED
)
447 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
449 /* Shifting by the host word size is undefined according to the
450 ANSI standard, so we must handle this as a special case. */
454 else if (count
>= HOST_BITS_PER_WIDE_INT
)
456 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
461 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
462 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
466 /* Sign extend all bits that are beyond the precision. */
468 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
469 ? ((unsigned HOST_WIDE_INT
) *hv
470 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
471 : (*lv
>> (prec
- 1))) & 1);
473 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
475 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
477 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
478 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
483 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
484 *lv
|= signmask
<< prec
;
488 /* Shift the doubleword integer in L1, H1 right by COUNT places
489 keeping only PREC bits of result. COUNT must be positive.
490 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
491 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
495 HOST_WIDE_INT count
, unsigned int prec
,
496 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
499 unsigned HOST_WIDE_INT signmask
;
502 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
505 if (SHIFT_COUNT_TRUNCATED
)
508 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
510 /* Shifting by the host word size is undefined according to the
511 ANSI standard, so we must handle this as a special case. */
515 else if (count
>= HOST_BITS_PER_WIDE_INT
)
518 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
522 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
524 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
527 /* Zero / sign extend all bits that are beyond the precision. */
529 if (count
>= (HOST_WIDE_INT
)prec
)
534 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
536 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
538 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
539 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
544 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
545 *lv
|= signmask
<< (prec
- count
);
549 /* Rotate the doubleword integer in L1, H1 left by COUNT places
550 keeping only PREC bits of result.
551 Rotate right if COUNT is negative.
552 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
556 HOST_WIDE_INT count
, unsigned int prec
,
557 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
559 unsigned HOST_WIDE_INT s1l
, s2l
;
560 HOST_WIDE_INT s1h
, s2h
;
566 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
567 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
572 /* Rotate the doubleword integer in L1, H1 left by COUNT places
573 keeping only PREC bits of result. COUNT must be positive.
574 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
578 HOST_WIDE_INT count
, unsigned int prec
,
579 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
581 unsigned HOST_WIDE_INT s1l
, s2l
;
582 HOST_WIDE_INT s1h
, s2h
;
588 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
589 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
594 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
595 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
596 CODE is a tree code for a kind of division, one of
597 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 It controls how the quotient is rounded to an integer.
600 Return nonzero if the operation overflows.
601 UNS nonzero says do unsigned division. */
604 div_and_round_double (enum tree_code code
, int uns
,
605 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
606 HOST_WIDE_INT hnum_orig
,
607 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
608 HOST_WIDE_INT hden_orig
,
609 unsigned HOST_WIDE_INT
*lquo
,
610 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
614 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
615 HOST_WIDE_INT den
[4], quo
[4];
617 unsigned HOST_WIDE_INT work
;
618 unsigned HOST_WIDE_INT carry
= 0;
619 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
620 HOST_WIDE_INT hnum
= hnum_orig
;
621 unsigned HOST_WIDE_INT lden
= lden_orig
;
622 HOST_WIDE_INT hden
= hden_orig
;
625 if (hden
== 0 && lden
== 0)
626 overflow
= 1, lden
= 1;
628 /* Calculate quotient sign and convert operands to unsigned. */
634 /* (minimum integer) / (-1) is the only overflow case. */
635 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
636 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
642 neg_double (lden
, hden
, &lden
, &hden
);
646 if (hnum
== 0 && hden
== 0)
647 { /* single precision */
649 /* This unsigned division rounds toward zero. */
655 { /* trivial case: dividend < divisor */
656 /* hden != 0 already checked. */
663 memset (quo
, 0, sizeof quo
);
665 memset (num
, 0, sizeof num
); /* to zero 9th element */
666 memset (den
, 0, sizeof den
);
668 encode (num
, lnum
, hnum
);
669 encode (den
, lden
, hden
);
671 /* Special code for when the divisor < BASE. */
672 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
674 /* hnum != 0 already checked. */
675 for (i
= 4 - 1; i
>= 0; i
--)
677 work
= num
[i
] + carry
* BASE
;
678 quo
[i
] = work
/ lden
;
684 /* Full double precision division,
685 with thanks to Don Knuth's "Seminumerical Algorithms". */
686 int num_hi_sig
, den_hi_sig
;
687 unsigned HOST_WIDE_INT quo_est
, scale
;
689 /* Find the highest nonzero divisor digit. */
690 for (i
= 4 - 1;; i
--)
697 /* Insure that the first digit of the divisor is at least BASE/2.
698 This is required by the quotient digit estimation algorithm. */
700 scale
= BASE
/ (den
[den_hi_sig
] + 1);
702 { /* scale divisor and dividend */
704 for (i
= 0; i
<= 4 - 1; i
++)
706 work
= (num
[i
] * scale
) + carry
;
707 num
[i
] = LOWPART (work
);
708 carry
= HIGHPART (work
);
713 for (i
= 0; i
<= 4 - 1; i
++)
715 work
= (den
[i
] * scale
) + carry
;
716 den
[i
] = LOWPART (work
);
717 carry
= HIGHPART (work
);
718 if (den
[i
] != 0) den_hi_sig
= i
;
725 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
727 /* Guess the next quotient digit, quo_est, by dividing the first
728 two remaining dividend digits by the high order quotient digit.
729 quo_est is never low and is at most 2 high. */
730 unsigned HOST_WIDE_INT tmp
;
732 num_hi_sig
= i
+ den_hi_sig
+ 1;
733 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
734 if (num
[num_hi_sig
] != den
[den_hi_sig
])
735 quo_est
= work
/ den
[den_hi_sig
];
739 /* Refine quo_est so it's usually correct, and at most one high. */
740 tmp
= work
- quo_est
* den
[den_hi_sig
];
742 && (den
[den_hi_sig
- 1] * quo_est
743 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
746 /* Try QUO_EST as the quotient digit, by multiplying the
747 divisor by QUO_EST and subtracting from the remaining dividend.
748 Keep in mind that QUO_EST is the I - 1st digit. */
751 for (j
= 0; j
<= den_hi_sig
; j
++)
753 work
= quo_est
* den
[j
] + carry
;
754 carry
= HIGHPART (work
);
755 work
= num
[i
+ j
] - LOWPART (work
);
756 num
[i
+ j
] = LOWPART (work
);
757 carry
+= HIGHPART (work
) != 0;
760 /* If quo_est was high by one, then num[i] went negative and
761 we need to correct things. */
762 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
765 carry
= 0; /* add divisor back in */
766 for (j
= 0; j
<= den_hi_sig
; j
++)
768 work
= num
[i
+ j
] + den
[j
] + carry
;
769 carry
= HIGHPART (work
);
770 num
[i
+ j
] = LOWPART (work
);
773 num
[num_hi_sig
] += carry
;
776 /* Store the quotient digit. */
781 decode (quo
, lquo
, hquo
);
784 /* If result is negative, make it so. */
786 neg_double (*lquo
, *hquo
, lquo
, hquo
);
788 /* Compute trial remainder: rem = num - (quo * den) */
789 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
790 neg_double (*lrem
, *hrem
, lrem
, hrem
);
791 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
796 case TRUNC_MOD_EXPR
: /* round toward zero */
797 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
801 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
802 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
805 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
813 case CEIL_MOD_EXPR
: /* round toward positive infinity */
814 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
816 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
824 case ROUND_MOD_EXPR
: /* round to closest integer */
826 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
827 HOST_WIDE_INT habs_rem
= *hrem
;
828 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
829 HOST_WIDE_INT habs_den
= hden
, htwice
;
831 /* Get absolute values. */
833 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
835 neg_double (lden
, hden
, &labs_den
, &habs_den
);
837 /* If (2 * abs (lrem) >= abs (lden)) */
838 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
839 labs_rem
, habs_rem
, <wice
, &htwice
);
841 if (((unsigned HOST_WIDE_INT
) habs_den
842 < (unsigned HOST_WIDE_INT
) htwice
)
843 || (((unsigned HOST_WIDE_INT
) habs_den
844 == (unsigned HOST_WIDE_INT
) htwice
)
845 && (labs_den
< ltwice
)))
849 add_double (*lquo
, *hquo
,
850 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
853 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
865 /* Compute true remainder: rem = num - (quo * den) */
866 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
867 neg_double (*lrem
, *hrem
, lrem
, hrem
);
868 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
872 /* If ARG2 divides ARG1 with zero remainder, carries out the division
873 of type CODE and returns the quotient.
874 Otherwise returns NULL_TREE. */
877 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
879 unsigned HOST_WIDE_INT int1l
, int2l
;
880 HOST_WIDE_INT int1h
, int2h
;
881 unsigned HOST_WIDE_INT quol
, reml
;
882 HOST_WIDE_INT quoh
, remh
;
883 tree type
= TREE_TYPE (arg1
);
884 int uns
= TYPE_UNSIGNED (type
);
886 int1l
= TREE_INT_CST_LOW (arg1
);
887 int1h
= TREE_INT_CST_HIGH (arg1
);
888 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
889 &obj[some_exotic_number]. */
890 if (POINTER_TYPE_P (type
))
893 type
= signed_type_for (type
);
894 fit_double_type (int1l
, int1h
, &int1l
, &int1h
,
898 fit_double_type (int1l
, int1h
, &int1l
, &int1h
, type
);
899 int2l
= TREE_INT_CST_LOW (arg2
);
900 int2h
= TREE_INT_CST_HIGH (arg2
);
902 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
903 &quol
, &quoh
, &reml
, &remh
);
904 if (remh
!= 0 || reml
!= 0)
907 return build_int_cst_wide (type
, quol
, quoh
);
910 /* This is nonzero if we should defer warnings about undefined
911 overflow. This facility exists because these warnings are a
912 special case. The code to estimate loop iterations does not want
913 to issue any warnings, since it works with expressions which do not
914 occur in user code. Various bits of cleanup code call fold(), but
915 only use the result if it has certain characteristics (e.g., is a
916 constant); that code only wants to issue a warning if the result is
919 static int fold_deferring_overflow_warnings
;
921 /* If a warning about undefined overflow is deferred, this is the
922 warning. Note that this may cause us to turn two warnings into
923 one, but that is fine since it is sufficient to only give one
924 warning per expression. */
926 static const char* fold_deferred_overflow_warning
;
928 /* If a warning about undefined overflow is deferred, this is the
929 level at which the warning should be emitted. */
931 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
933 /* Start deferring overflow warnings. We could use a stack here to
934 permit nested calls, but at present it is not necessary. */
937 fold_defer_overflow_warnings (void)
939 ++fold_deferring_overflow_warnings
;
942 /* Stop deferring overflow warnings. If there is a pending warning,
943 and ISSUE is true, then issue the warning if appropriate. STMT is
944 the statement with which the warning should be associated (used for
945 location information); STMT may be NULL. CODE is the level of the
946 warning--a warn_strict_overflow_code value. This function will use
947 the smaller of CODE and the deferred code when deciding whether to
948 issue the warning. CODE may be zero to mean to always use the
952 fold_undefer_overflow_warnings (bool issue
, const_tree stmt
, int code
)
957 gcc_assert (fold_deferring_overflow_warnings
> 0);
958 --fold_deferring_overflow_warnings
;
959 if (fold_deferring_overflow_warnings
> 0)
961 if (fold_deferred_overflow_warning
!= NULL
963 && code
< (int) fold_deferred_overflow_code
)
964 fold_deferred_overflow_code
= code
;
968 warnmsg
= fold_deferred_overflow_warning
;
969 fold_deferred_overflow_warning
= NULL
;
971 if (!issue
|| warnmsg
== NULL
)
974 if (stmt
!= NULL_TREE
&& TREE_NO_WARNING (stmt
))
977 /* Use the smallest code level when deciding to issue the
979 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
980 code
= fold_deferred_overflow_code
;
982 if (!issue_strict_overflow_warning (code
))
985 if (stmt
== NULL_TREE
|| !expr_has_location (stmt
))
986 locus
= input_location
;
988 locus
= expr_location (stmt
);
989 warning (OPT_Wstrict_overflow
, "%H%s", &locus
, warnmsg
);
992 /* Stop deferring overflow warnings, ignoring any deferred
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL_TREE
, 0);
1001 /* Whether we are deferring overflow warnings. */
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings
> 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1013 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
1015 gcc_assert (!flag_wrapv
&& !flag_trapv
);
1016 if (fold_deferring_overflow_warnings
> 0)
1018 if (fold_deferred_overflow_warning
== NULL
1019 || wc
< fold_deferred_overflow_code
)
1021 fold_deferred_overflow_warning
= gmsgid
;
1022 fold_deferred_overflow_code
= wc
;
1025 else if (issue_strict_overflow_warning (wc
))
1026 warning (OPT_Wstrict_overflow
, gmsgid
);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code
)
1037 CASE_FLT_FN (BUILT_IN_ASIN
):
1038 CASE_FLT_FN (BUILT_IN_ASINH
):
1039 CASE_FLT_FN (BUILT_IN_ATAN
):
1040 CASE_FLT_FN (BUILT_IN_ATANH
):
1041 CASE_FLT_FN (BUILT_IN_CASIN
):
1042 CASE_FLT_FN (BUILT_IN_CASINH
):
1043 CASE_FLT_FN (BUILT_IN_CATAN
):
1044 CASE_FLT_FN (BUILT_IN_CATANH
):
1045 CASE_FLT_FN (BUILT_IN_CBRT
):
1046 CASE_FLT_FN (BUILT_IN_CPROJ
):
1047 CASE_FLT_FN (BUILT_IN_CSIN
):
1048 CASE_FLT_FN (BUILT_IN_CSINH
):
1049 CASE_FLT_FN (BUILT_IN_CTAN
):
1050 CASE_FLT_FN (BUILT_IN_CTANH
):
1051 CASE_FLT_FN (BUILT_IN_ERF
):
1052 CASE_FLT_FN (BUILT_IN_LLROUND
):
1053 CASE_FLT_FN (BUILT_IN_LROUND
):
1054 CASE_FLT_FN (BUILT_IN_ROUND
):
1055 CASE_FLT_FN (BUILT_IN_SIN
):
1056 CASE_FLT_FN (BUILT_IN_SINH
):
1057 CASE_FLT_FN (BUILT_IN_TAN
):
1058 CASE_FLT_FN (BUILT_IN_TANH
):
1059 CASE_FLT_FN (BUILT_IN_TRUNC
):
1062 CASE_FLT_FN (BUILT_IN_LLRINT
):
1063 CASE_FLT_FN (BUILT_IN_LRINT
):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1065 CASE_FLT_FN (BUILT_IN_RINT
):
1066 return !flag_rounding_math
;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (const_tree t
)
1080 unsigned HOST_WIDE_INT val
;
1084 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
1086 type
= TREE_TYPE (t
);
1087 if (TYPE_UNSIGNED (type
))
1090 prec
= TYPE_PRECISION (type
);
1091 if (prec
> HOST_BITS_PER_WIDE_INT
)
1093 if (TREE_INT_CST_LOW (t
) != 0)
1095 prec
-= HOST_BITS_PER_WIDE_INT
;
1096 val
= TREE_INT_CST_HIGH (t
);
1099 val
= TREE_INT_CST_LOW (t
);
1100 if (prec
< HOST_BITS_PER_WIDE_INT
)
1101 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
1102 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t
)
1116 type
= TREE_TYPE (t
);
1118 STRIP_SIGN_NOPS (t
);
1119 switch (TREE_CODE (t
))
1122 if (TYPE_OVERFLOW_WRAPS (type
))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t
);
1128 return (INTEGRAL_TYPE_P (type
)
1129 && TYPE_OVERFLOW_WRAPS (type
));
1137 return negate_expr_p (TREE_REALPART (t
))
1138 && negate_expr_p (TREE_IMAGPART (t
));
1141 return negate_expr_p (TREE_OPERAND (t
, 0))
1142 && negate_expr_p (TREE_OPERAND (t
, 1));
1145 return negate_expr_p (TREE_OPERAND (t
, 0));
1148 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1149 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t
, 1))
1153 && reorder_operands_p (TREE_OPERAND (t
, 0),
1154 TREE_OPERAND (t
, 1)))
1156 /* -(A + B) -> (-A) - B. */
1157 return negate_expr_p (TREE_OPERAND (t
, 0));
1160 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1161 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1163 && reorder_operands_p (TREE_OPERAND (t
, 0),
1164 TREE_OPERAND (t
, 1));
1167 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1174 return negate_expr_p (TREE_OPERAND (t
, 1))
1175 || negate_expr_p (TREE_OPERAND (t
, 0));
1178 case TRUNC_DIV_EXPR
:
1179 case ROUND_DIV_EXPR
:
1180 case FLOOR_DIV_EXPR
:
1182 case EXACT_DIV_EXPR
:
1183 /* In general we can't negate A / B, because if A is INT_MIN and
1184 B is 1, we may turn this into INT_MIN / -1 which is undefined
1185 and actually traps on some architectures. But if overflow is
1186 undefined, we can negate, because - (INT_MIN / 1) is an
1188 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
1189 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
1191 return negate_expr_p (TREE_OPERAND (t
, 1))
1192 || negate_expr_p (TREE_OPERAND (t
, 0));
1195 /* Negate -((double)float) as (double)(-float). */
1196 if (TREE_CODE (type
) == REAL_TYPE
)
1198 tree tem
= strip_float_extensions (t
);
1200 return negate_expr_p (tem
);
1205 /* Negate -f(x) as f(-x). */
1206 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1207 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
1211 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1212 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1214 tree op1
= TREE_OPERAND (t
, 1);
1215 if (TREE_INT_CST_HIGH (op1
) == 0
1216 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1217 == TREE_INT_CST_LOW (op1
))
1228 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1229 simplification is possible.
1230 If negate_expr_p would return true for T, NULL_TREE will never be
1234 fold_negate_expr (tree t
)
1236 tree type
= TREE_TYPE (t
);
1239 switch (TREE_CODE (t
))
1241 /* Convert - (~A) to A + 1. */
1243 if (INTEGRAL_TYPE_P (type
))
1244 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1245 build_int_cst (type
, 1));
1249 tem
= fold_negate_const (t
, type
);
1250 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
1251 || !TYPE_OVERFLOW_TRAPS (type
))
1256 tem
= fold_negate_const (t
, type
);
1257 /* Two's complement FP formats, such as c4x, may overflow. */
1258 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1263 tem
= fold_negate_const (t
, type
);
1268 tree rpart
= negate_expr (TREE_REALPART (t
));
1269 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1271 if ((TREE_CODE (rpart
) == REAL_CST
1272 && TREE_CODE (ipart
) == REAL_CST
)
1273 || (TREE_CODE (rpart
) == INTEGER_CST
1274 && TREE_CODE (ipart
) == INTEGER_CST
))
1275 return build_complex (type
, rpart
, ipart
);
1280 if (negate_expr_p (t
))
1281 return fold_build2 (COMPLEX_EXPR
, type
,
1282 fold_negate_expr (TREE_OPERAND (t
, 0)),
1283 fold_negate_expr (TREE_OPERAND (t
, 1)));
1287 if (negate_expr_p (t
))
1288 return fold_build1 (CONJ_EXPR
, type
,
1289 fold_negate_expr (TREE_OPERAND (t
, 0)));
1293 return TREE_OPERAND (t
, 0);
1296 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1299 /* -(A + B) -> (-B) - A. */
1300 if (negate_expr_p (TREE_OPERAND (t
, 1))
1301 && reorder_operands_p (TREE_OPERAND (t
, 0),
1302 TREE_OPERAND (t
, 1)))
1304 tem
= negate_expr (TREE_OPERAND (t
, 1));
1305 return fold_build2 (MINUS_EXPR
, type
,
1306 tem
, TREE_OPERAND (t
, 0));
1309 /* -(A + B) -> (-A) - B. */
1310 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1312 tem
= negate_expr (TREE_OPERAND (t
, 0));
1313 return fold_build2 (MINUS_EXPR
, type
,
1314 tem
, TREE_OPERAND (t
, 1));
1320 /* - (A - B) -> B - A */
1321 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1323 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1324 return fold_build2 (MINUS_EXPR
, type
,
1325 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1329 if (TYPE_UNSIGNED (type
))
1335 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1337 tem
= TREE_OPERAND (t
, 1);
1338 if (negate_expr_p (tem
))
1339 return fold_build2 (TREE_CODE (t
), type
,
1340 TREE_OPERAND (t
, 0), negate_expr (tem
));
1341 tem
= TREE_OPERAND (t
, 0);
1342 if (negate_expr_p (tem
))
1343 return fold_build2 (TREE_CODE (t
), type
,
1344 negate_expr (tem
), TREE_OPERAND (t
, 1));
1348 case TRUNC_DIV_EXPR
:
1349 case ROUND_DIV_EXPR
:
1350 case FLOOR_DIV_EXPR
:
1352 case EXACT_DIV_EXPR
:
1353 /* In general we can't negate A / B, because if A is INT_MIN and
1354 B is 1, we may turn this into INT_MIN / -1 which is undefined
1355 and actually traps on some architectures. But if overflow is
1356 undefined, we can negate, because - (INT_MIN / 1) is an
1358 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
1360 const char * const warnmsg
= G_("assuming signed overflow does not "
1361 "occur when negating a division");
1362 tem
= TREE_OPERAND (t
, 1);
1363 if (negate_expr_p (tem
))
1365 if (INTEGRAL_TYPE_P (type
)
1366 && (TREE_CODE (tem
) != INTEGER_CST
1367 || integer_onep (tem
)))
1368 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1369 return fold_build2 (TREE_CODE (t
), type
,
1370 TREE_OPERAND (t
, 0), negate_expr (tem
));
1372 tem
= TREE_OPERAND (t
, 0);
1373 if (negate_expr_p (tem
))
1375 if (INTEGRAL_TYPE_P (type
)
1376 && (TREE_CODE (tem
) != INTEGER_CST
1377 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
1378 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1379 return fold_build2 (TREE_CODE (t
), type
,
1380 negate_expr (tem
), TREE_OPERAND (t
, 1));
1386 /* Convert -((double)float) into (double)(-float). */
1387 if (TREE_CODE (type
) == REAL_TYPE
)
1389 tem
= strip_float_extensions (t
);
1390 if (tem
!= t
&& negate_expr_p (tem
))
1391 return fold_convert (type
, negate_expr (tem
));
1396 /* Negate -f(x) as f(-x). */
1397 if (negate_mathfn_p (builtin_mathfn_code (t
))
1398 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
1402 fndecl
= get_callee_fndecl (t
);
1403 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
1404 return build_call_expr (fndecl
, 1, arg
);
1409 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1410 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1412 tree op1
= TREE_OPERAND (t
, 1);
1413 if (TREE_INT_CST_HIGH (op1
) == 0
1414 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1415 == TREE_INT_CST_LOW (op1
))
1417 tree ntype
= TYPE_UNSIGNED (type
)
1418 ? signed_type_for (type
)
1419 : unsigned_type_for (type
);
1420 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1421 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1422 return fold_convert (type
, temp
);
1434 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1435 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1436 return NULL_TREE. */
1439 negate_expr (tree t
)
1446 type
= TREE_TYPE (t
);
1447 STRIP_SIGN_NOPS (t
);
1449 tem
= fold_negate_expr (t
);
1451 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1452 return fold_convert (type
, tem
);
1455 /* Split a tree IN into a constant, literal and variable parts that could be
1456 combined with CODE to make IN. "constant" means an expression with
1457 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1458 commutative arithmetic operation. Store the constant part into *CONP,
1459 the literal in *LITP and return the variable part. If a part isn't
1460 present, set it to null. If the tree does not decompose in this way,
1461 return the entire tree as the variable part and the other parts as null.
1463 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1464 case, we negate an operand that was subtracted. Except if it is a
1465 literal for which we use *MINUS_LITP instead.
1467 If NEGATE_P is true, we are negating all of IN, again except a literal
1468 for which we use *MINUS_LITP instead.
1470 If IN is itself a literal or constant, return it as appropriate.
1472 Note that we do not guarantee that any of the three values will be the
1473 same type as IN, but they will have the same signedness and mode. */
1476 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1477 tree
*minus_litp
, int negate_p
)
1485 /* Strip any conversions that don't change the machine mode or signedness. */
1486 STRIP_SIGN_NOPS (in
);
1488 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
1489 || TREE_CODE (in
) == FIXED_CST
)
1491 else if (TREE_CODE (in
) == code
1492 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
1493 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
1494 /* We can associate addition and subtraction together (even
1495 though the C standard doesn't say so) for integers because
1496 the value is not affected. For reals, the value might be
1497 affected, so we can't. */
1498 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1499 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1501 tree op0
= TREE_OPERAND (in
, 0);
1502 tree op1
= TREE_OPERAND (in
, 1);
1503 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1504 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1506 /* First see if either of the operands is a literal, then a constant. */
1507 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
1508 || TREE_CODE (op0
) == FIXED_CST
)
1509 *litp
= op0
, op0
= 0;
1510 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
1511 || TREE_CODE (op1
) == FIXED_CST
)
1512 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1514 if (op0
!= 0 && TREE_CONSTANT (op0
))
1515 *conp
= op0
, op0
= 0;
1516 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1517 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1519 /* If we haven't dealt with either operand, this is not a case we can
1520 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1521 if (op0
!= 0 && op1
!= 0)
1526 var
= op1
, neg_var_p
= neg1_p
;
1528 /* Now do any needed negations. */
1530 *minus_litp
= *litp
, *litp
= 0;
1532 *conp
= negate_expr (*conp
);
1534 var
= negate_expr (var
);
1536 else if (TREE_CONSTANT (in
))
1544 *minus_litp
= *litp
, *litp
= 0;
1545 else if (*minus_litp
)
1546 *litp
= *minus_litp
, *minus_litp
= 0;
1547 *conp
= negate_expr (*conp
);
1548 var
= negate_expr (var
);
1554 /* Re-associate trees split by the above function. T1 and T2 are either
1555 expressions to associate or null. Return the new expression, if any. If
1556 we build an operation, do it in TYPE and with CODE. */
1559 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1566 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1567 try to fold this since we will have infinite recursion. But do
1568 deal with any NEGATE_EXPRs. */
1569 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1570 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1572 if (code
== PLUS_EXPR
)
1574 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1575 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1576 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1577 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1578 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1579 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1580 else if (integer_zerop (t2
))
1581 return fold_convert (type
, t1
);
1583 else if (code
== MINUS_EXPR
)
1585 if (integer_zerop (t2
))
1586 return fold_convert (type
, t1
);
1589 return build2 (code
, type
, fold_convert (type
, t1
),
1590 fold_convert (type
, t2
));
1593 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1594 fold_convert (type
, t2
));
1597 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1598 for use in int_const_binop, size_binop and size_diffop. */
1601 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
1603 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1605 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1620 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1621 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1622 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1626 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1627 to produce a new constant. Return NULL_TREE if we don't know how
1628 to evaluate CODE at compile-time.
1630 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1633 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
1635 unsigned HOST_WIDE_INT int1l
, int2l
;
1636 HOST_WIDE_INT int1h
, int2h
;
1637 unsigned HOST_WIDE_INT low
;
1639 unsigned HOST_WIDE_INT garbagel
;
1640 HOST_WIDE_INT garbageh
;
1642 tree type
= TREE_TYPE (arg1
);
1643 int uns
= TYPE_UNSIGNED (type
);
1645 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1648 int1l
= TREE_INT_CST_LOW (arg1
);
1649 int1h
= TREE_INT_CST_HIGH (arg1
);
1650 int2l
= TREE_INT_CST_LOW (arg2
);
1651 int2h
= TREE_INT_CST_HIGH (arg2
);
1656 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1660 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1664 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1670 /* It's unclear from the C standard whether shifts can overflow.
1671 The following code ignores overflow; perhaps a C standard
1672 interpretation ruling is needed. */
1673 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1680 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1685 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1689 neg_double (int2l
, int2h
, &low
, &hi
);
1690 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1691 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1695 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1698 case TRUNC_DIV_EXPR
:
1699 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1700 case EXACT_DIV_EXPR
:
1701 /* This is a shortcut for a common special case. */
1702 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1703 && !TREE_OVERFLOW (arg1
)
1704 && !TREE_OVERFLOW (arg2
)
1705 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1707 if (code
== CEIL_DIV_EXPR
)
1710 low
= int1l
/ int2l
, hi
= 0;
1714 /* ... fall through ... */
1716 case ROUND_DIV_EXPR
:
1717 if (int2h
== 0 && int2l
== 0)
1719 if (int2h
== 0 && int2l
== 1)
1721 low
= int1l
, hi
= int1h
;
1724 if (int1l
== int2l
&& int1h
== int2h
1725 && ! (int1l
== 0 && int1h
== 0))
1730 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1731 &low
, &hi
, &garbagel
, &garbageh
);
1734 case TRUNC_MOD_EXPR
:
1735 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1736 /* This is a shortcut for a common special case. */
1737 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1738 && !TREE_OVERFLOW (arg1
)
1739 && !TREE_OVERFLOW (arg2
)
1740 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1742 if (code
== CEIL_MOD_EXPR
)
1744 low
= int1l
% int2l
, hi
= 0;
1748 /* ... fall through ... */
1750 case ROUND_MOD_EXPR
:
1751 if (int2h
== 0 && int2l
== 0)
1753 overflow
= div_and_round_double (code
, uns
,
1754 int1l
, int1h
, int2l
, int2h
,
1755 &garbagel
, &garbageh
, &low
, &hi
);
1761 low
= (((unsigned HOST_WIDE_INT
) int1h
1762 < (unsigned HOST_WIDE_INT
) int2h
)
1763 || (((unsigned HOST_WIDE_INT
) int1h
1764 == (unsigned HOST_WIDE_INT
) int2h
)
1767 low
= (int1h
< int2h
1768 || (int1h
== int2h
&& int1l
< int2l
));
1770 if (low
== (code
== MIN_EXPR
))
1771 low
= int1l
, hi
= int1h
;
1773 low
= int2l
, hi
= int2h
;
1782 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1784 /* Propagate overflow flags ourselves. */
1785 if (((!uns
|| is_sizetype
) && overflow
)
1786 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1789 TREE_OVERFLOW (t
) = 1;
1793 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1794 ((!uns
|| is_sizetype
) && overflow
)
1795 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1800 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1801 constant. We assume ARG1 and ARG2 have the same data type, or at least
1802 are the same kind of constant and the same machine mode. Return zero if
1803 combining the constants is not allowed in the current operating mode.
1805 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1808 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1810 /* Sanity check for the recursive cases. */
1817 if (TREE_CODE (arg1
) == INTEGER_CST
)
1818 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1820 if (TREE_CODE (arg1
) == REAL_CST
)
1822 enum machine_mode mode
;
1825 REAL_VALUE_TYPE value
;
1826 REAL_VALUE_TYPE result
;
1830 /* The following codes are handled by real_arithmetic. */
1845 d1
= TREE_REAL_CST (arg1
);
1846 d2
= TREE_REAL_CST (arg2
);
1848 type
= TREE_TYPE (arg1
);
1849 mode
= TYPE_MODE (type
);
1851 /* Don't perform operation if we honor signaling NaNs and
1852 either operand is a NaN. */
1853 if (HONOR_SNANS (mode
)
1854 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1857 /* Don't perform operation if it would raise a division
1858 by zero exception. */
1859 if (code
== RDIV_EXPR
1860 && REAL_VALUES_EQUAL (d2
, dconst0
)
1861 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1864 /* If either operand is a NaN, just return it. Otherwise, set up
1865 for floating-point trap; we return an overflow. */
1866 if (REAL_VALUE_ISNAN (d1
))
1868 else if (REAL_VALUE_ISNAN (d2
))
1871 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1872 real_convert (&result
, mode
, &value
);
1874 /* Don't constant fold this floating point operation if
1875 the result has overflowed and flag_trapping_math. */
1876 if (flag_trapping_math
1877 && MODE_HAS_INFINITIES (mode
)
1878 && REAL_VALUE_ISINF (result
)
1879 && !REAL_VALUE_ISINF (d1
)
1880 && !REAL_VALUE_ISINF (d2
))
1883 /* Don't constant fold this floating point operation if the
1884 result may dependent upon the run-time rounding mode and
1885 flag_rounding_math is set, or if GCC's software emulation
1886 is unable to accurately represent the result. */
1887 if ((flag_rounding_math
1888 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1889 && !flag_unsafe_math_optimizations
))
1890 && (inexact
|| !real_identical (&result
, &value
)))
1893 t
= build_real (type
, result
);
1895 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1899 if (TREE_CODE (arg1
) == FIXED_CST
)
1901 FIXED_VALUE_TYPE f1
;
1902 FIXED_VALUE_TYPE f2
;
1903 FIXED_VALUE_TYPE result
;
1908 /* The following codes are handled by fixed_arithmetic. */
1914 case TRUNC_DIV_EXPR
:
1915 f2
= TREE_FIXED_CST (arg2
);
1920 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1921 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1929 f1
= TREE_FIXED_CST (arg1
);
1930 type
= TREE_TYPE (arg1
);
1931 sat_p
= TYPE_SATURATING (type
);
1932 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1933 t
= build_fixed (type
, result
);
1934 /* Propagate overflow flags. */
1935 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1937 TREE_OVERFLOW (t
) = 1;
1938 TREE_CONSTANT_OVERFLOW (t
) = 1;
1940 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1941 TREE_CONSTANT_OVERFLOW (t
) = 1;
1945 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1947 tree type
= TREE_TYPE (arg1
);
1948 tree r1
= TREE_REALPART (arg1
);
1949 tree i1
= TREE_IMAGPART (arg1
);
1950 tree r2
= TREE_REALPART (arg2
);
1951 tree i2
= TREE_IMAGPART (arg2
);
1958 real
= const_binop (code
, r1
, r2
, notrunc
);
1959 imag
= const_binop (code
, i1
, i2
, notrunc
);
1963 real
= const_binop (MINUS_EXPR
,
1964 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1965 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1967 imag
= const_binop (PLUS_EXPR
,
1968 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1969 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1976 = const_binop (PLUS_EXPR
,
1977 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1978 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1981 = const_binop (PLUS_EXPR
,
1982 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1983 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1986 = const_binop (MINUS_EXPR
,
1987 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1988 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1991 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1992 code
= TRUNC_DIV_EXPR
;
1994 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1995 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
2004 return build_complex (type
, real
, imag
);
2010 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2011 indicates which particular sizetype to create. */
2014 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
2016 return build_int_cst (sizetype_tab
[(int) kind
], number
);
2019 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2020 is a tree code. The type of the result is taken from the operands.
2021 Both must be equivalent integer types, ala int_binop_types_match_p.
2022 If the operands are constant, so is the result. */
2025 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
2027 tree type
= TREE_TYPE (arg0
);
2029 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
2030 return error_mark_node
;
2032 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
2035 /* Handle the special case of two integer constants faster. */
2036 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2038 /* And some specific cases even faster than that. */
2039 if (code
== PLUS_EXPR
)
2041 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
2043 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2046 else if (code
== MINUS_EXPR
)
2048 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2051 else if (code
== MULT_EXPR
)
2053 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
2057 /* Handle general case of two integer constants. */
2058 return int_const_binop (code
, arg0
, arg1
, 0);
2061 return fold_build2 (code
, type
, arg0
, arg1
);
2064 /* Given two values, either both of sizetype or both of bitsizetype,
2065 compute the difference between the two values. Return the value
2066 in signed type corresponding to the type of the operands. */
2069 size_diffop (tree arg0
, tree arg1
)
2071 tree type
= TREE_TYPE (arg0
);
2074 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2077 /* If the type is already signed, just do the simple thing. */
2078 if (!TYPE_UNSIGNED (type
))
2079 return size_binop (MINUS_EXPR
, arg0
, arg1
);
2081 if (type
== sizetype
)
2083 else if (type
== bitsizetype
)
2084 ctype
= sbitsizetype
;
2086 ctype
= signed_type_for (type
);
2088 /* If either operand is not a constant, do the conversions to the signed
2089 type and subtract. The hardware will do the right thing with any
2090 overflow in the subtraction. */
2091 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2092 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
2093 fold_convert (ctype
, arg1
));
2095 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2096 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2097 overflow) and negate (which can't either). Special-case a result
2098 of zero while we're here. */
2099 if (tree_int_cst_equal (arg0
, arg1
))
2100 return build_int_cst (ctype
, 0);
2101 else if (tree_int_cst_lt (arg1
, arg0
))
2102 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
2104 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
2105 fold_convert (ctype
, size_binop (MINUS_EXPR
,
2109 /* A subroutine of fold_convert_const handling conversions of an
2110 INTEGER_CST to another integer type. */
2113 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2117 /* Given an integer constant, make new constant with new type,
2118 appropriately sign-extended or truncated. */
2119 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
2120 TREE_INT_CST_HIGH (arg1
),
2121 /* Don't set the overflow when
2122 converting from a pointer, */
2123 !POINTER_TYPE_P (TREE_TYPE (arg1
))
2124 /* or to a sizetype with same signedness
2125 and the precision is unchanged.
2126 ??? sizetype is always sign-extended,
2127 but its signedness depends on the
2128 frontend. Thus we see spurious overflows
2129 here if we do not check this. */
2130 && !((TYPE_PRECISION (TREE_TYPE (arg1
))
2131 == TYPE_PRECISION (type
))
2132 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
2133 == TYPE_UNSIGNED (type
))
2134 && ((TREE_CODE (TREE_TYPE (arg1
)) == INTEGER_TYPE
2135 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1
)))
2136 || (TREE_CODE (type
) == INTEGER_TYPE
2137 && TYPE_IS_SIZETYPE (type
)))),
2138 (TREE_INT_CST_HIGH (arg1
) < 0
2139 && (TYPE_UNSIGNED (type
)
2140 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2141 | TREE_OVERFLOW (arg1
));
2146 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2147 to an integer type. */
2150 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2155 /* The following code implements the floating point to integer
2156 conversion rules required by the Java Language Specification,
2157 that IEEE NaNs are mapped to zero and values that overflow
2158 the target precision saturate, i.e. values greater than
2159 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2160 are mapped to INT_MIN. These semantics are allowed by the
2161 C and C++ standards that simply state that the behavior of
2162 FP-to-integer conversion is unspecified upon overflow. */
2164 HOST_WIDE_INT high
, low
;
2166 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2170 case FIX_TRUNC_EXPR
:
2171 real_trunc (&r
, VOIDmode
, &x
);
2178 /* If R is NaN, return zero and show we have an overflow. */
2179 if (REAL_VALUE_ISNAN (r
))
2186 /* See if R is less than the lower bound or greater than the
2191 tree lt
= TYPE_MIN_VALUE (type
);
2192 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2193 if (REAL_VALUES_LESS (r
, l
))
2196 high
= TREE_INT_CST_HIGH (lt
);
2197 low
= TREE_INT_CST_LOW (lt
);
2203 tree ut
= TYPE_MAX_VALUE (type
);
2206 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2207 if (REAL_VALUES_LESS (u
, r
))
2210 high
= TREE_INT_CST_HIGH (ut
);
2211 low
= TREE_INT_CST_LOW (ut
);
2217 REAL_VALUE_TO_INT (&low
, &high
, r
);
2219 t
= force_fit_type_double (type
, low
, high
, -1,
2220 overflow
| TREE_OVERFLOW (arg1
));
2224 /* A subroutine of fold_convert_const handling conversions of a
2225 FIXED_CST to an integer type. */
2228 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2231 double_int temp
, temp_trunc
;
2234 /* Right shift FIXED_CST to temp by fbit. */
2235 temp
= TREE_FIXED_CST (arg1
).data
;
2236 mode
= TREE_FIXED_CST (arg1
).mode
;
2237 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
2239 lshift_double (temp
.low
, temp
.high
,
2240 - GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2241 &temp
.low
, &temp
.high
, SIGNED_FIXED_POINT_MODE_P (mode
));
2243 /* Left shift temp to temp_trunc by fbit. */
2244 lshift_double (temp
.low
, temp
.high
,
2245 GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2246 &temp_trunc
.low
, &temp_trunc
.high
,
2247 SIGNED_FIXED_POINT_MODE_P (mode
));
2254 temp_trunc
.high
= 0;
2257 /* If FIXED_CST is negative, we need to round the value toward 0.
2258 By checking if the fractional bits are not zero to add 1 to temp. */
2259 if (SIGNED_FIXED_POINT_MODE_P (mode
) && temp_trunc
.high
< 0
2260 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
2265 temp
= double_int_add (temp
, one
);
2268 /* Given a fixed-point constant, make new constant with new type,
2269 appropriately sign-extended or truncated. */
2270 t
= force_fit_type_double (type
, temp
.low
, temp
.high
, -1,
2272 && (TYPE_UNSIGNED (type
)
2273 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2274 | TREE_OVERFLOW (arg1
));
2279 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2280 to another floating point type. */
2283 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2285 REAL_VALUE_TYPE value
;
2288 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2289 t
= build_real (type
, value
);
2291 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2295 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2296 to a floating point type. */
2299 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2301 REAL_VALUE_TYPE value
;
2304 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2305 t
= build_real (type
, value
);
2307 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2308 TREE_CONSTANT_OVERFLOW (t
)
2309 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
2313 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2314 to another fixed-point type. */
2317 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2319 FIXED_VALUE_TYPE value
;
2323 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2324 TYPE_SATURATING (type
));
2325 t
= build_fixed (type
, value
);
2327 /* Propagate overflow flags. */
2328 if (overflow_p
| TREE_OVERFLOW (arg1
))
2330 TREE_OVERFLOW (t
) = 1;
2331 TREE_CONSTANT_OVERFLOW (t
) = 1;
2333 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2334 TREE_CONSTANT_OVERFLOW (t
) = 1;
2338 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2339 to a fixed-point type. */
2342 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2344 FIXED_VALUE_TYPE value
;
2348 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
2349 TREE_INT_CST (arg1
),
2350 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2351 TYPE_SATURATING (type
));
2352 t
= build_fixed (type
, value
);
2354 /* Propagate overflow flags. */
2355 if (overflow_p
| TREE_OVERFLOW (arg1
))
2357 TREE_OVERFLOW (t
) = 1;
2358 TREE_CONSTANT_OVERFLOW (t
) = 1;
2360 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2361 TREE_CONSTANT_OVERFLOW (t
) = 1;
2365 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2366 to a fixed-point type. */
2369 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2371 FIXED_VALUE_TYPE value
;
2375 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2376 &TREE_REAL_CST (arg1
),
2377 TYPE_SATURATING (type
));
2378 t
= build_fixed (type
, value
);
2380 /* Propagate overflow flags. */
2381 if (overflow_p
| TREE_OVERFLOW (arg1
))
2383 TREE_OVERFLOW (t
) = 1;
2384 TREE_CONSTANT_OVERFLOW (t
) = 1;
2386 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2387 TREE_CONSTANT_OVERFLOW (t
) = 1;
2391 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2392 type TYPE. If no simplification can be done return NULL_TREE. */
2395 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2397 if (TREE_TYPE (arg1
) == type
)
2400 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2402 if (TREE_CODE (arg1
) == INTEGER_CST
)
2403 return fold_convert_const_int_from_int (type
, arg1
);
2404 else if (TREE_CODE (arg1
) == REAL_CST
)
2405 return fold_convert_const_int_from_real (code
, type
, arg1
);
2406 else if (TREE_CODE (arg1
) == FIXED_CST
)
2407 return fold_convert_const_int_from_fixed (type
, arg1
);
2409 else if (TREE_CODE (type
) == REAL_TYPE
)
2411 if (TREE_CODE (arg1
) == INTEGER_CST
)
2412 return build_real_from_int_cst (type
, arg1
);
2413 else if (TREE_CODE (arg1
) == REAL_CST
)
2414 return fold_convert_const_real_from_real (type
, arg1
);
2415 else if (TREE_CODE (arg1
) == FIXED_CST
)
2416 return fold_convert_const_real_from_fixed (type
, arg1
);
2418 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2420 if (TREE_CODE (arg1
) == FIXED_CST
)
2421 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2422 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2423 return fold_convert_const_fixed_from_int (type
, arg1
);
2424 else if (TREE_CODE (arg1
) == REAL_CST
)
2425 return fold_convert_const_fixed_from_real (type
, arg1
);
2430 /* Construct a vector of zero elements of vector type TYPE. */
2433 build_zero_vector (tree type
)
2438 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2439 units
= TYPE_VECTOR_SUBPARTS (type
);
2442 for (i
= 0; i
< units
; i
++)
2443 list
= tree_cons (NULL_TREE
, elem
, list
);
2444 return build_vector (type
, list
);
2447 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2450 fold_convertible_p (const_tree type
, const_tree arg
)
2452 tree orig
= TREE_TYPE (arg
);
2457 if (TREE_CODE (arg
) == ERROR_MARK
2458 || TREE_CODE (type
) == ERROR_MARK
2459 || TREE_CODE (orig
) == ERROR_MARK
)
2462 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2465 switch (TREE_CODE (type
))
2467 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2468 case POINTER_TYPE
: case REFERENCE_TYPE
:
2470 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2471 || TREE_CODE (orig
) == OFFSET_TYPE
)
2473 return (TREE_CODE (orig
) == VECTOR_TYPE
2474 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2477 case FIXED_POINT_TYPE
:
2481 return TREE_CODE (type
) == TREE_CODE (orig
);
2488 /* Convert expression ARG to type TYPE. Used by the middle-end for
2489 simple conversions in preference to calling the front-end's convert. */
2492 fold_convert (tree type
, tree arg
)
2494 tree orig
= TREE_TYPE (arg
);
2500 if (TREE_CODE (arg
) == ERROR_MARK
2501 || TREE_CODE (type
) == ERROR_MARK
2502 || TREE_CODE (orig
) == ERROR_MARK
)
2503 return error_mark_node
;
2505 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2506 return fold_build1 (NOP_EXPR
, type
, arg
);
2508 switch (TREE_CODE (type
))
2510 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2511 case POINTER_TYPE
: case REFERENCE_TYPE
:
2513 if (TREE_CODE (arg
) == INTEGER_CST
)
2515 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2516 if (tem
!= NULL_TREE
)
2519 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2520 || TREE_CODE (orig
) == OFFSET_TYPE
)
2521 return fold_build1 (NOP_EXPR
, type
, arg
);
2522 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2524 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2525 return fold_convert (type
, tem
);
2527 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2528 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2529 return fold_build1 (NOP_EXPR
, type
, arg
);
2532 if (TREE_CODE (arg
) == INTEGER_CST
)
2534 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2535 if (tem
!= NULL_TREE
)
2538 else if (TREE_CODE (arg
) == REAL_CST
)
2540 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2541 if (tem
!= NULL_TREE
)
2544 else if (TREE_CODE (arg
) == FIXED_CST
)
2546 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2547 if (tem
!= NULL_TREE
)
2551 switch (TREE_CODE (orig
))
2554 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2555 case POINTER_TYPE
: case REFERENCE_TYPE
:
2556 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2559 return fold_build1 (NOP_EXPR
, type
, arg
);
2561 case FIXED_POINT_TYPE
:
2562 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2565 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2566 return fold_convert (type
, tem
);
2572 case FIXED_POINT_TYPE
:
2573 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2574 || TREE_CODE (arg
) == REAL_CST
)
2576 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2577 if (tem
!= NULL_TREE
)
2581 switch (TREE_CODE (orig
))
2583 case FIXED_POINT_TYPE
:
2588 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2591 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2592 return fold_convert (type
, tem
);
2599 switch (TREE_CODE (orig
))
2602 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2603 case POINTER_TYPE
: case REFERENCE_TYPE
:
2605 case FIXED_POINT_TYPE
:
2606 return build2 (COMPLEX_EXPR
, type
,
2607 fold_convert (TREE_TYPE (type
), arg
),
2608 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2613 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2615 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2616 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2617 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2620 arg
= save_expr (arg
);
2621 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2622 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2623 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2624 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2625 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2633 if (integer_zerop (arg
))
2634 return build_zero_vector (type
);
2635 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2636 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2637 || TREE_CODE (orig
) == VECTOR_TYPE
);
2638 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2641 tem
= fold_ignored_result (arg
);
2642 if (TREE_CODE (tem
) == GIMPLE_MODIFY_STMT
)
2644 return fold_build1 (NOP_EXPR
, type
, tem
);
2651 /* Return false if expr can be assumed not to be an lvalue, true
2655 maybe_lvalue_p (const_tree x
)
2657 /* We only need to wrap lvalue tree codes. */
2658 switch (TREE_CODE (x
))
2669 case ALIGN_INDIRECT_REF
:
2670 case MISALIGNED_INDIRECT_REF
:
2672 case ARRAY_RANGE_REF
:
2678 case PREINCREMENT_EXPR
:
2679 case PREDECREMENT_EXPR
:
2681 case TRY_CATCH_EXPR
:
2682 case WITH_CLEANUP_EXPR
:
2685 case GIMPLE_MODIFY_STMT
:
2694 /* Assume the worst for front-end tree codes. */
2695 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2703 /* Return an expr equal to X but certainly not valid as an lvalue. */
2708 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2713 if (! maybe_lvalue_p (x
))
2715 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2718 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2719 Zero means allow extended lvalues. */
2721 int pedantic_lvalues
;
2723 /* When pedantic, return an expr equal to X but certainly not valid as a
2724 pedantic lvalue. Otherwise, return X. */
2727 pedantic_non_lvalue (tree x
)
2729 if (pedantic_lvalues
)
2730 return non_lvalue (x
);
2735 /* Given a tree comparison code, return the code that is the logical inverse
2736 of the given code. It is not safe to do this for floating-point
2737 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2738 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2741 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2743 if (honor_nans
&& flag_trapping_math
)
2753 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2755 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2757 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2759 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2773 return UNORDERED_EXPR
;
2774 case UNORDERED_EXPR
:
2775 return ORDERED_EXPR
;
2781 /* Similar, but return the comparison that results if the operands are
2782 swapped. This is safe for floating-point. */
2785 swap_tree_comparison (enum tree_code code
)
2792 case UNORDERED_EXPR
:
2818 /* Convert a comparison tree code from an enum tree_code representation
2819 into a compcode bit-based encoding. This function is the inverse of
2820 compcode_to_comparison. */
2822 static enum comparison_code
2823 comparison_to_compcode (enum tree_code code
)
2840 return COMPCODE_ORD
;
2841 case UNORDERED_EXPR
:
2842 return COMPCODE_UNORD
;
2844 return COMPCODE_UNLT
;
2846 return COMPCODE_UNEQ
;
2848 return COMPCODE_UNLE
;
2850 return COMPCODE_UNGT
;
2852 return COMPCODE_LTGT
;
2854 return COMPCODE_UNGE
;
2860 /* Convert a compcode bit-based encoding of a comparison operator back
2861 to GCC's enum tree_code representation. This function is the
2862 inverse of comparison_to_compcode. */
2864 static enum tree_code
2865 compcode_to_comparison (enum comparison_code code
)
2882 return ORDERED_EXPR
;
2883 case COMPCODE_UNORD
:
2884 return UNORDERED_EXPR
;
2902 /* Return a tree for the comparison which is the combination of
2903 doing the AND or OR (depending on CODE) of the two operations LCODE
2904 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2905 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2906 if this makes the transformation invalid. */
2909 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2910 enum tree_code rcode
, tree truth_type
,
2911 tree ll_arg
, tree lr_arg
)
2913 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2914 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2915 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2916 enum comparison_code compcode
;
2920 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2921 compcode
= lcompcode
& rcompcode
;
2924 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2925 compcode
= lcompcode
| rcompcode
;
2934 /* Eliminate unordered comparisons, as well as LTGT and ORD
2935 which are not used unless the mode has NaNs. */
2936 compcode
&= ~COMPCODE_UNORD
;
2937 if (compcode
== COMPCODE_LTGT
)
2938 compcode
= COMPCODE_NE
;
2939 else if (compcode
== COMPCODE_ORD
)
2940 compcode
= COMPCODE_TRUE
;
2942 else if (flag_trapping_math
)
2944 /* Check that the original operation and the optimized ones will trap
2945 under the same condition. */
2946 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2947 && (lcompcode
!= COMPCODE_EQ
)
2948 && (lcompcode
!= COMPCODE_ORD
);
2949 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2950 && (rcompcode
!= COMPCODE_EQ
)
2951 && (rcompcode
!= COMPCODE_ORD
);
2952 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2953 && (compcode
!= COMPCODE_EQ
)
2954 && (compcode
!= COMPCODE_ORD
);
2956 /* In a short-circuited boolean expression the LHS might be
2957 such that the RHS, if evaluated, will never trap. For
2958 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2959 if neither x nor y is NaN. (This is a mixed blessing: for
2960 example, the expression above will never trap, hence
2961 optimizing it to x < y would be invalid). */
2962 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2963 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2966 /* If the comparison was short-circuited, and only the RHS
2967 trapped, we may now generate a spurious trap. */
2969 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2972 /* If we changed the conditions that cause a trap, we lose. */
2973 if ((ltrap
|| rtrap
) != trap
)
2977 if (compcode
== COMPCODE_TRUE
)
2978 return constant_boolean_node (true, truth_type
);
2979 else if (compcode
== COMPCODE_FALSE
)
2980 return constant_boolean_node (false, truth_type
);
2982 return fold_build2 (compcode_to_comparison (compcode
),
2983 truth_type
, ll_arg
, lr_arg
);
2986 /* Return nonzero if CODE is a tree code that represents a truth value. */
2989 truth_value_p (enum tree_code code
)
2991 return (TREE_CODE_CLASS (code
) == tcc_comparison
2992 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2993 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2994 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2997 /* Return nonzero if two operands (typically of the same tree node)
2998 are necessarily equal. If either argument has side-effects this
2999 function returns zero. FLAGS modifies behavior as follows:
3001 If OEP_ONLY_CONST is set, only return nonzero for constants.
3002 This function tests whether the operands are indistinguishable;
3003 it does not test whether they are equal using C's == operation.
3004 The distinction is important for IEEE floating point, because
3005 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3006 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3008 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3009 even though it may hold multiple values during a function.
3010 This is because a GCC tree node guarantees that nothing else is
3011 executed between the evaluation of its "operands" (which may often
3012 be evaluated in arbitrary order). Hence if the operands themselves
3013 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3014 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3015 unset means assuming isochronic (or instantaneous) tree equivalence.
3016 Unless comparing arbitrary expression trees, such as from different
3017 statements, this flag can usually be left unset.
3019 If OEP_PURE_SAME is set, then pure functions with identical arguments
3020 are considered the same. It is used when the caller has other ways
3021 to ensure that global memory is unchanged in between. */
3024 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3026 /* If either is ERROR_MARK, they aren't equal. */
3027 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
3030 /* If both types don't have the same signedness, then we can't consider
3031 them equal. We must check this before the STRIP_NOPS calls
3032 because they may change the signedness of the arguments. */
3033 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3036 /* If both types don't have the same precision, then it is not safe
3038 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
3044 /* In case both args are comparisons but with different comparison
3045 code, try to swap the comparison operands of one arg to produce
3046 a match and compare that variant. */
3047 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3048 && COMPARISON_CLASS_P (arg0
)
3049 && COMPARISON_CLASS_P (arg1
))
3051 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3053 if (TREE_CODE (arg0
) == swap_code
)
3054 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3055 TREE_OPERAND (arg1
, 1), flags
)
3056 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3057 TREE_OPERAND (arg1
, 0), flags
);
3060 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3061 /* This is needed for conversions and for COMPONENT_REF.
3062 Might as well play it safe and always test this. */
3063 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3064 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3065 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
3068 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3069 We don't care about side effects in that case because the SAVE_EXPR
3070 takes care of that for us. In all other cases, two expressions are
3071 equal if they have no side effects. If we have two identical
3072 expressions with side effects that should be treated the same due
3073 to the only side effects being identical SAVE_EXPR's, that will
3074 be detected in the recursive calls below. */
3075 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3076 && (TREE_CODE (arg0
) == SAVE_EXPR
3077 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3080 /* Next handle constant cases, those for which we can return 1 even
3081 if ONLY_CONST is set. */
3082 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3083 switch (TREE_CODE (arg0
))
3086 return tree_int_cst_equal (arg0
, arg1
);
3089 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3090 TREE_FIXED_CST (arg1
));
3093 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
3094 TREE_REAL_CST (arg1
)))
3098 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
3100 /* If we do not distinguish between signed and unsigned zero,
3101 consider them equal. */
3102 if (real_zerop (arg0
) && real_zerop (arg1
))
3111 v1
= TREE_VECTOR_CST_ELTS (arg0
);
3112 v2
= TREE_VECTOR_CST_ELTS (arg1
);
3115 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
3118 v1
= TREE_CHAIN (v1
);
3119 v2
= TREE_CHAIN (v2
);
3126 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3128 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3132 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3133 && ! memcmp (TREE_STRING_POINTER (arg0
),
3134 TREE_STRING_POINTER (arg1
),
3135 TREE_STRING_LENGTH (arg0
)));
3138 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3144 if (flags
& OEP_ONLY_CONST
)
3147 /* Define macros to test an operand from arg0 and arg1 for equality and a
3148 variant that allows null and views null as being different from any
3149 non-null value. In the latter case, if either is null, the both
3150 must be; otherwise, do the normal comparison. */
3151 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3152 TREE_OPERAND (arg1, N), flags)
3154 #define OP_SAME_WITH_NULL(N) \
3155 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3156 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3158 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3161 /* Two conversions are equal only if signedness and modes match. */
3162 switch (TREE_CODE (arg0
))
3166 case FIX_TRUNC_EXPR
:
3167 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3168 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3178 case tcc_comparison
:
3180 if (OP_SAME (0) && OP_SAME (1))
3183 /* For commutative ops, allow the other order. */
3184 return (commutative_tree_code (TREE_CODE (arg0
))
3185 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3186 TREE_OPERAND (arg1
, 1), flags
)
3187 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3188 TREE_OPERAND (arg1
, 0), flags
));
3191 /* If either of the pointer (or reference) expressions we are
3192 dereferencing contain a side effect, these cannot be equal. */
3193 if (TREE_SIDE_EFFECTS (arg0
)
3194 || TREE_SIDE_EFFECTS (arg1
))
3197 switch (TREE_CODE (arg0
))
3200 case ALIGN_INDIRECT_REF
:
3201 case MISALIGNED_INDIRECT_REF
:
3207 case ARRAY_RANGE_REF
:
3208 /* Operands 2 and 3 may be null.
3209 Compare the array index by value if it is constant first as we
3210 may have different types but same value here. */
3212 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3213 TREE_OPERAND (arg1
, 1))
3215 && OP_SAME_WITH_NULL (2)
3216 && OP_SAME_WITH_NULL (3));
3219 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3220 may be NULL when we're called to compare MEM_EXPRs. */
3221 return OP_SAME_WITH_NULL (0)
3223 && OP_SAME_WITH_NULL (2);
3226 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3232 case tcc_expression
:
3233 switch (TREE_CODE (arg0
))
3236 case TRUTH_NOT_EXPR
:
3239 case TRUTH_ANDIF_EXPR
:
3240 case TRUTH_ORIF_EXPR
:
3241 return OP_SAME (0) && OP_SAME (1);
3243 case TRUTH_AND_EXPR
:
3245 case TRUTH_XOR_EXPR
:
3246 if (OP_SAME (0) && OP_SAME (1))
3249 /* Otherwise take into account this is a commutative operation. */
3250 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3251 TREE_OPERAND (arg1
, 1), flags
)
3252 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3253 TREE_OPERAND (arg1
, 0), flags
));
3260 switch (TREE_CODE (arg0
))
3263 /* If the CALL_EXPRs call different functions, then they
3264 clearly can not be equal. */
3265 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3270 unsigned int cef
= call_expr_flags (arg0
);
3271 if (flags
& OEP_PURE_SAME
)
3272 cef
&= ECF_CONST
| ECF_PURE
;
3279 /* Now see if all the arguments are the same. */
3281 const_call_expr_arg_iterator iter0
, iter1
;
3283 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3284 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3286 a0
= next_const_call_expr_arg (&iter0
),
3287 a1
= next_const_call_expr_arg (&iter1
))
3288 if (! operand_equal_p (a0
, a1
, flags
))
3291 /* If we get here and both argument lists are exhausted
3292 then the CALL_EXPRs are equal. */
3293 return ! (a0
|| a1
);
3299 case tcc_declaration
:
3300 /* Consider __builtin_sqrt equal to sqrt. */
3301 return (TREE_CODE (arg0
) == FUNCTION_DECL
3302 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3303 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3304 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3311 #undef OP_SAME_WITH_NULL
3314 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3315 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3317 When in doubt, return 0. */
3320 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3322 int unsignedp1
, unsignedpo
;
3323 tree primarg0
, primarg1
, primother
;
3324 unsigned int correct_width
;
3326 if (operand_equal_p (arg0
, arg1
, 0))
3329 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3330 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3333 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3334 and see if the inner values are the same. This removes any
3335 signedness comparison, which doesn't matter here. */
3336 primarg0
= arg0
, primarg1
= arg1
;
3337 STRIP_NOPS (primarg0
);
3338 STRIP_NOPS (primarg1
);
3339 if (operand_equal_p (primarg0
, primarg1
, 0))
3342 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3343 actual comparison operand, ARG0.
3345 First throw away any conversions to wider types
3346 already present in the operands. */
3348 primarg1
= get_narrower (arg1
, &unsignedp1
);
3349 primother
= get_narrower (other
, &unsignedpo
);
3351 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3352 if (unsignedp1
== unsignedpo
3353 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3354 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3356 tree type
= TREE_TYPE (arg0
);
3358 /* Make sure shorter operand is extended the right way
3359 to match the longer operand. */
3360 primarg1
= fold_convert (signed_or_unsigned_type_for
3361 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3363 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3370 /* See if ARG is an expression that is either a comparison or is performing
3371 arithmetic on comparisons. The comparisons must only be comparing
3372 two different values, which will be stored in *CVAL1 and *CVAL2; if
3373 they are nonzero it means that some operands have already been found.
3374 No variables may be used anywhere else in the expression except in the
3375 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3376 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3378 If this is true, return 1. Otherwise, return zero. */
3381 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3383 enum tree_code code
= TREE_CODE (arg
);
3384 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3386 /* We can handle some of the tcc_expression cases here. */
3387 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3389 else if (class == tcc_expression
3390 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3391 || code
== COMPOUND_EXPR
))
3394 else if (class == tcc_expression
&& code
== SAVE_EXPR
3395 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3397 /* If we've already found a CVAL1 or CVAL2, this expression is
3398 two complex to handle. */
3399 if (*cval1
|| *cval2
)
3409 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3412 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3413 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3414 cval1
, cval2
, save_p
));
3419 case tcc_expression
:
3420 if (code
== COND_EXPR
)
3421 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3422 cval1
, cval2
, save_p
)
3423 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3424 cval1
, cval2
, save_p
)
3425 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3426 cval1
, cval2
, save_p
));
3429 case tcc_comparison
:
3430 /* First see if we can handle the first operand, then the second. For
3431 the second operand, we know *CVAL1 can't be zero. It must be that
3432 one side of the comparison is each of the values; test for the
3433 case where this isn't true by failing if the two operands
3436 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3437 TREE_OPERAND (arg
, 1), 0))
3441 *cval1
= TREE_OPERAND (arg
, 0);
3442 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3444 else if (*cval2
== 0)
3445 *cval2
= TREE_OPERAND (arg
, 0);
3446 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3451 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3453 else if (*cval2
== 0)
3454 *cval2
= TREE_OPERAND (arg
, 1);
3455 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3467 /* ARG is a tree that is known to contain just arithmetic operations and
3468 comparisons. Evaluate the operations in the tree substituting NEW0 for
3469 any occurrence of OLD0 as an operand of a comparison and likewise for
3473 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
3475 tree type
= TREE_TYPE (arg
);
3476 enum tree_code code
= TREE_CODE (arg
);
3477 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3479 /* We can handle some of the tcc_expression cases here. */
3480 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3482 else if (class == tcc_expression
3483 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3489 return fold_build1 (code
, type
,
3490 eval_subst (TREE_OPERAND (arg
, 0),
3491 old0
, new0
, old1
, new1
));
3494 return fold_build2 (code
, type
,
3495 eval_subst (TREE_OPERAND (arg
, 0),
3496 old0
, new0
, old1
, new1
),
3497 eval_subst (TREE_OPERAND (arg
, 1),
3498 old0
, new0
, old1
, new1
));
3500 case tcc_expression
:
3504 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
3507 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
3510 return fold_build3 (code
, type
,
3511 eval_subst (TREE_OPERAND (arg
, 0),
3512 old0
, new0
, old1
, new1
),
3513 eval_subst (TREE_OPERAND (arg
, 1),
3514 old0
, new0
, old1
, new1
),
3515 eval_subst (TREE_OPERAND (arg
, 2),
3516 old0
, new0
, old1
, new1
));
3520 /* Fall through - ??? */
3522 case tcc_comparison
:
3524 tree arg0
= TREE_OPERAND (arg
, 0);
3525 tree arg1
= TREE_OPERAND (arg
, 1);
3527 /* We need to check both for exact equality and tree equality. The
3528 former will be true if the operand has a side-effect. In that
3529 case, we know the operand occurred exactly once. */
3531 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3533 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3536 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3538 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3541 return fold_build2 (code
, type
, arg0
, arg1
);
3549 /* Return a tree for the case when the result of an expression is RESULT
3550 converted to TYPE and OMITTED was previously an operand of the expression
3551 but is now not needed (e.g., we folded OMITTED * 0).
3553 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3554 the conversion of RESULT to TYPE. */
3557 omit_one_operand (tree type
, tree result
, tree omitted
)
3559 tree t
= fold_convert (type
, result
);
3561 /* If the resulting operand is an empty statement, just return the omitted
3562 statement casted to void. */
3563 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3564 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3566 if (TREE_SIDE_EFFECTS (omitted
))
3567 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3569 return non_lvalue (t
);
3572 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3575 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3577 tree t
= fold_convert (type
, result
);
3579 /* If the resulting operand is an empty statement, just return the omitted
3580 statement casted to void. */
3581 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3582 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3584 if (TREE_SIDE_EFFECTS (omitted
))
3585 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3587 return pedantic_non_lvalue (t
);
3590 /* Return a tree for the case when the result of an expression is RESULT
3591 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3592 of the expression but are now not needed.
3594 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3595 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3596 evaluated before OMITTED2. Otherwise, if neither has side effects,
3597 just do the conversion of RESULT to TYPE. */
3600 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3602 tree t
= fold_convert (type
, result
);
3604 if (TREE_SIDE_EFFECTS (omitted2
))
3605 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3606 if (TREE_SIDE_EFFECTS (omitted1
))
3607 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3609 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3613 /* Return a simplified tree node for the truth-negation of ARG. This
3614 never alters ARG itself. We assume that ARG is an operation that
3615 returns a truth value (0 or 1).
3617 FIXME: one would think we would fold the result, but it causes
3618 problems with the dominator optimizer. */
3621 fold_truth_not_expr (tree arg
)
3623 tree type
= TREE_TYPE (arg
);
3624 enum tree_code code
= TREE_CODE (arg
);
3626 /* If this is a comparison, we can simply invert it, except for
3627 floating-point non-equality comparisons, in which case we just
3628 enclose a TRUTH_NOT_EXPR around what we have. */
3630 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3632 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3633 if (FLOAT_TYPE_P (op_type
)
3634 && flag_trapping_math
3635 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3636 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3640 code
= invert_tree_comparison (code
,
3641 HONOR_NANS (TYPE_MODE (op_type
)));
3642 if (code
== ERROR_MARK
)
3645 return build2 (code
, type
,
3646 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3653 return constant_boolean_node (integer_zerop (arg
), type
);
3655 case TRUTH_AND_EXPR
:
3656 return build2 (TRUTH_OR_EXPR
, type
,
3657 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3658 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3661 return build2 (TRUTH_AND_EXPR
, type
,
3662 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3663 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3665 case TRUTH_XOR_EXPR
:
3666 /* Here we can invert either operand. We invert the first operand
3667 unless the second operand is a TRUTH_NOT_EXPR in which case our
3668 result is the XOR of the first operand with the inside of the
3669 negation of the second operand. */
3671 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3672 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3673 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3675 return build2 (TRUTH_XOR_EXPR
, type
,
3676 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3677 TREE_OPERAND (arg
, 1));
3679 case TRUTH_ANDIF_EXPR
:
3680 return build2 (TRUTH_ORIF_EXPR
, type
,
3681 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3682 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3684 case TRUTH_ORIF_EXPR
:
3685 return build2 (TRUTH_ANDIF_EXPR
, type
,
3686 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3687 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3689 case TRUTH_NOT_EXPR
:
3690 return TREE_OPERAND (arg
, 0);
3694 tree arg1
= TREE_OPERAND (arg
, 1);
3695 tree arg2
= TREE_OPERAND (arg
, 2);
3696 /* A COND_EXPR may have a throw as one operand, which
3697 then has void type. Just leave void operands
3699 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3700 VOID_TYPE_P (TREE_TYPE (arg1
))
3701 ? arg1
: invert_truthvalue (arg1
),
3702 VOID_TYPE_P (TREE_TYPE (arg2
))
3703 ? arg2
: invert_truthvalue (arg2
));
3707 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3708 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3710 case NON_LVALUE_EXPR
:
3711 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3714 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3715 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3719 return build1 (TREE_CODE (arg
), type
,
3720 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3723 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3725 return build2 (EQ_EXPR
, type
, arg
,
3726 build_int_cst (type
, 0));
3729 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3731 case CLEANUP_POINT_EXPR
:
3732 return build1 (CLEANUP_POINT_EXPR
, type
,
3733 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3742 /* Return a simplified tree node for the truth-negation of ARG. This
3743 never alters ARG itself. We assume that ARG is an operation that
3744 returns a truth value (0 or 1).
3746 FIXME: one would think we would fold the result, but it causes
3747 problems with the dominator optimizer. */
3750 invert_truthvalue (tree arg
)
3754 if (TREE_CODE (arg
) == ERROR_MARK
)
3757 tem
= fold_truth_not_expr (arg
);
3759 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3764 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3765 operands are another bit-wise operation with a common input. If so,
3766 distribute the bit operations to save an operation and possibly two if
3767 constants are involved. For example, convert
3768 (A | B) & (A | C) into A | (B & C)
3769 Further simplification will occur if B and C are constants.
3771 If this optimization cannot be done, 0 will be returned. */
3774 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3779 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3780 || TREE_CODE (arg0
) == code
3781 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3782 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3785 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3787 common
= TREE_OPERAND (arg0
, 0);
3788 left
= TREE_OPERAND (arg0
, 1);
3789 right
= TREE_OPERAND (arg1
, 1);
3791 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3793 common
= TREE_OPERAND (arg0
, 0);
3794 left
= TREE_OPERAND (arg0
, 1);
3795 right
= TREE_OPERAND (arg1
, 0);
3797 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3799 common
= TREE_OPERAND (arg0
, 1);
3800 left
= TREE_OPERAND (arg0
, 0);
3801 right
= TREE_OPERAND (arg1
, 1);
3803 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3805 common
= TREE_OPERAND (arg0
, 1);
3806 left
= TREE_OPERAND (arg0
, 0);
3807 right
= TREE_OPERAND (arg1
, 0);
3812 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3813 fold_build2 (code
, type
, left
, right
));
3816 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3817 with code CODE. This optimization is unsafe. */
3819 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3821 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3822 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3824 /* (A / C) +- (B / C) -> (A +- B) / C. */
3826 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3827 TREE_OPERAND (arg1
, 1), 0))
3828 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3829 fold_build2 (code
, type
,
3830 TREE_OPERAND (arg0
, 0),
3831 TREE_OPERAND (arg1
, 0)),
3832 TREE_OPERAND (arg0
, 1));
3834 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3835 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3836 TREE_OPERAND (arg1
, 0), 0)
3837 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3838 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3840 REAL_VALUE_TYPE r0
, r1
;
3841 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3842 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3844 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3846 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3847 real_arithmetic (&r0
, code
, &r0
, &r1
);
3848 return fold_build2 (MULT_EXPR
, type
,
3849 TREE_OPERAND (arg0
, 0),
3850 build_real (type
, r0
));
3856 /* Subroutine for fold_truthop: decode a field reference.
3858 If EXP is a comparison reference, we return the innermost reference.
3860 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3861 set to the starting bit number.
3863 If the innermost field can be completely contained in a mode-sized
3864 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3866 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3867 otherwise it is not changed.
3869 *PUNSIGNEDP is set to the signedness of the field.
3871 *PMASK is set to the mask used. This is either contained in a
3872 BIT_AND_EXPR or derived from the width of the field.
3874 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3876 Return 0 if this is not a component reference or is one that we can't
3877 do anything with. */
3880 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3881 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3882 int *punsignedp
, int *pvolatilep
,
3883 tree
*pmask
, tree
*pand_mask
)
3885 tree outer_type
= 0;
3887 tree mask
, inner
, offset
;
3889 unsigned int precision
;
3891 /* All the optimizations using this function assume integer fields.
3892 There are problems with FP fields since the type_for_size call
3893 below can fail for, e.g., XFmode. */
3894 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3897 /* We are interested in the bare arrangement of bits, so strip everything
3898 that doesn't affect the machine mode. However, record the type of the
3899 outermost expression if it may matter below. */
3900 if (TREE_CODE (exp
) == NOP_EXPR
3901 || TREE_CODE (exp
) == CONVERT_EXPR
3902 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3903 outer_type
= TREE_TYPE (exp
);
3906 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3908 and_mask
= TREE_OPERAND (exp
, 1);
3909 exp
= TREE_OPERAND (exp
, 0);
3910 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3911 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3915 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3916 punsignedp
, pvolatilep
, false);
3917 if ((inner
== exp
&& and_mask
== 0)
3918 || *pbitsize
< 0 || offset
!= 0
3919 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3922 /* If the number of bits in the reference is the same as the bitsize of
3923 the outer type, then the outer type gives the signedness. Otherwise
3924 (in case of a small bitfield) the signedness is unchanged. */
3925 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3926 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3928 /* Compute the mask to access the bitfield. */
3929 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3930 precision
= TYPE_PRECISION (unsigned_type
);
3932 mask
= build_int_cst_type (unsigned_type
, -1);
3934 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3935 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3937 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3939 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3940 fold_convert (unsigned_type
, and_mask
), mask
);
3943 *pand_mask
= and_mask
;
3947 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3948 represents the sign bit of EXP's type. If EXP represents a sign
3949 or zero extension, also test VAL against the unextended type.
3950 The return value is the (sub)expression whose sign bit is VAL,
3951 or NULL_TREE otherwise. */
3954 sign_bit_p (tree exp
, const_tree val
)
3956 unsigned HOST_WIDE_INT mask_lo
, lo
;
3957 HOST_WIDE_INT mask_hi
, hi
;
3961 /* Tree EXP must have an integral type. */
3962 t
= TREE_TYPE (exp
);
3963 if (! INTEGRAL_TYPE_P (t
))
3966 /* Tree VAL must be an integer constant. */
3967 if (TREE_CODE (val
) != INTEGER_CST
3968 || TREE_OVERFLOW (val
))
3971 width
= TYPE_PRECISION (t
);
3972 if (width
> HOST_BITS_PER_WIDE_INT
)
3974 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3977 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3978 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3984 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3987 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3988 >> (HOST_BITS_PER_WIDE_INT
- width
));
3991 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3992 treat VAL as if it were unsigned. */
3993 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3994 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3997 /* Handle extension from a narrower type. */
3998 if (TREE_CODE (exp
) == NOP_EXPR
3999 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4000 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4005 /* Subroutine for fold_truthop: determine if an operand is simple enough
4006 to be evaluated unconditionally. */
4009 simple_operand_p (const_tree exp
)
4011 /* Strip any conversions that don't change the machine mode. */
4014 return (CONSTANT_CLASS_P (exp
)
4015 || TREE_CODE (exp
) == SSA_NAME
4017 && ! TREE_ADDRESSABLE (exp
)
4018 && ! TREE_THIS_VOLATILE (exp
)
4019 && ! DECL_NONLOCAL (exp
)
4020 /* Don't regard global variables as simple. They may be
4021 allocated in ways unknown to the compiler (shared memory,
4022 #pragma weak, etc). */
4023 && ! TREE_PUBLIC (exp
)
4024 && ! DECL_EXTERNAL (exp
)
4025 /* Loading a static variable is unduly expensive, but global
4026 registers aren't expensive. */
4027 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4030 /* The following functions are subroutines to fold_range_test and allow it to
4031 try to change a logical combination of comparisons into a range test.
4034 X == 2 || X == 3 || X == 4 || X == 5
4038 (unsigned) (X - 2) <= 3
4040 We describe each set of comparisons as being either inside or outside
4041 a range, using a variable named like IN_P, and then describe the
4042 range with a lower and upper bound. If one of the bounds is omitted,
4043 it represents either the highest or lowest value of the type.
4045 In the comments below, we represent a range by two numbers in brackets
4046 preceded by a "+" to designate being inside that range, or a "-" to
4047 designate being outside that range, so the condition can be inverted by
4048 flipping the prefix. An omitted bound is represented by a "-". For
4049 example, "- [-, 10]" means being outside the range starting at the lowest
4050 possible value and ending at 10, in other words, being greater than 10.
4051 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4054 We set up things so that the missing bounds are handled in a consistent
4055 manner so neither a missing bound nor "true" and "false" need to be
4056 handled using a special case. */
4058 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4059 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4060 and UPPER1_P are nonzero if the respective argument is an upper bound
4061 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4062 must be specified for a comparison. ARG1 will be converted to ARG0's
4063 type if both are specified. */
4066 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4067 tree arg1
, int upper1_p
)
4073 /* If neither arg represents infinity, do the normal operation.
4074 Else, if not a comparison, return infinity. Else handle the special
4075 comparison rules. Note that most of the cases below won't occur, but
4076 are handled for consistency. */
4078 if (arg0
!= 0 && arg1
!= 0)
4080 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4081 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4083 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4086 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4089 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4090 for neither. In real maths, we cannot assume open ended ranges are
4091 the same. But, this is computer arithmetic, where numbers are finite.
4092 We can therefore make the transformation of any unbounded range with
4093 the value Z, Z being greater than any representable number. This permits
4094 us to treat unbounded ranges as equal. */
4095 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4096 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4100 result
= sgn0
== sgn1
;
4103 result
= sgn0
!= sgn1
;
4106 result
= sgn0
< sgn1
;
4109 result
= sgn0
<= sgn1
;
4112 result
= sgn0
> sgn1
;
4115 result
= sgn0
>= sgn1
;
4121 return constant_boolean_node (result
, type
);
4124 /* Given EXP, a logical expression, set the range it is testing into
4125 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4126 actually being tested. *PLOW and *PHIGH will be made of the same
4127 type as the returned expression. If EXP is not a comparison, we
4128 will most likely not be returning a useful value and range. Set
4129 *STRICT_OVERFLOW_P to true if the return value is only valid
4130 because signed overflow is undefined; otherwise, do not change
4131 *STRICT_OVERFLOW_P. */
4134 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4135 bool *strict_overflow_p
)
4137 enum tree_code code
;
4138 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4139 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
4141 tree low
, high
, n_low
, n_high
;
4143 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4144 and see if we can refine the range. Some of the cases below may not
4145 happen, but it doesn't seem worth worrying about this. We "continue"
4146 the outer loop when we've changed something; otherwise we "break"
4147 the switch, which will "break" the while. */
4150 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4154 code
= TREE_CODE (exp
);
4155 exp_type
= TREE_TYPE (exp
);
4157 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4159 if (TREE_OPERAND_LENGTH (exp
) > 0)
4160 arg0
= TREE_OPERAND (exp
, 0);
4161 if (TREE_CODE_CLASS (code
) == tcc_comparison
4162 || TREE_CODE_CLASS (code
) == tcc_unary
4163 || TREE_CODE_CLASS (code
) == tcc_binary
)
4164 arg0_type
= TREE_TYPE (arg0
);
4165 if (TREE_CODE_CLASS (code
) == tcc_binary
4166 || TREE_CODE_CLASS (code
) == tcc_comparison
4167 || (TREE_CODE_CLASS (code
) == tcc_expression
4168 && TREE_OPERAND_LENGTH (exp
) > 1))
4169 arg1
= TREE_OPERAND (exp
, 1);
4174 case TRUTH_NOT_EXPR
:
4175 in_p
= ! in_p
, exp
= arg0
;
4178 case EQ_EXPR
: case NE_EXPR
:
4179 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4180 /* We can only do something if the range is testing for zero
4181 and if the second operand is an integer constant. Note that
4182 saying something is "in" the range we make is done by
4183 complementing IN_P since it will set in the initial case of
4184 being not equal to zero; "out" is leaving it alone. */
4185 if (low
== 0 || high
== 0
4186 || ! integer_zerop (low
) || ! integer_zerop (high
)
4187 || TREE_CODE (arg1
) != INTEGER_CST
)
4192 case NE_EXPR
: /* - [c, c] */
4195 case EQ_EXPR
: /* + [c, c] */
4196 in_p
= ! in_p
, low
= high
= arg1
;
4198 case GT_EXPR
: /* - [-, c] */
4199 low
= 0, high
= arg1
;
4201 case GE_EXPR
: /* + [c, -] */
4202 in_p
= ! in_p
, low
= arg1
, high
= 0;
4204 case LT_EXPR
: /* - [c, -] */
4205 low
= arg1
, high
= 0;
4207 case LE_EXPR
: /* + [-, c] */
4208 in_p
= ! in_p
, low
= 0, high
= arg1
;
4214 /* If this is an unsigned comparison, we also know that EXP is
4215 greater than or equal to zero. We base the range tests we make
4216 on that fact, so we record it here so we can parse existing
4217 range tests. We test arg0_type since often the return type
4218 of, e.g. EQ_EXPR, is boolean. */
4219 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4221 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4223 build_int_cst (arg0_type
, 0),
4227 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4229 /* If the high bound is missing, but we have a nonzero low
4230 bound, reverse the range so it goes from zero to the low bound
4232 if (high
== 0 && low
&& ! integer_zerop (low
))
4235 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4236 integer_one_node
, 0);
4237 low
= build_int_cst (arg0_type
, 0);
4245 /* (-x) IN [a,b] -> x in [-b, -a] */
4246 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4247 build_int_cst (exp_type
, 0),
4249 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4250 build_int_cst (exp_type
, 0),
4252 low
= n_low
, high
= n_high
;
4258 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4259 build_int_cst (exp_type
, 1));
4262 case PLUS_EXPR
: case MINUS_EXPR
:
4263 if (TREE_CODE (arg1
) != INTEGER_CST
)
4266 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4267 move a constant to the other side. */
4268 if (!TYPE_UNSIGNED (arg0_type
)
4269 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4272 /* If EXP is signed, any overflow in the computation is undefined,
4273 so we don't worry about it so long as our computations on
4274 the bounds don't overflow. For unsigned, overflow is defined
4275 and this is exactly the right thing. */
4276 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4277 arg0_type
, low
, 0, arg1
, 0);
4278 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4279 arg0_type
, high
, 1, arg1
, 0);
4280 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4281 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4284 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4285 *strict_overflow_p
= true;
4287 /* Check for an unsigned range which has wrapped around the maximum
4288 value thus making n_high < n_low, and normalize it. */
4289 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4291 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4292 integer_one_node
, 0);
4293 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4294 integer_one_node
, 0);
4296 /* If the range is of the form +/- [ x+1, x ], we won't
4297 be able to normalize it. But then, it represents the
4298 whole range or the empty set, so make it
4300 if (tree_int_cst_equal (n_low
, low
)
4301 && tree_int_cst_equal (n_high
, high
))
4307 low
= n_low
, high
= n_high
;
4312 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
4313 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4316 if (! INTEGRAL_TYPE_P (arg0_type
)
4317 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4318 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4321 n_low
= low
, n_high
= high
;
4324 n_low
= fold_convert (arg0_type
, n_low
);
4327 n_high
= fold_convert (arg0_type
, n_high
);
4330 /* If we're converting arg0 from an unsigned type, to exp,
4331 a signed type, we will be doing the comparison as unsigned.
4332 The tests above have already verified that LOW and HIGH
4335 So we have to ensure that we will handle large unsigned
4336 values the same way that the current signed bounds treat
4339 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4343 /* For fixed-point modes, we need to pass the saturating flag
4344 as the 2nd parameter. */
4345 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4346 equiv_type
= lang_hooks
.types
.type_for_mode
4347 (TYPE_MODE (arg0_type
),
4348 TYPE_SATURATING (arg0_type
));
4350 equiv_type
= lang_hooks
.types
.type_for_mode
4351 (TYPE_MODE (arg0_type
), 1);
4353 /* A range without an upper bound is, naturally, unbounded.
4354 Since convert would have cropped a very large value, use
4355 the max value for the destination type. */
4357 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4358 : TYPE_MAX_VALUE (arg0_type
);
4360 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4361 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
4362 fold_convert (arg0_type
,
4364 build_int_cst (arg0_type
, 1));
4366 /* If the low bound is specified, "and" the range with the
4367 range for which the original unsigned value will be
4371 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4372 1, n_low
, n_high
, 1,
4373 fold_convert (arg0_type
,
4378 in_p
= (n_in_p
== in_p
);
4382 /* Otherwise, "or" the range with the range of the input
4383 that will be interpreted as negative. */
4384 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4385 0, n_low
, n_high
, 1,
4386 fold_convert (arg0_type
,
4391 in_p
= (in_p
!= n_in_p
);
4396 low
= n_low
, high
= n_high
;
4406 /* If EXP is a constant, we can evaluate whether this is true or false. */
4407 if (TREE_CODE (exp
) == INTEGER_CST
)
4409 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4411 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4417 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4421 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4422 type, TYPE, return an expression to test if EXP is in (or out of, depending
4423 on IN_P) the range. Return 0 if the test couldn't be created. */
4426 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4428 tree etype
= TREE_TYPE (exp
);
4431 #ifdef HAVE_canonicalize_funcptr_for_compare
4432 /* Disable this optimization for function pointer expressions
4433 on targets that require function pointer canonicalization. */
4434 if (HAVE_canonicalize_funcptr_for_compare
4435 && TREE_CODE (etype
) == POINTER_TYPE
4436 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4442 value
= build_range_check (type
, exp
, 1, low
, high
);
4444 return invert_truthvalue (value
);
4449 if (low
== 0 && high
== 0)
4450 return build_int_cst (type
, 1);
4453 return fold_build2 (LE_EXPR
, type
, exp
,
4454 fold_convert (etype
, high
));
4457 return fold_build2 (GE_EXPR
, type
, exp
,
4458 fold_convert (etype
, low
));
4460 if (operand_equal_p (low
, high
, 0))
4461 return fold_build2 (EQ_EXPR
, type
, exp
,
4462 fold_convert (etype
, low
));
4464 if (integer_zerop (low
))
4466 if (! TYPE_UNSIGNED (etype
))
4468 etype
= unsigned_type_for (etype
);
4469 high
= fold_convert (etype
, high
);
4470 exp
= fold_convert (etype
, exp
);
4472 return build_range_check (type
, exp
, 1, 0, high
);
4475 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4476 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4478 unsigned HOST_WIDE_INT lo
;
4482 prec
= TYPE_PRECISION (etype
);
4483 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4486 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4490 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4491 lo
= (unsigned HOST_WIDE_INT
) -1;
4494 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4496 if (TYPE_UNSIGNED (etype
))
4498 etype
= signed_type_for (etype
);
4499 exp
= fold_convert (etype
, exp
);
4501 return fold_build2 (GT_EXPR
, type
, exp
,
4502 build_int_cst (etype
, 0));
4506 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4507 This requires wrap-around arithmetics for the type of the expression. */
4508 switch (TREE_CODE (etype
))
4511 /* There is no requirement that LOW be within the range of ETYPE
4512 if the latter is a subtype. It must, however, be within the base
4513 type of ETYPE. So be sure we do the subtraction in that type. */
4514 if (TREE_TYPE (etype
))
4515 etype
= TREE_TYPE (etype
);
4520 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4521 TYPE_UNSIGNED (etype
));
4528 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4529 if (TREE_CODE (etype
) == INTEGER_TYPE
4530 && !TYPE_OVERFLOW_WRAPS (etype
))
4532 tree utype
, minv
, maxv
;
4534 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4535 for the type in question, as we rely on this here. */
4536 utype
= unsigned_type_for (etype
);
4537 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4538 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4539 integer_one_node
, 1);
4540 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4542 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4549 high
= fold_convert (etype
, high
);
4550 low
= fold_convert (etype
, low
);
4551 exp
= fold_convert (etype
, exp
);
4553 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4556 if (POINTER_TYPE_P (etype
))
4558 if (value
!= 0 && !TREE_OVERFLOW (value
))
4560 low
= fold_convert (sizetype
, low
);
4561 low
= fold_build1 (NEGATE_EXPR
, sizetype
, low
);
4562 return build_range_check (type
,
4563 fold_build2 (POINTER_PLUS_EXPR
, etype
, exp
, low
),
4564 1, build_int_cst (etype
, 0), value
);
4569 if (value
!= 0 && !TREE_OVERFLOW (value
))
4570 return build_range_check (type
,
4571 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4572 1, build_int_cst (etype
, 0), value
);
4577 /* Return the predecessor of VAL in its type, handling the infinite case. */
4580 range_predecessor (tree val
)
4582 tree type
= TREE_TYPE (val
);
4584 if (INTEGRAL_TYPE_P (type
)
4585 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4588 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4591 /* Return the successor of VAL in its type, handling the infinite case. */
4594 range_successor (tree val
)
4596 tree type
= TREE_TYPE (val
);
4598 if (INTEGRAL_TYPE_P (type
)
4599 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4602 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4605 /* Given two ranges, see if we can merge them into one. Return 1 if we
4606 can, 0 if we can't. Set the output range into the specified parameters. */
4609 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4610 tree high0
, int in1_p
, tree low1
, tree high1
)
4618 int lowequal
= ((low0
== 0 && low1
== 0)
4619 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4620 low0
, 0, low1
, 0)));
4621 int highequal
= ((high0
== 0 && high1
== 0)
4622 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4623 high0
, 1, high1
, 1)));
4625 /* Make range 0 be the range that starts first, or ends last if they
4626 start at the same value. Swap them if it isn't. */
4627 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4630 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4631 high1
, 1, high0
, 1))))
4633 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4634 tem
= low0
, low0
= low1
, low1
= tem
;
4635 tem
= high0
, high0
= high1
, high1
= tem
;
4638 /* Now flag two cases, whether the ranges are disjoint or whether the
4639 second range is totally subsumed in the first. Note that the tests
4640 below are simplified by the ones above. */
4641 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4642 high0
, 1, low1
, 0));
4643 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4644 high1
, 1, high0
, 1));
4646 /* We now have four cases, depending on whether we are including or
4647 excluding the two ranges. */
4650 /* If they don't overlap, the result is false. If the second range
4651 is a subset it is the result. Otherwise, the range is from the start
4652 of the second to the end of the first. */
4654 in_p
= 0, low
= high
= 0;
4656 in_p
= 1, low
= low1
, high
= high1
;
4658 in_p
= 1, low
= low1
, high
= high0
;
4661 else if (in0_p
&& ! in1_p
)
4663 /* If they don't overlap, the result is the first range. If they are
4664 equal, the result is false. If the second range is a subset of the
4665 first, and the ranges begin at the same place, we go from just after
4666 the end of the second range to the end of the first. If the second
4667 range is not a subset of the first, or if it is a subset and both
4668 ranges end at the same place, the range starts at the start of the
4669 first range and ends just before the second range.
4670 Otherwise, we can't describe this as a single range. */
4672 in_p
= 1, low
= low0
, high
= high0
;
4673 else if (lowequal
&& highequal
)
4674 in_p
= 0, low
= high
= 0;
4675 else if (subset
&& lowequal
)
4677 low
= range_successor (high1
);
4682 /* We are in the weird situation where high0 > high1 but
4683 high1 has no successor. Punt. */
4687 else if (! subset
|| highequal
)
4690 high
= range_predecessor (low1
);
4694 /* low0 < low1 but low1 has no predecessor. Punt. */
4702 else if (! in0_p
&& in1_p
)
4704 /* If they don't overlap, the result is the second range. If the second
4705 is a subset of the first, the result is false. Otherwise,
4706 the range starts just after the first range and ends at the
4707 end of the second. */
4709 in_p
= 1, low
= low1
, high
= high1
;
4710 else if (subset
|| highequal
)
4711 in_p
= 0, low
= high
= 0;
4714 low
= range_successor (high0
);
4719 /* high1 > high0 but high0 has no successor. Punt. */
4727 /* The case where we are excluding both ranges. Here the complex case
4728 is if they don't overlap. In that case, the only time we have a
4729 range is if they are adjacent. If the second is a subset of the
4730 first, the result is the first. Otherwise, the range to exclude
4731 starts at the beginning of the first range and ends at the end of the
4735 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4736 range_successor (high0
),
4738 in_p
= 0, low
= low0
, high
= high1
;
4741 /* Canonicalize - [min, x] into - [-, x]. */
4742 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4743 switch (TREE_CODE (TREE_TYPE (low0
)))
4746 if (TYPE_PRECISION (TREE_TYPE (low0
))
4747 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4751 if (tree_int_cst_equal (low0
,
4752 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4756 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4757 && integer_zerop (low0
))
4764 /* Canonicalize - [x, max] into - [x, -]. */
4765 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4766 switch (TREE_CODE (TREE_TYPE (high1
)))
4769 if (TYPE_PRECISION (TREE_TYPE (high1
))
4770 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4774 if (tree_int_cst_equal (high1
,
4775 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4779 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4780 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4782 integer_one_node
, 1)))
4789 /* The ranges might be also adjacent between the maximum and
4790 minimum values of the given type. For
4791 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4792 return + [x + 1, y - 1]. */
4793 if (low0
== 0 && high1
== 0)
4795 low
= range_successor (high0
);
4796 high
= range_predecessor (low1
);
4797 if (low
== 0 || high
== 0)
4807 in_p
= 0, low
= low0
, high
= high0
;
4809 in_p
= 0, low
= low0
, high
= high1
;
4812 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4817 /* Subroutine of fold, looking inside expressions of the form
4818 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4819 of the COND_EXPR. This function is being used also to optimize
4820 A op B ? C : A, by reversing the comparison first.
4822 Return a folded expression whose code is not a COND_EXPR
4823 anymore, or NULL_TREE if no folding opportunity is found. */
4826 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4828 enum tree_code comp_code
= TREE_CODE (arg0
);
4829 tree arg00
= TREE_OPERAND (arg0
, 0);
4830 tree arg01
= TREE_OPERAND (arg0
, 1);
4831 tree arg1_type
= TREE_TYPE (arg1
);
4837 /* If we have A op 0 ? A : -A, consider applying the following
4840 A == 0? A : -A same as -A
4841 A != 0? A : -A same as A
4842 A >= 0? A : -A same as abs (A)
4843 A > 0? A : -A same as abs (A)
4844 A <= 0? A : -A same as -abs (A)
4845 A < 0? A : -A same as -abs (A)
4847 None of these transformations work for modes with signed
4848 zeros. If A is +/-0, the first two transformations will
4849 change the sign of the result (from +0 to -0, or vice
4850 versa). The last four will fix the sign of the result,
4851 even though the original expressions could be positive or
4852 negative, depending on the sign of A.
4854 Note that all these transformations are correct if A is
4855 NaN, since the two alternatives (A and -A) are also NaNs. */
4856 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4857 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4858 ? real_zerop (arg01
)
4859 : integer_zerop (arg01
))
4860 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4861 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4862 /* In the case that A is of the form X-Y, '-A' (arg2) may
4863 have already been folded to Y-X, check for that. */
4864 || (TREE_CODE (arg1
) == MINUS_EXPR
4865 && TREE_CODE (arg2
) == MINUS_EXPR
4866 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4867 TREE_OPERAND (arg2
, 1), 0)
4868 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4869 TREE_OPERAND (arg2
, 0), 0))))
4874 tem
= fold_convert (arg1_type
, arg1
);
4875 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4878 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4881 if (flag_trapping_math
)
4886 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4887 arg1
= fold_convert (signed_type_for
4888 (TREE_TYPE (arg1
)), arg1
);
4889 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4890 return pedantic_non_lvalue (fold_convert (type
, tem
));
4893 if (flag_trapping_math
)
4897 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4898 arg1
= fold_convert (signed_type_for
4899 (TREE_TYPE (arg1
)), arg1
);
4900 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4901 return negate_expr (fold_convert (type
, tem
));
4903 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4907 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4908 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4909 both transformations are correct when A is NaN: A != 0
4910 is then true, and A == 0 is false. */
4912 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4913 && integer_zerop (arg01
) && integer_zerop (arg2
))
4915 if (comp_code
== NE_EXPR
)
4916 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4917 else if (comp_code
== EQ_EXPR
)
4918 return build_int_cst (type
, 0);
4921 /* Try some transformations of A op B ? A : B.
4923 A == B? A : B same as B
4924 A != B? A : B same as A
4925 A >= B? A : B same as max (A, B)
4926 A > B? A : B same as max (B, A)
4927 A <= B? A : B same as min (A, B)
4928 A < B? A : B same as min (B, A)
4930 As above, these transformations don't work in the presence
4931 of signed zeros. For example, if A and B are zeros of
4932 opposite sign, the first two transformations will change
4933 the sign of the result. In the last four, the original
4934 expressions give different results for (A=+0, B=-0) and
4935 (A=-0, B=+0), but the transformed expressions do not.
4937 The first two transformations are correct if either A or B
4938 is a NaN. In the first transformation, the condition will
4939 be false, and B will indeed be chosen. In the case of the
4940 second transformation, the condition A != B will be true,
4941 and A will be chosen.
4943 The conversions to max() and min() are not correct if B is
4944 a number and A is not. The conditions in the original
4945 expressions will be false, so all four give B. The min()
4946 and max() versions would give a NaN instead. */
4947 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4948 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4949 /* Avoid these transformations if the COND_EXPR may be used
4950 as an lvalue in the C++ front-end. PR c++/19199. */
4952 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4953 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4954 || ! maybe_lvalue_p (arg1
)
4955 || ! maybe_lvalue_p (arg2
)))
4957 tree comp_op0
= arg00
;
4958 tree comp_op1
= arg01
;
4959 tree comp_type
= TREE_TYPE (comp_op0
);
4961 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4962 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4972 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4974 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4979 /* In C++ a ?: expression can be an lvalue, so put the
4980 operand which will be used if they are equal first
4981 so that we can convert this back to the
4982 corresponding COND_EXPR. */
4983 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4985 comp_op0
= fold_convert (comp_type
, comp_op0
);
4986 comp_op1
= fold_convert (comp_type
, comp_op1
);
4987 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4988 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4989 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4990 return pedantic_non_lvalue (fold_convert (type
, tem
));
4997 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4999 comp_op0
= fold_convert (comp_type
, comp_op0
);
5000 comp_op1
= fold_convert (comp_type
, comp_op1
);
5001 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5002 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5003 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
5004 return pedantic_non_lvalue (fold_convert (type
, tem
));
5008 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5009 return pedantic_non_lvalue (fold_convert (type
, arg2
));
5012 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5013 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5016 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5021 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5022 we might still be able to simplify this. For example,
5023 if C1 is one less or one more than C2, this might have started
5024 out as a MIN or MAX and been transformed by this function.
5025 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5027 if (INTEGRAL_TYPE_P (type
)
5028 && TREE_CODE (arg01
) == INTEGER_CST
5029 && TREE_CODE (arg2
) == INTEGER_CST
)
5033 /* We can replace A with C1 in this case. */
5034 arg1
= fold_convert (type
, arg01
);
5035 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
5038 /* If C1 is C2 + 1, this is min(A, C2). */
5039 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5041 && operand_equal_p (arg01
,
5042 const_binop (PLUS_EXPR
, arg2
,
5043 build_int_cst (type
, 1), 0),
5045 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5047 fold_convert (type
, arg1
),
5052 /* If C1 is C2 - 1, this is min(A, C2). */
5053 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5055 && operand_equal_p (arg01
,
5056 const_binop (MINUS_EXPR
, arg2
,
5057 build_int_cst (type
, 1), 0),
5059 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5061 fold_convert (type
, arg1
),
5066 /* If C1 is C2 - 1, this is max(A, C2). */
5067 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5069 && operand_equal_p (arg01
,
5070 const_binop (MINUS_EXPR
, arg2
,
5071 build_int_cst (type
, 1), 0),
5073 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5075 fold_convert (type
, arg1
),
5080 /* If C1 is C2 + 1, this is max(A, C2). */
5081 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5083 && operand_equal_p (arg01
,
5084 const_binop (PLUS_EXPR
, arg2
,
5085 build_int_cst (type
, 1), 0),
5087 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5089 fold_convert (type
, arg1
),
5103 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5104 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5107 /* EXP is some logical combination of boolean tests. See if we can
5108 merge it into some range test. Return the new tree if so. */
5111 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
5113 int or_op
= (code
== TRUTH_ORIF_EXPR
5114 || code
== TRUTH_OR_EXPR
);
5115 int in0_p
, in1_p
, in_p
;
5116 tree low0
, low1
, low
, high0
, high1
, high
;
5117 bool strict_overflow_p
= false;
5118 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5119 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5121 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5122 "when simplifying range test");
5124 /* If this is an OR operation, invert both sides; we will invert
5125 again at the end. */
5127 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5129 /* If both expressions are the same, if we can merge the ranges, and we
5130 can build the range test, return it or it inverted. If one of the
5131 ranges is always true or always false, consider it to be the same
5132 expression as the other. */
5133 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5134 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5136 && 0 != (tem
= (build_range_check (type
,
5138 : rhs
!= 0 ? rhs
: integer_zero_node
,
5141 if (strict_overflow_p
)
5142 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5143 return or_op
? invert_truthvalue (tem
) : tem
;
5146 /* On machines where the branch cost is expensive, if this is a
5147 short-circuited branch and the underlying object on both sides
5148 is the same, make a non-short-circuit operation. */
5149 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5150 && lhs
!= 0 && rhs
!= 0
5151 && (code
== TRUTH_ANDIF_EXPR
5152 || code
== TRUTH_ORIF_EXPR
)
5153 && operand_equal_p (lhs
, rhs
, 0))
5155 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5156 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5157 which cases we can't do this. */
5158 if (simple_operand_p (lhs
))
5159 return build2 (code
== TRUTH_ANDIF_EXPR
5160 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5163 else if (lang_hooks
.decls
.global_bindings_p () == 0
5164 && ! CONTAINS_PLACEHOLDER_P (lhs
))
5166 tree common
= save_expr (lhs
);
5168 if (0 != (lhs
= build_range_check (type
, common
,
5169 or_op
? ! in0_p
: in0_p
,
5171 && (0 != (rhs
= build_range_check (type
, common
,
5172 or_op
? ! in1_p
: in1_p
,
5175 if (strict_overflow_p
)
5176 fold_overflow_warning (warnmsg
,
5177 WARN_STRICT_OVERFLOW_COMPARISON
);
5178 return build2 (code
== TRUTH_ANDIF_EXPR
5179 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5188 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5189 bit value. Arrange things so the extra bits will be set to zero if and
5190 only if C is signed-extended to its full width. If MASK is nonzero,
5191 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5194 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5196 tree type
= TREE_TYPE (c
);
5197 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5200 if (p
== modesize
|| unsignedp
)
5203 /* We work by getting just the sign bit into the low-order bit, then
5204 into the high-order bit, then sign-extend. We then XOR that value
5206 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
5207 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
5209 /* We must use a signed type in order to get an arithmetic right shift.
5210 However, we must also avoid introducing accidental overflows, so that
5211 a subsequent call to integer_zerop will work. Hence we must
5212 do the type conversion here. At this point, the constant is either
5213 zero or one, and the conversion to a signed type can never overflow.
5214 We could get an overflow if this conversion is done anywhere else. */
5215 if (TYPE_UNSIGNED (type
))
5216 temp
= fold_convert (signed_type_for (type
), temp
);
5218 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
5219 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
5221 temp
= const_binop (BIT_AND_EXPR
, temp
,
5222 fold_convert (TREE_TYPE (c
), mask
), 0);
5223 /* If necessary, convert the type back to match the type of C. */
5224 if (TYPE_UNSIGNED (type
))
5225 temp
= fold_convert (type
, temp
);
5227 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
5230 /* Find ways of folding logical expressions of LHS and RHS:
5231 Try to merge two comparisons to the same innermost item.
5232 Look for range tests like "ch >= '0' && ch <= '9'".
5233 Look for combinations of simple terms on machines with expensive branches
5234 and evaluate the RHS unconditionally.
5236 For example, if we have p->a == 2 && p->b == 4 and we can make an
5237 object large enough to span both A and B, we can do this with a comparison
5238 against the object ANDed with the a mask.
5240 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5241 operations to do this with one comparison.
5243 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5244 function and the one above.
5246 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5247 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5249 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5252 We return the simplified tree or 0 if no optimization is possible. */
5255 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
5257 /* If this is the "or" of two comparisons, we can do something if
5258 the comparisons are NE_EXPR. If this is the "and", we can do something
5259 if the comparisons are EQ_EXPR. I.e.,
5260 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5262 WANTED_CODE is this operation code. For single bit fields, we can
5263 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5264 comparison for one-bit fields. */
5266 enum tree_code wanted_code
;
5267 enum tree_code lcode
, rcode
;
5268 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5269 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5270 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5271 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5272 HOST_WIDE_INT xll_bitpos
, xrl_bitpos
;
5273 HOST_WIDE_INT lnbitsize
, lnbitpos
;
5274 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5275 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5276 enum machine_mode lnmode
;
5277 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5278 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5279 tree l_const
, r_const
;
5280 tree lntype
, result
;
5281 int first_bit
, end_bit
;
5283 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5284 enum tree_code orig_code
= code
;
5286 /* Start by getting the comparison codes. Fail if anything is volatile.
5287 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5288 it were surrounded with a NE_EXPR. */
5290 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5293 lcode
= TREE_CODE (lhs
);
5294 rcode
= TREE_CODE (rhs
);
5296 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5298 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5299 build_int_cst (TREE_TYPE (lhs
), 0));
5303 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5305 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5306 build_int_cst (TREE_TYPE (rhs
), 0));
5310 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5311 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5314 ll_arg
= TREE_OPERAND (lhs
, 0);
5315 lr_arg
= TREE_OPERAND (lhs
, 1);
5316 rl_arg
= TREE_OPERAND (rhs
, 0);
5317 rr_arg
= TREE_OPERAND (rhs
, 1);
5319 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5320 if (simple_operand_p (ll_arg
)
5321 && simple_operand_p (lr_arg
))
5324 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5325 && operand_equal_p (lr_arg
, rr_arg
, 0))
5327 result
= combine_comparisons (code
, lcode
, rcode
,
5328 truth_type
, ll_arg
, lr_arg
);
5332 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5333 && operand_equal_p (lr_arg
, rl_arg
, 0))
5335 result
= combine_comparisons (code
, lcode
,
5336 swap_tree_comparison (rcode
),
5337 truth_type
, ll_arg
, lr_arg
);
5343 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5344 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5346 /* If the RHS can be evaluated unconditionally and its operands are
5347 simple, it wins to evaluate the RHS unconditionally on machines
5348 with expensive branches. In this case, this isn't a comparison
5349 that can be merged. Avoid doing this if the RHS is a floating-point
5350 comparison since those can trap. */
5352 if (BRANCH_COST
>= 2
5353 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5354 && simple_operand_p (rl_arg
)
5355 && simple_operand_p (rr_arg
))
5357 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5358 if (code
== TRUTH_OR_EXPR
5359 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5360 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5361 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5362 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5363 return build2 (NE_EXPR
, truth_type
,
5364 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5366 build_int_cst (TREE_TYPE (ll_arg
), 0));
5368 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5369 if (code
== TRUTH_AND_EXPR
5370 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5371 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5372 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5373 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5374 return build2 (EQ_EXPR
, truth_type
,
5375 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5377 build_int_cst (TREE_TYPE (ll_arg
), 0));
5379 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5381 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5382 return build2 (code
, truth_type
, lhs
, rhs
);
5387 /* See if the comparisons can be merged. Then get all the parameters for
5390 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5391 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5395 ll_inner
= decode_field_reference (ll_arg
,
5396 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5397 &ll_unsignedp
, &volatilep
, &ll_mask
,
5399 lr_inner
= decode_field_reference (lr_arg
,
5400 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5401 &lr_unsignedp
, &volatilep
, &lr_mask
,
5403 rl_inner
= decode_field_reference (rl_arg
,
5404 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5405 &rl_unsignedp
, &volatilep
, &rl_mask
,
5407 rr_inner
= decode_field_reference (rr_arg
,
5408 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5409 &rr_unsignedp
, &volatilep
, &rr_mask
,
5412 /* It must be true that the inner operation on the lhs of each
5413 comparison must be the same if we are to be able to do anything.
5414 Then see if we have constants. If not, the same must be true for
5416 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5417 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5420 if (TREE_CODE (lr_arg
) == INTEGER_CST
5421 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5422 l_const
= lr_arg
, r_const
= rr_arg
;
5423 else if (lr_inner
== 0 || rr_inner
== 0
5424 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5427 l_const
= r_const
= 0;
5429 /* If either comparison code is not correct for our logical operation,
5430 fail. However, we can convert a one-bit comparison against zero into
5431 the opposite comparison against that bit being set in the field. */
5433 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5434 if (lcode
!= wanted_code
)
5436 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5438 /* Make the left operand unsigned, since we are only interested
5439 in the value of one bit. Otherwise we are doing the wrong
5448 /* This is analogous to the code for l_const above. */
5449 if (rcode
!= wanted_code
)
5451 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5460 /* See if we can find a mode that contains both fields being compared on
5461 the left. If we can't, fail. Otherwise, update all constants and masks
5462 to be relative to a field of that size. */
5463 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5464 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5465 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5466 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5468 if (lnmode
== VOIDmode
)
5471 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5472 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5473 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5474 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5476 if (BYTES_BIG_ENDIAN
)
5478 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5479 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5482 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5483 size_int (xll_bitpos
), 0);
5484 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5485 size_int (xrl_bitpos
), 0);
5489 l_const
= fold_convert (lntype
, l_const
);
5490 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5491 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5492 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5493 fold_build1 (BIT_NOT_EXPR
,
5497 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5499 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5504 r_const
= fold_convert (lntype
, r_const
);
5505 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5506 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5507 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5508 fold_build1 (BIT_NOT_EXPR
,
5512 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5514 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5518 /* Handle the case of comparisons with constants. If there is something in
5519 common between the masks, those bits of the constants must be the same.
5520 If not, the condition is always false. Test for this to avoid generating
5521 incorrect code below. */
5522 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5523 if (! integer_zerop (result
)
5524 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5525 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5527 if (wanted_code
== NE_EXPR
)
5529 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5530 return constant_boolean_node (true, truth_type
);
5534 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5535 return constant_boolean_node (false, truth_type
);
5542 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5546 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5549 enum tree_code op_code
;
5552 int consts_equal
, consts_lt
;
5555 STRIP_SIGN_NOPS (arg0
);
5557 op_code
= TREE_CODE (arg0
);
5558 minmax_const
= TREE_OPERAND (arg0
, 1);
5559 comp_const
= fold_convert (TREE_TYPE (arg0
), op1
);
5560 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5561 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5562 inner
= TREE_OPERAND (arg0
, 0);
5564 /* If something does not permit us to optimize, return the original tree. */
5565 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5566 || TREE_CODE (comp_const
) != INTEGER_CST
5567 || TREE_OVERFLOW (comp_const
)
5568 || TREE_CODE (minmax_const
) != INTEGER_CST
5569 || TREE_OVERFLOW (minmax_const
))
5572 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5573 and GT_EXPR, doing the rest with recursive calls using logical
5577 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5579 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5582 return invert_truthvalue (tem
);
5588 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5589 optimize_minmax_comparison
5590 (EQ_EXPR
, type
, arg0
, comp_const
),
5591 optimize_minmax_comparison
5592 (GT_EXPR
, type
, arg0
, comp_const
));
5595 if (op_code
== MAX_EXPR
&& consts_equal
)
5596 /* MAX (X, 0) == 0 -> X <= 0 */
5597 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5599 else if (op_code
== MAX_EXPR
&& consts_lt
)
5600 /* MAX (X, 0) == 5 -> X == 5 */
5601 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5603 else if (op_code
== MAX_EXPR
)
5604 /* MAX (X, 0) == -1 -> false */
5605 return omit_one_operand (type
, integer_zero_node
, inner
);
5607 else if (consts_equal
)
5608 /* MIN (X, 0) == 0 -> X >= 0 */
5609 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5612 /* MIN (X, 0) == 5 -> false */
5613 return omit_one_operand (type
, integer_zero_node
, inner
);
5616 /* MIN (X, 0) == -1 -> X == -1 */
5617 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5620 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5621 /* MAX (X, 0) > 0 -> X > 0
5622 MAX (X, 0) > 5 -> X > 5 */
5623 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5625 else if (op_code
== MAX_EXPR
)
5626 /* MAX (X, 0) > -1 -> true */
5627 return omit_one_operand (type
, integer_one_node
, inner
);
5629 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5630 /* MIN (X, 0) > 0 -> false
5631 MIN (X, 0) > 5 -> false */
5632 return omit_one_operand (type
, integer_zero_node
, inner
);
5635 /* MIN (X, 0) > -1 -> X > -1 */
5636 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5643 /* T is an integer expression that is being multiplied, divided, or taken a
5644 modulus (CODE says which and what kind of divide or modulus) by a
5645 constant C. See if we can eliminate that operation by folding it with
5646 other operations already in T. WIDE_TYPE, if non-null, is a type that
5647 should be used for the computation if wider than our type.
5649 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5650 (X * 2) + (Y * 4). We must, however, be assured that either the original
5651 expression would not overflow or that overflow is undefined for the type
5652 in the language in question.
5654 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5655 the machine has a multiply-accumulate insn or that this is part of an
5656 addressing calculation.
5658 If we return a non-null expression, it is an equivalent form of the
5659 original computation, but need not be in the original type.
5661 We set *STRICT_OVERFLOW_P to true if the return values depends on
5662 signed overflow being undefined. Otherwise we do not change
5663 *STRICT_OVERFLOW_P. */
5666 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5667 bool *strict_overflow_p
)
5669 /* To avoid exponential search depth, refuse to allow recursion past
5670 three levels. Beyond that (1) it's highly unlikely that we'll find
5671 something interesting and (2) we've probably processed it before
5672 when we built the inner expression. */
5681 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5688 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5689 bool *strict_overflow_p
)
5691 tree type
= TREE_TYPE (t
);
5692 enum tree_code tcode
= TREE_CODE (t
);
5693 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5694 > GET_MODE_SIZE (TYPE_MODE (type
)))
5695 ? wide_type
: type
);
5697 int same_p
= tcode
== code
;
5698 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5699 bool sub_strict_overflow_p
;
5701 /* Don't deal with constants of zero here; they confuse the code below. */
5702 if (integer_zerop (c
))
5705 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5706 op0
= TREE_OPERAND (t
, 0);
5708 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5709 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5711 /* Note that we need not handle conditional operations here since fold
5712 already handles those cases. So just do arithmetic here. */
5716 /* For a constant, we can always simplify if we are a multiply
5717 or (for divide and modulus) if it is a multiple of our constant. */
5718 if (code
== MULT_EXPR
5719 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5720 return const_binop (code
, fold_convert (ctype
, t
),
5721 fold_convert (ctype
, c
), 0);
5724 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5725 /* If op0 is an expression ... */
5726 if ((COMPARISON_CLASS_P (op0
)
5727 || UNARY_CLASS_P (op0
)
5728 || BINARY_CLASS_P (op0
)
5729 || VL_EXP_CLASS_P (op0
)
5730 || EXPRESSION_CLASS_P (op0
))
5731 /* ... and is unsigned, and its type is smaller than ctype,
5732 then we cannot pass through as widening. */
5733 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5734 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5735 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5736 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5737 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5738 /* ... or this is a truncation (t is narrower than op0),
5739 then we cannot pass through this narrowing. */
5740 || (GET_MODE_SIZE (TYPE_MODE (type
))
5741 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5742 /* ... or signedness changes for division or modulus,
5743 then we cannot pass through this conversion. */
5744 || (code
!= MULT_EXPR
5745 && (TYPE_UNSIGNED (ctype
)
5746 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5747 /* ... or has undefined overflow while the converted to
5748 type has not, we cannot do the operation in the inner type
5749 as that would introduce undefined overflow. */
5750 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5751 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5754 /* Pass the constant down and see if we can make a simplification. If
5755 we can, replace this expression with the inner simplification for
5756 possible later conversion to our or some other type. */
5757 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5758 && TREE_CODE (t2
) == INTEGER_CST
5759 && !TREE_OVERFLOW (t2
)
5760 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5762 ? ctype
: NULL_TREE
,
5763 strict_overflow_p
))))
5768 /* If widening the type changes it from signed to unsigned, then we
5769 must avoid building ABS_EXPR itself as unsigned. */
5770 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5772 tree cstype
= (*signed_type_for
) (ctype
);
5773 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5776 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5777 return fold_convert (ctype
, t1
);
5781 /* If the constant is negative, we cannot simplify this. */
5782 if (tree_int_cst_sgn (c
) == -1)
5786 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5788 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5791 case MIN_EXPR
: case MAX_EXPR
:
5792 /* If widening the type changes the signedness, then we can't perform
5793 this optimization as that changes the result. */
5794 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5797 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5798 sub_strict_overflow_p
= false;
5799 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5800 &sub_strict_overflow_p
)) != 0
5801 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5802 &sub_strict_overflow_p
)) != 0)
5804 if (tree_int_cst_sgn (c
) < 0)
5805 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5806 if (sub_strict_overflow_p
)
5807 *strict_overflow_p
= true;
5808 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5809 fold_convert (ctype
, t2
));
5813 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5814 /* If the second operand is constant, this is a multiplication
5815 or floor division, by a power of two, so we can treat it that
5816 way unless the multiplier or divisor overflows. Signed
5817 left-shift overflow is implementation-defined rather than
5818 undefined in C90, so do not convert signed left shift into
5820 if (TREE_CODE (op1
) == INTEGER_CST
5821 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5822 /* const_binop may not detect overflow correctly,
5823 so check for it explicitly here. */
5824 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5825 && TREE_INT_CST_HIGH (op1
) == 0
5826 && 0 != (t1
= fold_convert (ctype
,
5827 const_binop (LSHIFT_EXPR
,
5830 && !TREE_OVERFLOW (t1
))
5831 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5832 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5833 ctype
, fold_convert (ctype
, op0
), t1
),
5834 c
, code
, wide_type
, strict_overflow_p
);
5837 case PLUS_EXPR
: case MINUS_EXPR
:
5838 /* See if we can eliminate the operation on both sides. If we can, we
5839 can return a new PLUS or MINUS. If we can't, the only remaining
5840 cases where we can do anything are if the second operand is a
5842 sub_strict_overflow_p
= false;
5843 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5844 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5845 if (t1
!= 0 && t2
!= 0
5846 && (code
== MULT_EXPR
5847 /* If not multiplication, we can only do this if both operands
5848 are divisible by c. */
5849 || (multiple_of_p (ctype
, op0
, c
)
5850 && multiple_of_p (ctype
, op1
, c
))))
5852 if (sub_strict_overflow_p
)
5853 *strict_overflow_p
= true;
5854 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5855 fold_convert (ctype
, t2
));
5858 /* If this was a subtraction, negate OP1 and set it to be an addition.
5859 This simplifies the logic below. */
5860 if (tcode
== MINUS_EXPR
)
5861 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5863 if (TREE_CODE (op1
) != INTEGER_CST
)
5866 /* If either OP1 or C are negative, this optimization is not safe for
5867 some of the division and remainder types while for others we need
5868 to change the code. */
5869 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5871 if (code
== CEIL_DIV_EXPR
)
5872 code
= FLOOR_DIV_EXPR
;
5873 else if (code
== FLOOR_DIV_EXPR
)
5874 code
= CEIL_DIV_EXPR
;
5875 else if (code
!= MULT_EXPR
5876 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5880 /* If it's a multiply or a division/modulus operation of a multiple
5881 of our constant, do the operation and verify it doesn't overflow. */
5882 if (code
== MULT_EXPR
5883 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5885 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5886 fold_convert (ctype
, c
), 0);
5887 /* We allow the constant to overflow with wrapping semantics. */
5889 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5895 /* If we have an unsigned type is not a sizetype, we cannot widen
5896 the operation since it will change the result if the original
5897 computation overflowed. */
5898 if (TYPE_UNSIGNED (ctype
)
5899 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5903 /* If we were able to eliminate our operation from the first side,
5904 apply our operation to the second side and reform the PLUS. */
5905 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5906 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5908 /* The last case is if we are a multiply. In that case, we can
5909 apply the distributive law to commute the multiply and addition
5910 if the multiplication of the constants doesn't overflow. */
5911 if (code
== MULT_EXPR
)
5912 return fold_build2 (tcode
, ctype
,
5913 fold_build2 (code
, ctype
,
5914 fold_convert (ctype
, op0
),
5915 fold_convert (ctype
, c
)),
5921 /* We have a special case here if we are doing something like
5922 (C * 8) % 4 since we know that's zero. */
5923 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5924 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5925 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5926 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5927 return omit_one_operand (type
, integer_zero_node
, op0
);
5929 /* ... fall through ... */
5931 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5932 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5933 /* If we can extract our operation from the LHS, do so and return a
5934 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5935 do something only if the second operand is a constant. */
5937 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5938 strict_overflow_p
)) != 0)
5939 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5940 fold_convert (ctype
, op1
));
5941 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5942 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5943 strict_overflow_p
)) != 0)
5944 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5945 fold_convert (ctype
, t1
));
5946 else if (TREE_CODE (op1
) != INTEGER_CST
)
5949 /* If these are the same operation types, we can associate them
5950 assuming no overflow. */
5952 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5953 fold_convert (ctype
, c
), 0))
5954 && !TREE_OVERFLOW (t1
))
5955 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5957 /* If these operations "cancel" each other, we have the main
5958 optimizations of this pass, which occur when either constant is a
5959 multiple of the other, in which case we replace this with either an
5960 operation or CODE or TCODE.
5962 If we have an unsigned type that is not a sizetype, we cannot do
5963 this since it will change the result if the original computation
5965 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5966 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5967 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5968 || (tcode
== MULT_EXPR
5969 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5970 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5971 && code
!= MULT_EXPR
)))
5973 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5975 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5976 *strict_overflow_p
= true;
5977 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5978 fold_convert (ctype
,
5979 const_binop (TRUNC_DIV_EXPR
,
5982 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5984 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5985 *strict_overflow_p
= true;
5986 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5987 fold_convert (ctype
,
5988 const_binop (TRUNC_DIV_EXPR
,
6001 /* Return a node which has the indicated constant VALUE (either 0 or
6002 1), and is of the indicated TYPE. */
6005 constant_boolean_node (int value
, tree type
)
6007 if (type
== integer_type_node
)
6008 return value
? integer_one_node
: integer_zero_node
;
6009 else if (type
== boolean_type_node
)
6010 return value
? boolean_true_node
: boolean_false_node
;
6012 return build_int_cst (type
, value
);
6016 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6017 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6018 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6019 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6020 COND is the first argument to CODE; otherwise (as in the example
6021 given here), it is the second argument. TYPE is the type of the
6022 original expression. Return NULL_TREE if no simplification is
6026 fold_binary_op_with_conditional_arg (enum tree_code code
,
6027 tree type
, tree op0
, tree op1
,
6028 tree cond
, tree arg
, int cond_first_p
)
6030 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6031 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6032 tree test
, true_value
, false_value
;
6033 tree lhs
= NULL_TREE
;
6034 tree rhs
= NULL_TREE
;
6036 /* This transformation is only worthwhile if we don't have to wrap
6037 arg in a SAVE_EXPR, and the operation can be simplified on at least
6038 one of the branches once its pushed inside the COND_EXPR. */
6039 if (!TREE_CONSTANT (arg
))
6042 if (TREE_CODE (cond
) == COND_EXPR
)
6044 test
= TREE_OPERAND (cond
, 0);
6045 true_value
= TREE_OPERAND (cond
, 1);
6046 false_value
= TREE_OPERAND (cond
, 2);
6047 /* If this operand throws an expression, then it does not make
6048 sense to try to perform a logical or arithmetic operation
6050 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6052 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6057 tree testtype
= TREE_TYPE (cond
);
6059 true_value
= constant_boolean_node (true, testtype
);
6060 false_value
= constant_boolean_node (false, testtype
);
6063 arg
= fold_convert (arg_type
, arg
);
6066 true_value
= fold_convert (cond_type
, true_value
);
6068 lhs
= fold_build2 (code
, type
, true_value
, arg
);
6070 lhs
= fold_build2 (code
, type
, arg
, true_value
);
6074 false_value
= fold_convert (cond_type
, false_value
);
6076 rhs
= fold_build2 (code
, type
, false_value
, arg
);
6078 rhs
= fold_build2 (code
, type
, arg
, false_value
);
6081 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
6082 return fold_convert (type
, test
);
6086 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6088 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6089 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6090 ADDEND is the same as X.
6092 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6093 and finite. The problematic cases are when X is zero, and its mode
6094 has signed zeros. In the case of rounding towards -infinity,
6095 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6096 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6099 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6101 if (!real_zerop (addend
))
6104 /* Don't allow the fold with -fsignaling-nans. */
6105 if (HONOR_SNANS (TYPE_MODE (type
)))
6108 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6109 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6112 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6113 if (TREE_CODE (addend
) == REAL_CST
6114 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6117 /* The mode has signed zeros, and we have to honor their sign.
6118 In this situation, there is only one case we can return true for.
6119 X - 0 is the same as X unless rounding towards -infinity is
6121 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6124 /* Subroutine of fold() that checks comparisons of built-in math
6125 functions against real constants.
6127 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6128 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6129 is the type of the result and ARG0 and ARG1 are the operands of the
6130 comparison. ARG1 must be a TREE_REAL_CST.
6132 The function returns the constant folded tree if a simplification
6133 can be made, and NULL_TREE otherwise. */
6136 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
6137 tree type
, tree arg0
, tree arg1
)
6141 if (BUILTIN_SQRT_P (fcode
))
6143 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6144 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6146 c
= TREE_REAL_CST (arg1
);
6147 if (REAL_VALUE_NEGATIVE (c
))
6149 /* sqrt(x) < y is always false, if y is negative. */
6150 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6151 return omit_one_operand (type
, integer_zero_node
, arg
);
6153 /* sqrt(x) > y is always true, if y is negative and we
6154 don't care about NaNs, i.e. negative values of x. */
6155 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6156 return omit_one_operand (type
, integer_one_node
, arg
);
6158 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6159 return fold_build2 (GE_EXPR
, type
, arg
,
6160 build_real (TREE_TYPE (arg
), dconst0
));
6162 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6166 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6167 real_convert (&c2
, mode
, &c2
);
6169 if (REAL_VALUE_ISINF (c2
))
6171 /* sqrt(x) > y is x == +Inf, when y is very large. */
6172 if (HONOR_INFINITIES (mode
))
6173 return fold_build2 (EQ_EXPR
, type
, arg
,
6174 build_real (TREE_TYPE (arg
), c2
));
6176 /* sqrt(x) > y is always false, when y is very large
6177 and we don't care about infinities. */
6178 return omit_one_operand (type
, integer_zero_node
, arg
);
6181 /* sqrt(x) > c is the same as x > c*c. */
6182 return fold_build2 (code
, type
, arg
,
6183 build_real (TREE_TYPE (arg
), c2
));
6185 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6189 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6190 real_convert (&c2
, mode
, &c2
);
6192 if (REAL_VALUE_ISINF (c2
))
6194 /* sqrt(x) < y is always true, when y is a very large
6195 value and we don't care about NaNs or Infinities. */
6196 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6197 return omit_one_operand (type
, integer_one_node
, arg
);
6199 /* sqrt(x) < y is x != +Inf when y is very large and we
6200 don't care about NaNs. */
6201 if (! HONOR_NANS (mode
))
6202 return fold_build2 (NE_EXPR
, type
, arg
,
6203 build_real (TREE_TYPE (arg
), c2
));
6205 /* sqrt(x) < y is x >= 0 when y is very large and we
6206 don't care about Infinities. */
6207 if (! HONOR_INFINITIES (mode
))
6208 return fold_build2 (GE_EXPR
, type
, arg
,
6209 build_real (TREE_TYPE (arg
), dconst0
));
6211 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6212 if (lang_hooks
.decls
.global_bindings_p () != 0
6213 || CONTAINS_PLACEHOLDER_P (arg
))
6216 arg
= save_expr (arg
);
6217 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6218 fold_build2 (GE_EXPR
, type
, arg
,
6219 build_real (TREE_TYPE (arg
),
6221 fold_build2 (NE_EXPR
, type
, arg
,
6222 build_real (TREE_TYPE (arg
),
6226 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6227 if (! HONOR_NANS (mode
))
6228 return fold_build2 (code
, type
, arg
,
6229 build_real (TREE_TYPE (arg
), c2
));
6231 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6232 if (lang_hooks
.decls
.global_bindings_p () == 0
6233 && ! CONTAINS_PLACEHOLDER_P (arg
))
6235 arg
= save_expr (arg
);
6236 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6237 fold_build2 (GE_EXPR
, type
, arg
,
6238 build_real (TREE_TYPE (arg
),
6240 fold_build2 (code
, type
, arg
,
6241 build_real (TREE_TYPE (arg
),
6250 /* Subroutine of fold() that optimizes comparisons against Infinities,
6251 either +Inf or -Inf.
6253 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6254 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6255 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6257 The function returns the constant folded tree if a simplification
6258 can be made, and NULL_TREE otherwise. */
6261 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6263 enum machine_mode mode
;
6264 REAL_VALUE_TYPE max
;
6268 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6270 /* For negative infinity swap the sense of the comparison. */
6271 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6273 code
= swap_tree_comparison (code
);
6278 /* x > +Inf is always false, if with ignore sNANs. */
6279 if (HONOR_SNANS (mode
))
6281 return omit_one_operand (type
, integer_zero_node
, arg0
);
6284 /* x <= +Inf is always true, if we don't case about NaNs. */
6285 if (! HONOR_NANS (mode
))
6286 return omit_one_operand (type
, integer_one_node
, arg0
);
6288 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6289 if (lang_hooks
.decls
.global_bindings_p () == 0
6290 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6292 arg0
= save_expr (arg0
);
6293 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6299 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6300 real_maxval (&max
, neg
, mode
);
6301 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6302 arg0
, build_real (TREE_TYPE (arg0
), max
));
6305 /* x < +Inf is always equal to x <= DBL_MAX. */
6306 real_maxval (&max
, neg
, mode
);
6307 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6308 arg0
, build_real (TREE_TYPE (arg0
), max
));
6311 /* x != +Inf is always equal to !(x > DBL_MAX). */
6312 real_maxval (&max
, neg
, mode
);
6313 if (! HONOR_NANS (mode
))
6314 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6315 arg0
, build_real (TREE_TYPE (arg0
), max
));
6317 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6318 arg0
, build_real (TREE_TYPE (arg0
), max
));
6319 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6328 /* Subroutine of fold() that optimizes comparisons of a division by
6329 a nonzero integer constant against an integer constant, i.e.
6332 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6333 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6334 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6336 The function returns the constant folded tree if a simplification
6337 can be made, and NULL_TREE otherwise. */
6340 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6342 tree prod
, tmp
, hi
, lo
;
6343 tree arg00
= TREE_OPERAND (arg0
, 0);
6344 tree arg01
= TREE_OPERAND (arg0
, 1);
6345 unsigned HOST_WIDE_INT lpart
;
6346 HOST_WIDE_INT hpart
;
6347 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6351 /* We have to do this the hard way to detect unsigned overflow.
6352 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6353 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6354 TREE_INT_CST_HIGH (arg01
),
6355 TREE_INT_CST_LOW (arg1
),
6356 TREE_INT_CST_HIGH (arg1
),
6357 &lpart
, &hpart
, unsigned_p
);
6358 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6360 neg_overflow
= false;
6364 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6365 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6368 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6369 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6370 TREE_INT_CST_HIGH (prod
),
6371 TREE_INT_CST_LOW (tmp
),
6372 TREE_INT_CST_HIGH (tmp
),
6373 &lpart
, &hpart
, unsigned_p
);
6374 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6375 -1, overflow
| TREE_OVERFLOW (prod
));
6377 else if (tree_int_cst_sgn (arg01
) >= 0)
6379 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6380 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6381 switch (tree_int_cst_sgn (arg1
))
6384 neg_overflow
= true;
6385 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6390 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6395 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6405 /* A negative divisor reverses the relational operators. */
6406 code
= swap_tree_comparison (code
);
6408 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6409 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6410 switch (tree_int_cst_sgn (arg1
))
6413 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6418 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6423 neg_overflow
= true;
6424 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6436 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6437 return omit_one_operand (type
, integer_zero_node
, arg00
);
6438 if (TREE_OVERFLOW (hi
))
6439 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6440 if (TREE_OVERFLOW (lo
))
6441 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6442 return build_range_check (type
, arg00
, 1, lo
, hi
);
6445 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6446 return omit_one_operand (type
, integer_one_node
, arg00
);
6447 if (TREE_OVERFLOW (hi
))
6448 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6449 if (TREE_OVERFLOW (lo
))
6450 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6451 return build_range_check (type
, arg00
, 0, lo
, hi
);
6454 if (TREE_OVERFLOW (lo
))
6456 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6457 return omit_one_operand (type
, tmp
, arg00
);
6459 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6462 if (TREE_OVERFLOW (hi
))
6464 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6465 return omit_one_operand (type
, tmp
, arg00
);
6467 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6470 if (TREE_OVERFLOW (hi
))
6472 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6473 return omit_one_operand (type
, tmp
, arg00
);
6475 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6478 if (TREE_OVERFLOW (lo
))
6480 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6481 return omit_one_operand (type
, tmp
, arg00
);
6483 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6493 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6494 equality/inequality test, then return a simplified form of the test
6495 using a sign testing. Otherwise return NULL. TYPE is the desired
6499 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6502 /* If this is testing a single bit, we can optimize the test. */
6503 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6504 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6505 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6507 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6508 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6509 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6511 if (arg00
!= NULL_TREE
6512 /* This is only a win if casting to a signed type is cheap,
6513 i.e. when arg00's type is not a partial mode. */
6514 && TYPE_PRECISION (TREE_TYPE (arg00
))
6515 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6517 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6518 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6519 result_type
, fold_convert (stype
, arg00
),
6520 build_int_cst (stype
, 0));
6527 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6528 equality/inequality test, then return a simplified form of
6529 the test using shifts and logical operations. Otherwise return
6530 NULL. TYPE is the desired result type. */
6533 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6536 /* If this is testing a single bit, we can optimize the test. */
6537 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6538 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6539 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6541 tree inner
= TREE_OPERAND (arg0
, 0);
6542 tree type
= TREE_TYPE (arg0
);
6543 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6544 enum machine_mode operand_mode
= TYPE_MODE (type
);
6546 tree signed_type
, unsigned_type
, intermediate_type
;
6549 /* First, see if we can fold the single bit test into a sign-bit
6551 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6556 /* Otherwise we have (A & C) != 0 where C is a single bit,
6557 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6558 Similarly for (A & C) == 0. */
6560 /* If INNER is a right shift of a constant and it plus BITNUM does
6561 not overflow, adjust BITNUM and INNER. */
6562 if (TREE_CODE (inner
) == RSHIFT_EXPR
6563 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6564 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6565 && bitnum
< TYPE_PRECISION (type
)
6566 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6567 bitnum
- TYPE_PRECISION (type
)))
6569 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6570 inner
= TREE_OPERAND (inner
, 0);
6573 /* If we are going to be able to omit the AND below, we must do our
6574 operations as unsigned. If we must use the AND, we have a choice.
6575 Normally unsigned is faster, but for some machines signed is. */
6576 #ifdef LOAD_EXTEND_OP
6577 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6578 && !flag_syntax_only
) ? 0 : 1;
6583 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6584 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6585 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6586 inner
= fold_convert (intermediate_type
, inner
);
6589 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6590 inner
, size_int (bitnum
));
6592 one
= build_int_cst (intermediate_type
, 1);
6594 if (code
== EQ_EXPR
)
6595 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6597 /* Put the AND last so it can combine with more things. */
6598 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6600 /* Make sure to return the proper type. */
6601 inner
= fold_convert (result_type
, inner
);
6608 /* Check whether we are allowed to reorder operands arg0 and arg1,
6609 such that the evaluation of arg1 occurs before arg0. */
6612 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6614 if (! flag_evaluation_order
)
6616 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6618 return ! TREE_SIDE_EFFECTS (arg0
)
6619 && ! TREE_SIDE_EFFECTS (arg1
);
6622 /* Test whether it is preferable two swap two operands, ARG0 and
6623 ARG1, for example because ARG0 is an integer constant and ARG1
6624 isn't. If REORDER is true, only recommend swapping if we can
6625 evaluate the operands in reverse order. */
6628 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6630 STRIP_SIGN_NOPS (arg0
);
6631 STRIP_SIGN_NOPS (arg1
);
6633 if (TREE_CODE (arg1
) == INTEGER_CST
)
6635 if (TREE_CODE (arg0
) == INTEGER_CST
)
6638 if (TREE_CODE (arg1
) == REAL_CST
)
6640 if (TREE_CODE (arg0
) == REAL_CST
)
6643 if (TREE_CODE (arg1
) == FIXED_CST
)
6645 if (TREE_CODE (arg0
) == FIXED_CST
)
6648 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6650 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6653 if (TREE_CONSTANT (arg1
))
6655 if (TREE_CONSTANT (arg0
))
6661 if (reorder
&& flag_evaluation_order
6662 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6665 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6666 for commutative and comparison operators. Ensuring a canonical
6667 form allows the optimizers to find additional redundancies without
6668 having to explicitly check for both orderings. */
6669 if (TREE_CODE (arg0
) == SSA_NAME
6670 && TREE_CODE (arg1
) == SSA_NAME
6671 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6674 /* Put SSA_NAMEs last. */
6675 if (TREE_CODE (arg1
) == SSA_NAME
)
6677 if (TREE_CODE (arg0
) == SSA_NAME
)
6680 /* Put variables last. */
6689 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6690 ARG0 is extended to a wider type. */
6693 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6695 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6697 tree shorter_type
, outer_type
;
6701 if (arg0_unw
== arg0
)
6703 shorter_type
= TREE_TYPE (arg0_unw
);
6705 #ifdef HAVE_canonicalize_funcptr_for_compare
6706 /* Disable this optimization if we're casting a function pointer
6707 type on targets that require function pointer canonicalization. */
6708 if (HAVE_canonicalize_funcptr_for_compare
6709 && TREE_CODE (shorter_type
) == POINTER_TYPE
6710 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6714 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6717 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6719 /* If possible, express the comparison in the shorter mode. */
6720 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6721 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6722 && (TREE_TYPE (arg1_unw
) == shorter_type
6723 || (TYPE_PRECISION (shorter_type
)
6724 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6725 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6726 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6727 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6728 && int_fits_type_p (arg1_unw
, shorter_type
))))
6729 return fold_build2 (code
, type
, arg0_unw
,
6730 fold_convert (shorter_type
, arg1_unw
));
6732 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6733 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6734 || !int_fits_type_p (arg1_unw
, shorter_type
))
6737 /* If we are comparing with the integer that does not fit into the range
6738 of the shorter type, the result is known. */
6739 outer_type
= TREE_TYPE (arg1_unw
);
6740 min
= lower_bound_in_type (outer_type
, shorter_type
);
6741 max
= upper_bound_in_type (outer_type
, shorter_type
);
6743 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6745 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6752 return omit_one_operand (type
, integer_zero_node
, arg0
);
6757 return omit_one_operand (type
, integer_one_node
, arg0
);
6763 return omit_one_operand (type
, integer_one_node
, arg0
);
6765 return omit_one_operand (type
, integer_zero_node
, arg0
);
6770 return omit_one_operand (type
, integer_zero_node
, arg0
);
6772 return omit_one_operand (type
, integer_one_node
, arg0
);
6781 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6782 ARG0 just the signedness is changed. */
6785 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6786 tree arg0
, tree arg1
)
6789 tree inner_type
, outer_type
;
6791 if (TREE_CODE (arg0
) != NOP_EXPR
6792 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6795 outer_type
= TREE_TYPE (arg0
);
6796 arg0_inner
= TREE_OPERAND (arg0
, 0);
6797 inner_type
= TREE_TYPE (arg0_inner
);
6799 #ifdef HAVE_canonicalize_funcptr_for_compare
6800 /* Disable this optimization if we're casting a function pointer
6801 type on targets that require function pointer canonicalization. */
6802 if (HAVE_canonicalize_funcptr_for_compare
6803 && TREE_CODE (inner_type
) == POINTER_TYPE
6804 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6808 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6811 /* If the conversion is from an integral subtype to its basetype
6813 if (TREE_TYPE (inner_type
) == outer_type
)
6816 if (TREE_CODE (arg1
) != INTEGER_CST
6817 && !((TREE_CODE (arg1
) == NOP_EXPR
6818 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6819 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6822 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6827 if (TREE_CODE (arg1
) == INTEGER_CST
)
6828 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
6829 TREE_INT_CST_HIGH (arg1
), 0,
6830 TREE_OVERFLOW (arg1
));
6832 arg1
= fold_convert (inner_type
, arg1
);
6834 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6837 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6838 step of the array. Reconstructs s and delta in the case of s * delta
6839 being an integer constant (and thus already folded).
6840 ADDR is the address. MULT is the multiplicative expression.
6841 If the function succeeds, the new address expression is returned. Otherwise
6842 NULL_TREE is returned. */
6845 try_move_mult_to_index (tree addr
, tree op1
)
6847 tree s
, delta
, step
;
6848 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6853 /* Strip the nops that might be added when converting op1 to sizetype. */
6856 /* Canonicalize op1 into a possibly non-constant delta
6857 and an INTEGER_CST s. */
6858 if (TREE_CODE (op1
) == MULT_EXPR
)
6860 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6865 if (TREE_CODE (arg0
) == INTEGER_CST
)
6870 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6878 else if (TREE_CODE (op1
) == INTEGER_CST
)
6885 /* Simulate we are delta * 1. */
6887 s
= integer_one_node
;
6890 for (;; ref
= TREE_OPERAND (ref
, 0))
6892 if (TREE_CODE (ref
) == ARRAY_REF
)
6894 /* Remember if this was a multi-dimensional array. */
6895 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6898 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6902 step
= array_ref_element_size (ref
);
6903 if (TREE_CODE (step
) != INTEGER_CST
)
6908 if (! tree_int_cst_equal (step
, s
))
6913 /* Try if delta is a multiple of step. */
6914 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6920 /* Only fold here if we can verify we do not overflow one
6921 dimension of a multi-dimensional array. */
6926 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6927 || !INTEGRAL_TYPE_P (itype
)
6928 || !TYPE_MAX_VALUE (itype
)
6929 || TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
)
6932 tmp
= fold_binary (PLUS_EXPR
, itype
,
6933 fold_convert (itype
,
6934 TREE_OPERAND (ref
, 1)),
6935 fold_convert (itype
, delta
));
6937 || TREE_CODE (tmp
) != INTEGER_CST
6938 || tree_int_cst_lt (TYPE_MAX_VALUE (itype
), tmp
))
6947 if (!handled_component_p (ref
))
6951 /* We found the suitable array reference. So copy everything up to it,
6952 and replace the index. */
6954 pref
= TREE_OPERAND (addr
, 0);
6955 ret
= copy_node (pref
);
6960 pref
= TREE_OPERAND (pref
, 0);
6961 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6962 pos
= TREE_OPERAND (pos
, 0);
6965 TREE_OPERAND (pos
, 1) = fold_build2 (PLUS_EXPR
, itype
,
6966 fold_convert (itype
,
6967 TREE_OPERAND (pos
, 1)),
6968 fold_convert (itype
, delta
));
6970 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6974 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6975 means A >= Y && A != MAX, but in this case we know that
6976 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6979 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6981 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6983 if (TREE_CODE (bound
) == LT_EXPR
)
6984 a
= TREE_OPERAND (bound
, 0);
6985 else if (TREE_CODE (bound
) == GT_EXPR
)
6986 a
= TREE_OPERAND (bound
, 1);
6990 typea
= TREE_TYPE (a
);
6991 if (!INTEGRAL_TYPE_P (typea
)
6992 && !POINTER_TYPE_P (typea
))
6995 if (TREE_CODE (ineq
) == LT_EXPR
)
6997 a1
= TREE_OPERAND (ineq
, 1);
6998 y
= TREE_OPERAND (ineq
, 0);
7000 else if (TREE_CODE (ineq
) == GT_EXPR
)
7002 a1
= TREE_OPERAND (ineq
, 0);
7003 y
= TREE_OPERAND (ineq
, 1);
7008 if (TREE_TYPE (a1
) != typea
)
7011 if (POINTER_TYPE_P (typea
))
7013 /* Convert the pointer types into integer before taking the difference. */
7014 tree ta
= fold_convert (ssizetype
, a
);
7015 tree ta1
= fold_convert (ssizetype
, a1
);
7016 diff
= fold_binary (MINUS_EXPR
, ssizetype
, ta1
, ta
);
7019 diff
= fold_binary (MINUS_EXPR
, typea
, a1
, a
);
7021 if (!diff
|| !integer_onep (diff
))
7024 return fold_build2 (GE_EXPR
, type
, a
, y
);
7027 /* Fold a sum or difference of at least one multiplication.
7028 Returns the folded tree or NULL if no simplification could be made. */
7031 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7033 tree arg00
, arg01
, arg10
, arg11
;
7034 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7036 /* (A * C) +- (B * C) -> (A+-B) * C.
7037 (A * C) +- A -> A * (C+-1).
7038 We are most concerned about the case where C is a constant,
7039 but other combinations show up during loop reduction. Since
7040 it is not difficult, try all four possibilities. */
7042 if (TREE_CODE (arg0
) == MULT_EXPR
)
7044 arg00
= TREE_OPERAND (arg0
, 0);
7045 arg01
= TREE_OPERAND (arg0
, 1);
7047 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7049 arg00
= build_one_cst (type
);
7054 /* We cannot generate constant 1 for fract. */
7055 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7058 arg01
= build_one_cst (type
);
7060 if (TREE_CODE (arg1
) == MULT_EXPR
)
7062 arg10
= TREE_OPERAND (arg1
, 0);
7063 arg11
= TREE_OPERAND (arg1
, 1);
7065 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7067 arg10
= build_one_cst (type
);
7072 /* We cannot generate constant 1 for fract. */
7073 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7076 arg11
= build_one_cst (type
);
7080 if (operand_equal_p (arg01
, arg11
, 0))
7081 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7082 else if (operand_equal_p (arg00
, arg10
, 0))
7083 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7084 else if (operand_equal_p (arg00
, arg11
, 0))
7085 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7086 else if (operand_equal_p (arg01
, arg10
, 0))
7087 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7089 /* No identical multiplicands; see if we can find a common
7090 power-of-two factor in non-power-of-two multiplies. This
7091 can help in multi-dimensional array access. */
7092 else if (host_integerp (arg01
, 0)
7093 && host_integerp (arg11
, 0))
7095 HOST_WIDE_INT int01
, int11
, tmp
;
7098 int01
= TREE_INT_CST_LOW (arg01
);
7099 int11
= TREE_INT_CST_LOW (arg11
);
7101 /* Move min of absolute values to int11. */
7102 if ((int01
>= 0 ? int01
: -int01
)
7103 < (int11
>= 0 ? int11
: -int11
))
7105 tmp
= int01
, int01
= int11
, int11
= tmp
;
7106 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7113 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0)
7115 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7116 build_int_cst (TREE_TYPE (arg00
),
7121 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7126 return fold_build2 (MULT_EXPR
, type
,
7127 fold_build2 (code
, type
,
7128 fold_convert (type
, alt0
),
7129 fold_convert (type
, alt1
)),
7130 fold_convert (type
, same
));
7135 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7136 specified by EXPR into the buffer PTR of length LEN bytes.
7137 Return the number of bytes placed in the buffer, or zero
7141 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7143 tree type
= TREE_TYPE (expr
);
7144 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7145 int byte
, offset
, word
, words
;
7146 unsigned char value
;
7148 if (total_bytes
> len
)
7150 words
= total_bytes
/ UNITS_PER_WORD
;
7152 for (byte
= 0; byte
< total_bytes
; byte
++)
7154 int bitpos
= byte
* BITS_PER_UNIT
;
7155 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7156 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7158 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7159 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7161 if (total_bytes
> UNITS_PER_WORD
)
7163 word
= byte
/ UNITS_PER_WORD
;
7164 if (WORDS_BIG_ENDIAN
)
7165 word
= (words
- 1) - word
;
7166 offset
= word
* UNITS_PER_WORD
;
7167 if (BYTES_BIG_ENDIAN
)
7168 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7170 offset
+= byte
% UNITS_PER_WORD
;
7173 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7174 ptr
[offset
] = value
;
7180 /* Subroutine of native_encode_expr. Encode the REAL_CST
7181 specified by EXPR into the buffer PTR of length LEN bytes.
7182 Return the number of bytes placed in the buffer, or zero
7186 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7188 tree type
= TREE_TYPE (expr
);
7189 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7190 int byte
, offset
, word
, words
, bitpos
;
7191 unsigned char value
;
7193 /* There are always 32 bits in each long, no matter the size of
7194 the hosts long. We handle floating point representations with
7198 if (total_bytes
> len
)
7200 words
= 32 / UNITS_PER_WORD
;
7202 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7204 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7205 bitpos
+= BITS_PER_UNIT
)
7207 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7208 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7210 if (UNITS_PER_WORD
< 4)
7212 word
= byte
/ UNITS_PER_WORD
;
7213 if (WORDS_BIG_ENDIAN
)
7214 word
= (words
- 1) - word
;
7215 offset
= word
* UNITS_PER_WORD
;
7216 if (BYTES_BIG_ENDIAN
)
7217 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7219 offset
+= byte
% UNITS_PER_WORD
;
7222 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7223 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7228 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7229 specified by EXPR into the buffer PTR of length LEN bytes.
7230 Return the number of bytes placed in the buffer, or zero
7234 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7239 part
= TREE_REALPART (expr
);
7240 rsize
= native_encode_expr (part
, ptr
, len
);
7243 part
= TREE_IMAGPART (expr
);
7244 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7247 return rsize
+ isize
;
7251 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7252 specified by EXPR into the buffer PTR of length LEN bytes.
7253 Return the number of bytes placed in the buffer, or zero
7257 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7259 int i
, size
, offset
, count
;
7260 tree itype
, elem
, elements
;
7263 elements
= TREE_VECTOR_CST_ELTS (expr
);
7264 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7265 itype
= TREE_TYPE (TREE_TYPE (expr
));
7266 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7267 for (i
= 0; i
< count
; i
++)
7271 elem
= TREE_VALUE (elements
);
7272 elements
= TREE_CHAIN (elements
);
7279 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7284 if (offset
+ size
> len
)
7286 memset (ptr
+offset
, 0, size
);
7294 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7295 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7296 buffer PTR of length LEN bytes. Return the number of bytes
7297 placed in the buffer, or zero upon failure. */
7300 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7302 switch (TREE_CODE (expr
))
7305 return native_encode_int (expr
, ptr
, len
);
7308 return native_encode_real (expr
, ptr
, len
);
7311 return native_encode_complex (expr
, ptr
, len
);
7314 return native_encode_vector (expr
, ptr
, len
);
7322 /* Subroutine of native_interpret_expr. Interpret the contents of
7323 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7324 If the buffer cannot be interpreted, return NULL_TREE. */
7327 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7329 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7330 int byte
, offset
, word
, words
;
7331 unsigned char value
;
7332 unsigned int HOST_WIDE_INT lo
= 0;
7333 HOST_WIDE_INT hi
= 0;
7335 if (total_bytes
> len
)
7337 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7339 words
= total_bytes
/ UNITS_PER_WORD
;
7341 for (byte
= 0; byte
< total_bytes
; byte
++)
7343 int bitpos
= byte
* BITS_PER_UNIT
;
7344 if (total_bytes
> UNITS_PER_WORD
)
7346 word
= byte
/ UNITS_PER_WORD
;
7347 if (WORDS_BIG_ENDIAN
)
7348 word
= (words
- 1) - word
;
7349 offset
= word
* UNITS_PER_WORD
;
7350 if (BYTES_BIG_ENDIAN
)
7351 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7353 offset
+= byte
% UNITS_PER_WORD
;
7356 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7357 value
= ptr
[offset
];
7359 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7360 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7362 hi
|= (unsigned HOST_WIDE_INT
) value
7363 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7366 return build_int_cst_wide_type (type
, lo
, hi
);
7370 /* Subroutine of native_interpret_expr. Interpret the contents of
7371 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7372 If the buffer cannot be interpreted, return NULL_TREE. */
7375 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7377 enum machine_mode mode
= TYPE_MODE (type
);
7378 int total_bytes
= GET_MODE_SIZE (mode
);
7379 int byte
, offset
, word
, words
, bitpos
;
7380 unsigned char value
;
7381 /* There are always 32 bits in each long, no matter the size of
7382 the hosts long. We handle floating point representations with
7387 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7388 if (total_bytes
> len
|| total_bytes
> 24)
7390 words
= 32 / UNITS_PER_WORD
;
7392 memset (tmp
, 0, sizeof (tmp
));
7393 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7394 bitpos
+= BITS_PER_UNIT
)
7396 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7397 if (UNITS_PER_WORD
< 4)
7399 word
= byte
/ UNITS_PER_WORD
;
7400 if (WORDS_BIG_ENDIAN
)
7401 word
= (words
- 1) - word
;
7402 offset
= word
* UNITS_PER_WORD
;
7403 if (BYTES_BIG_ENDIAN
)
7404 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7406 offset
+= byte
% UNITS_PER_WORD
;
7409 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7410 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7412 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7415 real_from_target (&r
, tmp
, mode
);
7416 return build_real (type
, r
);
7420 /* Subroutine of native_interpret_expr. Interpret the contents of
7421 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7422 If the buffer cannot be interpreted, return NULL_TREE. */
7425 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7427 tree etype
, rpart
, ipart
;
7430 etype
= TREE_TYPE (type
);
7431 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7434 rpart
= native_interpret_expr (etype
, ptr
, size
);
7437 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7440 return build_complex (type
, rpart
, ipart
);
7444 /* Subroutine of native_interpret_expr. Interpret the contents of
7445 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7446 If the buffer cannot be interpreted, return NULL_TREE. */
7449 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7451 tree etype
, elem
, elements
;
7454 etype
= TREE_TYPE (type
);
7455 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7456 count
= TYPE_VECTOR_SUBPARTS (type
);
7457 if (size
* count
> len
)
7460 elements
= NULL_TREE
;
7461 for (i
= count
- 1; i
>= 0; i
--)
7463 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7466 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7468 return build_vector (type
, elements
);
7472 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7473 the buffer PTR of length LEN as a constant of type TYPE. For
7474 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7475 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7476 return NULL_TREE. */
7479 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7481 switch (TREE_CODE (type
))
7486 return native_interpret_int (type
, ptr
, len
);
7489 return native_interpret_real (type
, ptr
, len
);
7492 return native_interpret_complex (type
, ptr
, len
);
7495 return native_interpret_vector (type
, ptr
, len
);
7503 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7504 TYPE at compile-time. If we're unable to perform the conversion
7505 return NULL_TREE. */
7508 fold_view_convert_expr (tree type
, tree expr
)
7510 /* We support up to 512-bit values (for V8DFmode). */
7511 unsigned char buffer
[64];
7514 /* Check that the host and target are sane. */
7515 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7518 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7522 return native_interpret_expr (type
, buffer
, len
);
7525 /* Build an expression for the address of T. Folds away INDIRECT_REF
7526 to avoid confusing the gimplify process. When IN_FOLD is true
7527 avoid modifications of T. */
7530 build_fold_addr_expr_with_type_1 (tree t
, tree ptrtype
, bool in_fold
)
7532 /* The size of the object is not relevant when talking about its address. */
7533 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7534 t
= TREE_OPERAND (t
, 0);
7536 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7537 if (TREE_CODE (t
) == INDIRECT_REF
7538 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
7540 t
= TREE_OPERAND (t
, 0);
7542 if (TREE_TYPE (t
) != ptrtype
)
7543 t
= build1 (NOP_EXPR
, ptrtype
, t
);
7549 while (handled_component_p (base
))
7550 base
= TREE_OPERAND (base
, 0);
7553 TREE_ADDRESSABLE (base
) = 1;
7555 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7558 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7563 /* Build an expression for the address of T with type PTRTYPE. This
7564 function modifies the input parameter 'T' by sometimes setting the
7565 TREE_ADDRESSABLE flag. */
7568 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
7570 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, false);
7573 /* Build an expression for the address of T. This function modifies
7574 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7575 flag. When called from fold functions, use fold_addr_expr instead. */
7578 build_fold_addr_expr (tree t
)
7580 return build_fold_addr_expr_with_type_1 (t
,
7581 build_pointer_type (TREE_TYPE (t
)),
7585 /* Same as build_fold_addr_expr, builds an expression for the address
7586 of T, but avoids touching the input node 't'. Fold functions
7587 should use this version. */
7590 fold_addr_expr (tree t
)
7592 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7594 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, true);
7597 /* Fold a unary expression of code CODE and type TYPE with operand
7598 OP0. Return the folded expression if folding is successful.
7599 Otherwise, return NULL_TREE. */
7602 fold_unary (enum tree_code code
, tree type
, tree op0
)
7606 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7608 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7609 && TREE_CODE_LENGTH (code
) == 1);
7614 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
7615 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7617 /* Don't use STRIP_NOPS, because signedness of argument type
7619 STRIP_SIGN_NOPS (arg0
);
7623 /* Strip any conversions that don't change the mode. This
7624 is safe for every expression, except for a comparison
7625 expression because its signedness is derived from its
7628 Note that this is done as an internal manipulation within
7629 the constant folder, in order to find the simplest
7630 representation of the arguments so that their form can be
7631 studied. In any cases, the appropriate type conversions
7632 should be put back in the tree that will get out of the
7638 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7640 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7641 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7642 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7643 else if (TREE_CODE (arg0
) == COND_EXPR
)
7645 tree arg01
= TREE_OPERAND (arg0
, 1);
7646 tree arg02
= TREE_OPERAND (arg0
, 2);
7647 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7648 arg01
= fold_build1 (code
, type
, arg01
);
7649 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7650 arg02
= fold_build1 (code
, type
, arg02
);
7651 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7654 /* If this was a conversion, and all we did was to move into
7655 inside the COND_EXPR, bring it back out. But leave it if
7656 it is a conversion from integer to integer and the
7657 result precision is no wider than a word since such a
7658 conversion is cheap and may be optimized away by combine,
7659 while it couldn't if it were outside the COND_EXPR. Then return
7660 so we don't get into an infinite recursion loop taking the
7661 conversion out and then back in. */
7663 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7664 || code
== NON_LVALUE_EXPR
)
7665 && TREE_CODE (tem
) == COND_EXPR
7666 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7667 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7668 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7669 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7670 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7671 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7672 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7674 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7675 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7676 || flag_syntax_only
))
7677 tem
= build1 (code
, type
,
7679 TREE_TYPE (TREE_OPERAND
7680 (TREE_OPERAND (tem
, 1), 0)),
7681 TREE_OPERAND (tem
, 0),
7682 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7683 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7686 else if (COMPARISON_CLASS_P (arg0
))
7688 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7690 arg0
= copy_node (arg0
);
7691 TREE_TYPE (arg0
) = type
;
7694 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7695 return fold_build3 (COND_EXPR
, type
, arg0
,
7696 fold_build1 (code
, type
,
7698 fold_build1 (code
, type
,
7699 integer_zero_node
));
7706 /* Re-association barriers around constants and other re-association
7707 barriers can be removed. */
7708 if (CONSTANT_CLASS_P (op0
)
7709 || TREE_CODE (op0
) == PAREN_EXPR
)
7710 return fold_convert (type
, op0
);
7716 case FIX_TRUNC_EXPR
:
7717 if (TREE_TYPE (op0
) == type
)
7720 /* If we have (type) (a CMP b) and type is an integral type, return
7721 new expression involving the new type. */
7722 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7723 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7724 TREE_OPERAND (op0
, 1));
7726 /* Handle cases of two conversions in a row. */
7727 if (TREE_CODE (op0
) == NOP_EXPR
7728 || TREE_CODE (op0
) == CONVERT_EXPR
)
7730 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7731 tree inter_type
= TREE_TYPE (op0
);
7732 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7733 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7734 int inside_float
= FLOAT_TYPE_P (inside_type
);
7735 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7736 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7737 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7738 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7739 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7740 int inter_float
= FLOAT_TYPE_P (inter_type
);
7741 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7742 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7743 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7744 int final_int
= INTEGRAL_TYPE_P (type
);
7745 int final_ptr
= POINTER_TYPE_P (type
);
7746 int final_float
= FLOAT_TYPE_P (type
);
7747 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7748 unsigned int final_prec
= TYPE_PRECISION (type
);
7749 int final_unsignedp
= TYPE_UNSIGNED (type
);
7751 /* In addition to the cases of two conversions in a row
7752 handled below, if we are converting something to its own
7753 type via an object of identical or wider precision, neither
7754 conversion is needed. */
7755 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7756 && (((inter_int
|| inter_ptr
) && final_int
)
7757 || (inter_float
&& final_float
))
7758 && inter_prec
>= final_prec
)
7759 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7761 /* Likewise, if the intermediate and final types are either both
7762 float or both integer, we don't need the middle conversion if
7763 it is wider than the final type and doesn't change the signedness
7764 (for integers). Avoid this if the final type is a pointer
7765 since then we sometimes need the inner conversion. Likewise if
7766 the outer has a precision not equal to the size of its mode. */
7767 if (((inter_int
&& inside_int
)
7768 || (inter_float
&& inside_float
)
7769 || (inter_vec
&& inside_vec
))
7770 && inter_prec
>= inside_prec
7771 && (inter_float
|| inter_vec
7772 || inter_unsignedp
== inside_unsignedp
)
7773 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7774 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7776 && (! final_vec
|| inter_prec
== inside_prec
))
7777 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7779 /* If we have a sign-extension of a zero-extended value, we can
7780 replace that by a single zero-extension. */
7781 if (inside_int
&& inter_int
&& final_int
7782 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7783 && inside_unsignedp
&& !inter_unsignedp
)
7784 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7786 /* Two conversions in a row are not needed unless:
7787 - some conversion is floating-point (overstrict for now), or
7788 - some conversion is a vector (overstrict for now), or
7789 - the intermediate type is narrower than both initial and
7791 - the intermediate type and innermost type differ in signedness,
7792 and the outermost type is wider than the intermediate, or
7793 - the initial type is a pointer type and the precisions of the
7794 intermediate and final types differ, or
7795 - the final type is a pointer type and the precisions of the
7796 initial and intermediate types differ. */
7797 if (! inside_float
&& ! inter_float
&& ! final_float
7798 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7799 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7800 && ! (inside_int
&& inter_int
7801 && inter_unsignedp
!= inside_unsignedp
7802 && inter_prec
< final_prec
)
7803 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7804 == (final_unsignedp
&& final_prec
> inter_prec
))
7805 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7806 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7807 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7808 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7809 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7812 /* Handle (T *)&A.B.C for A being of type T and B and C
7813 living at offset zero. This occurs frequently in
7814 C++ upcasting and then accessing the base. */
7815 if (TREE_CODE (op0
) == ADDR_EXPR
7816 && POINTER_TYPE_P (type
)
7817 && handled_component_p (TREE_OPERAND (op0
, 0)))
7819 HOST_WIDE_INT bitsize
, bitpos
;
7821 enum machine_mode mode
;
7822 int unsignedp
, volatilep
;
7823 tree base
= TREE_OPERAND (op0
, 0);
7824 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7825 &mode
, &unsignedp
, &volatilep
, false);
7826 /* If the reference was to a (constant) zero offset, we can use
7827 the address of the base if it has the same base type
7828 as the result type. */
7829 if (! offset
&& bitpos
== 0
7830 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7831 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7832 return fold_convert (type
, fold_addr_expr (base
));
7835 if ((TREE_CODE (op0
) == MODIFY_EXPR
7836 || TREE_CODE (op0
) == GIMPLE_MODIFY_STMT
)
7837 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0
, 1))
7838 /* Detect assigning a bitfield. */
7839 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7841 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0
, 0), 1))))
7843 /* Don't leave an assignment inside a conversion
7844 unless assigning a bitfield. */
7845 tem
= fold_build1 (code
, type
, GENERIC_TREE_OPERAND (op0
, 1));
7846 /* First do the assignment, then return converted constant. */
7847 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7848 TREE_NO_WARNING (tem
) = 1;
7849 TREE_USED (tem
) = 1;
7853 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7854 constants (if x has signed type, the sign bit cannot be set
7855 in c). This folds extension into the BIT_AND_EXPR. */
7856 if (INTEGRAL_TYPE_P (type
)
7857 && TREE_CODE (type
) != BOOLEAN_TYPE
7858 && TREE_CODE (op0
) == BIT_AND_EXPR
7859 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7862 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
7865 if (TYPE_UNSIGNED (TREE_TYPE (and))
7866 || (TYPE_PRECISION (type
)
7867 <= TYPE_PRECISION (TREE_TYPE (and))))
7869 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7870 <= HOST_BITS_PER_WIDE_INT
7871 && host_integerp (and1
, 1))
7873 unsigned HOST_WIDE_INT cst
;
7875 cst
= tree_low_cst (and1
, 1);
7876 cst
&= (HOST_WIDE_INT
) -1
7877 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7878 change
= (cst
== 0);
7879 #ifdef LOAD_EXTEND_OP
7881 && !flag_syntax_only
7882 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7885 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7886 and0
= fold_convert (uns
, and0
);
7887 and1
= fold_convert (uns
, and1
);
7893 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
7894 TREE_INT_CST_HIGH (and1
), 0,
7895 TREE_OVERFLOW (and1
));
7896 return fold_build2 (BIT_AND_EXPR
, type
,
7897 fold_convert (type
, and0
), tem
);
7901 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7902 when one of the new casts will fold away. Conservatively we assume
7903 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7904 if (POINTER_TYPE_P (type
)
7905 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7906 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7907 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7908 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7910 tree arg00
= TREE_OPERAND (arg0
, 0);
7911 tree arg01
= TREE_OPERAND (arg0
, 1);
7913 return fold_build2 (TREE_CODE (arg0
), type
, fold_convert (type
, arg00
),
7914 fold_convert (sizetype
, arg01
));
7917 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7918 of the same precision, and X is an integer type not narrower than
7919 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7920 if (INTEGRAL_TYPE_P (type
)
7921 && TREE_CODE (op0
) == BIT_NOT_EXPR
7922 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7923 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
7924 || TREE_CODE (TREE_OPERAND (op0
, 0)) == CONVERT_EXPR
)
7925 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7927 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7928 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7929 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7930 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
7933 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7934 type of X and Y (integer types only). */
7935 if (INTEGRAL_TYPE_P (type
)
7936 && TREE_CODE (op0
) == MULT_EXPR
7937 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7938 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7940 /* Be careful not to introduce new overflows. */
7942 if (TYPE_OVERFLOW_WRAPS (type
))
7945 mult_type
= unsigned_type_for (type
);
7947 tem
= fold_build2 (MULT_EXPR
, mult_type
,
7948 fold_convert (mult_type
, TREE_OPERAND (op0
, 0)),
7949 fold_convert (mult_type
, TREE_OPERAND (op0
, 1)));
7950 return fold_convert (type
, tem
);
7953 tem
= fold_convert_const (code
, type
, op0
);
7954 return tem
? tem
: NULL_TREE
;
7956 case FIXED_CONVERT_EXPR
:
7957 tem
= fold_convert_const (code
, type
, arg0
);
7958 return tem
? tem
: NULL_TREE
;
7960 case VIEW_CONVERT_EXPR
:
7961 if (TREE_TYPE (op0
) == type
)
7963 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7964 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7966 /* For integral conversions with the same precision or pointer
7967 conversions use a NOP_EXPR instead. */
7968 if ((INTEGRAL_TYPE_P (type
)
7969 || POINTER_TYPE_P (type
))
7970 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7971 || POINTER_TYPE_P (TREE_TYPE (op0
)))
7972 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
))
7973 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7974 a sub-type to its base type as generated by the Ada FE. */
7975 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7976 && TREE_TYPE (TREE_TYPE (op0
))))
7977 return fold_convert (type
, op0
);
7979 /* Strip inner integral conversions that do not change the precision. */
7980 if ((TREE_CODE (op0
) == NOP_EXPR
7981 || TREE_CODE (op0
) == CONVERT_EXPR
)
7982 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7983 || POINTER_TYPE_P (TREE_TYPE (op0
)))
7984 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
7985 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
7986 && (TYPE_PRECISION (TREE_TYPE (op0
))
7987 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
7988 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7990 return fold_view_convert_expr (type
, op0
);
7993 tem
= fold_negate_expr (arg0
);
7995 return fold_convert (type
, tem
);
7999 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8000 return fold_abs_const (arg0
, type
);
8001 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8002 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8003 /* Convert fabs((double)float) into (double)fabsf(float). */
8004 else if (TREE_CODE (arg0
) == NOP_EXPR
8005 && TREE_CODE (type
) == REAL_TYPE
)
8007 tree targ0
= strip_float_extensions (arg0
);
8009 return fold_convert (type
, fold_build1 (ABS_EXPR
,
8013 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8014 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8016 else if (tree_expr_nonnegative_p (arg0
))
8019 /* Strip sign ops from argument. */
8020 if (TREE_CODE (type
) == REAL_TYPE
)
8022 tem
= fold_strip_sign_ops (arg0
);
8024 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
8029 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8030 return fold_convert (type
, arg0
);
8031 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8033 tree itype
= TREE_TYPE (type
);
8034 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
8035 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
8036 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
8038 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8040 tree itype
= TREE_TYPE (type
);
8041 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
8042 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
8043 return build_complex (type
, rpart
, negate_expr (ipart
));
8045 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8046 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8050 if (TREE_CODE (arg0
) == INTEGER_CST
)
8051 return fold_not_const (arg0
, type
);
8052 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8053 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8054 /* Convert ~ (-A) to A - 1. */
8055 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8056 return fold_build2 (MINUS_EXPR
, type
,
8057 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8058 build_int_cst (type
, 1));
8059 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8060 else if (INTEGRAL_TYPE_P (type
)
8061 && ((TREE_CODE (arg0
) == MINUS_EXPR
8062 && integer_onep (TREE_OPERAND (arg0
, 1)))
8063 || (TREE_CODE (arg0
) == PLUS_EXPR
8064 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8065 return fold_build1 (NEGATE_EXPR
, type
,
8066 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8067 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8068 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8069 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8071 TREE_OPERAND (arg0
, 0)))))
8072 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
8073 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
8074 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8075 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8077 TREE_OPERAND (arg0
, 1)))))
8078 return fold_build2 (BIT_XOR_EXPR
, type
,
8079 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
8080 /* Perform BIT_NOT_EXPR on each element individually. */
8081 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8083 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8084 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8086 for (i
= 0; i
< count
; i
++)
8090 elem
= TREE_VALUE (elements
);
8091 elem
= fold_unary (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8092 if (elem
== NULL_TREE
)
8094 elements
= TREE_CHAIN (elements
);
8097 elem
= build_int_cst (TREE_TYPE (type
), -1);
8098 list
= tree_cons (NULL_TREE
, elem
, list
);
8101 return build_vector (type
, nreverse (list
));
8106 case TRUTH_NOT_EXPR
:
8107 /* The argument to invert_truthvalue must have Boolean type. */
8108 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8109 arg0
= fold_convert (boolean_type_node
, arg0
);
8111 /* Note that the operand of this must be an int
8112 and its values must be 0 or 1.
8113 ("true" is a fixed value perhaps depending on the language,
8114 but we don't handle values other than 1 correctly yet.) */
8115 tem
= fold_truth_not_expr (arg0
);
8118 return fold_convert (type
, tem
);
8121 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8122 return fold_convert (type
, arg0
);
8123 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8124 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8125 TREE_OPERAND (arg0
, 1));
8126 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8127 return fold_convert (type
, TREE_REALPART (arg0
));
8128 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8130 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8131 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8132 fold_build1 (REALPART_EXPR
, itype
,
8133 TREE_OPERAND (arg0
, 0)),
8134 fold_build1 (REALPART_EXPR
, itype
,
8135 TREE_OPERAND (arg0
, 1)));
8136 return fold_convert (type
, tem
);
8138 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8140 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8141 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8142 return fold_convert (type
, tem
);
8144 if (TREE_CODE (arg0
) == CALL_EXPR
)
8146 tree fn
= get_callee_fndecl (arg0
);
8147 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8148 switch (DECL_FUNCTION_CODE (fn
))
8150 CASE_FLT_FN (BUILT_IN_CEXPI
):
8151 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8153 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8163 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8164 return fold_convert (type
, integer_zero_node
);
8165 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8166 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8167 TREE_OPERAND (arg0
, 0));
8168 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8169 return fold_convert (type
, TREE_IMAGPART (arg0
));
8170 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8172 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8173 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8174 fold_build1 (IMAGPART_EXPR
, itype
,
8175 TREE_OPERAND (arg0
, 0)),
8176 fold_build1 (IMAGPART_EXPR
, itype
,
8177 TREE_OPERAND (arg0
, 1)));
8178 return fold_convert (type
, tem
);
8180 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8182 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8183 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8184 return fold_convert (type
, negate_expr (tem
));
8186 if (TREE_CODE (arg0
) == CALL_EXPR
)
8188 tree fn
= get_callee_fndecl (arg0
);
8189 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8190 switch (DECL_FUNCTION_CODE (fn
))
8192 CASE_FLT_FN (BUILT_IN_CEXPI
):
8193 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8195 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8206 } /* switch (code) */
8209 /* Fold a binary expression of code CODE and type TYPE with operands
8210 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8211 Return the folded expression if folding is successful. Otherwise,
8212 return NULL_TREE. */
8215 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
8217 enum tree_code compl_code
;
8219 if (code
== MIN_EXPR
)
8220 compl_code
= MAX_EXPR
;
8221 else if (code
== MAX_EXPR
)
8222 compl_code
= MIN_EXPR
;
8226 /* MIN (MAX (a, b), b) == b. */
8227 if (TREE_CODE (op0
) == compl_code
8228 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8229 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
8231 /* MIN (MAX (b, a), b) == b. */
8232 if (TREE_CODE (op0
) == compl_code
8233 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8234 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8235 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
8237 /* MIN (a, MAX (a, b)) == a. */
8238 if (TREE_CODE (op1
) == compl_code
8239 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8240 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8241 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
8243 /* MIN (a, MAX (b, a)) == a. */
8244 if (TREE_CODE (op1
) == compl_code
8245 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8246 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8247 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
8252 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8253 by changing CODE to reduce the magnitude of constants involved in
8254 ARG0 of the comparison.
8255 Returns a canonicalized comparison tree if a simplification was
8256 possible, otherwise returns NULL_TREE.
8257 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8258 valid if signed overflow is undefined. */
8261 maybe_canonicalize_comparison_1 (enum tree_code code
, tree type
,
8262 tree arg0
, tree arg1
,
8263 bool *strict_overflow_p
)
8265 enum tree_code code0
= TREE_CODE (arg0
);
8266 tree t
, cst0
= NULL_TREE
;
8270 /* Match A +- CST code arg1 and CST code arg1. */
8271 if (!(((code0
== MINUS_EXPR
8272 || code0
== PLUS_EXPR
)
8273 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8274 || code0
== INTEGER_CST
))
8277 /* Identify the constant in arg0 and its sign. */
8278 if (code0
== INTEGER_CST
)
8281 cst0
= TREE_OPERAND (arg0
, 1);
8282 sgn0
= tree_int_cst_sgn (cst0
);
8284 /* Overflowed constants and zero will cause problems. */
8285 if (integer_zerop (cst0
)
8286 || TREE_OVERFLOW (cst0
))
8289 /* See if we can reduce the magnitude of the constant in
8290 arg0 by changing the comparison code. */
8291 if (code0
== INTEGER_CST
)
8293 /* CST <= arg1 -> CST-1 < arg1. */
8294 if (code
== LE_EXPR
&& sgn0
== 1)
8296 /* -CST < arg1 -> -CST-1 <= arg1. */
8297 else if (code
== LT_EXPR
&& sgn0
== -1)
8299 /* CST > arg1 -> CST-1 >= arg1. */
8300 else if (code
== GT_EXPR
&& sgn0
== 1)
8302 /* -CST >= arg1 -> -CST-1 > arg1. */
8303 else if (code
== GE_EXPR
&& sgn0
== -1)
8307 /* arg1 code' CST' might be more canonical. */
8312 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8314 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8316 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8317 else if (code
== GT_EXPR
8318 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8320 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8321 else if (code
== LE_EXPR
8322 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8324 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8325 else if (code
== GE_EXPR
8326 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8330 *strict_overflow_p
= true;
8333 /* Now build the constant reduced in magnitude. */
8334 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8335 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
8336 if (code0
!= INTEGER_CST
)
8337 t
= fold_build2 (code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8339 /* If swapping might yield to a more canonical form, do so. */
8341 return fold_build2 (swap_tree_comparison (code
), type
, arg1
, t
);
8343 return fold_build2 (code
, type
, t
, arg1
);
8346 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8347 overflow further. Try to decrease the magnitude of constants involved
8348 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8349 and put sole constants at the second argument position.
8350 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8353 maybe_canonicalize_comparison (enum tree_code code
, tree type
,
8354 tree arg0
, tree arg1
)
8357 bool strict_overflow_p
;
8358 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8359 "when reducing constant in comparison");
8361 /* In principle pointers also have undefined overflow behavior,
8362 but that causes problems elsewhere. */
8363 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8364 || POINTER_TYPE_P (TREE_TYPE (arg0
)))
8367 /* Try canonicalization by simplifying arg0. */
8368 strict_overflow_p
= false;
8369 t
= maybe_canonicalize_comparison_1 (code
, type
, arg0
, arg1
,
8370 &strict_overflow_p
);
8373 if (strict_overflow_p
)
8374 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8378 /* Try canonicalization by simplifying arg1 using the swapped
8380 code
= swap_tree_comparison (code
);
8381 strict_overflow_p
= false;
8382 t
= maybe_canonicalize_comparison_1 (code
, type
, arg1
, arg0
,
8383 &strict_overflow_p
);
8384 if (t
&& strict_overflow_p
)
8385 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8389 /* Subroutine of fold_binary. This routine performs all of the
8390 transformations that are common to the equality/inequality
8391 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8392 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8393 fold_binary should call fold_binary. Fold a comparison with
8394 tree code CODE and type TYPE with operands OP0 and OP1. Return
8395 the folded comparison or NULL_TREE. */
8398 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
8400 tree arg0
, arg1
, tem
;
8405 STRIP_SIGN_NOPS (arg0
);
8406 STRIP_SIGN_NOPS (arg1
);
8408 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8409 if (tem
!= NULL_TREE
)
8412 /* If one arg is a real or integer constant, put it last. */
8413 if (tree_swap_operands_p (arg0
, arg1
, true))
8414 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8416 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8417 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8418 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8419 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8420 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8421 && (TREE_CODE (arg1
) == INTEGER_CST
8422 && !TREE_OVERFLOW (arg1
)))
8424 tree const1
= TREE_OPERAND (arg0
, 1);
8426 tree variable
= TREE_OPERAND (arg0
, 0);
8429 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8431 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8432 TREE_TYPE (arg1
), const2
, const1
);
8434 /* If the constant operation overflowed this can be
8435 simplified as a comparison against INT_MAX/INT_MIN. */
8436 if (TREE_CODE (lhs
) == INTEGER_CST
8437 && TREE_OVERFLOW (lhs
))
8439 int const1_sgn
= tree_int_cst_sgn (const1
);
8440 enum tree_code code2
= code
;
8442 /* Get the sign of the constant on the lhs if the
8443 operation were VARIABLE + CONST1. */
8444 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8445 const1_sgn
= -const1_sgn
;
8447 /* The sign of the constant determines if we overflowed
8448 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8449 Canonicalize to the INT_MIN overflow by swapping the comparison
8451 if (const1_sgn
== -1)
8452 code2
= swap_tree_comparison (code
);
8454 /* We now can look at the canonicalized case
8455 VARIABLE + 1 CODE2 INT_MIN
8456 and decide on the result. */
8457 if (code2
== LT_EXPR
8459 || code2
== EQ_EXPR
)
8460 return omit_one_operand (type
, boolean_false_node
, variable
);
8461 else if (code2
== NE_EXPR
8463 || code2
== GT_EXPR
)
8464 return omit_one_operand (type
, boolean_true_node
, variable
);
8467 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8468 && (TREE_CODE (lhs
) != INTEGER_CST
8469 || !TREE_OVERFLOW (lhs
)))
8471 fold_overflow_warning (("assuming signed overflow does not occur "
8472 "when changing X +- C1 cmp C2 to "
8474 WARN_STRICT_OVERFLOW_COMPARISON
);
8475 return fold_build2 (code
, type
, variable
, lhs
);
8479 /* For comparisons of pointers we can decompose it to a compile time
8480 comparison of the base objects and the offsets into the object.
8481 This requires at least one operand being an ADDR_EXPR or a
8482 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8483 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8484 && (TREE_CODE (arg0
) == ADDR_EXPR
8485 || TREE_CODE (arg1
) == ADDR_EXPR
8486 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8487 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8489 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8490 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8491 enum machine_mode mode
;
8492 int volatilep
, unsignedp
;
8493 bool indirect_base0
= false, indirect_base1
= false;
8495 /* Get base and offset for the access. Strip ADDR_EXPR for
8496 get_inner_reference, but put it back by stripping INDIRECT_REF
8497 off the base object if possible. indirect_baseN will be true
8498 if baseN is not an address but refers to the object itself. */
8500 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8502 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8503 &bitsize
, &bitpos0
, &offset0
, &mode
,
8504 &unsignedp
, &volatilep
, false);
8505 if (TREE_CODE (base0
) == INDIRECT_REF
)
8506 base0
= TREE_OPERAND (base0
, 0);
8508 indirect_base0
= true;
8510 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8512 base0
= TREE_OPERAND (arg0
, 0);
8513 offset0
= TREE_OPERAND (arg0
, 1);
8517 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8519 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8520 &bitsize
, &bitpos1
, &offset1
, &mode
,
8521 &unsignedp
, &volatilep
, false);
8522 if (TREE_CODE (base1
) == INDIRECT_REF
)
8523 base1
= TREE_OPERAND (base1
, 0);
8525 indirect_base1
= true;
8527 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8529 base1
= TREE_OPERAND (arg1
, 0);
8530 offset1
= TREE_OPERAND (arg1
, 1);
8533 /* If we have equivalent bases we might be able to simplify. */
8534 if (indirect_base0
== indirect_base1
8535 && operand_equal_p (base0
, base1
, 0))
8537 /* We can fold this expression to a constant if the non-constant
8538 offset parts are equal. */
8539 if (offset0
== offset1
8540 || (offset0
&& offset1
8541 && operand_equal_p (offset0
, offset1
, 0)))
8546 return build_int_cst (boolean_type_node
, bitpos0
== bitpos1
);
8548 return build_int_cst (boolean_type_node
, bitpos0
!= bitpos1
);
8550 return build_int_cst (boolean_type_node
, bitpos0
< bitpos1
);
8552 return build_int_cst (boolean_type_node
, bitpos0
<= bitpos1
);
8554 return build_int_cst (boolean_type_node
, bitpos0
>= bitpos1
);
8556 return build_int_cst (boolean_type_node
, bitpos0
> bitpos1
);
8560 /* We can simplify the comparison to a comparison of the variable
8561 offset parts if the constant offset parts are equal.
8562 Be careful to use signed size type here because otherwise we
8563 mess with array offsets in the wrong way. This is possible
8564 because pointer arithmetic is restricted to retain within an
8565 object and overflow on pointer differences is undefined as of
8566 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8567 else if (bitpos0
== bitpos1
)
8569 tree signed_size_type_node
;
8570 signed_size_type_node
= signed_type_for (size_type_node
);
8572 /* By converting to signed size type we cover middle-end pointer
8573 arithmetic which operates on unsigned pointer types of size
8574 type size and ARRAY_REF offsets which are properly sign or
8575 zero extended from their type in case it is narrower than
8577 if (offset0
== NULL_TREE
)
8578 offset0
= build_int_cst (signed_size_type_node
, 0);
8580 offset0
= fold_convert (signed_size_type_node
, offset0
);
8581 if (offset1
== NULL_TREE
)
8582 offset1
= build_int_cst (signed_size_type_node
, 0);
8584 offset1
= fold_convert (signed_size_type_node
, offset1
);
8586 return fold_build2 (code
, type
, offset0
, offset1
);
8589 /* For non-equal bases we can simplify if they are addresses
8590 of local binding decls or constants. */
8591 else if (indirect_base0
&& indirect_base1
8592 /* We know that !operand_equal_p (base0, base1, 0)
8593 because the if condition was false. But make
8594 sure two decls are not the same. */
8596 && TREE_CODE (arg0
) == ADDR_EXPR
8597 && TREE_CODE (arg1
) == ADDR_EXPR
8598 && (((TREE_CODE (base0
) == VAR_DECL
8599 || TREE_CODE (base0
) == PARM_DECL
)
8600 && (targetm
.binds_local_p (base0
)
8601 || CONSTANT_CLASS_P (base1
)))
8602 || CONSTANT_CLASS_P (base0
))
8603 && (((TREE_CODE (base1
) == VAR_DECL
8604 || TREE_CODE (base1
) == PARM_DECL
)
8605 && (targetm
.binds_local_p (base1
)
8606 || CONSTANT_CLASS_P (base0
)))
8607 || CONSTANT_CLASS_P (base1
)))
8609 if (code
== EQ_EXPR
)
8610 return omit_two_operands (type
, boolean_false_node
, arg0
, arg1
);
8611 else if (code
== NE_EXPR
)
8612 return omit_two_operands (type
, boolean_true_node
, arg0
, arg1
);
8614 /* For equal offsets we can simplify to a comparison of the
8616 else if (bitpos0
== bitpos1
8618 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8620 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8621 && ((offset0
== offset1
)
8622 || (offset0
&& offset1
8623 && operand_equal_p (offset0
, offset1
, 0))))
8626 base0
= fold_addr_expr (base0
);
8628 base1
= fold_addr_expr (base1
);
8629 return fold_build2 (code
, type
, base0
, base1
);
8633 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8634 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8635 the resulting offset is smaller in absolute value than the
8637 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8638 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8639 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8640 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8641 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8642 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8643 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8645 tree const1
= TREE_OPERAND (arg0
, 1);
8646 tree const2
= TREE_OPERAND (arg1
, 1);
8647 tree variable1
= TREE_OPERAND (arg0
, 0);
8648 tree variable2
= TREE_OPERAND (arg1
, 0);
8650 const char * const warnmsg
= G_("assuming signed overflow does not "
8651 "occur when combining constants around "
8654 /* Put the constant on the side where it doesn't overflow and is
8655 of lower absolute value than before. */
8656 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8657 ? MINUS_EXPR
: PLUS_EXPR
,
8659 if (!TREE_OVERFLOW (cst
)
8660 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8662 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8663 return fold_build2 (code
, type
,
8665 fold_build2 (TREE_CODE (arg1
), TREE_TYPE (arg1
),
8669 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8670 ? MINUS_EXPR
: PLUS_EXPR
,
8672 if (!TREE_OVERFLOW (cst
)
8673 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8675 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8676 return fold_build2 (code
, type
,
8677 fold_build2 (TREE_CODE (arg0
), TREE_TYPE (arg0
),
8683 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8684 signed arithmetic case. That form is created by the compiler
8685 often enough for folding it to be of value. One example is in
8686 computing loop trip counts after Operator Strength Reduction. */
8687 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8688 && TREE_CODE (arg0
) == MULT_EXPR
8689 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8690 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8691 && integer_zerop (arg1
))
8693 tree const1
= TREE_OPERAND (arg0
, 1);
8694 tree const2
= arg1
; /* zero */
8695 tree variable1
= TREE_OPERAND (arg0
, 0);
8696 enum tree_code cmp_code
= code
;
8698 gcc_assert (!integer_zerop (const1
));
8700 fold_overflow_warning (("assuming signed overflow does not occur when "
8701 "eliminating multiplication in comparison "
8703 WARN_STRICT_OVERFLOW_COMPARISON
);
8705 /* If const1 is negative we swap the sense of the comparison. */
8706 if (tree_int_cst_sgn (const1
) < 0)
8707 cmp_code
= swap_tree_comparison (cmp_code
);
8709 return fold_build2 (cmp_code
, type
, variable1
, const2
);
8712 tem
= maybe_canonicalize_comparison (code
, type
, op0
, op1
);
8716 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8718 tree targ0
= strip_float_extensions (arg0
);
8719 tree targ1
= strip_float_extensions (arg1
);
8720 tree newtype
= TREE_TYPE (targ0
);
8722 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8723 newtype
= TREE_TYPE (targ1
);
8725 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8726 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8727 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
8728 fold_convert (newtype
, targ1
));
8730 /* (-a) CMP (-b) -> b CMP a */
8731 if (TREE_CODE (arg0
) == NEGATE_EXPR
8732 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8733 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8734 TREE_OPERAND (arg0
, 0));
8736 if (TREE_CODE (arg1
) == REAL_CST
)
8738 REAL_VALUE_TYPE cst
;
8739 cst
= TREE_REAL_CST (arg1
);
8741 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8742 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8743 return fold_build2 (swap_tree_comparison (code
), type
,
8744 TREE_OPERAND (arg0
, 0),
8745 build_real (TREE_TYPE (arg1
),
8746 REAL_VALUE_NEGATE (cst
)));
8748 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8749 /* a CMP (-0) -> a CMP 0 */
8750 if (REAL_VALUE_MINUS_ZERO (cst
))
8751 return fold_build2 (code
, type
, arg0
,
8752 build_real (TREE_TYPE (arg1
), dconst0
));
8754 /* x != NaN is always true, other ops are always false. */
8755 if (REAL_VALUE_ISNAN (cst
)
8756 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8758 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8759 return omit_one_operand (type
, tem
, arg0
);
8762 /* Fold comparisons against infinity. */
8763 if (REAL_VALUE_ISINF (cst
))
8765 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
8766 if (tem
!= NULL_TREE
)
8771 /* If this is a comparison of a real constant with a PLUS_EXPR
8772 or a MINUS_EXPR of a real constant, we can convert it into a
8773 comparison with a revised real constant as long as no overflow
8774 occurs when unsafe_math_optimizations are enabled. */
8775 if (flag_unsafe_math_optimizations
8776 && TREE_CODE (arg1
) == REAL_CST
8777 && (TREE_CODE (arg0
) == PLUS_EXPR
8778 || TREE_CODE (arg0
) == MINUS_EXPR
)
8779 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8780 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8781 ? MINUS_EXPR
: PLUS_EXPR
,
8782 arg1
, TREE_OPERAND (arg0
, 1), 0))
8783 && !TREE_OVERFLOW (tem
))
8784 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8786 /* Likewise, we can simplify a comparison of a real constant with
8787 a MINUS_EXPR whose first operand is also a real constant, i.e.
8788 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8789 floating-point types only if -fassociative-math is set. */
8790 if (flag_associative_math
8791 && TREE_CODE (arg1
) == REAL_CST
8792 && TREE_CODE (arg0
) == MINUS_EXPR
8793 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
8794 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
8796 && !TREE_OVERFLOW (tem
))
8797 return fold_build2 (swap_tree_comparison (code
), type
,
8798 TREE_OPERAND (arg0
, 1), tem
);
8800 /* Fold comparisons against built-in math functions. */
8801 if (TREE_CODE (arg1
) == REAL_CST
8802 && flag_unsafe_math_optimizations
8803 && ! flag_errno_math
)
8805 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8807 if (fcode
!= END_BUILTINS
)
8809 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
8810 if (tem
!= NULL_TREE
)
8816 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8817 && (TREE_CODE (arg0
) == NOP_EXPR
8818 || TREE_CODE (arg0
) == CONVERT_EXPR
))
8820 /* If we are widening one operand of an integer comparison,
8821 see if the other operand is similarly being widened. Perhaps we
8822 can do the comparison in the narrower type. */
8823 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
8827 /* Or if we are changing signedness. */
8828 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
8833 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8834 constant, we can simplify it. */
8835 if (TREE_CODE (arg1
) == INTEGER_CST
8836 && (TREE_CODE (arg0
) == MIN_EXPR
8837 || TREE_CODE (arg0
) == MAX_EXPR
)
8838 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8840 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
8845 /* Simplify comparison of something with itself. (For IEEE
8846 floating-point, we can only do some of these simplifications.) */
8847 if (operand_equal_p (arg0
, arg1
, 0))
8852 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8853 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8854 return constant_boolean_node (1, type
);
8859 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8860 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8861 return constant_boolean_node (1, type
);
8862 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
8865 /* For NE, we can only do this simplification if integer
8866 or we don't honor IEEE floating point NaNs. */
8867 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8868 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8870 /* ... fall through ... */
8873 return constant_boolean_node (0, type
);
8879 /* If we are comparing an expression that just has comparisons
8880 of two integer values, arithmetic expressions of those comparisons,
8881 and constants, we can simplify it. There are only three cases
8882 to check: the two values can either be equal, the first can be
8883 greater, or the second can be greater. Fold the expression for
8884 those three values. Since each value must be 0 or 1, we have
8885 eight possibilities, each of which corresponds to the constant 0
8886 or 1 or one of the six possible comparisons.
8888 This handles common cases like (a > b) == 0 but also handles
8889 expressions like ((x > y) - (y > x)) > 0, which supposedly
8890 occur in macroized code. */
8892 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8894 tree cval1
= 0, cval2
= 0;
8897 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8898 /* Don't handle degenerate cases here; they should already
8899 have been handled anyway. */
8900 && cval1
!= 0 && cval2
!= 0
8901 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8902 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8903 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8904 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8905 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8906 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8907 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8909 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8910 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8912 /* We can't just pass T to eval_subst in case cval1 or cval2
8913 was the same as ARG1. */
8916 = fold_build2 (code
, type
,
8917 eval_subst (arg0
, cval1
, maxval
,
8921 = fold_build2 (code
, type
,
8922 eval_subst (arg0
, cval1
, maxval
,
8926 = fold_build2 (code
, type
,
8927 eval_subst (arg0
, cval1
, minval
,
8931 /* All three of these results should be 0 or 1. Confirm they are.
8932 Then use those values to select the proper code to use. */
8934 if (TREE_CODE (high_result
) == INTEGER_CST
8935 && TREE_CODE (equal_result
) == INTEGER_CST
8936 && TREE_CODE (low_result
) == INTEGER_CST
)
8938 /* Make a 3-bit mask with the high-order bit being the
8939 value for `>', the next for '=', and the low for '<'. */
8940 switch ((integer_onep (high_result
) * 4)
8941 + (integer_onep (equal_result
) * 2)
8942 + integer_onep (low_result
))
8946 return omit_one_operand (type
, integer_zero_node
, arg0
);
8967 return omit_one_operand (type
, integer_one_node
, arg0
);
8971 return save_expr (build2 (code
, type
, cval1
, cval2
));
8972 return fold_build2 (code
, type
, cval1
, cval2
);
8977 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8978 into a single range test. */
8979 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8980 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8981 && TREE_CODE (arg1
) == INTEGER_CST
8982 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8983 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8984 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8985 && !TREE_OVERFLOW (arg1
))
8987 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
8988 if (tem
!= NULL_TREE
)
8992 /* Fold ~X op ~Y as Y op X. */
8993 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8994 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8996 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8997 return fold_build2 (code
, type
,
8998 fold_convert (cmp_type
, TREE_OPERAND (arg1
, 0)),
8999 TREE_OPERAND (arg0
, 0));
9002 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9003 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9004 && TREE_CODE (arg1
) == INTEGER_CST
)
9006 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9007 return fold_build2 (swap_tree_comparison (code
), type
,
9008 TREE_OPERAND (arg0
, 0),
9009 fold_build1 (BIT_NOT_EXPR
, cmp_type
,
9010 fold_convert (cmp_type
, arg1
)));
9017 /* Subroutine of fold_binary. Optimize complex multiplications of the
9018 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9019 argument EXPR represents the expression "z" of type TYPE. */
9022 fold_mult_zconjz (tree type
, tree expr
)
9024 tree itype
= TREE_TYPE (type
);
9025 tree rpart
, ipart
, tem
;
9027 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9029 rpart
= TREE_OPERAND (expr
, 0);
9030 ipart
= TREE_OPERAND (expr
, 1);
9032 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9034 rpart
= TREE_REALPART (expr
);
9035 ipart
= TREE_IMAGPART (expr
);
9039 expr
= save_expr (expr
);
9040 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
9041 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
9044 rpart
= save_expr (rpart
);
9045 ipart
= save_expr (ipart
);
9046 tem
= fold_build2 (PLUS_EXPR
, itype
,
9047 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
9048 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
9049 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
9050 fold_convert (itype
, integer_zero_node
));
9054 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9055 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9056 guarantees that P and N have the same least significant log2(M) bits.
9057 N is not otherwise constrained. In particular, N is not normalized to
9058 0 <= N < M as is common. In general, the precise value of P is unknown.
9059 M is chosen as large as possible such that constant N can be determined.
9061 Returns M and sets *RESIDUE to N. */
9063 static unsigned HOST_WIDE_INT
9064 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
)
9066 enum tree_code code
;
9070 code
= TREE_CODE (expr
);
9071 if (code
== ADDR_EXPR
)
9073 expr
= TREE_OPERAND (expr
, 0);
9074 if (handled_component_p (expr
))
9076 HOST_WIDE_INT bitsize
, bitpos
;
9078 enum machine_mode mode
;
9079 int unsignedp
, volatilep
;
9081 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9082 &mode
, &unsignedp
, &volatilep
, false);
9083 *residue
= bitpos
/ BITS_PER_UNIT
;
9086 if (TREE_CODE (offset
) == INTEGER_CST
)
9087 *residue
+= TREE_INT_CST_LOW (offset
);
9089 /* We don't handle more complicated offset expressions. */
9094 if (DECL_P (expr
) && TREE_CODE (expr
) != FUNCTION_DECL
)
9095 return DECL_ALIGN_UNIT (expr
);
9097 else if (code
== POINTER_PLUS_EXPR
)
9100 unsigned HOST_WIDE_INT modulus
;
9101 enum tree_code inner_code
;
9103 op0
= TREE_OPERAND (expr
, 0);
9105 modulus
= get_pointer_modulus_and_residue (op0
, residue
);
9107 op1
= TREE_OPERAND (expr
, 1);
9109 inner_code
= TREE_CODE (op1
);
9110 if (inner_code
== INTEGER_CST
)
9112 *residue
+= TREE_INT_CST_LOW (op1
);
9115 else if (inner_code
== MULT_EXPR
)
9117 op1
= TREE_OPERAND (op1
, 1);
9118 if (TREE_CODE (op1
) == INTEGER_CST
)
9120 unsigned HOST_WIDE_INT align
;
9122 /* Compute the greatest power-of-2 divisor of op1. */
9123 align
= TREE_INT_CST_LOW (op1
);
9126 /* If align is non-zero and less than *modulus, replace
9127 *modulus with align., If align is 0, then either op1 is 0
9128 or the greatest power-of-2 divisor of op1 doesn't fit in an
9129 unsigned HOST_WIDE_INT. In either case, no additional
9130 constraint is imposed. */
9132 modulus
= MIN (modulus
, align
);
9139 /* If we get here, we were unable to determine anything useful about the
9145 /* Fold a binary expression of code CODE and type TYPE with operands
9146 OP0 and OP1. Return the folded expression if folding is
9147 successful. Otherwise, return NULL_TREE. */
9150 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
9152 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9153 tree arg0
, arg1
, tem
;
9154 tree t1
= NULL_TREE
;
9155 bool strict_overflow_p
;
9157 gcc_assert ((IS_EXPR_CODE_CLASS (kind
)
9158 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
9159 && TREE_CODE_LENGTH (code
) == 2
9161 && op1
!= NULL_TREE
);
9166 /* Strip any conversions that don't change the mode. This is
9167 safe for every expression, except for a comparison expression
9168 because its signedness is derived from its operands. So, in
9169 the latter case, only strip conversions that don't change the
9172 Note that this is done as an internal manipulation within the
9173 constant folder, in order to find the simplest representation
9174 of the arguments so that their form can be studied. In any
9175 cases, the appropriate type conversions should be put back in
9176 the tree that will get out of the constant folder. */
9178 if (kind
== tcc_comparison
)
9180 STRIP_SIGN_NOPS (arg0
);
9181 STRIP_SIGN_NOPS (arg1
);
9189 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9190 constant but we can't do arithmetic on them. */
9191 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9192 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9193 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9194 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9195 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9196 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9198 if (kind
== tcc_binary
)
9200 /* Make sure type and arg0 have the same saturating flag. */
9201 gcc_assert (TYPE_SATURATING (type
)
9202 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9203 tem
= const_binop (code
, arg0
, arg1
, 0);
9205 else if (kind
== tcc_comparison
)
9206 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9210 if (tem
!= NULL_TREE
)
9212 if (TREE_TYPE (tem
) != type
)
9213 tem
= fold_convert (type
, tem
);
9218 /* If this is a commutative operation, and ARG0 is a constant, move it
9219 to ARG1 to reduce the number of tests below. */
9220 if (commutative_tree_code (code
)
9221 && tree_swap_operands_p (arg0
, arg1
, true))
9222 return fold_build2 (code
, type
, op1
, op0
);
9224 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9226 First check for cases where an arithmetic operation is applied to a
9227 compound, conditional, or comparison operation. Push the arithmetic
9228 operation inside the compound or conditional to see if any folding
9229 can then be done. Convert comparison to conditional for this purpose.
9230 The also optimizes non-constant cases that used to be done in
9233 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9234 one of the operands is a comparison and the other is a comparison, a
9235 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9236 code below would make the expression more complex. Change it to a
9237 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9238 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9240 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9241 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9242 && ((truth_value_p (TREE_CODE (arg0
))
9243 && (truth_value_p (TREE_CODE (arg1
))
9244 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9245 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9246 || (truth_value_p (TREE_CODE (arg1
))
9247 && (truth_value_p (TREE_CODE (arg0
))
9248 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9249 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9251 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9252 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9255 fold_convert (boolean_type_node
, arg0
),
9256 fold_convert (boolean_type_node
, arg1
));
9258 if (code
== EQ_EXPR
)
9259 tem
= invert_truthvalue (tem
);
9261 return fold_convert (type
, tem
);
9264 if (TREE_CODE_CLASS (code
) == tcc_binary
9265 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9267 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9268 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9269 fold_build2 (code
, type
,
9270 fold_convert (TREE_TYPE (op0
),
9271 TREE_OPERAND (arg0
, 1)),
9273 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9274 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9275 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9276 fold_build2 (code
, type
, op0
,
9277 fold_convert (TREE_TYPE (op1
),
9278 TREE_OPERAND (arg1
, 1))));
9280 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9282 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9284 /*cond_first_p=*/1);
9285 if (tem
!= NULL_TREE
)
9289 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9291 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9293 /*cond_first_p=*/0);
9294 if (tem
!= NULL_TREE
)
9301 case POINTER_PLUS_EXPR
:
9302 /* 0 +p index -> (type)index */
9303 if (integer_zerop (arg0
))
9304 return non_lvalue (fold_convert (type
, arg1
));
9306 /* PTR +p 0 -> PTR */
9307 if (integer_zerop (arg1
))
9308 return non_lvalue (fold_convert (type
, arg0
));
9310 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9311 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9312 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9313 return fold_convert (type
, fold_build2 (PLUS_EXPR
, sizetype
,
9314 fold_convert (sizetype
, arg1
),
9315 fold_convert (sizetype
, arg0
)));
9317 /* index +p PTR -> PTR +p index */
9318 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9319 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9320 return fold_build2 (POINTER_PLUS_EXPR
, type
,
9321 fold_convert (type
, arg1
),
9322 fold_convert (sizetype
, arg0
));
9324 /* (PTR +p B) +p A -> PTR +p (B + A) */
9325 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9328 tree arg01
= fold_convert (sizetype
, TREE_OPERAND (arg0
, 1));
9329 tree arg00
= TREE_OPERAND (arg0
, 0);
9330 inner
= fold_build2 (PLUS_EXPR
, sizetype
,
9331 arg01
, fold_convert (sizetype
, arg1
));
9332 return fold_convert (type
,
9333 fold_build2 (POINTER_PLUS_EXPR
,
9334 TREE_TYPE (arg00
), arg00
, inner
));
9337 /* PTR_CST +p CST -> CST1 */
9338 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9339 return fold_build2 (PLUS_EXPR
, type
, arg0
, fold_convert (type
, arg1
));
9341 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9342 of the array. Loop optimizer sometimes produce this type of
9344 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9346 tem
= try_move_mult_to_index (arg0
, fold_convert (sizetype
, arg1
));
9348 return fold_convert (type
, tem
);
9354 /* PTR + INT -> (INT)(PTR p+ INT) */
9355 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9356 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9357 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9360 fold_convert (sizetype
, arg1
)));
9361 /* INT + PTR -> (INT)(PTR p+ INT) */
9362 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9363 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9364 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9367 fold_convert (sizetype
, arg0
)));
9368 /* A + (-B) -> A - B */
9369 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9370 return fold_build2 (MINUS_EXPR
, type
,
9371 fold_convert (type
, arg0
),
9372 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9373 /* (-A) + B -> B - A */
9374 if (TREE_CODE (arg0
) == NEGATE_EXPR
9375 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9376 return fold_build2 (MINUS_EXPR
, type
,
9377 fold_convert (type
, arg1
),
9378 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9380 if (INTEGRAL_TYPE_P (type
))
9382 /* Convert ~A + 1 to -A. */
9383 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9384 && integer_onep (arg1
))
9385 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
9388 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9389 && !TYPE_OVERFLOW_TRAPS (type
))
9391 tree tem
= TREE_OPERAND (arg0
, 0);
9394 if (operand_equal_p (tem
, arg1
, 0))
9396 t1
= build_int_cst_type (type
, -1);
9397 return omit_one_operand (type
, t1
, arg1
);
9402 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9403 && !TYPE_OVERFLOW_TRAPS (type
))
9405 tree tem
= TREE_OPERAND (arg1
, 0);
9408 if (operand_equal_p (arg0
, tem
, 0))
9410 t1
= build_int_cst_type (type
, -1);
9411 return omit_one_operand (type
, t1
, arg0
);
9415 /* X + (X / CST) * -CST is X % CST. */
9416 if (TREE_CODE (arg1
) == MULT_EXPR
9417 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9418 && operand_equal_p (arg0
,
9419 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9421 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9422 tree cst1
= TREE_OPERAND (arg1
, 1);
9423 tree sum
= fold_binary (PLUS_EXPR
, TREE_TYPE (cst1
), cst1
, cst0
);
9424 if (sum
&& integer_zerop (sum
))
9425 return fold_convert (type
,
9426 fold_build2 (TRUNC_MOD_EXPR
,
9427 TREE_TYPE (arg0
), arg0
, cst0
));
9431 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9432 same or one. Make sure type is not saturating.
9433 fold_plusminus_mult_expr will re-associate. */
9434 if ((TREE_CODE (arg0
) == MULT_EXPR
9435 || TREE_CODE (arg1
) == MULT_EXPR
)
9436 && !TYPE_SATURATING (type
)
9437 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9439 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9444 if (! FLOAT_TYPE_P (type
))
9446 if (integer_zerop (arg1
))
9447 return non_lvalue (fold_convert (type
, arg0
));
9449 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9450 with a constant, and the two constants have no bits in common,
9451 we should treat this as a BIT_IOR_EXPR since this may produce more
9453 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9454 && TREE_CODE (arg1
) == BIT_AND_EXPR
9455 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9456 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9457 && integer_zerop (const_binop (BIT_AND_EXPR
,
9458 TREE_OPERAND (arg0
, 1),
9459 TREE_OPERAND (arg1
, 1), 0)))
9461 code
= BIT_IOR_EXPR
;
9465 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9466 (plus (plus (mult) (mult)) (foo)) so that we can
9467 take advantage of the factoring cases below. */
9468 if (((TREE_CODE (arg0
) == PLUS_EXPR
9469 || TREE_CODE (arg0
) == MINUS_EXPR
)
9470 && TREE_CODE (arg1
) == MULT_EXPR
)
9471 || ((TREE_CODE (arg1
) == PLUS_EXPR
9472 || TREE_CODE (arg1
) == MINUS_EXPR
)
9473 && TREE_CODE (arg0
) == MULT_EXPR
))
9475 tree parg0
, parg1
, parg
, marg
;
9476 enum tree_code pcode
;
9478 if (TREE_CODE (arg1
) == MULT_EXPR
)
9479 parg
= arg0
, marg
= arg1
;
9481 parg
= arg1
, marg
= arg0
;
9482 pcode
= TREE_CODE (parg
);
9483 parg0
= TREE_OPERAND (parg
, 0);
9484 parg1
= TREE_OPERAND (parg
, 1);
9488 if (TREE_CODE (parg0
) == MULT_EXPR
9489 && TREE_CODE (parg1
) != MULT_EXPR
)
9490 return fold_build2 (pcode
, type
,
9491 fold_build2 (PLUS_EXPR
, type
,
9492 fold_convert (type
, parg0
),
9493 fold_convert (type
, marg
)),
9494 fold_convert (type
, parg1
));
9495 if (TREE_CODE (parg0
) != MULT_EXPR
9496 && TREE_CODE (parg1
) == MULT_EXPR
)
9497 return fold_build2 (PLUS_EXPR
, type
,
9498 fold_convert (type
, parg0
),
9499 fold_build2 (pcode
, type
,
9500 fold_convert (type
, marg
),
9507 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9508 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9509 return non_lvalue (fold_convert (type
, arg0
));
9511 /* Likewise if the operands are reversed. */
9512 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9513 return non_lvalue (fold_convert (type
, arg1
));
9515 /* Convert X + -C into X - C. */
9516 if (TREE_CODE (arg1
) == REAL_CST
9517 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9519 tem
= fold_negate_const (arg1
, type
);
9520 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9521 return fold_build2 (MINUS_EXPR
, type
,
9522 fold_convert (type
, arg0
),
9523 fold_convert (type
, tem
));
9526 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9527 to __complex__ ( x, y ). This is not the same for SNaNs or
9528 if signed zeros are involved. */
9529 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9530 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9531 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9533 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9534 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9535 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9536 bool arg0rz
= false, arg0iz
= false;
9537 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9538 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9540 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9541 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9542 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9544 tree rp
= arg1r
? arg1r
9545 : build1 (REALPART_EXPR
, rtype
, arg1
);
9546 tree ip
= arg0i
? arg0i
9547 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9548 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9550 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9552 tree rp
= arg0r
? arg0r
9553 : build1 (REALPART_EXPR
, rtype
, arg0
);
9554 tree ip
= arg1i
? arg1i
9555 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9556 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9561 if (flag_unsafe_math_optimizations
9562 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9563 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9564 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9567 /* Convert x+x into x*2.0. */
9568 if (operand_equal_p (arg0
, arg1
, 0)
9569 && SCALAR_FLOAT_TYPE_P (type
))
9570 return fold_build2 (MULT_EXPR
, type
, arg0
,
9571 build_real (type
, dconst2
));
9573 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9574 We associate floats only if the user has specified
9575 -fassociative-math. */
9576 if (flag_associative_math
9577 && TREE_CODE (arg1
) == PLUS_EXPR
9578 && TREE_CODE (arg0
) != MULT_EXPR
)
9580 tree tree10
= TREE_OPERAND (arg1
, 0);
9581 tree tree11
= TREE_OPERAND (arg1
, 1);
9582 if (TREE_CODE (tree11
) == MULT_EXPR
9583 && TREE_CODE (tree10
) == MULT_EXPR
)
9586 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
9587 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
9590 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9591 We associate floats only if the user has specified
9592 -fassociative-math. */
9593 if (flag_associative_math
9594 && TREE_CODE (arg0
) == PLUS_EXPR
9595 && TREE_CODE (arg1
) != MULT_EXPR
)
9597 tree tree00
= TREE_OPERAND (arg0
, 0);
9598 tree tree01
= TREE_OPERAND (arg0
, 1);
9599 if (TREE_CODE (tree01
) == MULT_EXPR
9600 && TREE_CODE (tree00
) == MULT_EXPR
)
9603 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
9604 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
9610 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9611 is a rotate of A by C1 bits. */
9612 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9613 is a rotate of A by B bits. */
9615 enum tree_code code0
, code1
;
9617 code0
= TREE_CODE (arg0
);
9618 code1
= TREE_CODE (arg1
);
9619 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9620 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9621 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9622 TREE_OPERAND (arg1
, 0), 0)
9623 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9624 TYPE_UNSIGNED (rtype
))
9625 /* Only create rotates in complete modes. Other cases are not
9626 expanded properly. */
9627 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
9629 tree tree01
, tree11
;
9630 enum tree_code code01
, code11
;
9632 tree01
= TREE_OPERAND (arg0
, 1);
9633 tree11
= TREE_OPERAND (arg1
, 1);
9634 STRIP_NOPS (tree01
);
9635 STRIP_NOPS (tree11
);
9636 code01
= TREE_CODE (tree01
);
9637 code11
= TREE_CODE (tree11
);
9638 if (code01
== INTEGER_CST
9639 && code11
== INTEGER_CST
9640 && TREE_INT_CST_HIGH (tree01
) == 0
9641 && TREE_INT_CST_HIGH (tree11
) == 0
9642 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9643 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9644 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9645 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9646 else if (code11
== MINUS_EXPR
)
9648 tree tree110
, tree111
;
9649 tree110
= TREE_OPERAND (tree11
, 0);
9650 tree111
= TREE_OPERAND (tree11
, 1);
9651 STRIP_NOPS (tree110
);
9652 STRIP_NOPS (tree111
);
9653 if (TREE_CODE (tree110
) == INTEGER_CST
9654 && 0 == compare_tree_int (tree110
,
9656 (TREE_TYPE (TREE_OPERAND
9658 && operand_equal_p (tree01
, tree111
, 0))
9659 return build2 ((code0
== LSHIFT_EXPR
9662 type
, TREE_OPERAND (arg0
, 0), tree01
);
9664 else if (code01
== MINUS_EXPR
)
9666 tree tree010
, tree011
;
9667 tree010
= TREE_OPERAND (tree01
, 0);
9668 tree011
= TREE_OPERAND (tree01
, 1);
9669 STRIP_NOPS (tree010
);
9670 STRIP_NOPS (tree011
);
9671 if (TREE_CODE (tree010
) == INTEGER_CST
9672 && 0 == compare_tree_int (tree010
,
9674 (TREE_TYPE (TREE_OPERAND
9676 && operand_equal_p (tree11
, tree011
, 0))
9677 return build2 ((code0
!= LSHIFT_EXPR
9680 type
, TREE_OPERAND (arg0
, 0), tree11
);
9686 /* In most languages, can't associate operations on floats through
9687 parentheses. Rather than remember where the parentheses were, we
9688 don't associate floats at all, unless the user has specified
9690 And, we need to make sure type is not saturating. */
9692 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9693 && !TYPE_SATURATING (type
))
9695 tree var0
, con0
, lit0
, minus_lit0
;
9696 tree var1
, con1
, lit1
, minus_lit1
;
9699 /* Split both trees into variables, constants, and literals. Then
9700 associate each group together, the constants with literals,
9701 then the result with variables. This increases the chances of
9702 literals being recombined later and of generating relocatable
9703 expressions for the sum of a constant and literal. */
9704 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9705 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9706 code
== MINUS_EXPR
);
9708 /* With undefined overflow we can only associate constants
9709 with one variable. */
9710 if ((POINTER_TYPE_P (type
)
9711 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9717 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9718 tmp0
= TREE_OPERAND (tmp0
, 0);
9719 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9720 tmp1
= TREE_OPERAND (tmp1
, 0);
9721 /* The only case we can still associate with two variables
9722 is if they are the same, modulo negation. */
9723 if (!operand_equal_p (tmp0
, tmp1
, 0))
9727 /* Only do something if we found more than two objects. Otherwise,
9728 nothing has changed and we risk infinite recursion. */
9730 && (2 < ((var0
!= 0) + (var1
!= 0)
9731 + (con0
!= 0) + (con1
!= 0)
9732 + (lit0
!= 0) + (lit1
!= 0)
9733 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9735 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9736 if (code
== MINUS_EXPR
)
9739 var0
= associate_trees (var0
, var1
, code
, type
);
9740 con0
= associate_trees (con0
, con1
, code
, type
);
9741 lit0
= associate_trees (lit0
, lit1
, code
, type
);
9742 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
9744 /* Preserve the MINUS_EXPR if the negative part of the literal is
9745 greater than the positive part. Otherwise, the multiplicative
9746 folding code (i.e extract_muldiv) may be fooled in case
9747 unsigned constants are subtracted, like in the following
9748 example: ((X*2 + 4) - 8U)/2. */
9749 if (minus_lit0
&& lit0
)
9751 if (TREE_CODE (lit0
) == INTEGER_CST
9752 && TREE_CODE (minus_lit0
) == INTEGER_CST
9753 && tree_int_cst_lt (lit0
, minus_lit0
))
9755 minus_lit0
= associate_trees (minus_lit0
, lit0
,
9761 lit0
= associate_trees (lit0
, minus_lit0
,
9769 return fold_convert (type
,
9770 associate_trees (var0
, minus_lit0
,
9774 con0
= associate_trees (con0
, minus_lit0
,
9776 return fold_convert (type
,
9777 associate_trees (var0
, con0
,
9782 con0
= associate_trees (con0
, lit0
, code
, type
);
9783 return fold_convert (type
, associate_trees (var0
, con0
,
9791 /* Pointer simplifications for subtraction, simple reassociations. */
9792 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
9794 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9795 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9796 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9798 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9799 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9800 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9801 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9802 return fold_build2 (PLUS_EXPR
, type
,
9803 fold_build2 (MINUS_EXPR
, type
, arg00
, arg10
),
9804 fold_build2 (MINUS_EXPR
, type
, arg01
, arg11
));
9806 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9807 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9809 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9810 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9811 tree tmp
= fold_binary (MINUS_EXPR
, type
, arg00
, fold_convert (type
, arg1
));
9813 return fold_build2 (PLUS_EXPR
, type
, tmp
, arg01
);
9816 /* A - (-B) -> A + B */
9817 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9818 return fold_build2 (PLUS_EXPR
, type
, op0
,
9819 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9820 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9821 if (TREE_CODE (arg0
) == NEGATE_EXPR
9822 && (FLOAT_TYPE_P (type
)
9823 || INTEGRAL_TYPE_P (type
))
9824 && negate_expr_p (arg1
)
9825 && reorder_operands_p (arg0
, arg1
))
9826 return fold_build2 (MINUS_EXPR
, type
,
9827 fold_convert (type
, negate_expr (arg1
)),
9828 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9829 /* Convert -A - 1 to ~A. */
9830 if (INTEGRAL_TYPE_P (type
)
9831 && TREE_CODE (arg0
) == NEGATE_EXPR
9832 && integer_onep (arg1
)
9833 && !TYPE_OVERFLOW_TRAPS (type
))
9834 return fold_build1 (BIT_NOT_EXPR
, type
,
9835 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9837 /* Convert -1 - A to ~A. */
9838 if (INTEGRAL_TYPE_P (type
)
9839 && integer_all_onesp (arg0
))
9840 return fold_build1 (BIT_NOT_EXPR
, type
, op1
);
9843 /* X - (X / CST) * CST is X % CST. */
9844 if (INTEGRAL_TYPE_P (type
)
9845 && TREE_CODE (arg1
) == MULT_EXPR
9846 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9847 && operand_equal_p (arg0
,
9848 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
9849 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
9850 TREE_OPERAND (arg1
, 1), 0))
9851 return fold_convert (type
,
9852 fold_build2 (TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
9853 arg0
, TREE_OPERAND (arg1
, 1)));
9855 if (! FLOAT_TYPE_P (type
))
9857 if (integer_zerop (arg0
))
9858 return negate_expr (fold_convert (type
, arg1
));
9859 if (integer_zerop (arg1
))
9860 return non_lvalue (fold_convert (type
, arg0
));
9862 /* Fold A - (A & B) into ~B & A. */
9863 if (!TREE_SIDE_EFFECTS (arg0
)
9864 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
9866 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
9868 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9869 return fold_build2 (BIT_AND_EXPR
, type
,
9870 fold_build1 (BIT_NOT_EXPR
, type
, arg10
),
9871 fold_convert (type
, arg0
));
9873 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9875 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9876 return fold_build2 (BIT_AND_EXPR
, type
,
9877 fold_build1 (BIT_NOT_EXPR
, type
, arg11
),
9878 fold_convert (type
, arg0
));
9882 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9883 any power of 2 minus 1. */
9884 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9885 && TREE_CODE (arg1
) == BIT_AND_EXPR
9886 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9887 TREE_OPERAND (arg1
, 0), 0))
9889 tree mask0
= TREE_OPERAND (arg0
, 1);
9890 tree mask1
= TREE_OPERAND (arg1
, 1);
9891 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
9893 if (operand_equal_p (tem
, mask1
, 0))
9895 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
9896 TREE_OPERAND (arg0
, 0), mask1
);
9897 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
9902 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9903 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
9904 return non_lvalue (fold_convert (type
, arg0
));
9906 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9907 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9908 (-ARG1 + ARG0) reduces to -ARG1. */
9909 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9910 return negate_expr (fold_convert (type
, arg1
));
9912 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9913 __complex__ ( x, -y ). This is not the same for SNaNs or if
9914 signed zeros are involved. */
9915 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9916 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9917 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9919 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9920 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9921 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9922 bool arg0rz
= false, arg0iz
= false;
9923 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9924 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9926 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9927 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9928 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9930 tree rp
= fold_build1 (NEGATE_EXPR
, rtype
,
9932 : build1 (REALPART_EXPR
, rtype
, arg1
));
9933 tree ip
= arg0i
? arg0i
9934 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9935 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9937 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9939 tree rp
= arg0r
? arg0r
9940 : build1 (REALPART_EXPR
, rtype
, arg0
);
9941 tree ip
= fold_build1 (NEGATE_EXPR
, rtype
,
9943 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9944 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9949 /* Fold &x - &x. This can happen from &x.foo - &x.
9950 This is unsafe for certain floats even in non-IEEE formats.
9951 In IEEE, it is unsafe because it does wrong for NaNs.
9952 Also note that operand_equal_p is always false if an operand
9955 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
9956 && operand_equal_p (arg0
, arg1
, 0))
9957 return fold_convert (type
, integer_zero_node
);
9959 /* A - B -> A + (-B) if B is easily negatable. */
9960 if (negate_expr_p (arg1
)
9961 && ((FLOAT_TYPE_P (type
)
9962 /* Avoid this transformation if B is a positive REAL_CST. */
9963 && (TREE_CODE (arg1
) != REAL_CST
9964 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
9965 || INTEGRAL_TYPE_P (type
)))
9966 return fold_build2 (PLUS_EXPR
, type
,
9967 fold_convert (type
, arg0
),
9968 fold_convert (type
, negate_expr (arg1
)));
9970 /* Try folding difference of addresses. */
9974 if ((TREE_CODE (arg0
) == ADDR_EXPR
9975 || TREE_CODE (arg1
) == ADDR_EXPR
)
9976 && ptr_difference_const (arg0
, arg1
, &diff
))
9977 return build_int_cst_type (type
, diff
);
9980 /* Fold &a[i] - &a[j] to i-j. */
9981 if (TREE_CODE (arg0
) == ADDR_EXPR
9982 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9983 && TREE_CODE (arg1
) == ADDR_EXPR
9984 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9986 tree aref0
= TREE_OPERAND (arg0
, 0);
9987 tree aref1
= TREE_OPERAND (arg1
, 0);
9988 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
9989 TREE_OPERAND (aref1
, 0), 0))
9991 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
9992 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
9993 tree esz
= array_ref_element_size (aref0
);
9994 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9995 return fold_build2 (MULT_EXPR
, type
, diff
,
9996 fold_convert (type
, esz
));
10001 if (flag_unsafe_math_optimizations
10002 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10003 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10004 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
10007 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10008 same or one. Make sure type is not saturating.
10009 fold_plusminus_mult_expr will re-associate. */
10010 if ((TREE_CODE (arg0
) == MULT_EXPR
10011 || TREE_CODE (arg1
) == MULT_EXPR
)
10012 && !TYPE_SATURATING (type
)
10013 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10015 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
10023 /* (-A) * (-B) -> A * B */
10024 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10025 return fold_build2 (MULT_EXPR
, type
,
10026 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10027 fold_convert (type
, negate_expr (arg1
)));
10028 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10029 return fold_build2 (MULT_EXPR
, type
,
10030 fold_convert (type
, negate_expr (arg0
)),
10031 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10033 if (! FLOAT_TYPE_P (type
))
10035 if (integer_zerop (arg1
))
10036 return omit_one_operand (type
, arg1
, arg0
);
10037 if (integer_onep (arg1
))
10038 return non_lvalue (fold_convert (type
, arg0
));
10039 /* Transform x * -1 into -x. Make sure to do the negation
10040 on the original operand with conversions not stripped
10041 because we can only strip non-sign-changing conversions. */
10042 if (integer_all_onesp (arg1
))
10043 return fold_convert (type
, negate_expr (op0
));
10044 /* Transform x * -C into -x * C if x is easily negatable. */
10045 if (TREE_CODE (arg1
) == INTEGER_CST
10046 && tree_int_cst_sgn (arg1
) == -1
10047 && negate_expr_p (arg0
)
10048 && (tem
= negate_expr (arg1
)) != arg1
10049 && !TREE_OVERFLOW (tem
))
10050 return fold_build2 (MULT_EXPR
, type
,
10051 fold_convert (type
, negate_expr (arg0
)), tem
);
10053 /* (a * (1 << b)) is (a << b) */
10054 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10055 && integer_onep (TREE_OPERAND (arg1
, 0)))
10056 return fold_build2 (LSHIFT_EXPR
, type
, op0
,
10057 TREE_OPERAND (arg1
, 1));
10058 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10059 && integer_onep (TREE_OPERAND (arg0
, 0)))
10060 return fold_build2 (LSHIFT_EXPR
, type
, op1
,
10061 TREE_OPERAND (arg0
, 1));
10063 strict_overflow_p
= false;
10064 if (TREE_CODE (arg1
) == INTEGER_CST
10065 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10066 &strict_overflow_p
)))
10068 if (strict_overflow_p
)
10069 fold_overflow_warning (("assuming signed overflow does not "
10070 "occur when simplifying "
10072 WARN_STRICT_OVERFLOW_MISC
);
10073 return fold_convert (type
, tem
);
10076 /* Optimize z * conj(z) for integer complex numbers. */
10077 if (TREE_CODE (arg0
) == CONJ_EXPR
10078 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10079 return fold_mult_zconjz (type
, arg1
);
10080 if (TREE_CODE (arg1
) == CONJ_EXPR
10081 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10082 return fold_mult_zconjz (type
, arg0
);
10086 /* Maybe fold x * 0 to 0. The expressions aren't the same
10087 when x is NaN, since x * 0 is also NaN. Nor are they the
10088 same in modes with signed zeros, since multiplying a
10089 negative value by 0 gives -0, not +0. */
10090 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10091 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10092 && real_zerop (arg1
))
10093 return omit_one_operand (type
, arg1
, arg0
);
10094 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10095 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10096 && real_onep (arg1
))
10097 return non_lvalue (fold_convert (type
, arg0
));
10099 /* Transform x * -1.0 into -x. */
10100 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10101 && real_minus_onep (arg1
))
10102 return fold_convert (type
, negate_expr (arg0
));
10104 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10105 the result for floating point types due to rounding so it is applied
10106 only if -fassociative-math was specify. */
10107 if (flag_associative_math
10108 && TREE_CODE (arg0
) == RDIV_EXPR
10109 && TREE_CODE (arg1
) == REAL_CST
10110 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10112 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10115 return fold_build2 (RDIV_EXPR
, type
, tem
,
10116 TREE_OPERAND (arg0
, 1));
10119 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10120 if (operand_equal_p (arg0
, arg1
, 0))
10122 tree tem
= fold_strip_sign_ops (arg0
);
10123 if (tem
!= NULL_TREE
)
10125 tem
= fold_convert (type
, tem
);
10126 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
10130 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10131 This is not the same for NaNs or if signed zeros are
10133 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10134 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10135 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10136 && TREE_CODE (arg1
) == COMPLEX_CST
10137 && real_zerop (TREE_REALPART (arg1
)))
10139 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10140 if (real_onep (TREE_IMAGPART (arg1
)))
10141 return fold_build2 (COMPLEX_EXPR
, type
,
10142 negate_expr (fold_build1 (IMAGPART_EXPR
,
10144 fold_build1 (REALPART_EXPR
, rtype
, arg0
));
10145 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10146 return fold_build2 (COMPLEX_EXPR
, type
,
10147 fold_build1 (IMAGPART_EXPR
, rtype
, arg0
),
10148 negate_expr (fold_build1 (REALPART_EXPR
,
10152 /* Optimize z * conj(z) for floating point complex numbers.
10153 Guarded by flag_unsafe_math_optimizations as non-finite
10154 imaginary components don't produce scalar results. */
10155 if (flag_unsafe_math_optimizations
10156 && TREE_CODE (arg0
) == CONJ_EXPR
10157 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10158 return fold_mult_zconjz (type
, arg1
);
10159 if (flag_unsafe_math_optimizations
10160 && TREE_CODE (arg1
) == CONJ_EXPR
10161 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10162 return fold_mult_zconjz (type
, arg0
);
10164 if (flag_unsafe_math_optimizations
)
10166 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10167 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10169 /* Optimizations of root(...)*root(...). */
10170 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10173 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10174 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10176 /* Optimize sqrt(x)*sqrt(x) as x. */
10177 if (BUILTIN_SQRT_P (fcode0
)
10178 && operand_equal_p (arg00
, arg10
, 0)
10179 && ! HONOR_SNANS (TYPE_MODE (type
)))
10182 /* Optimize root(x)*root(y) as root(x*y). */
10183 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10184 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10185 return build_call_expr (rootfn
, 1, arg
);
10188 /* Optimize expN(x)*expN(y) as expN(x+y). */
10189 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10191 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10192 tree arg
= fold_build2 (PLUS_EXPR
, type
,
10193 CALL_EXPR_ARG (arg0
, 0),
10194 CALL_EXPR_ARG (arg1
, 0));
10195 return build_call_expr (expfn
, 1, arg
);
10198 /* Optimizations of pow(...)*pow(...). */
10199 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10200 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10201 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10203 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10204 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10205 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10206 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10208 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10209 if (operand_equal_p (arg01
, arg11
, 0))
10211 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10212 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10213 return build_call_expr (powfn
, 2, arg
, arg01
);
10216 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10217 if (operand_equal_p (arg00
, arg10
, 0))
10219 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10220 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
10221 return build_call_expr (powfn
, 2, arg00
, arg
);
10225 /* Optimize tan(x)*cos(x) as sin(x). */
10226 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10227 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10228 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10229 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10230 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10231 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10232 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10233 CALL_EXPR_ARG (arg1
, 0), 0))
10235 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10237 if (sinfn
!= NULL_TREE
)
10238 return build_call_expr (sinfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10241 /* Optimize x*pow(x,c) as pow(x,c+1). */
10242 if (fcode1
== BUILT_IN_POW
10243 || fcode1
== BUILT_IN_POWF
10244 || fcode1
== BUILT_IN_POWL
)
10246 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10247 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10248 if (TREE_CODE (arg11
) == REAL_CST
10249 && !TREE_OVERFLOW (arg11
)
10250 && operand_equal_p (arg0
, arg10
, 0))
10252 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10256 c
= TREE_REAL_CST (arg11
);
10257 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10258 arg
= build_real (type
, c
);
10259 return build_call_expr (powfn
, 2, arg0
, arg
);
10263 /* Optimize pow(x,c)*x as pow(x,c+1). */
10264 if (fcode0
== BUILT_IN_POW
10265 || fcode0
== BUILT_IN_POWF
10266 || fcode0
== BUILT_IN_POWL
)
10268 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10269 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10270 if (TREE_CODE (arg01
) == REAL_CST
10271 && !TREE_OVERFLOW (arg01
)
10272 && operand_equal_p (arg1
, arg00
, 0))
10274 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10278 c
= TREE_REAL_CST (arg01
);
10279 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10280 arg
= build_real (type
, c
);
10281 return build_call_expr (powfn
, 2, arg1
, arg
);
10285 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10286 if (! optimize_size
10287 && operand_equal_p (arg0
, arg1
, 0))
10289 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10293 tree arg
= build_real (type
, dconst2
);
10294 return build_call_expr (powfn
, 2, arg0
, arg
);
10303 if (integer_all_onesp (arg1
))
10304 return omit_one_operand (type
, arg1
, arg0
);
10305 if (integer_zerop (arg1
))
10306 return non_lvalue (fold_convert (type
, arg0
));
10307 if (operand_equal_p (arg0
, arg1
, 0))
10308 return non_lvalue (fold_convert (type
, arg0
));
10310 /* ~X | X is -1. */
10311 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10312 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10314 t1
= fold_convert (type
, integer_zero_node
);
10315 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10316 return omit_one_operand (type
, t1
, arg1
);
10319 /* X | ~X is -1. */
10320 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10321 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10323 t1
= fold_convert (type
, integer_zero_node
);
10324 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10325 return omit_one_operand (type
, t1
, arg0
);
10328 /* Canonicalize (X & C1) | C2. */
10329 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10330 && TREE_CODE (arg1
) == INTEGER_CST
10331 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10333 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10334 int width
= TYPE_PRECISION (type
), w
;
10335 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10336 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10337 hi2
= TREE_INT_CST_HIGH (arg1
);
10338 lo2
= TREE_INT_CST_LOW (arg1
);
10340 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10341 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10342 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10344 if (width
> HOST_BITS_PER_WIDE_INT
)
10346 mhi
= (unsigned HOST_WIDE_INT
) -1
10347 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10353 mlo
= (unsigned HOST_WIDE_INT
) -1
10354 >> (HOST_BITS_PER_WIDE_INT
- width
);
10357 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10358 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10359 return fold_build2 (BIT_IOR_EXPR
, type
,
10360 TREE_OPERAND (arg0
, 0), arg1
);
10362 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10363 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10364 mode which allows further optimizations. */
10371 for (w
= BITS_PER_UNIT
;
10372 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10375 unsigned HOST_WIDE_INT mask
10376 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10377 if (((lo1
| lo2
) & mask
) == mask
10378 && (lo1
& ~mask
) == 0 && hi1
== 0)
10385 if (hi3
!= hi1
|| lo3
!= lo1
)
10386 return fold_build2 (BIT_IOR_EXPR
, type
,
10387 fold_build2 (BIT_AND_EXPR
, type
,
10388 TREE_OPERAND (arg0
, 0),
10389 build_int_cst_wide (type
,
10394 /* (X & Y) | Y is (X, Y). */
10395 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10396 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10397 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10398 /* (X & Y) | X is (Y, X). */
10399 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10400 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10401 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10402 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10403 /* X | (X & Y) is (Y, X). */
10404 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10405 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10406 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10407 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10408 /* X | (Y & X) is (Y, X). */
10409 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10410 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10411 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10412 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10414 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10415 if (t1
!= NULL_TREE
)
10418 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10420 This results in more efficient code for machines without a NAND
10421 instruction. Combine will canonicalize to the first form
10422 which will allow use of NAND instructions provided by the
10423 backend if they exist. */
10424 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10425 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10427 return fold_build1 (BIT_NOT_EXPR
, type
,
10428 build2 (BIT_AND_EXPR
, type
,
10429 fold_convert (type
,
10430 TREE_OPERAND (arg0
, 0)),
10431 fold_convert (type
,
10432 TREE_OPERAND (arg1
, 0))));
10435 /* See if this can be simplified into a rotate first. If that
10436 is unsuccessful continue in the association code. */
10440 if (integer_zerop (arg1
))
10441 return non_lvalue (fold_convert (type
, arg0
));
10442 if (integer_all_onesp (arg1
))
10443 return fold_build1 (BIT_NOT_EXPR
, type
, op0
);
10444 if (operand_equal_p (arg0
, arg1
, 0))
10445 return omit_one_operand (type
, integer_zero_node
, arg0
);
10447 /* ~X ^ X is -1. */
10448 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10449 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10451 t1
= fold_convert (type
, integer_zero_node
);
10452 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10453 return omit_one_operand (type
, t1
, arg1
);
10456 /* X ^ ~X is -1. */
10457 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10458 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10460 t1
= fold_convert (type
, integer_zero_node
);
10461 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10462 return omit_one_operand (type
, t1
, arg0
);
10465 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10466 with a constant, and the two constants have no bits in common,
10467 we should treat this as a BIT_IOR_EXPR since this may produce more
10468 simplifications. */
10469 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10470 && TREE_CODE (arg1
) == BIT_AND_EXPR
10471 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10472 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10473 && integer_zerop (const_binop (BIT_AND_EXPR
,
10474 TREE_OPERAND (arg0
, 1),
10475 TREE_OPERAND (arg1
, 1), 0)))
10477 code
= BIT_IOR_EXPR
;
10481 /* (X | Y) ^ X -> Y & ~ X*/
10482 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10483 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10485 tree t2
= TREE_OPERAND (arg0
, 1);
10486 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10488 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10489 fold_convert (type
, t1
));
10493 /* (Y | X) ^ X -> Y & ~ X*/
10494 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10495 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10497 tree t2
= TREE_OPERAND (arg0
, 0);
10498 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10500 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10501 fold_convert (type
, t1
));
10505 /* X ^ (X | Y) -> Y & ~ X*/
10506 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10507 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10509 tree t2
= TREE_OPERAND (arg1
, 1);
10510 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10512 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10513 fold_convert (type
, t1
));
10517 /* X ^ (Y | X) -> Y & ~ X*/
10518 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10519 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10521 tree t2
= TREE_OPERAND (arg1
, 0);
10522 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10524 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10525 fold_convert (type
, t1
));
10529 /* Convert ~X ^ ~Y to X ^ Y. */
10530 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10531 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10532 return fold_build2 (code
, type
,
10533 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10534 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10536 /* Convert ~X ^ C to X ^ ~C. */
10537 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10538 && TREE_CODE (arg1
) == INTEGER_CST
)
10539 return fold_build2 (code
, type
,
10540 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10541 fold_build1 (BIT_NOT_EXPR
, type
, arg1
));
10543 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10544 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10545 && integer_onep (TREE_OPERAND (arg0
, 1))
10546 && integer_onep (arg1
))
10547 return fold_build2 (EQ_EXPR
, type
, arg0
,
10548 build_int_cst (TREE_TYPE (arg0
), 0));
10550 /* Fold (X & Y) ^ Y as ~X & Y. */
10551 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10552 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10554 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10555 return fold_build2 (BIT_AND_EXPR
, type
,
10556 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10557 fold_convert (type
, arg1
));
10559 /* Fold (X & Y) ^ X as ~Y & X. */
10560 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10561 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10562 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10564 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10565 return fold_build2 (BIT_AND_EXPR
, type
,
10566 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10567 fold_convert (type
, arg1
));
10569 /* Fold X ^ (X & Y) as X & ~Y. */
10570 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10571 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10573 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10574 return fold_build2 (BIT_AND_EXPR
, type
,
10575 fold_convert (type
, arg0
),
10576 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10578 /* Fold X ^ (Y & X) as ~Y & X. */
10579 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10580 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10581 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10583 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10584 return fold_build2 (BIT_AND_EXPR
, type
,
10585 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10586 fold_convert (type
, arg0
));
10589 /* See if this can be simplified into a rotate first. If that
10590 is unsuccessful continue in the association code. */
10594 if (integer_all_onesp (arg1
))
10595 return non_lvalue (fold_convert (type
, arg0
));
10596 if (integer_zerop (arg1
))
10597 return omit_one_operand (type
, arg1
, arg0
);
10598 if (operand_equal_p (arg0
, arg1
, 0))
10599 return non_lvalue (fold_convert (type
, arg0
));
10601 /* ~X & X is always zero. */
10602 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10603 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10604 return omit_one_operand (type
, integer_zero_node
, arg1
);
10606 /* X & ~X is always zero. */
10607 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10608 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10609 return omit_one_operand (type
, integer_zero_node
, arg0
);
10611 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10612 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10613 && TREE_CODE (arg1
) == INTEGER_CST
10614 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10616 tree tmp1
= fold_convert (TREE_TYPE (arg0
), arg1
);
10617 tree tmp2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10618 TREE_OPERAND (arg0
, 0), tmp1
);
10619 tree tmp3
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10620 TREE_OPERAND (arg0
, 1), tmp1
);
10621 return fold_convert (type
,
10622 fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (arg0
),
10626 /* (X | Y) & Y is (X, Y). */
10627 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10628 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10629 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10630 /* (X | Y) & X is (Y, X). */
10631 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10632 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10633 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10634 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10635 /* X & (X | Y) is (Y, X). */
10636 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10637 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10638 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10639 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10640 /* X & (Y | X) is (Y, X). */
10641 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10642 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10643 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10644 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10646 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10647 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10648 && integer_onep (TREE_OPERAND (arg0
, 1))
10649 && integer_onep (arg1
))
10651 tem
= TREE_OPERAND (arg0
, 0);
10652 return fold_build2 (EQ_EXPR
, type
,
10653 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10654 build_int_cst (TREE_TYPE (tem
), 1)),
10655 build_int_cst (TREE_TYPE (tem
), 0));
10657 /* Fold ~X & 1 as (X & 1) == 0. */
10658 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10659 && integer_onep (arg1
))
10661 tem
= TREE_OPERAND (arg0
, 0);
10662 return fold_build2 (EQ_EXPR
, type
,
10663 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10664 build_int_cst (TREE_TYPE (tem
), 1)),
10665 build_int_cst (TREE_TYPE (tem
), 0));
10668 /* Fold (X ^ Y) & Y as ~X & Y. */
10669 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10670 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10672 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10673 return fold_build2 (BIT_AND_EXPR
, type
,
10674 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10675 fold_convert (type
, arg1
));
10677 /* Fold (X ^ Y) & X as ~Y & X. */
10678 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10679 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10680 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10682 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10683 return fold_build2 (BIT_AND_EXPR
, type
,
10684 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10685 fold_convert (type
, arg1
));
10687 /* Fold X & (X ^ Y) as X & ~Y. */
10688 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10689 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10691 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10692 return fold_build2 (BIT_AND_EXPR
, type
,
10693 fold_convert (type
, arg0
),
10694 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10696 /* Fold X & (Y ^ X) as ~Y & X. */
10697 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10698 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10699 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10701 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10702 return fold_build2 (BIT_AND_EXPR
, type
,
10703 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10704 fold_convert (type
, arg0
));
10707 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10708 if (t1
!= NULL_TREE
)
10710 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10711 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10712 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10715 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10717 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
10718 && (~TREE_INT_CST_LOW (arg1
)
10719 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
10720 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
10723 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10725 This results in more efficient code for machines without a NOR
10726 instruction. Combine will canonicalize to the first form
10727 which will allow use of NOR instructions provided by the
10728 backend if they exist. */
10729 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10730 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10732 return fold_build1 (BIT_NOT_EXPR
, type
,
10733 build2 (BIT_IOR_EXPR
, type
,
10734 fold_convert (type
,
10735 TREE_OPERAND (arg0
, 0)),
10736 fold_convert (type
,
10737 TREE_OPERAND (arg1
, 0))));
10740 /* If arg0 is derived from the address of an object or function, we may
10741 be able to fold this expression using the object or function's
10743 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
10745 unsigned HOST_WIDE_INT modulus
, residue
;
10746 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
10748 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
);
10750 /* This works because modulus is a power of 2. If this weren't the
10751 case, we'd have to replace it by its greatest power-of-2
10752 divisor: modulus & -modulus. */
10754 return build_int_cst (type
, residue
& low
);
10757 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10758 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10759 if the new mask might be further optimized. */
10760 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
10761 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
10762 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
10763 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
10764 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
10765 < TYPE_PRECISION (TREE_TYPE (arg0
))
10766 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
10767 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
10769 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
10770 unsigned HOST_WIDE_INT mask
10771 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
10772 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
10773 tree shift_type
= TREE_TYPE (arg0
);
10775 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
10776 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
10777 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
10778 && TYPE_PRECISION (TREE_TYPE (arg0
))
10779 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
10781 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
10782 tree arg00
= TREE_OPERAND (arg0
, 0);
10783 /* See if more bits can be proven as zero because of
10785 if (TREE_CODE (arg00
) == NOP_EXPR
10786 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
10788 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
10789 if (TYPE_PRECISION (inner_type
)
10790 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
10791 && TYPE_PRECISION (inner_type
) < prec
)
10793 prec
= TYPE_PRECISION (inner_type
);
10794 /* See if we can shorten the right shift. */
10796 shift_type
= inner_type
;
10799 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
10800 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
10801 zerobits
<<= prec
- shiftc
;
10802 /* For arithmetic shift if sign bit could be set, zerobits
10803 can contain actually sign bits, so no transformation is
10804 possible, unless MASK masks them all away. In that
10805 case the shift needs to be converted into logical shift. */
10806 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
10807 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
10809 if ((mask
& zerobits
) == 0)
10810 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
10816 /* ((X << 16) & 0xff00) is (X, 0). */
10817 if ((mask
& zerobits
) == mask
)
10818 return omit_one_operand (type
, build_int_cst (type
, 0), arg0
);
10820 newmask
= mask
| zerobits
;
10821 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
10825 /* Only do the transformation if NEWMASK is some integer
10827 for (prec
= BITS_PER_UNIT
;
10828 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
10829 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
10831 if (prec
< HOST_BITS_PER_WIDE_INT
10832 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
10834 if (shift_type
!= TREE_TYPE (arg0
))
10836 tem
= fold_build2 (TREE_CODE (arg0
), shift_type
,
10837 fold_convert (shift_type
,
10838 TREE_OPERAND (arg0
, 0)),
10839 TREE_OPERAND (arg0
, 1));
10840 tem
= fold_convert (type
, tem
);
10844 return fold_build2 (BIT_AND_EXPR
, type
, tem
,
10845 build_int_cst_type (TREE_TYPE (op1
),
10854 /* Don't touch a floating-point divide by zero unless the mode
10855 of the constant can represent infinity. */
10856 if (TREE_CODE (arg1
) == REAL_CST
10857 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10858 && real_zerop (arg1
))
10861 /* Optimize A / A to 1.0 if we don't care about
10862 NaNs or Infinities. Skip the transformation
10863 for non-real operands. */
10864 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10865 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10866 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
10867 && operand_equal_p (arg0
, arg1
, 0))
10869 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
10871 return omit_two_operands (type
, r
, arg0
, arg1
);
10874 /* The complex version of the above A / A optimization. */
10875 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10876 && operand_equal_p (arg0
, arg1
, 0))
10878 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
10879 if (! HONOR_NANS (TYPE_MODE (elem_type
))
10880 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
10882 tree r
= build_real (elem_type
, dconst1
);
10883 /* omit_two_operands will call fold_convert for us. */
10884 return omit_two_operands (type
, r
, arg0
, arg1
);
10888 /* (-A) / (-B) -> A / B */
10889 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10890 return fold_build2 (RDIV_EXPR
, type
,
10891 TREE_OPERAND (arg0
, 0),
10892 negate_expr (arg1
));
10893 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10894 return fold_build2 (RDIV_EXPR
, type
,
10895 negate_expr (arg0
),
10896 TREE_OPERAND (arg1
, 0));
10898 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10899 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10900 && real_onep (arg1
))
10901 return non_lvalue (fold_convert (type
, arg0
));
10903 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10904 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10905 && real_minus_onep (arg1
))
10906 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
10908 /* If ARG1 is a constant, we can convert this to a multiply by the
10909 reciprocal. This does not have the same rounding properties,
10910 so only do this if -freciprocal-math. We can actually
10911 always safely do it if ARG1 is a power of two, but it's hard to
10912 tell if it is or not in a portable manner. */
10913 if (TREE_CODE (arg1
) == REAL_CST
)
10915 if (flag_reciprocal_math
10916 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
10918 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
10919 /* Find the reciprocal if optimizing and the result is exact. */
10923 r
= TREE_REAL_CST (arg1
);
10924 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
10926 tem
= build_real (type
, r
);
10927 return fold_build2 (MULT_EXPR
, type
,
10928 fold_convert (type
, arg0
), tem
);
10932 /* Convert A/B/C to A/(B*C). */
10933 if (flag_reciprocal_math
10934 && TREE_CODE (arg0
) == RDIV_EXPR
)
10935 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10936 fold_build2 (MULT_EXPR
, type
,
10937 TREE_OPERAND (arg0
, 1), arg1
));
10939 /* Convert A/(B/C) to (A/B)*C. */
10940 if (flag_reciprocal_math
10941 && TREE_CODE (arg1
) == RDIV_EXPR
)
10942 return fold_build2 (MULT_EXPR
, type
,
10943 fold_build2 (RDIV_EXPR
, type
, arg0
,
10944 TREE_OPERAND (arg1
, 0)),
10945 TREE_OPERAND (arg1
, 1));
10947 /* Convert C1/(X*C2) into (C1/C2)/X. */
10948 if (flag_reciprocal_math
10949 && TREE_CODE (arg1
) == MULT_EXPR
10950 && TREE_CODE (arg0
) == REAL_CST
10951 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
10953 tree tem
= const_binop (RDIV_EXPR
, arg0
,
10954 TREE_OPERAND (arg1
, 1), 0);
10956 return fold_build2 (RDIV_EXPR
, type
, tem
,
10957 TREE_OPERAND (arg1
, 0));
10960 if (flag_unsafe_math_optimizations
)
10962 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10963 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10965 /* Optimize sin(x)/cos(x) as tan(x). */
10966 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
10967 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
10968 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
10969 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10970 CALL_EXPR_ARG (arg1
, 0), 0))
10972 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10974 if (tanfn
!= NULL_TREE
)
10975 return build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10978 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10979 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
10980 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
10981 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
10982 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10983 CALL_EXPR_ARG (arg1
, 0), 0))
10985 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10987 if (tanfn
!= NULL_TREE
)
10989 tree tmp
= build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10990 return fold_build2 (RDIV_EXPR
, type
,
10991 build_real (type
, dconst1
), tmp
);
10995 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10996 NaNs or Infinities. */
10997 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
10998 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
10999 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11001 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11002 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11004 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11005 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11006 && operand_equal_p (arg00
, arg01
, 0))
11008 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11010 if (cosfn
!= NULL_TREE
)
11011 return build_call_expr (cosfn
, 1, arg00
);
11015 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11016 NaNs or Infinities. */
11017 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11018 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11019 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11021 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11022 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11024 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11025 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11026 && operand_equal_p (arg00
, arg01
, 0))
11028 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11030 if (cosfn
!= NULL_TREE
)
11032 tree tmp
= build_call_expr (cosfn
, 1, arg00
);
11033 return fold_build2 (RDIV_EXPR
, type
,
11034 build_real (type
, dconst1
),
11040 /* Optimize pow(x,c)/x as pow(x,c-1). */
11041 if (fcode0
== BUILT_IN_POW
11042 || fcode0
== BUILT_IN_POWF
11043 || fcode0
== BUILT_IN_POWL
)
11045 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11046 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11047 if (TREE_CODE (arg01
) == REAL_CST
11048 && !TREE_OVERFLOW (arg01
)
11049 && operand_equal_p (arg1
, arg00
, 0))
11051 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11055 c
= TREE_REAL_CST (arg01
);
11056 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11057 arg
= build_real (type
, c
);
11058 return build_call_expr (powfn
, 2, arg1
, arg
);
11062 /* Optimize a/root(b/c) into a*root(c/b). */
11063 if (BUILTIN_ROOT_P (fcode1
))
11065 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11067 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11069 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11070 tree b
= TREE_OPERAND (rootarg
, 0);
11071 tree c
= TREE_OPERAND (rootarg
, 1);
11073 tree tmp
= fold_build2 (RDIV_EXPR
, type
, c
, b
);
11075 tmp
= build_call_expr (rootfn
, 1, tmp
);
11076 return fold_build2 (MULT_EXPR
, type
, arg0
, tmp
);
11080 /* Optimize x/expN(y) into x*expN(-y). */
11081 if (BUILTIN_EXPONENT_P (fcode1
))
11083 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11084 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11085 arg1
= build_call_expr (expfn
, 1, fold_convert (type
, arg
));
11086 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11089 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11090 if (fcode1
== BUILT_IN_POW
11091 || fcode1
== BUILT_IN_POWF
11092 || fcode1
== BUILT_IN_POWL
)
11094 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11095 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11096 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11097 tree neg11
= fold_convert (type
, negate_expr (arg11
));
11098 arg1
= build_call_expr (powfn
, 2, arg10
, neg11
);
11099 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11104 case TRUNC_DIV_EXPR
:
11105 case FLOOR_DIV_EXPR
:
11106 /* Simplify A / (B << N) where A and B are positive and B is
11107 a power of 2, to A >> (N + log2(B)). */
11108 strict_overflow_p
= false;
11109 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11110 && (TYPE_UNSIGNED (type
)
11111 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11113 tree sval
= TREE_OPERAND (arg1
, 0);
11114 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11116 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11117 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11119 if (strict_overflow_p
)
11120 fold_overflow_warning (("assuming signed overflow does not "
11121 "occur when simplifying A / (B << N)"),
11122 WARN_STRICT_OVERFLOW_MISC
);
11124 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11125 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
11126 return fold_build2 (RSHIFT_EXPR
, type
,
11127 fold_convert (type
, arg0
), sh_cnt
);
11131 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11132 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11133 if (INTEGRAL_TYPE_P (type
)
11134 && TYPE_UNSIGNED (type
)
11135 && code
== FLOOR_DIV_EXPR
)
11136 return fold_build2 (TRUNC_DIV_EXPR
, type
, op0
, op1
);
11140 case ROUND_DIV_EXPR
:
11141 case CEIL_DIV_EXPR
:
11142 case EXACT_DIV_EXPR
:
11143 if (integer_onep (arg1
))
11144 return non_lvalue (fold_convert (type
, arg0
));
11145 if (integer_zerop (arg1
))
11147 /* X / -1 is -X. */
11148 if (!TYPE_UNSIGNED (type
)
11149 && TREE_CODE (arg1
) == INTEGER_CST
11150 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11151 && TREE_INT_CST_HIGH (arg1
) == -1)
11152 return fold_convert (type
, negate_expr (arg0
));
11154 /* Convert -A / -B to A / B when the type is signed and overflow is
11156 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11157 && TREE_CODE (arg0
) == NEGATE_EXPR
11158 && negate_expr_p (arg1
))
11160 if (INTEGRAL_TYPE_P (type
))
11161 fold_overflow_warning (("assuming signed overflow does not occur "
11162 "when distributing negation across "
11164 WARN_STRICT_OVERFLOW_MISC
);
11165 return fold_build2 (code
, type
,
11166 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11167 negate_expr (arg1
));
11169 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11170 && TREE_CODE (arg1
) == NEGATE_EXPR
11171 && negate_expr_p (arg0
))
11173 if (INTEGRAL_TYPE_P (type
))
11174 fold_overflow_warning (("assuming signed overflow does not occur "
11175 "when distributing negation across "
11177 WARN_STRICT_OVERFLOW_MISC
);
11178 return fold_build2 (code
, type
, negate_expr (arg0
),
11179 TREE_OPERAND (arg1
, 0));
11182 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11183 operation, EXACT_DIV_EXPR.
11185 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11186 At one time others generated faster code, it's not clear if they do
11187 after the last round to changes to the DIV code in expmed.c. */
11188 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11189 && multiple_of_p (type
, arg0
, arg1
))
11190 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11192 strict_overflow_p
= false;
11193 if (TREE_CODE (arg1
) == INTEGER_CST
11194 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11195 &strict_overflow_p
)))
11197 if (strict_overflow_p
)
11198 fold_overflow_warning (("assuming signed overflow does not occur "
11199 "when simplifying division"),
11200 WARN_STRICT_OVERFLOW_MISC
);
11201 return fold_convert (type
, tem
);
11206 case CEIL_MOD_EXPR
:
11207 case FLOOR_MOD_EXPR
:
11208 case ROUND_MOD_EXPR
:
11209 case TRUNC_MOD_EXPR
:
11210 /* X % 1 is always zero, but be sure to preserve any side
11212 if (integer_onep (arg1
))
11213 return omit_one_operand (type
, integer_zero_node
, arg0
);
11215 /* X % 0, return X % 0 unchanged so that we can get the
11216 proper warnings and errors. */
11217 if (integer_zerop (arg1
))
11220 /* 0 % X is always zero, but be sure to preserve any side
11221 effects in X. Place this after checking for X == 0. */
11222 if (integer_zerop (arg0
))
11223 return omit_one_operand (type
, integer_zero_node
, arg1
);
11225 /* X % -1 is zero. */
11226 if (!TYPE_UNSIGNED (type
)
11227 && TREE_CODE (arg1
) == INTEGER_CST
11228 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11229 && TREE_INT_CST_HIGH (arg1
) == -1)
11230 return omit_one_operand (type
, integer_zero_node
, arg0
);
11232 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11233 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11234 strict_overflow_p
= false;
11235 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11236 && (TYPE_UNSIGNED (type
)
11237 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11240 /* Also optimize A % (C << N) where C is a power of 2,
11241 to A & ((C << N) - 1). */
11242 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11243 c
= TREE_OPERAND (arg1
, 0);
11245 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11247 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11248 build_int_cst (TREE_TYPE (arg1
), 1));
11249 if (strict_overflow_p
)
11250 fold_overflow_warning (("assuming signed overflow does not "
11251 "occur when simplifying "
11252 "X % (power of two)"),
11253 WARN_STRICT_OVERFLOW_MISC
);
11254 return fold_build2 (BIT_AND_EXPR
, type
,
11255 fold_convert (type
, arg0
),
11256 fold_convert (type
, mask
));
11260 /* X % -C is the same as X % C. */
11261 if (code
== TRUNC_MOD_EXPR
11262 && !TYPE_UNSIGNED (type
)
11263 && TREE_CODE (arg1
) == INTEGER_CST
11264 && !TREE_OVERFLOW (arg1
)
11265 && TREE_INT_CST_HIGH (arg1
) < 0
11266 && !TYPE_OVERFLOW_TRAPS (type
)
11267 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11268 && !sign_bit_p (arg1
, arg1
))
11269 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11270 fold_convert (type
, negate_expr (arg1
)));
11272 /* X % -Y is the same as X % Y. */
11273 if (code
== TRUNC_MOD_EXPR
11274 && !TYPE_UNSIGNED (type
)
11275 && TREE_CODE (arg1
) == NEGATE_EXPR
11276 && !TYPE_OVERFLOW_TRAPS (type
))
11277 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11278 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
11280 if (TREE_CODE (arg1
) == INTEGER_CST
11281 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11282 &strict_overflow_p
)))
11284 if (strict_overflow_p
)
11285 fold_overflow_warning (("assuming signed overflow does not occur "
11286 "when simplifying modulos"),
11287 WARN_STRICT_OVERFLOW_MISC
);
11288 return fold_convert (type
, tem
);
11295 if (integer_all_onesp (arg0
))
11296 return omit_one_operand (type
, arg0
, arg1
);
11300 /* Optimize -1 >> x for arithmetic right shifts. */
11301 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
11302 return omit_one_operand (type
, arg0
, arg1
);
11303 /* ... fall through ... */
11307 if (integer_zerop (arg1
))
11308 return non_lvalue (fold_convert (type
, arg0
));
11309 if (integer_zerop (arg0
))
11310 return omit_one_operand (type
, arg0
, arg1
);
11312 /* Since negative shift count is not well-defined,
11313 don't try to compute it in the compiler. */
11314 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11317 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11318 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11319 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11320 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11321 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11323 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11324 + TREE_INT_CST_LOW (arg1
));
11326 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11327 being well defined. */
11328 if (low
>= TYPE_PRECISION (type
))
11330 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11331 low
= low
% TYPE_PRECISION (type
);
11332 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11333 return build_int_cst (type
, 0);
11335 low
= TYPE_PRECISION (type
) - 1;
11338 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11339 build_int_cst (type
, low
));
11342 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11343 into x & ((unsigned)-1 >> c) for unsigned types. */
11344 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11345 || (TYPE_UNSIGNED (type
)
11346 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11347 && host_integerp (arg1
, false)
11348 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11349 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11350 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11352 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11353 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11359 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
11361 lshift
= build_int_cst (type
, -1);
11362 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
11364 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
11368 /* Rewrite an LROTATE_EXPR by a constant into an
11369 RROTATE_EXPR by a new constant. */
11370 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
11372 tree tem
= build_int_cst (TREE_TYPE (arg1
),
11373 TYPE_PRECISION (type
));
11374 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
11375 return fold_build2 (RROTATE_EXPR
, type
, op0
, tem
);
11378 /* If we have a rotate of a bit operation with the rotate count and
11379 the second operand of the bit operation both constant,
11380 permute the two operations. */
11381 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11382 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11383 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11384 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11385 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11386 return fold_build2 (TREE_CODE (arg0
), type
,
11387 fold_build2 (code
, type
,
11388 TREE_OPERAND (arg0
, 0), arg1
),
11389 fold_build2 (code
, type
,
11390 TREE_OPERAND (arg0
, 1), arg1
));
11392 /* Two consecutive rotates adding up to the precision of the
11393 type can be ignored. */
11394 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11395 && TREE_CODE (arg0
) == RROTATE_EXPR
11396 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11397 && TREE_INT_CST_HIGH (arg1
) == 0
11398 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
11399 && ((TREE_INT_CST_LOW (arg1
)
11400 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
11401 == (unsigned int) TYPE_PRECISION (type
)))
11402 return TREE_OPERAND (arg0
, 0);
11404 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11405 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11406 if the latter can be further optimized. */
11407 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11408 && TREE_CODE (arg0
) == BIT_AND_EXPR
11409 && TREE_CODE (arg1
) == INTEGER_CST
11410 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11412 tree mask
= fold_build2 (code
, type
,
11413 fold_convert (type
, TREE_OPERAND (arg0
, 1)),
11415 tree shift
= fold_build2 (code
, type
,
11416 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11418 tem
= fold_binary (BIT_AND_EXPR
, type
, shift
, mask
);
11426 if (operand_equal_p (arg0
, arg1
, 0))
11427 return omit_one_operand (type
, arg0
, arg1
);
11428 if (INTEGRAL_TYPE_P (type
)
11429 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
11430 return omit_one_operand (type
, arg1
, arg0
);
11431 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
11437 if (operand_equal_p (arg0
, arg1
, 0))
11438 return omit_one_operand (type
, arg0
, arg1
);
11439 if (INTEGRAL_TYPE_P (type
)
11440 && TYPE_MAX_VALUE (type
)
11441 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
11442 return omit_one_operand (type
, arg1
, arg0
);
11443 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
11448 case TRUTH_ANDIF_EXPR
:
11449 /* Note that the operands of this must be ints
11450 and their values must be 0 or 1.
11451 ("true" is a fixed value perhaps depending on the language.) */
11452 /* If first arg is constant zero, return it. */
11453 if (integer_zerop (arg0
))
11454 return fold_convert (type
, arg0
);
11455 case TRUTH_AND_EXPR
:
11456 /* If either arg is constant true, drop it. */
11457 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11458 return non_lvalue (fold_convert (type
, arg1
));
11459 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11460 /* Preserve sequence points. */
11461 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11462 return non_lvalue (fold_convert (type
, arg0
));
11463 /* If second arg is constant zero, result is zero, but first arg
11464 must be evaluated. */
11465 if (integer_zerop (arg1
))
11466 return omit_one_operand (type
, arg1
, arg0
);
11467 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11468 case will be handled here. */
11469 if (integer_zerop (arg0
))
11470 return omit_one_operand (type
, arg0
, arg1
);
11472 /* !X && X is always false. */
11473 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11474 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11475 return omit_one_operand (type
, integer_zero_node
, arg1
);
11476 /* X && !X is always false. */
11477 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11478 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11479 return omit_one_operand (type
, integer_zero_node
, arg0
);
11481 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11482 means A >= Y && A != MAX, but in this case we know that
11485 if (!TREE_SIDE_EFFECTS (arg0
)
11486 && !TREE_SIDE_EFFECTS (arg1
))
11488 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
11489 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11490 return fold_build2 (code
, type
, tem
, arg1
);
11492 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
11493 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11494 return fold_build2 (code
, type
, arg0
, tem
);
11498 /* We only do these simplifications if we are optimizing. */
11502 /* Check for things like (A || B) && (A || C). We can convert this
11503 to A || (B && C). Note that either operator can be any of the four
11504 truth and/or operations and the transformation will still be
11505 valid. Also note that we only care about order for the
11506 ANDIF and ORIF operators. If B contains side effects, this
11507 might change the truth-value of A. */
11508 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
11509 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
11510 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
11511 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
11512 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
11513 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
11515 tree a00
= TREE_OPERAND (arg0
, 0);
11516 tree a01
= TREE_OPERAND (arg0
, 1);
11517 tree a10
= TREE_OPERAND (arg1
, 0);
11518 tree a11
= TREE_OPERAND (arg1
, 1);
11519 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
11520 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
11521 && (code
== TRUTH_AND_EXPR
11522 || code
== TRUTH_OR_EXPR
));
11524 if (operand_equal_p (a00
, a10
, 0))
11525 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11526 fold_build2 (code
, type
, a01
, a11
));
11527 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
11528 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11529 fold_build2 (code
, type
, a01
, a10
));
11530 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
11531 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
11532 fold_build2 (code
, type
, a00
, a11
));
11534 /* This case if tricky because we must either have commutative
11535 operators or else A10 must not have side-effects. */
11537 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
11538 && operand_equal_p (a01
, a11
, 0))
11539 return fold_build2 (TREE_CODE (arg0
), type
,
11540 fold_build2 (code
, type
, a00
, a10
),
11544 /* See if we can build a range comparison. */
11545 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
11548 /* Check for the possibility of merging component references. If our
11549 lhs is another similar operation, try to merge its rhs with our
11550 rhs. Then try to merge our lhs and rhs. */
11551 if (TREE_CODE (arg0
) == code
11552 && 0 != (tem
= fold_truthop (code
, type
,
11553 TREE_OPERAND (arg0
, 1), arg1
)))
11554 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11556 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
11561 case TRUTH_ORIF_EXPR
:
11562 /* Note that the operands of this must be ints
11563 and their values must be 0 or true.
11564 ("true" is a fixed value perhaps depending on the language.) */
11565 /* If first arg is constant true, return it. */
11566 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11567 return fold_convert (type
, arg0
);
11568 case TRUTH_OR_EXPR
:
11569 /* If either arg is constant zero, drop it. */
11570 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11571 return non_lvalue (fold_convert (type
, arg1
));
11572 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11573 /* Preserve sequence points. */
11574 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11575 return non_lvalue (fold_convert (type
, arg0
));
11576 /* If second arg is constant true, result is true, but we must
11577 evaluate first arg. */
11578 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11579 return omit_one_operand (type
, arg1
, arg0
);
11580 /* Likewise for first arg, but note this only occurs here for
11582 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11583 return omit_one_operand (type
, arg0
, arg1
);
11585 /* !X || X is always true. */
11586 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11587 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11588 return omit_one_operand (type
, integer_one_node
, arg1
);
11589 /* X || !X is always true. */
11590 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11591 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11592 return omit_one_operand (type
, integer_one_node
, arg0
);
11596 case TRUTH_XOR_EXPR
:
11597 /* If the second arg is constant zero, drop it. */
11598 if (integer_zerop (arg1
))
11599 return non_lvalue (fold_convert (type
, arg0
));
11600 /* If the second arg is constant true, this is a logical inversion. */
11601 if (integer_onep (arg1
))
11603 /* Only call invert_truthvalue if operand is a truth value. */
11604 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
11605 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
11607 tem
= invert_truthvalue (arg0
);
11608 return non_lvalue (fold_convert (type
, tem
));
11610 /* Identical arguments cancel to zero. */
11611 if (operand_equal_p (arg0
, arg1
, 0))
11612 return omit_one_operand (type
, integer_zero_node
, arg0
);
11614 /* !X ^ X is always true. */
11615 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11616 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11617 return omit_one_operand (type
, integer_one_node
, arg1
);
11619 /* X ^ !X is always true. */
11620 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11621 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11622 return omit_one_operand (type
, integer_one_node
, arg0
);
11628 tem
= fold_comparison (code
, type
, op0
, op1
);
11629 if (tem
!= NULL_TREE
)
11632 /* bool_var != 0 becomes bool_var. */
11633 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11634 && code
== NE_EXPR
)
11635 return non_lvalue (fold_convert (type
, arg0
));
11637 /* bool_var == 1 becomes bool_var. */
11638 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11639 && code
== EQ_EXPR
)
11640 return non_lvalue (fold_convert (type
, arg0
));
11642 /* bool_var != 1 becomes !bool_var. */
11643 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11644 && code
== NE_EXPR
)
11645 return fold_build1 (TRUTH_NOT_EXPR
, type
, fold_convert (type
, arg0
));
11647 /* bool_var == 0 becomes !bool_var. */
11648 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11649 && code
== EQ_EXPR
)
11650 return fold_build1 (TRUTH_NOT_EXPR
, type
, fold_convert (type
, arg0
));
11652 /* If this is an equality comparison of the address of two non-weak,
11653 unaliased symbols neither of which are extern (since we do not
11654 have access to attributes for externs), then we know the result. */
11655 if (TREE_CODE (arg0
) == ADDR_EXPR
11656 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
11657 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
11658 && ! lookup_attribute ("alias",
11659 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
11660 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
11661 && TREE_CODE (arg1
) == ADDR_EXPR
11662 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
11663 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
11664 && ! lookup_attribute ("alias",
11665 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
11666 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
11668 /* We know that we're looking at the address of two
11669 non-weak, unaliased, static _DECL nodes.
11671 It is both wasteful and incorrect to call operand_equal_p
11672 to compare the two ADDR_EXPR nodes. It is wasteful in that
11673 all we need to do is test pointer equality for the arguments
11674 to the two ADDR_EXPR nodes. It is incorrect to use
11675 operand_equal_p as that function is NOT equivalent to a
11676 C equality test. It can in fact return false for two
11677 objects which would test as equal using the C equality
11679 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
11680 return constant_boolean_node (equal
11681 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
11685 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11686 a MINUS_EXPR of a constant, we can convert it into a comparison with
11687 a revised constant as long as no overflow occurs. */
11688 if (TREE_CODE (arg1
) == INTEGER_CST
11689 && (TREE_CODE (arg0
) == PLUS_EXPR
11690 || TREE_CODE (arg0
) == MINUS_EXPR
)
11691 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11692 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
11693 ? MINUS_EXPR
: PLUS_EXPR
,
11694 fold_convert (TREE_TYPE (arg0
), arg1
),
11695 TREE_OPERAND (arg0
, 1), 0))
11696 && !TREE_OVERFLOW (tem
))
11697 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11699 /* Similarly for a NEGATE_EXPR. */
11700 if (TREE_CODE (arg0
) == NEGATE_EXPR
11701 && TREE_CODE (arg1
) == INTEGER_CST
11702 && 0 != (tem
= negate_expr (arg1
))
11703 && TREE_CODE (tem
) == INTEGER_CST
11704 && !TREE_OVERFLOW (tem
))
11705 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11707 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11708 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11709 && TREE_CODE (arg1
) == INTEGER_CST
11710 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11711 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11712 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg0
),
11713 fold_convert (TREE_TYPE (arg0
), arg1
),
11714 TREE_OPERAND (arg0
, 1)));
11716 /* Transform comparisons of the form X +- C CMP X. */
11717 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11718 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11719 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11720 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11721 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
11723 tree cst
= TREE_OPERAND (arg0
, 1);
11725 if (code
== EQ_EXPR
11726 && !integer_zerop (cst
))
11727 return omit_two_operands (type
, boolean_false_node
,
11728 TREE_OPERAND (arg0
, 0), arg1
);
11730 return omit_two_operands (type
, boolean_true_node
,
11731 TREE_OPERAND (arg0
, 0), arg1
);
11734 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11735 for !=. Don't do this for ordered comparisons due to overflow. */
11736 if (TREE_CODE (arg0
) == MINUS_EXPR
11737 && integer_zerop (arg1
))
11738 return fold_build2 (code
, type
,
11739 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
11741 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11742 if (TREE_CODE (arg0
) == ABS_EXPR
11743 && (integer_zerop (arg1
) || real_zerop (arg1
)))
11744 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
11746 /* If this is an EQ or NE comparison with zero and ARG0 is
11747 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11748 two operations, but the latter can be done in one less insn
11749 on machines that have only two-operand insns or on which a
11750 constant cannot be the first operand. */
11751 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11752 && integer_zerop (arg1
))
11754 tree arg00
= TREE_OPERAND (arg0
, 0);
11755 tree arg01
= TREE_OPERAND (arg0
, 1);
11756 if (TREE_CODE (arg00
) == LSHIFT_EXPR
11757 && integer_onep (TREE_OPERAND (arg00
, 0)))
11759 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
11760 arg01
, TREE_OPERAND (arg00
, 1));
11761 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11762 build_int_cst (TREE_TYPE (arg0
), 1));
11763 return fold_build2 (code
, type
,
11764 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
11766 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
11767 && integer_onep (TREE_OPERAND (arg01
, 0)))
11769 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
11770 arg00
, TREE_OPERAND (arg01
, 1));
11771 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11772 build_int_cst (TREE_TYPE (arg0
), 1));
11773 return fold_build2 (code
, type
,
11774 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
11778 /* If this is an NE or EQ comparison of zero against the result of a
11779 signed MOD operation whose second operand is a power of 2, make
11780 the MOD operation unsigned since it is simpler and equivalent. */
11781 if (integer_zerop (arg1
)
11782 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
11783 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
11784 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
11785 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
11786 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
11787 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11789 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
11790 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
11791 fold_convert (newtype
,
11792 TREE_OPERAND (arg0
, 0)),
11793 fold_convert (newtype
,
11794 TREE_OPERAND (arg0
, 1)));
11796 return fold_build2 (code
, type
, newmod
,
11797 fold_convert (newtype
, arg1
));
11800 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11801 C1 is a valid shift constant, and C2 is a power of two, i.e.
11803 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11804 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
11805 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11807 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11808 && integer_zerop (arg1
))
11810 tree itype
= TREE_TYPE (arg0
);
11811 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
11812 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11814 /* Check for a valid shift count. */
11815 if (TREE_INT_CST_HIGH (arg001
) == 0
11816 && TREE_INT_CST_LOW (arg001
) < prec
)
11818 tree arg01
= TREE_OPERAND (arg0
, 1);
11819 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11820 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
11821 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11822 can be rewritten as (X & (C2 << C1)) != 0. */
11823 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
11825 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
11826 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
11827 return fold_build2 (code
, type
, tem
, arg1
);
11829 /* Otherwise, for signed (arithmetic) shifts,
11830 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11831 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11832 else if (!TYPE_UNSIGNED (itype
))
11833 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
11834 arg000
, build_int_cst (itype
, 0));
11835 /* Otherwise, of unsigned (logical) shifts,
11836 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11837 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11839 return omit_one_operand (type
,
11840 code
== EQ_EXPR
? integer_one_node
11841 : integer_zero_node
,
11846 /* If this is an NE comparison of zero with an AND of one, remove the
11847 comparison since the AND will give the correct value. */
11848 if (code
== NE_EXPR
11849 && integer_zerop (arg1
)
11850 && TREE_CODE (arg0
) == BIT_AND_EXPR
11851 && integer_onep (TREE_OPERAND (arg0
, 1)))
11852 return fold_convert (type
, arg0
);
11854 /* If we have (A & C) == C where C is a power of 2, convert this into
11855 (A & C) != 0. Similarly for NE_EXPR. */
11856 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11857 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11858 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11859 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11860 arg0
, fold_convert (TREE_TYPE (arg0
),
11861 integer_zero_node
));
11863 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11864 bit, then fold the expression into A < 0 or A >= 0. */
11865 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
11869 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11870 Similarly for NE_EXPR. */
11871 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11872 && TREE_CODE (arg1
) == INTEGER_CST
11873 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11875 tree notc
= fold_build1 (BIT_NOT_EXPR
,
11876 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
11877 TREE_OPERAND (arg0
, 1));
11878 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11880 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11881 if (integer_nonzerop (dandnotc
))
11882 return omit_one_operand (type
, rslt
, arg0
);
11885 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11886 Similarly for NE_EXPR. */
11887 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11888 && TREE_CODE (arg1
) == INTEGER_CST
11889 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11891 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
11892 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11893 TREE_OPERAND (arg0
, 1), notd
);
11894 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11895 if (integer_nonzerop (candnotd
))
11896 return omit_one_operand (type
, rslt
, arg0
);
11899 /* Optimize comparisons of strlen vs zero to a compare of the
11900 first character of the string vs zero. To wit,
11901 strlen(ptr) == 0 => *ptr == 0
11902 strlen(ptr) != 0 => *ptr != 0
11903 Other cases should reduce to one of these two (or a constant)
11904 due to the return value of strlen being unsigned. */
11905 if (TREE_CODE (arg0
) == CALL_EXPR
11906 && integer_zerop (arg1
))
11908 tree fndecl
= get_callee_fndecl (arg0
);
11911 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
11912 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
11913 && call_expr_nargs (arg0
) == 1
11914 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
11916 tree iref
= build_fold_indirect_ref (CALL_EXPR_ARG (arg0
, 0));
11917 return fold_build2 (code
, type
, iref
,
11918 build_int_cst (TREE_TYPE (iref
), 0));
11922 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11923 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11924 if (TREE_CODE (arg0
) == RSHIFT_EXPR
11925 && integer_zerop (arg1
)
11926 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11928 tree arg00
= TREE_OPERAND (arg0
, 0);
11929 tree arg01
= TREE_OPERAND (arg0
, 1);
11930 tree itype
= TREE_TYPE (arg00
);
11931 if (TREE_INT_CST_HIGH (arg01
) == 0
11932 && TREE_INT_CST_LOW (arg01
)
11933 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
11935 if (TYPE_UNSIGNED (itype
))
11937 itype
= signed_type_for (itype
);
11938 arg00
= fold_convert (itype
, arg00
);
11940 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
11941 type
, arg00
, build_int_cst (itype
, 0));
11945 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11946 if (integer_zerop (arg1
)
11947 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11948 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11949 TREE_OPERAND (arg0
, 1));
11951 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11952 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11953 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11954 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11955 build_int_cst (TREE_TYPE (arg1
), 0));
11956 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11957 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11958 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11959 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11960 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
11961 build_int_cst (TREE_TYPE (arg1
), 0));
11963 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11964 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11965 && TREE_CODE (arg1
) == INTEGER_CST
11966 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11967 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11968 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
11969 TREE_OPERAND (arg0
, 1), arg1
));
11971 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11972 (X & C) == 0 when C is a single bit. */
11973 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11974 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
11975 && integer_zerop (arg1
)
11976 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11978 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11979 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
11980 TREE_OPERAND (arg0
, 1));
11981 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
11985 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11986 constant C is a power of two, i.e. a single bit. */
11987 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11988 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11989 && integer_zerop (arg1
)
11990 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11991 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11992 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11994 tree arg00
= TREE_OPERAND (arg0
, 0);
11995 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11996 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
11999 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12000 when is C is a power of two, i.e. a single bit. */
12001 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12002 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12003 && integer_zerop (arg1
)
12004 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12005 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12006 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12008 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12009 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
12010 arg000
, TREE_OPERAND (arg0
, 1));
12011 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12012 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12015 if (integer_zerop (arg1
)
12016 && tree_expr_nonzero_p (arg0
))
12018 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12019 return omit_one_operand (type
, res
, arg0
);
12022 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12023 if (TREE_CODE (arg0
) == NEGATE_EXPR
12024 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12025 return fold_build2 (code
, type
,
12026 TREE_OPERAND (arg0
, 0),
12027 TREE_OPERAND (arg1
, 0));
12029 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12030 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12031 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12033 tree arg00
= TREE_OPERAND (arg0
, 0);
12034 tree arg01
= TREE_OPERAND (arg0
, 1);
12035 tree arg10
= TREE_OPERAND (arg1
, 0);
12036 tree arg11
= TREE_OPERAND (arg1
, 1);
12037 tree itype
= TREE_TYPE (arg0
);
12039 if (operand_equal_p (arg01
, arg11
, 0))
12040 return fold_build2 (code
, type
,
12041 fold_build2 (BIT_AND_EXPR
, itype
,
12042 fold_build2 (BIT_XOR_EXPR
, itype
,
12045 build_int_cst (itype
, 0));
12047 if (operand_equal_p (arg01
, arg10
, 0))
12048 return fold_build2 (code
, type
,
12049 fold_build2 (BIT_AND_EXPR
, itype
,
12050 fold_build2 (BIT_XOR_EXPR
, itype
,
12053 build_int_cst (itype
, 0));
12055 if (operand_equal_p (arg00
, arg11
, 0))
12056 return fold_build2 (code
, type
,
12057 fold_build2 (BIT_AND_EXPR
, itype
,
12058 fold_build2 (BIT_XOR_EXPR
, itype
,
12061 build_int_cst (itype
, 0));
12063 if (operand_equal_p (arg00
, arg10
, 0))
12064 return fold_build2 (code
, type
,
12065 fold_build2 (BIT_AND_EXPR
, itype
,
12066 fold_build2 (BIT_XOR_EXPR
, itype
,
12069 build_int_cst (itype
, 0));
12072 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12073 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12075 tree arg00
= TREE_OPERAND (arg0
, 0);
12076 tree arg01
= TREE_OPERAND (arg0
, 1);
12077 tree arg10
= TREE_OPERAND (arg1
, 0);
12078 tree arg11
= TREE_OPERAND (arg1
, 1);
12079 tree itype
= TREE_TYPE (arg0
);
12081 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12082 operand_equal_p guarantees no side-effects so we don't need
12083 to use omit_one_operand on Z. */
12084 if (operand_equal_p (arg01
, arg11
, 0))
12085 return fold_build2 (code
, type
, arg00
, arg10
);
12086 if (operand_equal_p (arg01
, arg10
, 0))
12087 return fold_build2 (code
, type
, arg00
, arg11
);
12088 if (operand_equal_p (arg00
, arg11
, 0))
12089 return fold_build2 (code
, type
, arg01
, arg10
);
12090 if (operand_equal_p (arg00
, arg10
, 0))
12091 return fold_build2 (code
, type
, arg01
, arg11
);
12093 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12094 if (TREE_CODE (arg01
) == INTEGER_CST
12095 && TREE_CODE (arg11
) == INTEGER_CST
)
12096 return fold_build2 (code
, type
,
12097 fold_build2 (BIT_XOR_EXPR
, itype
, arg00
,
12098 fold_build2 (BIT_XOR_EXPR
, itype
,
12103 /* Attempt to simplify equality/inequality comparisons of complex
12104 values. Only lower the comparison if the result is known or
12105 can be simplified to a single scalar comparison. */
12106 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12107 || TREE_CODE (arg0
) == COMPLEX_CST
)
12108 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12109 || TREE_CODE (arg1
) == COMPLEX_CST
))
12111 tree real0
, imag0
, real1
, imag1
;
12114 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12116 real0
= TREE_OPERAND (arg0
, 0);
12117 imag0
= TREE_OPERAND (arg0
, 1);
12121 real0
= TREE_REALPART (arg0
);
12122 imag0
= TREE_IMAGPART (arg0
);
12125 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12127 real1
= TREE_OPERAND (arg1
, 0);
12128 imag1
= TREE_OPERAND (arg1
, 1);
12132 real1
= TREE_REALPART (arg1
);
12133 imag1
= TREE_IMAGPART (arg1
);
12136 rcond
= fold_binary (code
, type
, real0
, real1
);
12137 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12139 if (integer_zerop (rcond
))
12141 if (code
== EQ_EXPR
)
12142 return omit_two_operands (type
, boolean_false_node
,
12144 return fold_build2 (NE_EXPR
, type
, imag0
, imag1
);
12148 if (code
== NE_EXPR
)
12149 return omit_two_operands (type
, boolean_true_node
,
12151 return fold_build2 (EQ_EXPR
, type
, imag0
, imag1
);
12155 icond
= fold_binary (code
, type
, imag0
, imag1
);
12156 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12158 if (integer_zerop (icond
))
12160 if (code
== EQ_EXPR
)
12161 return omit_two_operands (type
, boolean_false_node
,
12163 return fold_build2 (NE_EXPR
, type
, real0
, real1
);
12167 if (code
== NE_EXPR
)
12168 return omit_two_operands (type
, boolean_true_node
,
12170 return fold_build2 (EQ_EXPR
, type
, real0
, real1
);
12181 tem
= fold_comparison (code
, type
, op0
, op1
);
12182 if (tem
!= NULL_TREE
)
12185 /* Transform comparisons of the form X +- C CMP X. */
12186 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12187 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12188 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12189 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12190 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12191 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12193 tree arg01
= TREE_OPERAND (arg0
, 1);
12194 enum tree_code code0
= TREE_CODE (arg0
);
12197 if (TREE_CODE (arg01
) == REAL_CST
)
12198 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12200 is_positive
= tree_int_cst_sgn (arg01
);
12202 /* (X - c) > X becomes false. */
12203 if (code
== GT_EXPR
12204 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12205 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12207 if (TREE_CODE (arg01
) == INTEGER_CST
12208 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12209 fold_overflow_warning (("assuming signed overflow does not "
12210 "occur when assuming that (X - c) > X "
12211 "is always false"),
12212 WARN_STRICT_OVERFLOW_ALL
);
12213 return constant_boolean_node (0, type
);
12216 /* Likewise (X + c) < X becomes false. */
12217 if (code
== LT_EXPR
12218 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12219 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12221 if (TREE_CODE (arg01
) == INTEGER_CST
12222 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12223 fold_overflow_warning (("assuming signed overflow does not "
12224 "occur when assuming that "
12225 "(X + c) < X is always false"),
12226 WARN_STRICT_OVERFLOW_ALL
);
12227 return constant_boolean_node (0, type
);
12230 /* Convert (X - c) <= X to true. */
12231 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12233 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12234 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12236 if (TREE_CODE (arg01
) == INTEGER_CST
12237 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12238 fold_overflow_warning (("assuming signed overflow does not "
12239 "occur when assuming that "
12240 "(X - c) <= X is always true"),
12241 WARN_STRICT_OVERFLOW_ALL
);
12242 return constant_boolean_node (1, type
);
12245 /* Convert (X + c) >= X to true. */
12246 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12248 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12249 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12251 if (TREE_CODE (arg01
) == INTEGER_CST
12252 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12253 fold_overflow_warning (("assuming signed overflow does not "
12254 "occur when assuming that "
12255 "(X + c) >= X is always true"),
12256 WARN_STRICT_OVERFLOW_ALL
);
12257 return constant_boolean_node (1, type
);
12260 if (TREE_CODE (arg01
) == INTEGER_CST
)
12262 /* Convert X + c > X and X - c < X to true for integers. */
12263 if (code
== GT_EXPR
12264 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12265 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12267 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12268 fold_overflow_warning (("assuming signed overflow does "
12269 "not occur when assuming that "
12270 "(X + c) > X is always true"),
12271 WARN_STRICT_OVERFLOW_ALL
);
12272 return constant_boolean_node (1, type
);
12275 if (code
== LT_EXPR
12276 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12277 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12279 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12280 fold_overflow_warning (("assuming signed overflow does "
12281 "not occur when assuming that "
12282 "(X - c) < X is always true"),
12283 WARN_STRICT_OVERFLOW_ALL
);
12284 return constant_boolean_node (1, type
);
12287 /* Convert X + c <= X and X - c >= X to false for integers. */
12288 if (code
== LE_EXPR
12289 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12290 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12292 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12293 fold_overflow_warning (("assuming signed overflow does "
12294 "not occur when assuming that "
12295 "(X + c) <= X is always false"),
12296 WARN_STRICT_OVERFLOW_ALL
);
12297 return constant_boolean_node (0, type
);
12300 if (code
== GE_EXPR
12301 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12302 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12304 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12305 fold_overflow_warning (("assuming signed overflow does "
12306 "not occur when assuming that "
12307 "(X - c) >= X is always false"),
12308 WARN_STRICT_OVERFLOW_ALL
);
12309 return constant_boolean_node (0, type
);
12314 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12315 This transformation affects the cases which are handled in later
12316 optimizations involving comparisons with non-negative constants. */
12317 if (TREE_CODE (arg1
) == INTEGER_CST
12318 && TREE_CODE (arg0
) != INTEGER_CST
12319 && tree_int_cst_sgn (arg1
) > 0)
12321 if (code
== GE_EXPR
)
12323 arg1
= const_binop (MINUS_EXPR
, arg1
,
12324 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12325 return fold_build2 (GT_EXPR
, type
, arg0
,
12326 fold_convert (TREE_TYPE (arg0
), arg1
));
12328 if (code
== LT_EXPR
)
12330 arg1
= const_binop (MINUS_EXPR
, arg1
,
12331 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12332 return fold_build2 (LE_EXPR
, type
, arg0
,
12333 fold_convert (TREE_TYPE (arg0
), arg1
));
12337 /* Comparisons with the highest or lowest possible integer of
12338 the specified precision will have known values. */
12340 tree arg1_type
= TREE_TYPE (arg1
);
12341 unsigned int width
= TYPE_PRECISION (arg1_type
);
12343 if (TREE_CODE (arg1
) == INTEGER_CST
12344 && !TREE_OVERFLOW (arg1
)
12345 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12346 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12348 HOST_WIDE_INT signed_max_hi
;
12349 unsigned HOST_WIDE_INT signed_max_lo
;
12350 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12352 if (width
<= HOST_BITS_PER_WIDE_INT
)
12354 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12359 if (TYPE_UNSIGNED (arg1_type
))
12361 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12367 max_lo
= signed_max_lo
;
12368 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12374 width
-= HOST_BITS_PER_WIDE_INT
;
12375 signed_max_lo
= -1;
12376 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12381 if (TYPE_UNSIGNED (arg1_type
))
12383 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12388 max_hi
= signed_max_hi
;
12389 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12393 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
12394 && TREE_INT_CST_LOW (arg1
) == max_lo
)
12398 return omit_one_operand (type
, integer_zero_node
, arg0
);
12401 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12404 return omit_one_operand (type
, integer_one_node
, arg0
);
12407 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12409 /* The GE_EXPR and LT_EXPR cases above are not normally
12410 reached because of previous transformations. */
12415 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12417 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
12421 arg1
= const_binop (PLUS_EXPR
, arg1
,
12422 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12423 return fold_build2 (EQ_EXPR
, type
,
12424 fold_convert (TREE_TYPE (arg1
), arg0
),
12427 arg1
= const_binop (PLUS_EXPR
, arg1
,
12428 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12429 return fold_build2 (NE_EXPR
, type
,
12430 fold_convert (TREE_TYPE (arg1
), arg0
),
12435 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12437 && TREE_INT_CST_LOW (arg1
) == min_lo
)
12441 return omit_one_operand (type
, integer_zero_node
, arg0
);
12444 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12447 return omit_one_operand (type
, integer_one_node
, arg0
);
12450 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12455 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12457 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
12461 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12462 return fold_build2 (NE_EXPR
, type
,
12463 fold_convert (TREE_TYPE (arg1
), arg0
),
12466 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12467 return fold_build2 (EQ_EXPR
, type
,
12468 fold_convert (TREE_TYPE (arg1
), arg0
),
12474 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
12475 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
12476 && TYPE_UNSIGNED (arg1_type
)
12477 /* We will flip the signedness of the comparison operator
12478 associated with the mode of arg1, so the sign bit is
12479 specified by this mode. Check that arg1 is the signed
12480 max associated with this sign bit. */
12481 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
12482 /* signed_type does not work on pointer types. */
12483 && INTEGRAL_TYPE_P (arg1_type
))
12485 /* The following case also applies to X < signed_max+1
12486 and X >= signed_max+1 because previous transformations. */
12487 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12490 st
= signed_type_for (TREE_TYPE (arg1
));
12491 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12492 type
, fold_convert (st
, arg0
),
12493 build_int_cst (st
, 0));
12499 /* If we are comparing an ABS_EXPR with a constant, we can
12500 convert all the cases into explicit comparisons, but they may
12501 well not be faster than doing the ABS and one comparison.
12502 But ABS (X) <= C is a range comparison, which becomes a subtraction
12503 and a comparison, and is probably faster. */
12504 if (code
== LE_EXPR
12505 && TREE_CODE (arg1
) == INTEGER_CST
12506 && TREE_CODE (arg0
) == ABS_EXPR
12507 && ! TREE_SIDE_EFFECTS (arg0
)
12508 && (0 != (tem
= negate_expr (arg1
)))
12509 && TREE_CODE (tem
) == INTEGER_CST
12510 && !TREE_OVERFLOW (tem
))
12511 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12512 build2 (GE_EXPR
, type
,
12513 TREE_OPERAND (arg0
, 0), tem
),
12514 build2 (LE_EXPR
, type
,
12515 TREE_OPERAND (arg0
, 0), arg1
));
12517 /* Convert ABS_EXPR<x> >= 0 to true. */
12518 strict_overflow_p
= false;
12519 if (code
== GE_EXPR
12520 && (integer_zerop (arg1
)
12521 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12522 && real_zerop (arg1
)))
12523 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12525 if (strict_overflow_p
)
12526 fold_overflow_warning (("assuming signed overflow does not occur "
12527 "when simplifying comparison of "
12528 "absolute value and zero"),
12529 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12530 return omit_one_operand (type
, integer_one_node
, arg0
);
12533 /* Convert ABS_EXPR<x> < 0 to false. */
12534 strict_overflow_p
= false;
12535 if (code
== LT_EXPR
12536 && (integer_zerop (arg1
) || real_zerop (arg1
))
12537 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12539 if (strict_overflow_p
)
12540 fold_overflow_warning (("assuming signed overflow does not occur "
12541 "when simplifying comparison of "
12542 "absolute value and zero"),
12543 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12544 return omit_one_operand (type
, integer_zero_node
, arg0
);
12547 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12548 and similarly for >= into !=. */
12549 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12550 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12551 && TREE_CODE (arg1
) == LSHIFT_EXPR
12552 && integer_onep (TREE_OPERAND (arg1
, 0)))
12553 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12554 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12555 TREE_OPERAND (arg1
, 1)),
12556 build_int_cst (TREE_TYPE (arg0
), 0));
12558 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12559 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12560 && (TREE_CODE (arg1
) == NOP_EXPR
12561 || TREE_CODE (arg1
) == CONVERT_EXPR
)
12562 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12563 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12565 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12566 fold_convert (TREE_TYPE (arg0
),
12567 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12568 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
12570 build_int_cst (TREE_TYPE (arg0
), 0));
12574 case UNORDERED_EXPR
:
12582 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
12584 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
12585 if (t1
!= NULL_TREE
)
12589 /* If the first operand is NaN, the result is constant. */
12590 if (TREE_CODE (arg0
) == REAL_CST
12591 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
12592 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12594 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12595 ? integer_zero_node
12596 : integer_one_node
;
12597 return omit_one_operand (type
, t1
, arg1
);
12600 /* If the second operand is NaN, the result is constant. */
12601 if (TREE_CODE (arg1
) == REAL_CST
12602 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
12603 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12605 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12606 ? integer_zero_node
12607 : integer_one_node
;
12608 return omit_one_operand (type
, t1
, arg0
);
12611 /* Simplify unordered comparison of something with itself. */
12612 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
12613 && operand_equal_p (arg0
, arg1
, 0))
12614 return constant_boolean_node (1, type
);
12616 if (code
== LTGT_EXPR
12617 && !flag_trapping_math
12618 && operand_equal_p (arg0
, arg1
, 0))
12619 return constant_boolean_node (0, type
);
12621 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12623 tree targ0
= strip_float_extensions (arg0
);
12624 tree targ1
= strip_float_extensions (arg1
);
12625 tree newtype
= TREE_TYPE (targ0
);
12627 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12628 newtype
= TREE_TYPE (targ1
);
12630 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12631 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
12632 fold_convert (newtype
, targ1
));
12637 case COMPOUND_EXPR
:
12638 /* When pedantic, a compound expression can be neither an lvalue
12639 nor an integer constant expression. */
12640 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12642 /* Don't let (0, 0) be null pointer constant. */
12643 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12644 : fold_convert (type
, arg1
);
12645 return pedantic_non_lvalue (tem
);
12648 if ((TREE_CODE (arg0
) == REAL_CST
12649 && TREE_CODE (arg1
) == REAL_CST
)
12650 || (TREE_CODE (arg0
) == INTEGER_CST
12651 && TREE_CODE (arg1
) == INTEGER_CST
))
12652 return build_complex (type
, arg0
, arg1
);
12656 /* An ASSERT_EXPR should never be passed to fold_binary. */
12657 gcc_unreachable ();
12661 } /* switch (code) */
12664 /* Callback for walk_tree, looking for LABEL_EXPR.
12665 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12666 Do not check the sub-tree of GOTO_EXPR. */
12669 contains_label_1 (tree
*tp
,
12670 int *walk_subtrees
,
12671 void *data ATTRIBUTE_UNUSED
)
12673 switch (TREE_CODE (*tp
))
12678 *walk_subtrees
= 0;
12685 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12686 accessible from outside the sub-tree. Returns NULL_TREE if no
12687 addressable label is found. */
12690 contains_label_p (tree st
)
12692 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
12695 /* Fold a ternary expression of code CODE and type TYPE with operands
12696 OP0, OP1, and OP2. Return the folded expression if folding is
12697 successful. Otherwise, return NULL_TREE. */
12700 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
12703 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
12704 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12706 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12707 && TREE_CODE_LENGTH (code
) == 3);
12709 /* Strip any conversions that don't change the mode. This is safe
12710 for every expression, except for a comparison expression because
12711 its signedness is derived from its operands. So, in the latter
12712 case, only strip conversions that don't change the signedness.
12714 Note that this is done as an internal manipulation within the
12715 constant folder, in order to find the simplest representation of
12716 the arguments so that their form can be studied. In any cases,
12717 the appropriate type conversions should be put back in the tree
12718 that will get out of the constant folder. */
12733 case COMPONENT_REF
:
12734 if (TREE_CODE (arg0
) == CONSTRUCTOR
12735 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12737 unsigned HOST_WIDE_INT idx
;
12739 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12746 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12747 so all simple results must be passed through pedantic_non_lvalue. */
12748 if (TREE_CODE (arg0
) == INTEGER_CST
)
12750 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12751 tem
= integer_zerop (arg0
) ? op2
: op1
;
12752 /* Only optimize constant conditions when the selected branch
12753 has the same type as the COND_EXPR. This avoids optimizing
12754 away "c ? x : throw", where the throw has a void type.
12755 Avoid throwing away that operand which contains label. */
12756 if ((!TREE_SIDE_EFFECTS (unused_op
)
12757 || !contains_label_p (unused_op
))
12758 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12759 || VOID_TYPE_P (type
)))
12760 return pedantic_non_lvalue (tem
);
12763 if (operand_equal_p (arg1
, op2
, 0))
12764 return pedantic_omit_one_operand (type
, arg1
, arg0
);
12766 /* If we have A op B ? A : C, we may be able to convert this to a
12767 simpler expression, depending on the operation and the values
12768 of B and C. Signed zeros prevent all of these transformations,
12769 for reasons given above each one.
12771 Also try swapping the arguments and inverting the conditional. */
12772 if (COMPARISON_CLASS_P (arg0
)
12773 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12774 arg1
, TREE_OPERAND (arg0
, 1))
12775 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
12777 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
12782 if (COMPARISON_CLASS_P (arg0
)
12783 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12785 TREE_OPERAND (arg0
, 1))
12786 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
12788 tem
= fold_truth_not_expr (arg0
);
12789 if (tem
&& COMPARISON_CLASS_P (tem
))
12791 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
12797 /* If the second operand is simpler than the third, swap them
12798 since that produces better jump optimization results. */
12799 if (truth_value_p (TREE_CODE (arg0
))
12800 && tree_swap_operands_p (op1
, op2
, false))
12802 /* See if this can be inverted. If it can't, possibly because
12803 it was a floating-point inequality comparison, don't do
12805 tem
= fold_truth_not_expr (arg0
);
12807 return fold_build3 (code
, type
, tem
, op2
, op1
);
12810 /* Convert A ? 1 : 0 to simply A. */
12811 if (integer_onep (op1
)
12812 && integer_zerop (op2
)
12813 /* If we try to convert OP0 to our type, the
12814 call to fold will try to move the conversion inside
12815 a COND, which will recurse. In that case, the COND_EXPR
12816 is probably the best choice, so leave it alone. */
12817 && type
== TREE_TYPE (arg0
))
12818 return pedantic_non_lvalue (arg0
);
12820 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12821 over COND_EXPR in cases such as floating point comparisons. */
12822 if (integer_zerop (op1
)
12823 && integer_onep (op2
)
12824 && truth_value_p (TREE_CODE (arg0
)))
12825 return pedantic_non_lvalue (fold_convert (type
,
12826 invert_truthvalue (arg0
)));
12828 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12829 if (TREE_CODE (arg0
) == LT_EXPR
12830 && integer_zerop (TREE_OPERAND (arg0
, 1))
12831 && integer_zerop (op2
)
12832 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12834 /* sign_bit_p only checks ARG1 bits within A's precision.
12835 If <sign bit of A> has wider type than A, bits outside
12836 of A's precision in <sign bit of A> need to be checked.
12837 If they are all 0, this optimization needs to be done
12838 in unsigned A's type, if they are all 1 in signed A's type,
12839 otherwise this can't be done. */
12840 if (TYPE_PRECISION (TREE_TYPE (tem
))
12841 < TYPE_PRECISION (TREE_TYPE (arg1
))
12842 && TYPE_PRECISION (TREE_TYPE (tem
))
12843 < TYPE_PRECISION (type
))
12845 unsigned HOST_WIDE_INT mask_lo
;
12846 HOST_WIDE_INT mask_hi
;
12847 int inner_width
, outer_width
;
12850 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12851 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12852 if (outer_width
> TYPE_PRECISION (type
))
12853 outer_width
= TYPE_PRECISION (type
);
12855 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
12857 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
12858 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
12864 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
12865 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
12867 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
12869 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
12870 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
12874 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
12875 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
12877 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
12878 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
12880 tem_type
= signed_type_for (TREE_TYPE (tem
));
12881 tem
= fold_convert (tem_type
, tem
);
12883 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
12884 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
12886 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
12887 tem
= fold_convert (tem_type
, tem
);
12894 return fold_convert (type
,
12895 fold_build2 (BIT_AND_EXPR
,
12896 TREE_TYPE (tem
), tem
,
12897 fold_convert (TREE_TYPE (tem
),
12901 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12902 already handled above. */
12903 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12904 && integer_onep (TREE_OPERAND (arg0
, 1))
12905 && integer_zerop (op2
)
12906 && integer_pow2p (arg1
))
12908 tree tem
= TREE_OPERAND (arg0
, 0);
12910 if (TREE_CODE (tem
) == RSHIFT_EXPR
12911 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
12912 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
12913 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
12914 return fold_build2 (BIT_AND_EXPR
, type
,
12915 TREE_OPERAND (tem
, 0), arg1
);
12918 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12919 is probably obsolete because the first operand should be a
12920 truth value (that's why we have the two cases above), but let's
12921 leave it in until we can confirm this for all front-ends. */
12922 if (integer_zerop (op2
)
12923 && TREE_CODE (arg0
) == NE_EXPR
12924 && integer_zerop (TREE_OPERAND (arg0
, 1))
12925 && integer_pow2p (arg1
)
12926 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12927 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12928 arg1
, OEP_ONLY_CONST
))
12929 return pedantic_non_lvalue (fold_convert (type
,
12930 TREE_OPERAND (arg0
, 0)));
12932 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12933 if (integer_zerop (op2
)
12934 && truth_value_p (TREE_CODE (arg0
))
12935 && truth_value_p (TREE_CODE (arg1
)))
12936 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12937 fold_convert (type
, arg0
),
12940 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12941 if (integer_onep (op2
)
12942 && truth_value_p (TREE_CODE (arg0
))
12943 && truth_value_p (TREE_CODE (arg1
)))
12945 /* Only perform transformation if ARG0 is easily inverted. */
12946 tem
= fold_truth_not_expr (arg0
);
12948 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
12949 fold_convert (type
, tem
),
12953 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12954 if (integer_zerop (arg1
)
12955 && truth_value_p (TREE_CODE (arg0
))
12956 && truth_value_p (TREE_CODE (op2
)))
12958 /* Only perform transformation if ARG0 is easily inverted. */
12959 tem
= fold_truth_not_expr (arg0
);
12961 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12962 fold_convert (type
, tem
),
12966 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12967 if (integer_onep (arg1
)
12968 && truth_value_p (TREE_CODE (arg0
))
12969 && truth_value_p (TREE_CODE (op2
)))
12970 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
12971 fold_convert (type
, arg0
),
12977 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12978 of fold_ternary on them. */
12979 gcc_unreachable ();
12981 case BIT_FIELD_REF
:
12982 if ((TREE_CODE (arg0
) == VECTOR_CST
12983 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
12984 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
12986 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
12987 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
12990 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
12991 && (idx
% width
) == 0
12992 && (idx
= idx
/ width
)
12993 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
12995 tree elements
= NULL_TREE
;
12997 if (TREE_CODE (arg0
) == VECTOR_CST
)
12998 elements
= TREE_VECTOR_CST_ELTS (arg0
);
13001 unsigned HOST_WIDE_INT idx
;
13004 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
13005 elements
= tree_cons (NULL_TREE
, value
, elements
);
13007 while (idx
-- > 0 && elements
)
13008 elements
= TREE_CHAIN (elements
);
13010 return TREE_VALUE (elements
);
13012 return fold_convert (type
, integer_zero_node
);
13019 } /* switch (code) */
13022 /* Perform constant folding and related simplification of EXPR.
13023 The related simplifications include x*1 => x, x*0 => 0, etc.,
13024 and application of the associative law.
13025 NOP_EXPR conversions may be removed freely (as long as we
13026 are careful not to change the type of the overall expression).
13027 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13028 but we can constant-fold them if they have constant operands. */
13030 #ifdef ENABLE_FOLD_CHECKING
13031 # define fold(x) fold_1 (x)
13032 static tree
fold_1 (tree
);
13038 const tree t
= expr
;
13039 enum tree_code code
= TREE_CODE (t
);
13040 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13043 /* Return right away if a constant. */
13044 if (kind
== tcc_constant
)
13047 /* CALL_EXPR-like objects with variable numbers of operands are
13048 treated specially. */
13049 if (kind
== tcc_vl_exp
)
13051 if (code
== CALL_EXPR
)
13053 tem
= fold_call_expr (expr
, false);
13054 return tem
? tem
: expr
;
13059 if (IS_EXPR_CODE_CLASS (kind
)
13060 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
13062 tree type
= TREE_TYPE (t
);
13063 tree op0
, op1
, op2
;
13065 switch (TREE_CODE_LENGTH (code
))
13068 op0
= TREE_OPERAND (t
, 0);
13069 tem
= fold_unary (code
, type
, op0
);
13070 return tem
? tem
: expr
;
13072 op0
= TREE_OPERAND (t
, 0);
13073 op1
= TREE_OPERAND (t
, 1);
13074 tem
= fold_binary (code
, type
, op0
, op1
);
13075 return tem
? tem
: expr
;
13077 op0
= TREE_OPERAND (t
, 0);
13078 op1
= TREE_OPERAND (t
, 1);
13079 op2
= TREE_OPERAND (t
, 2);
13080 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13081 return tem
? tem
: expr
;
13091 tree op0
= TREE_OPERAND (t
, 0);
13092 tree op1
= TREE_OPERAND (t
, 1);
13094 if (TREE_CODE (op1
) == INTEGER_CST
13095 && TREE_CODE (op0
) == CONSTRUCTOR
13096 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13098 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
13099 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
13100 unsigned HOST_WIDE_INT begin
= 0;
13102 /* Find a matching index by means of a binary search. */
13103 while (begin
!= end
)
13105 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13106 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
13108 if (TREE_CODE (index
) == INTEGER_CST
13109 && tree_int_cst_lt (index
, op1
))
13110 begin
= middle
+ 1;
13111 else if (TREE_CODE (index
) == INTEGER_CST
13112 && tree_int_cst_lt (op1
, index
))
13114 else if (TREE_CODE (index
) == RANGE_EXPR
13115 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13116 begin
= middle
+ 1;
13117 else if (TREE_CODE (index
) == RANGE_EXPR
13118 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13121 return VEC_index (constructor_elt
, elts
, middle
)->value
;
13129 return fold (DECL_INITIAL (t
));
13133 } /* switch (code) */
13136 #ifdef ENABLE_FOLD_CHECKING
13139 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13140 static void fold_check_failed (const_tree
, const_tree
);
13141 void print_fold_checksum (const_tree
);
13143 /* When --enable-checking=fold, compute a digest of expr before
13144 and after actual fold call to see if fold did not accidentally
13145 change original expr. */
13151 struct md5_ctx ctx
;
13152 unsigned char checksum_before
[16], checksum_after
[16];
13155 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13156 md5_init_ctx (&ctx
);
13157 fold_checksum_tree (expr
, &ctx
, ht
);
13158 md5_finish_ctx (&ctx
, checksum_before
);
13161 ret
= fold_1 (expr
);
13163 md5_init_ctx (&ctx
);
13164 fold_checksum_tree (expr
, &ctx
, ht
);
13165 md5_finish_ctx (&ctx
, checksum_after
);
13168 if (memcmp (checksum_before
, checksum_after
, 16))
13169 fold_check_failed (expr
, ret
);
13175 print_fold_checksum (const_tree expr
)
13177 struct md5_ctx ctx
;
13178 unsigned char checksum
[16], cnt
;
13181 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13182 md5_init_ctx (&ctx
);
13183 fold_checksum_tree (expr
, &ctx
, ht
);
13184 md5_finish_ctx (&ctx
, checksum
);
13186 for (cnt
= 0; cnt
< 16; ++cnt
)
13187 fprintf (stderr
, "%02x", checksum
[cnt
]);
13188 putc ('\n', stderr
);
13192 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13194 internal_error ("fold check: original tree changed by fold");
13198 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13201 enum tree_code code
;
13202 struct tree_function_decl buf
;
13207 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
13208 <= sizeof (struct tree_function_decl
))
13209 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
13212 slot
= (const void **) htab_find_slot (ht
, expr
, INSERT
);
13216 code
= TREE_CODE (expr
);
13217 if (TREE_CODE_CLASS (code
) == tcc_declaration
13218 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13220 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13221 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13222 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13223 expr
= (tree
) &buf
;
13225 else if (TREE_CODE_CLASS (code
) == tcc_type
13226 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
13227 || TYPE_CACHED_VALUES_P (expr
)
13228 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
13230 /* Allow these fields to be modified. */
13232 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13233 expr
= tmp
= (tree
) &buf
;
13234 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13235 TYPE_POINTER_TO (tmp
) = NULL
;
13236 TYPE_REFERENCE_TO (tmp
) = NULL
;
13237 if (TYPE_CACHED_VALUES_P (tmp
))
13239 TYPE_CACHED_VALUES_P (tmp
) = 0;
13240 TYPE_CACHED_VALUES (tmp
) = NULL
;
13243 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13244 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13245 if (TREE_CODE_CLASS (code
) != tcc_type
13246 && TREE_CODE_CLASS (code
) != tcc_declaration
13247 && code
!= TREE_LIST
13248 && code
!= SSA_NAME
)
13249 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13250 switch (TREE_CODE_CLASS (code
))
13256 md5_process_bytes (TREE_STRING_POINTER (expr
),
13257 TREE_STRING_LENGTH (expr
), ctx
);
13260 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13261 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13264 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13270 case tcc_exceptional
:
13274 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13275 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13276 expr
= TREE_CHAIN (expr
);
13277 goto recursive_label
;
13280 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13281 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13287 case tcc_expression
:
13288 case tcc_reference
:
13289 case tcc_comparison
:
13292 case tcc_statement
:
13294 len
= TREE_OPERAND_LENGTH (expr
);
13295 for (i
= 0; i
< len
; ++i
)
13296 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13298 case tcc_declaration
:
13299 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13300 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13301 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13303 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13304 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13305 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13306 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13307 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13309 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13310 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
13312 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13314 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13315 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13316 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
13320 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13321 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13322 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13323 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13324 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13325 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13326 if (INTEGRAL_TYPE_P (expr
)
13327 || SCALAR_FLOAT_TYPE_P (expr
))
13329 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13330 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13332 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13333 if (TREE_CODE (expr
) == RECORD_TYPE
13334 || TREE_CODE (expr
) == UNION_TYPE
13335 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13336 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13337 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13344 /* Helper function for outputting the checksum of a tree T. When
13345 debugging with gdb, you can "define mynext" to be "next" followed
13346 by "call debug_fold_checksum (op0)", then just trace down till the
13350 debug_fold_checksum (const_tree t
)
13353 unsigned char checksum
[16];
13354 struct md5_ctx ctx
;
13355 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13357 md5_init_ctx (&ctx
);
13358 fold_checksum_tree (t
, &ctx
, ht
);
13359 md5_finish_ctx (&ctx
, checksum
);
13362 for (i
= 0; i
< 16; i
++)
13363 fprintf (stderr
, "%d ", checksum
[i
]);
13365 fprintf (stderr
, "\n");
13370 /* Fold a unary tree expression with code CODE of type TYPE with an
13371 operand OP0. Return a folded expression if successful. Otherwise,
13372 return a tree expression with code CODE of type TYPE with an
13376 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13379 #ifdef ENABLE_FOLD_CHECKING
13380 unsigned char checksum_before
[16], checksum_after
[16];
13381 struct md5_ctx ctx
;
13384 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13385 md5_init_ctx (&ctx
);
13386 fold_checksum_tree (op0
, &ctx
, ht
);
13387 md5_finish_ctx (&ctx
, checksum_before
);
13391 tem
= fold_unary (code
, type
, op0
);
13393 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
13395 #ifdef ENABLE_FOLD_CHECKING
13396 md5_init_ctx (&ctx
);
13397 fold_checksum_tree (op0
, &ctx
, ht
);
13398 md5_finish_ctx (&ctx
, checksum_after
);
13401 if (memcmp (checksum_before
, checksum_after
, 16))
13402 fold_check_failed (op0
, tem
);
13407 /* Fold a binary tree expression with code CODE of type TYPE with
13408 operands OP0 and OP1. Return a folded expression if successful.
13409 Otherwise, return a tree expression with code CODE of type TYPE
13410 with operands OP0 and OP1. */
13413 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
13417 #ifdef ENABLE_FOLD_CHECKING
13418 unsigned char checksum_before_op0
[16],
13419 checksum_before_op1
[16],
13420 checksum_after_op0
[16],
13421 checksum_after_op1
[16];
13422 struct md5_ctx ctx
;
13425 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13426 md5_init_ctx (&ctx
);
13427 fold_checksum_tree (op0
, &ctx
, ht
);
13428 md5_finish_ctx (&ctx
, checksum_before_op0
);
13431 md5_init_ctx (&ctx
);
13432 fold_checksum_tree (op1
, &ctx
, ht
);
13433 md5_finish_ctx (&ctx
, checksum_before_op1
);
13437 tem
= fold_binary (code
, type
, op0
, op1
);
13439 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
13441 #ifdef ENABLE_FOLD_CHECKING
13442 md5_init_ctx (&ctx
);
13443 fold_checksum_tree (op0
, &ctx
, ht
);
13444 md5_finish_ctx (&ctx
, checksum_after_op0
);
13447 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13448 fold_check_failed (op0
, tem
);
13450 md5_init_ctx (&ctx
);
13451 fold_checksum_tree (op1
, &ctx
, ht
);
13452 md5_finish_ctx (&ctx
, checksum_after_op1
);
13455 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13456 fold_check_failed (op1
, tem
);
13461 /* Fold a ternary tree expression with code CODE of type TYPE with
13462 operands OP0, OP1, and OP2. Return a folded expression if
13463 successful. Otherwise, return a tree expression with code CODE of
13464 type TYPE with operands OP0, OP1, and OP2. */
13467 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
13471 #ifdef ENABLE_FOLD_CHECKING
13472 unsigned char checksum_before_op0
[16],
13473 checksum_before_op1
[16],
13474 checksum_before_op2
[16],
13475 checksum_after_op0
[16],
13476 checksum_after_op1
[16],
13477 checksum_after_op2
[16];
13478 struct md5_ctx ctx
;
13481 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13482 md5_init_ctx (&ctx
);
13483 fold_checksum_tree (op0
, &ctx
, ht
);
13484 md5_finish_ctx (&ctx
, checksum_before_op0
);
13487 md5_init_ctx (&ctx
);
13488 fold_checksum_tree (op1
, &ctx
, ht
);
13489 md5_finish_ctx (&ctx
, checksum_before_op1
);
13492 md5_init_ctx (&ctx
);
13493 fold_checksum_tree (op2
, &ctx
, ht
);
13494 md5_finish_ctx (&ctx
, checksum_before_op2
);
13498 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13499 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13501 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13503 #ifdef ENABLE_FOLD_CHECKING
13504 md5_init_ctx (&ctx
);
13505 fold_checksum_tree (op0
, &ctx
, ht
);
13506 md5_finish_ctx (&ctx
, checksum_after_op0
);
13509 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13510 fold_check_failed (op0
, tem
);
13512 md5_init_ctx (&ctx
);
13513 fold_checksum_tree (op1
, &ctx
, ht
);
13514 md5_finish_ctx (&ctx
, checksum_after_op1
);
13517 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13518 fold_check_failed (op1
, tem
);
13520 md5_init_ctx (&ctx
);
13521 fold_checksum_tree (op2
, &ctx
, ht
);
13522 md5_finish_ctx (&ctx
, checksum_after_op2
);
13525 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13526 fold_check_failed (op2
, tem
);
13531 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13532 arguments in ARGARRAY, and a null static chain.
13533 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13534 of type TYPE from the given operands as constructed by build_call_array. */
13537 fold_build_call_array (tree type
, tree fn
, int nargs
, tree
*argarray
)
13540 #ifdef ENABLE_FOLD_CHECKING
13541 unsigned char checksum_before_fn
[16],
13542 checksum_before_arglist
[16],
13543 checksum_after_fn
[16],
13544 checksum_after_arglist
[16];
13545 struct md5_ctx ctx
;
13549 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13550 md5_init_ctx (&ctx
);
13551 fold_checksum_tree (fn
, &ctx
, ht
);
13552 md5_finish_ctx (&ctx
, checksum_before_fn
);
13555 md5_init_ctx (&ctx
);
13556 for (i
= 0; i
< nargs
; i
++)
13557 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13558 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13562 tem
= fold_builtin_call_array (type
, fn
, nargs
, argarray
);
13564 #ifdef ENABLE_FOLD_CHECKING
13565 md5_init_ctx (&ctx
);
13566 fold_checksum_tree (fn
, &ctx
, ht
);
13567 md5_finish_ctx (&ctx
, checksum_after_fn
);
13570 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13571 fold_check_failed (fn
, tem
);
13573 md5_init_ctx (&ctx
);
13574 for (i
= 0; i
< nargs
; i
++)
13575 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13576 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13579 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13580 fold_check_failed (NULL_TREE
, tem
);
13585 /* Perform constant folding and related simplification of initializer
13586 expression EXPR. These behave identically to "fold_buildN" but ignore
13587 potential run-time traps and exceptions that fold must preserve. */
13589 #define START_FOLD_INIT \
13590 int saved_signaling_nans = flag_signaling_nans;\
13591 int saved_trapping_math = flag_trapping_math;\
13592 int saved_rounding_math = flag_rounding_math;\
13593 int saved_trapv = flag_trapv;\
13594 int saved_folding_initializer = folding_initializer;\
13595 flag_signaling_nans = 0;\
13596 flag_trapping_math = 0;\
13597 flag_rounding_math = 0;\
13599 folding_initializer = 1;
13601 #define END_FOLD_INIT \
13602 flag_signaling_nans = saved_signaling_nans;\
13603 flag_trapping_math = saved_trapping_math;\
13604 flag_rounding_math = saved_rounding_math;\
13605 flag_trapv = saved_trapv;\
13606 folding_initializer = saved_folding_initializer;
13609 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
13614 result
= fold_build1 (code
, type
, op
);
13621 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
13626 result
= fold_build2 (code
, type
, op0
, op1
);
13633 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
13639 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
13646 fold_build_call_array_initializer (tree type
, tree fn
,
13647 int nargs
, tree
*argarray
)
13652 result
= fold_build_call_array (type
, fn
, nargs
, argarray
);
13658 #undef START_FOLD_INIT
13659 #undef END_FOLD_INIT
13661 /* Determine if first argument is a multiple of second argument. Return 0 if
13662 it is not, or we cannot easily determined it to be.
13664 An example of the sort of thing we care about (at this point; this routine
13665 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13666 fold cases do now) is discovering that
13668 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13674 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13676 This code also handles discovering that
13678 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13680 is a multiple of 8 so we don't have to worry about dealing with a
13681 possible remainder.
13683 Note that we *look* inside a SAVE_EXPR only to determine how it was
13684 calculated; it is not safe for fold to do much of anything else with the
13685 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13686 at run time. For example, the latter example above *cannot* be implemented
13687 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13688 evaluation time of the original SAVE_EXPR is not necessarily the same at
13689 the time the new expression is evaluated. The only optimization of this
13690 sort that would be valid is changing
13692 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13696 SAVE_EXPR (I) * SAVE_EXPR (J)
13698 (where the same SAVE_EXPR (J) is used in the original and the
13699 transformed version). */
13702 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13704 if (operand_equal_p (top
, bottom
, 0))
13707 if (TREE_CODE (type
) != INTEGER_TYPE
)
13710 switch (TREE_CODE (top
))
13713 /* Bitwise and provides a power of two multiple. If the mask is
13714 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13715 if (!integer_pow2p (bottom
))
13720 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13721 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13725 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13726 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13729 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13733 op1
= TREE_OPERAND (top
, 1);
13734 /* const_binop may not detect overflow correctly,
13735 so check for it explicitly here. */
13736 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
13737 > TREE_INT_CST_LOW (op1
)
13738 && TREE_INT_CST_HIGH (op1
) == 0
13739 && 0 != (t1
= fold_convert (type
,
13740 const_binop (LSHIFT_EXPR
,
13743 && !TREE_OVERFLOW (t1
))
13744 return multiple_of_p (type
, t1
, bottom
);
13749 /* Can't handle conversions from non-integral or wider integral type. */
13750 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13751 || (TYPE_PRECISION (type
)
13752 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13755 /* .. fall through ... */
13758 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13761 if (TREE_CODE (bottom
) != INTEGER_CST
13762 || integer_zerop (bottom
)
13763 || (TYPE_UNSIGNED (type
)
13764 && (tree_int_cst_sgn (top
) < 0
13765 || tree_int_cst_sgn (bottom
) < 0)))
13767 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
13775 /* Return true if CODE or TYPE is known to be non-negative. */
13778 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
13780 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
13781 && truth_value_p (code
))
13782 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13783 have a signed:1 type (where the value is -1 and 0). */
13788 /* Return true if (CODE OP0) is known to be non-negative. If the return
13789 value is based on the assumption that signed overflow is undefined,
13790 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13791 *STRICT_OVERFLOW_P. */
13794 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13795 bool *strict_overflow_p
)
13797 if (TYPE_UNSIGNED (type
))
13803 /* We can't return 1 if flag_wrapv is set because
13804 ABS_EXPR<INT_MIN> = INT_MIN. */
13805 if (!INTEGRAL_TYPE_P (type
))
13807 if (TYPE_OVERFLOW_UNDEFINED (type
))
13809 *strict_overflow_p
= true;
13814 case NON_LVALUE_EXPR
:
13816 case FIX_TRUNC_EXPR
:
13817 return tree_expr_nonnegative_warnv_p (op0
,
13818 strict_overflow_p
);
13822 tree inner_type
= TREE_TYPE (op0
);
13823 tree outer_type
= type
;
13825 if (TREE_CODE (outer_type
) == REAL_TYPE
)
13827 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13828 return tree_expr_nonnegative_warnv_p (op0
,
13829 strict_overflow_p
);
13830 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
13832 if (TYPE_UNSIGNED (inner_type
))
13834 return tree_expr_nonnegative_warnv_p (op0
,
13835 strict_overflow_p
);
13838 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
13840 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13841 return tree_expr_nonnegative_warnv_p (op0
,
13842 strict_overflow_p
);
13843 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
13844 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
13845 && TYPE_UNSIGNED (inner_type
);
13851 return tree_simple_nonnegative_warnv_p (code
, type
);
13854 /* We don't know sign of `t', so be conservative and return false. */
13858 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13859 value is based on the assumption that signed overflow is undefined,
13860 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13861 *STRICT_OVERFLOW_P. */
13864 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13865 tree op1
, bool *strict_overflow_p
)
13867 if (TYPE_UNSIGNED (type
))
13872 case POINTER_PLUS_EXPR
:
13874 if (FLOAT_TYPE_P (type
))
13875 return (tree_expr_nonnegative_warnv_p (op0
,
13877 && tree_expr_nonnegative_warnv_p (op1
,
13878 strict_overflow_p
));
13880 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13881 both unsigned and at least 2 bits shorter than the result. */
13882 if (TREE_CODE (type
) == INTEGER_TYPE
13883 && TREE_CODE (op0
) == NOP_EXPR
13884 && TREE_CODE (op1
) == NOP_EXPR
)
13886 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
13887 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
13888 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13889 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13891 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
13892 TYPE_PRECISION (inner2
)) + 1;
13893 return prec
< TYPE_PRECISION (type
);
13899 if (FLOAT_TYPE_P (type
))
13901 /* x * x for floating point x is always non-negative. */
13902 if (operand_equal_p (op0
, op1
, 0))
13904 return (tree_expr_nonnegative_warnv_p (op0
,
13906 && tree_expr_nonnegative_warnv_p (op1
,
13907 strict_overflow_p
));
13910 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13911 both unsigned and their total bits is shorter than the result. */
13912 if (TREE_CODE (type
) == INTEGER_TYPE
13913 && TREE_CODE (op0
) == NOP_EXPR
13914 && TREE_CODE (op1
) == NOP_EXPR
)
13916 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
13917 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
13918 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13919 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13920 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
13921 < TYPE_PRECISION (type
);
13927 return (tree_expr_nonnegative_warnv_p (op0
,
13929 || tree_expr_nonnegative_warnv_p (op1
,
13930 strict_overflow_p
));
13936 case TRUNC_DIV_EXPR
:
13937 case CEIL_DIV_EXPR
:
13938 case FLOOR_DIV_EXPR
:
13939 case ROUND_DIV_EXPR
:
13940 return (tree_expr_nonnegative_warnv_p (op0
,
13942 && tree_expr_nonnegative_warnv_p (op1
,
13943 strict_overflow_p
));
13945 case TRUNC_MOD_EXPR
:
13946 case CEIL_MOD_EXPR
:
13947 case FLOOR_MOD_EXPR
:
13948 case ROUND_MOD_EXPR
:
13949 return tree_expr_nonnegative_warnv_p (op0
,
13950 strict_overflow_p
);
13952 return tree_simple_nonnegative_warnv_p (code
, type
);
13955 /* We don't know sign of `t', so be conservative and return false. */
13959 /* Return true if T is known to be non-negative. If the return
13960 value is based on the assumption that signed overflow is undefined,
13961 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13962 *STRICT_OVERFLOW_P. */
13965 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
13967 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13970 switch (TREE_CODE (t
))
13973 /* Query VRP to see if it has recorded any information about
13974 the range of this object. */
13975 return ssa_name_nonnegative_p (t
);
13978 return tree_int_cst_sgn (t
) >= 0;
13981 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
13984 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
13987 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13989 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
13990 strict_overflow_p
));
13992 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
13995 /* We don't know sign of `t', so be conservative and return false. */
13999 /* Return true if T is known to be non-negative. If the return
14000 value is based on the assumption that signed overflow is undefined,
14001 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14002 *STRICT_OVERFLOW_P. */
14005 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14007 enum tree_code code
= TREE_CODE (t
);
14008 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14015 tree temp
= TARGET_EXPR_SLOT (t
);
14016 t
= TARGET_EXPR_INITIAL (t
);
14018 /* If the initializer is non-void, then it's a normal expression
14019 that will be assigned to the slot. */
14020 if (!VOID_TYPE_P (t
))
14021 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14023 /* Otherwise, the initializer sets the slot in some way. One common
14024 way is an assignment statement at the end of the initializer. */
14027 if (TREE_CODE (t
) == BIND_EXPR
)
14028 t
= expr_last (BIND_EXPR_BODY (t
));
14029 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14030 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14031 t
= expr_last (TREE_OPERAND (t
, 0));
14032 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14037 if ((TREE_CODE (t
) == MODIFY_EXPR
14038 || TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
14039 && GENERIC_TREE_OPERAND (t
, 0) == temp
)
14040 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14041 strict_overflow_p
);
14048 tree fndecl
= get_callee_fndecl (t
);
14049 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14050 switch (DECL_FUNCTION_CODE (fndecl
))
14052 CASE_FLT_FN (BUILT_IN_ACOS
):
14053 CASE_FLT_FN (BUILT_IN_ACOSH
):
14054 CASE_FLT_FN (BUILT_IN_CABS
):
14055 CASE_FLT_FN (BUILT_IN_COSH
):
14056 CASE_FLT_FN (BUILT_IN_ERFC
):
14057 CASE_FLT_FN (BUILT_IN_EXP
):
14058 CASE_FLT_FN (BUILT_IN_EXP10
):
14059 CASE_FLT_FN (BUILT_IN_EXP2
):
14060 CASE_FLT_FN (BUILT_IN_FABS
):
14061 CASE_FLT_FN (BUILT_IN_FDIM
):
14062 CASE_FLT_FN (BUILT_IN_HYPOT
):
14063 CASE_FLT_FN (BUILT_IN_POW10
):
14064 CASE_INT_FN (BUILT_IN_FFS
):
14065 CASE_INT_FN (BUILT_IN_PARITY
):
14066 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14067 case BUILT_IN_BSWAP32
:
14068 case BUILT_IN_BSWAP64
:
14072 CASE_FLT_FN (BUILT_IN_SQRT
):
14073 /* sqrt(-0.0) is -0.0. */
14074 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
14076 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14077 strict_overflow_p
);
14079 CASE_FLT_FN (BUILT_IN_ASINH
):
14080 CASE_FLT_FN (BUILT_IN_ATAN
):
14081 CASE_FLT_FN (BUILT_IN_ATANH
):
14082 CASE_FLT_FN (BUILT_IN_CBRT
):
14083 CASE_FLT_FN (BUILT_IN_CEIL
):
14084 CASE_FLT_FN (BUILT_IN_ERF
):
14085 CASE_FLT_FN (BUILT_IN_EXPM1
):
14086 CASE_FLT_FN (BUILT_IN_FLOOR
):
14087 CASE_FLT_FN (BUILT_IN_FMOD
):
14088 CASE_FLT_FN (BUILT_IN_FREXP
):
14089 CASE_FLT_FN (BUILT_IN_LCEIL
):
14090 CASE_FLT_FN (BUILT_IN_LDEXP
):
14091 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14092 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14093 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14094 CASE_FLT_FN (BUILT_IN_LLRINT
):
14095 CASE_FLT_FN (BUILT_IN_LLROUND
):
14096 CASE_FLT_FN (BUILT_IN_LRINT
):
14097 CASE_FLT_FN (BUILT_IN_LROUND
):
14098 CASE_FLT_FN (BUILT_IN_MODF
):
14099 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14100 CASE_FLT_FN (BUILT_IN_RINT
):
14101 CASE_FLT_FN (BUILT_IN_ROUND
):
14102 CASE_FLT_FN (BUILT_IN_SCALB
):
14103 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14104 CASE_FLT_FN (BUILT_IN_SCALBN
):
14105 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14106 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14107 CASE_FLT_FN (BUILT_IN_SINH
):
14108 CASE_FLT_FN (BUILT_IN_TANH
):
14109 CASE_FLT_FN (BUILT_IN_TRUNC
):
14110 /* True if the 1st argument is nonnegative. */
14111 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14112 strict_overflow_p
);
14114 CASE_FLT_FN (BUILT_IN_FMAX
):
14115 /* True if the 1st OR 2nd arguments are nonnegative. */
14116 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14118 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14119 strict_overflow_p
)));
14121 CASE_FLT_FN (BUILT_IN_FMIN
):
14122 /* True if the 1st AND 2nd arguments are nonnegative. */
14123 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14125 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14126 strict_overflow_p
)));
14128 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14129 /* True if the 2nd argument is nonnegative. */
14130 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14131 strict_overflow_p
);
14133 CASE_FLT_FN (BUILT_IN_POWI
):
14134 /* True if the 1st argument is nonnegative or the second
14135 argument is an even integer. */
14136 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == INTEGER_CST
)
14138 tree arg1
= CALL_EXPR_ARG (t
, 1);
14139 if ((TREE_INT_CST_LOW (arg1
) & 1) == 0)
14142 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14143 strict_overflow_p
);
14145 CASE_FLT_FN (BUILT_IN_POW
):
14146 /* True if the 1st argument is nonnegative or the second
14147 argument is an even integer valued real. */
14148 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == REAL_CST
)
14153 c
= TREE_REAL_CST (CALL_EXPR_ARG (t
, 1));
14154 n
= real_to_integer (&c
);
14157 REAL_VALUE_TYPE cint
;
14158 real_from_integer (&cint
, VOIDmode
, n
,
14159 n
< 0 ? -1 : 0, 0);
14160 if (real_identical (&c
, &cint
))
14164 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14165 strict_overflow_p
);
14170 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14175 case COMPOUND_EXPR
:
14177 case GIMPLE_MODIFY_STMT
:
14178 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14179 strict_overflow_p
);
14181 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14182 strict_overflow_p
);
14184 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14185 strict_overflow_p
);
14188 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14192 /* We don't know sign of `t', so be conservative and return false. */
14196 /* Return true if T is known to be non-negative. If the return
14197 value is based on the assumption that signed overflow is undefined,
14198 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14199 *STRICT_OVERFLOW_P. */
14202 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14204 enum tree_code code
;
14205 if (t
== error_mark_node
)
14208 code
= TREE_CODE (t
);
14209 switch (TREE_CODE_CLASS (code
))
14212 case tcc_comparison
:
14213 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14215 TREE_OPERAND (t
, 0),
14216 TREE_OPERAND (t
, 1),
14217 strict_overflow_p
);
14220 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14222 TREE_OPERAND (t
, 0),
14223 strict_overflow_p
);
14226 case tcc_declaration
:
14227 case tcc_reference
:
14228 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14236 case TRUTH_AND_EXPR
:
14237 case TRUTH_OR_EXPR
:
14238 case TRUTH_XOR_EXPR
:
14239 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14241 TREE_OPERAND (t
, 0),
14242 TREE_OPERAND (t
, 1),
14243 strict_overflow_p
);
14244 case TRUTH_NOT_EXPR
:
14245 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14247 TREE_OPERAND (t
, 0),
14248 strict_overflow_p
);
14255 case WITH_SIZE_EXPR
:
14259 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14262 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
14266 /* Return true if `t' is known to be non-negative. Handle warnings
14267 about undefined signed overflow. */
14270 tree_expr_nonnegative_p (tree t
)
14272 bool ret
, strict_overflow_p
;
14274 strict_overflow_p
= false;
14275 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14276 if (strict_overflow_p
)
14277 fold_overflow_warning (("assuming signed overflow does not occur when "
14278 "determining that expression is always "
14280 WARN_STRICT_OVERFLOW_MISC
);
14285 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14286 For floating point we further ensure that T is not denormal.
14287 Similar logic is present in nonzero_address in rtlanal.h.
14289 If the return value is based on the assumption that signed overflow
14290 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14291 change *STRICT_OVERFLOW_P. */
14294 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14295 bool *strict_overflow_p
)
14300 return tree_expr_nonzero_warnv_p (op0
,
14301 strict_overflow_p
);
14305 tree inner_type
= TREE_TYPE (op0
);
14306 tree outer_type
= type
;
14308 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14309 && tree_expr_nonzero_warnv_p (op0
,
14310 strict_overflow_p
));
14314 case NON_LVALUE_EXPR
:
14315 return tree_expr_nonzero_warnv_p (op0
,
14316 strict_overflow_p
);
14325 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14326 For floating point we further ensure that T is not denormal.
14327 Similar logic is present in nonzero_address in rtlanal.h.
14329 If the return value is based on the assumption that signed overflow
14330 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14331 change *STRICT_OVERFLOW_P. */
14334 tree_binary_nonzero_warnv_p (enum tree_code code
,
14337 tree op1
, bool *strict_overflow_p
)
14339 bool sub_strict_overflow_p
;
14342 case POINTER_PLUS_EXPR
:
14344 if (TYPE_OVERFLOW_UNDEFINED (type
))
14346 /* With the presence of negative values it is hard
14347 to say something. */
14348 sub_strict_overflow_p
= false;
14349 if (!tree_expr_nonnegative_warnv_p (op0
,
14350 &sub_strict_overflow_p
)
14351 || !tree_expr_nonnegative_warnv_p (op1
,
14352 &sub_strict_overflow_p
))
14354 /* One of operands must be positive and the other non-negative. */
14355 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14356 overflows, on a twos-complement machine the sum of two
14357 nonnegative numbers can never be zero. */
14358 return (tree_expr_nonzero_warnv_p (op0
,
14360 || tree_expr_nonzero_warnv_p (op1
,
14361 strict_overflow_p
));
14366 if (TYPE_OVERFLOW_UNDEFINED (type
))
14368 if (tree_expr_nonzero_warnv_p (op0
,
14370 && tree_expr_nonzero_warnv_p (op1
,
14371 strict_overflow_p
))
14373 *strict_overflow_p
= true;
14380 sub_strict_overflow_p
= false;
14381 if (tree_expr_nonzero_warnv_p (op0
,
14382 &sub_strict_overflow_p
)
14383 && tree_expr_nonzero_warnv_p (op1
,
14384 &sub_strict_overflow_p
))
14386 if (sub_strict_overflow_p
)
14387 *strict_overflow_p
= true;
14392 sub_strict_overflow_p
= false;
14393 if (tree_expr_nonzero_warnv_p (op0
,
14394 &sub_strict_overflow_p
))
14396 if (sub_strict_overflow_p
)
14397 *strict_overflow_p
= true;
14399 /* When both operands are nonzero, then MAX must be too. */
14400 if (tree_expr_nonzero_warnv_p (op1
,
14401 strict_overflow_p
))
14404 /* MAX where operand 0 is positive is positive. */
14405 return tree_expr_nonnegative_warnv_p (op0
,
14406 strict_overflow_p
);
14408 /* MAX where operand 1 is positive is positive. */
14409 else if (tree_expr_nonzero_warnv_p (op1
,
14410 &sub_strict_overflow_p
)
14411 && tree_expr_nonnegative_warnv_p (op1
,
14412 &sub_strict_overflow_p
))
14414 if (sub_strict_overflow_p
)
14415 *strict_overflow_p
= true;
14421 return (tree_expr_nonzero_warnv_p (op1
,
14423 || tree_expr_nonzero_warnv_p (op0
,
14424 strict_overflow_p
));
14433 /* Return true when T is an address and is known to be nonzero.
14434 For floating point we further ensure that T is not denormal.
14435 Similar logic is present in nonzero_address in rtlanal.h.
14437 If the return value is based on the assumption that signed overflow
14438 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14439 change *STRICT_OVERFLOW_P. */
14442 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14444 bool sub_strict_overflow_p
;
14445 switch (TREE_CODE (t
))
14448 /* Query VRP to see if it has recorded any information about
14449 the range of this object. */
14450 return ssa_name_nonzero_p (t
);
14453 return !integer_zerop (t
);
14457 tree base
= get_base_address (TREE_OPERAND (t
, 0));
14462 /* Weak declarations may link to NULL. */
14463 if (VAR_OR_FUNCTION_DECL_P (base
))
14464 return !DECL_WEAK (base
);
14466 /* Constants are never weak. */
14467 if (CONSTANT_CLASS_P (base
))
14474 sub_strict_overflow_p
= false;
14475 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14476 &sub_strict_overflow_p
)
14477 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14478 &sub_strict_overflow_p
))
14480 if (sub_strict_overflow_p
)
14481 *strict_overflow_p
= true;
14492 /* Return true when T is an address and is known to be nonzero.
14493 For floating point we further ensure that T is not denormal.
14494 Similar logic is present in nonzero_address in rtlanal.h.
14496 If the return value is based on the assumption that signed overflow
14497 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14498 change *STRICT_OVERFLOW_P. */
14501 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14503 tree type
= TREE_TYPE (t
);
14504 enum tree_code code
;
14506 /* Doing something useful for floating point would need more work. */
14507 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
14510 code
= TREE_CODE (t
);
14511 switch (TREE_CODE_CLASS (code
))
14514 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
14515 strict_overflow_p
);
14517 case tcc_comparison
:
14518 return tree_binary_nonzero_warnv_p (code
, type
,
14519 TREE_OPERAND (t
, 0),
14520 TREE_OPERAND (t
, 1),
14521 strict_overflow_p
);
14523 case tcc_declaration
:
14524 case tcc_reference
:
14525 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
14533 case TRUTH_NOT_EXPR
:
14534 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
14535 strict_overflow_p
);
14537 case TRUTH_AND_EXPR
:
14538 case TRUTH_OR_EXPR
:
14539 case TRUTH_XOR_EXPR
:
14540 return tree_binary_nonzero_warnv_p (code
, type
,
14541 TREE_OPERAND (t
, 0),
14542 TREE_OPERAND (t
, 1),
14543 strict_overflow_p
);
14550 case WITH_SIZE_EXPR
:
14554 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
14556 case COMPOUND_EXPR
:
14558 case GIMPLE_MODIFY_STMT
:
14560 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14561 strict_overflow_p
);
14564 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14565 strict_overflow_p
);
14568 return alloca_call_p (t
);
14576 /* Return true when T is an address and is known to be nonzero.
14577 Handle warnings about undefined signed overflow. */
14580 tree_expr_nonzero_p (tree t
)
14582 bool ret
, strict_overflow_p
;
14584 strict_overflow_p
= false;
14585 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
14586 if (strict_overflow_p
)
14587 fold_overflow_warning (("assuming signed overflow does not occur when "
14588 "determining that expression is always "
14590 WARN_STRICT_OVERFLOW_MISC
);
14594 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14595 attempt to fold the expression to a constant without modifying TYPE,
14598 If the expression could be simplified to a constant, then return
14599 the constant. If the expression would not be simplified to a
14600 constant, then return NULL_TREE. */
14603 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
14605 tree tem
= fold_binary (code
, type
, op0
, op1
);
14606 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14609 /* Given the components of a unary expression CODE, TYPE and OP0,
14610 attempt to fold the expression to a constant without modifying
14613 If the expression could be simplified to a constant, then return
14614 the constant. If the expression would not be simplified to a
14615 constant, then return NULL_TREE. */
14618 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
14620 tree tem
= fold_unary (code
, type
, op0
);
14621 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14624 /* If EXP represents referencing an element in a constant string
14625 (either via pointer arithmetic or array indexing), return the
14626 tree representing the value accessed, otherwise return NULL. */
14629 fold_read_from_constant_string (tree exp
)
14631 if ((TREE_CODE (exp
) == INDIRECT_REF
14632 || TREE_CODE (exp
) == ARRAY_REF
)
14633 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
14635 tree exp1
= TREE_OPERAND (exp
, 0);
14639 if (TREE_CODE (exp
) == INDIRECT_REF
)
14640 string
= string_constant (exp1
, &index
);
14643 tree low_bound
= array_ref_low_bound (exp
);
14644 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
14646 /* Optimize the special-case of a zero lower bound.
14648 We convert the low_bound to sizetype to avoid some problems
14649 with constant folding. (E.g. suppose the lower bound is 1,
14650 and its mode is QI. Without the conversion,l (ARRAY
14651 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14652 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14653 if (! integer_zerop (low_bound
))
14654 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
14660 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
14661 && TREE_CODE (string
) == STRING_CST
14662 && TREE_CODE (index
) == INTEGER_CST
14663 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
14664 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
14666 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
14667 return build_int_cst_type (TREE_TYPE (exp
),
14668 (TREE_STRING_POINTER (string
)
14669 [TREE_INT_CST_LOW (index
)]));
14674 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14675 an integer constant, real, or fixed-point constant.
14677 TYPE is the type of the result. */
14680 fold_negate_const (tree arg0
, tree type
)
14682 tree t
= NULL_TREE
;
14684 switch (TREE_CODE (arg0
))
14688 unsigned HOST_WIDE_INT low
;
14689 HOST_WIDE_INT high
;
14690 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14691 TREE_INT_CST_HIGH (arg0
),
14693 t
= force_fit_type_double (type
, low
, high
, 1,
14694 (overflow
| TREE_OVERFLOW (arg0
))
14695 && !TYPE_UNSIGNED (type
));
14700 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14705 FIXED_VALUE_TYPE f
;
14706 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
14707 &(TREE_FIXED_CST (arg0
)), NULL
,
14708 TYPE_SATURATING (type
));
14709 t
= build_fixed (type
, f
);
14710 /* Propagate overflow flags. */
14711 if (overflow_p
| TREE_OVERFLOW (arg0
))
14713 TREE_OVERFLOW (t
) = 1;
14714 TREE_CONSTANT_OVERFLOW (t
) = 1;
14716 else if (TREE_CONSTANT_OVERFLOW (arg0
))
14717 TREE_CONSTANT_OVERFLOW (t
) = 1;
14722 gcc_unreachable ();
14728 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14729 an integer constant or real constant.
14731 TYPE is the type of the result. */
14734 fold_abs_const (tree arg0
, tree type
)
14736 tree t
= NULL_TREE
;
14738 switch (TREE_CODE (arg0
))
14741 /* If the value is unsigned, then the absolute value is
14742 the same as the ordinary value. */
14743 if (TYPE_UNSIGNED (type
))
14745 /* Similarly, if the value is non-negative. */
14746 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
14748 /* If the value is negative, then the absolute value is
14752 unsigned HOST_WIDE_INT low
;
14753 HOST_WIDE_INT high
;
14754 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14755 TREE_INT_CST_HIGH (arg0
),
14757 t
= force_fit_type_double (type
, low
, high
, -1,
14758 overflow
| TREE_OVERFLOW (arg0
));
14763 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14764 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14770 gcc_unreachable ();
14776 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14777 constant. TYPE is the type of the result. */
14780 fold_not_const (tree arg0
, tree type
)
14782 tree t
= NULL_TREE
;
14784 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14786 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
14787 ~TREE_INT_CST_HIGH (arg0
), 0,
14788 TREE_OVERFLOW (arg0
));
14793 /* Given CODE, a relational operator, the target type, TYPE and two
14794 constant operands OP0 and OP1, return the result of the
14795 relational operation. If the result is not a compile time
14796 constant, then return NULL_TREE. */
14799 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14801 int result
, invert
;
14803 /* From here on, the only cases we handle are when the result is
14804 known to be a constant. */
14806 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14808 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14809 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14811 /* Handle the cases where either operand is a NaN. */
14812 if (real_isnan (c0
) || real_isnan (c1
))
14822 case UNORDERED_EXPR
:
14836 if (flag_trapping_math
)
14842 gcc_unreachable ();
14845 return constant_boolean_node (result
, type
);
14848 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14851 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14853 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14854 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14855 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14858 /* Handle equality/inequality of complex constants. */
14859 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14861 tree rcond
= fold_relational_const (code
, type
,
14862 TREE_REALPART (op0
),
14863 TREE_REALPART (op1
));
14864 tree icond
= fold_relational_const (code
, type
,
14865 TREE_IMAGPART (op0
),
14866 TREE_IMAGPART (op1
));
14867 if (code
== EQ_EXPR
)
14868 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14869 else if (code
== NE_EXPR
)
14870 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
14875 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14877 To compute GT, swap the arguments and do LT.
14878 To compute GE, do LT and invert the result.
14879 To compute LE, swap the arguments, do LT and invert the result.
14880 To compute NE, do EQ and invert the result.
14882 Therefore, the code below must handle only EQ and LT. */
14884 if (code
== LE_EXPR
|| code
== GT_EXPR
)
14889 code
= swap_tree_comparison (code
);
14892 /* Note that it is safe to invert for real values here because we
14893 have already handled the one case that it matters. */
14896 if (code
== NE_EXPR
|| code
== GE_EXPR
)
14899 code
= invert_tree_comparison (code
, false);
14902 /* Compute a result for LT or EQ if args permit;
14903 Otherwise return T. */
14904 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
14906 if (code
== EQ_EXPR
)
14907 result
= tree_int_cst_equal (op0
, op1
);
14908 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
14909 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
14911 result
= INT_CST_LT (op0
, op1
);
14918 return constant_boolean_node (result
, type
);
14921 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14922 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14926 fold_build_cleanup_point_expr (tree type
, tree expr
)
14928 /* If the expression does not have side effects then we don't have to wrap
14929 it with a cleanup point expression. */
14930 if (!TREE_SIDE_EFFECTS (expr
))
14933 /* If the expression is a return, check to see if the expression inside the
14934 return has no side effects or the right hand side of the modify expression
14935 inside the return. If either don't have side effects set we don't need to
14936 wrap the expression in a cleanup point expression. Note we don't check the
14937 left hand side of the modify because it should always be a return decl. */
14938 if (TREE_CODE (expr
) == RETURN_EXPR
)
14940 tree op
= TREE_OPERAND (expr
, 0);
14941 if (!op
|| !TREE_SIDE_EFFECTS (op
))
14943 op
= TREE_OPERAND (op
, 1);
14944 if (!TREE_SIDE_EFFECTS (op
))
14948 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
14951 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14952 of an indirection through OP0, or NULL_TREE if no simplification is
14956 fold_indirect_ref_1 (tree type
, tree op0
)
14962 subtype
= TREE_TYPE (sub
);
14963 if (!POINTER_TYPE_P (subtype
))
14966 if (TREE_CODE (sub
) == ADDR_EXPR
)
14968 tree op
= TREE_OPERAND (sub
, 0);
14969 tree optype
= TREE_TYPE (op
);
14970 /* *&CONST_DECL -> to the value of the const decl. */
14971 if (TREE_CODE (op
) == CONST_DECL
)
14972 return DECL_INITIAL (op
);
14973 /* *&p => p; make sure to handle *&"str"[cst] here. */
14974 if (type
== optype
)
14976 tree fop
= fold_read_from_constant_string (op
);
14982 /* *(foo *)&fooarray => fooarray[0] */
14983 else if (TREE_CODE (optype
) == ARRAY_TYPE
14984 && type
== TREE_TYPE (optype
))
14986 tree type_domain
= TYPE_DOMAIN (optype
);
14987 tree min_val
= size_zero_node
;
14988 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14989 min_val
= TYPE_MIN_VALUE (type_domain
);
14990 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
14992 /* *(foo *)&complexfoo => __real__ complexfoo */
14993 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14994 && type
== TREE_TYPE (optype
))
14995 return fold_build1 (REALPART_EXPR
, type
, op
);
14996 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14997 else if (TREE_CODE (optype
) == VECTOR_TYPE
14998 && type
== TREE_TYPE (optype
))
15000 tree part_width
= TYPE_SIZE (type
);
15001 tree index
= bitsize_int (0);
15002 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
15006 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15007 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15008 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15010 tree op00
= TREE_OPERAND (sub
, 0);
15011 tree op01
= TREE_OPERAND (sub
, 1);
15015 op00type
= TREE_TYPE (op00
);
15016 if (TREE_CODE (op00
) == ADDR_EXPR
15017 && TREE_CODE (TREE_TYPE (op00type
)) == VECTOR_TYPE
15018 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
15020 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
15021 tree part_width
= TYPE_SIZE (type
);
15022 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
15023 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15024 tree index
= bitsize_int (indexi
);
15026 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type
)))
15027 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (op00
, 0),
15028 part_width
, index
);
15034 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15035 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15036 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15038 tree op00
= TREE_OPERAND (sub
, 0);
15039 tree op01
= TREE_OPERAND (sub
, 1);
15043 op00type
= TREE_TYPE (op00
);
15044 if (TREE_CODE (op00
) == ADDR_EXPR
15045 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
15046 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
15048 tree size
= TYPE_SIZE_UNIT (type
);
15049 if (tree_int_cst_equal (size
, op01
))
15050 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
15054 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15055 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15056 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
15059 tree min_val
= size_zero_node
;
15060 sub
= build_fold_indirect_ref (sub
);
15061 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15062 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15063 min_val
= TYPE_MIN_VALUE (type_domain
);
15064 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
15070 /* Builds an expression for an indirection through T, simplifying some
15074 build_fold_indirect_ref (tree t
)
15076 tree type
= TREE_TYPE (TREE_TYPE (t
));
15077 tree sub
= fold_indirect_ref_1 (type
, t
);
15082 return build1 (INDIRECT_REF
, type
, t
);
15085 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15088 fold_indirect_ref (tree t
)
15090 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15098 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15099 whose result is ignored. The type of the returned tree need not be
15100 the same as the original expression. */
15103 fold_ignored_result (tree t
)
15105 if (!TREE_SIDE_EFFECTS (t
))
15106 return integer_zero_node
;
15109 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15112 t
= TREE_OPERAND (t
, 0);
15116 case tcc_comparison
:
15117 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15118 t
= TREE_OPERAND (t
, 0);
15119 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15120 t
= TREE_OPERAND (t
, 1);
15125 case tcc_expression
:
15126 switch (TREE_CODE (t
))
15128 case COMPOUND_EXPR
:
15129 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15131 t
= TREE_OPERAND (t
, 0);
15135 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15136 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15138 t
= TREE_OPERAND (t
, 0);
15151 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15152 This can only be applied to objects of a sizetype. */
15155 round_up (tree value
, int divisor
)
15157 tree div
= NULL_TREE
;
15159 gcc_assert (divisor
> 0);
15163 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15164 have to do anything. Only do this when we are not given a const,
15165 because in that case, this check is more expensive than just
15167 if (TREE_CODE (value
) != INTEGER_CST
)
15169 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15171 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15175 /* If divisor is a power of two, simplify this to bit manipulation. */
15176 if (divisor
== (divisor
& -divisor
))
15178 if (TREE_CODE (value
) == INTEGER_CST
)
15180 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
15181 unsigned HOST_WIDE_INT high
;
15184 if ((low
& (divisor
- 1)) == 0)
15187 overflow_p
= TREE_OVERFLOW (value
);
15188 high
= TREE_INT_CST_HIGH (value
);
15189 low
&= ~(divisor
- 1);
15198 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
15205 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15206 value
= size_binop (PLUS_EXPR
, value
, t
);
15207 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15208 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15214 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15215 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
15216 value
= size_binop (MULT_EXPR
, value
, div
);
15222 /* Likewise, but round down. */
15225 round_down (tree value
, int divisor
)
15227 tree div
= NULL_TREE
;
15229 gcc_assert (divisor
> 0);
15233 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15234 have to do anything. Only do this when we are not given a const,
15235 because in that case, this check is more expensive than just
15237 if (TREE_CODE (value
) != INTEGER_CST
)
15239 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15241 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15245 /* If divisor is a power of two, simplify this to bit manipulation. */
15246 if (divisor
== (divisor
& -divisor
))
15250 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15251 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15256 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15257 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
15258 value
= size_binop (MULT_EXPR
, value
, div
);
15264 /* Returns the pointer to the base of the object addressed by EXP and
15265 extracts the information about the offset of the access, storing it
15266 to PBITPOS and POFFSET. */
15269 split_address_to_core_and_offset (tree exp
,
15270 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15273 enum machine_mode mode
;
15274 int unsignedp
, volatilep
;
15275 HOST_WIDE_INT bitsize
;
15277 if (TREE_CODE (exp
) == ADDR_EXPR
)
15279 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15280 poffset
, &mode
, &unsignedp
, &volatilep
,
15282 core
= fold_addr_expr (core
);
15288 *poffset
= NULL_TREE
;
15294 /* Returns true if addresses of E1 and E2 differ by a constant, false
15295 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15298 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15301 HOST_WIDE_INT bitpos1
, bitpos2
;
15302 tree toffset1
, toffset2
, tdiff
, type
;
15304 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15305 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15307 if (bitpos1
% BITS_PER_UNIT
!= 0
15308 || bitpos2
% BITS_PER_UNIT
!= 0
15309 || !operand_equal_p (core1
, core2
, 0))
15312 if (toffset1
&& toffset2
)
15314 type
= TREE_TYPE (toffset1
);
15315 if (type
!= TREE_TYPE (toffset2
))
15316 toffset2
= fold_convert (type
, toffset2
);
15318 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15319 if (!cst_and_fits_in_hwi (tdiff
))
15322 *diff
= int_cst_value (tdiff
);
15324 else if (toffset1
|| toffset2
)
15326 /* If only one of the offsets is non-constant, the difference cannot
15333 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
15337 /* Simplify the floating point expression EXP when the sign of the
15338 result is not significant. Return NULL_TREE if no simplification
15342 fold_strip_sign_ops (tree exp
)
15346 switch (TREE_CODE (exp
))
15350 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15351 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
15355 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
15357 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15358 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15359 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
15360 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
15361 arg0
? arg0
: TREE_OPERAND (exp
, 0),
15362 arg1
? arg1
: TREE_OPERAND (exp
, 1));
15365 case COMPOUND_EXPR
:
15366 arg0
= TREE_OPERAND (exp
, 0);
15367 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15369 return fold_build2 (COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
15373 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15374 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
15376 return fold_build3 (COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
15377 arg0
? arg0
: TREE_OPERAND (exp
, 1),
15378 arg1
? arg1
: TREE_OPERAND (exp
, 2));
15383 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
15386 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15387 /* Strip copysign function call, return the 1st argument. */
15388 arg0
= CALL_EXPR_ARG (exp
, 0);
15389 arg1
= CALL_EXPR_ARG (exp
, 1);
15390 return omit_one_operand (TREE_TYPE (exp
), arg0
, arg1
);
15393 /* Strip sign ops from the argument of "odd" math functions. */
15394 if (negate_mathfn_p (fcode
))
15396 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
15398 return build_call_expr (get_callee_fndecl (exp
), 1, arg0
);