1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer
= 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code
{
96 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
97 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
98 static bool negate_mathfn_p (enum built_in_function
);
99 static bool negate_expr_p (tree
);
100 static tree
negate_expr (tree
);
101 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
102 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
103 static tree
const_binop (enum tree_code
, tree
, tree
, int);
104 static enum comparison_code
comparison_to_compcode (enum tree_code
);
105 static enum tree_code
compcode_to_comparison (enum comparison_code
);
106 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
107 enum tree_code
, tree
, tree
, tree
);
108 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
109 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
110 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
111 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
112 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
113 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
114 enum machine_mode
*, int *, int *,
116 static tree
sign_bit_p (tree
, const_tree
);
117 static int simple_operand_p (const_tree
);
118 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
119 static tree
range_predecessor (tree
);
120 static tree
range_successor (tree
);
121 static tree
make_range (tree
, int *, tree
*, tree
*, bool *);
122 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
123 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
125 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
126 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
127 static tree
unextend (tree
, int, int, tree
);
128 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
129 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
130 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
131 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
132 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
135 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
137 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
138 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
139 static bool reorder_operands_p (const_tree
, const_tree
);
140 static tree
fold_negate_const (tree
, tree
);
141 static tree
fold_not_const (tree
, tree
);
142 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
173 words
[0] = LOWPART (low
);
174 words
[1] = HIGHPART (low
);
175 words
[2] = LOWPART (hi
);
176 words
[3] = HIGHPART (hi
);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
187 *low
= words
[0] + words
[1] * BASE
;
188 *hi
= words
[2] + words
[3] * BASE
;
191 /* Force the double-word integer L1, H1 to be within the range of the
192 integer type TYPE. Stores the properly truncated and sign-extended
193 double-word integer in *LV, *HV. Returns true if the operation
194 overflows, that is, argument and result are different. */
197 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
198 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, const_tree type
)
200 unsigned HOST_WIDE_INT low0
= l1
;
201 HOST_WIDE_INT high0
= h1
;
203 int sign_extended_type
;
205 if (POINTER_TYPE_P (type
)
206 || TREE_CODE (type
) == OFFSET_TYPE
)
209 prec
= TYPE_PRECISION (type
);
211 /* Size types *are* sign extended. */
212 sign_extended_type
= (!TYPE_UNSIGNED (type
)
213 || (TREE_CODE (type
) == INTEGER_TYPE
214 && TYPE_IS_SIZETYPE (type
)));
216 /* First clear all bits that are beyond the type's precision. */
217 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
219 else if (prec
> HOST_BITS_PER_WIDE_INT
)
220 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
224 if (prec
< HOST_BITS_PER_WIDE_INT
)
225 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
228 /* Then do sign extension if necessary. */
229 if (!sign_extended_type
)
230 /* No sign extension */;
231 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
232 /* Correct width already. */;
233 else if (prec
> HOST_BITS_PER_WIDE_INT
)
235 /* Sign extend top half? */
236 if (h1
& ((unsigned HOST_WIDE_INT
)1
237 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
238 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
240 else if (prec
== HOST_BITS_PER_WIDE_INT
)
242 if ((HOST_WIDE_INT
)l1
< 0)
247 /* Sign extend bottom half? */
248 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
251 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
258 /* If the value didn't fit, signal overflow. */
259 return l1
!= low0
|| h1
!= high0
;
262 /* We force the double-int HIGH:LOW to the range of the type TYPE by
263 sign or zero extending it.
264 OVERFLOWABLE indicates if we are interested
265 in overflow of the value, when >0 we are only interested in signed
266 overflow, for <0 we are interested in any overflow. OVERFLOWED
267 indicates whether overflow has already occurred. CONST_OVERFLOWED
268 indicates whether constant overflow has already occurred. We force
269 T's value to be within range of T's type (by setting to 0 or 1 all
270 the bits outside the type's range). We set TREE_OVERFLOWED if,
271 OVERFLOWED is nonzero,
272 or OVERFLOWABLE is >0 and signed overflow occurs
273 or OVERFLOWABLE is <0 and any overflow occurs
274 We return a new tree node for the extended double-int. The node
275 is shared if no overflow flags are set. */
278 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
279 HOST_WIDE_INT high
, int overflowable
,
282 int sign_extended_type
;
285 /* Size types *are* sign extended. */
286 sign_extended_type
= (!TYPE_UNSIGNED (type
)
287 || (TREE_CODE (type
) == INTEGER_TYPE
288 && TYPE_IS_SIZETYPE (type
)));
290 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
292 /* If we need to set overflow flags, return a new unshared node. */
293 if (overflowed
|| overflow
)
297 || (overflowable
> 0 && sign_extended_type
))
299 tree t
= make_node (INTEGER_CST
);
300 TREE_INT_CST_LOW (t
) = low
;
301 TREE_INT_CST_HIGH (t
) = high
;
302 TREE_TYPE (t
) = type
;
303 TREE_OVERFLOW (t
) = 1;
308 /* Else build a shared node. */
309 return build_int_cst_wide (type
, low
, high
);
312 /* Add two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows according to UNSIGNED_P.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
320 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
321 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
324 unsigned HOST_WIDE_INT l
;
328 h
= h1
+ h2
+ (l
< l1
);
334 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
336 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
339 /* Negate a doubleword integer with doubleword result.
340 Return nonzero if the operation overflows, assuming it's signed.
341 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
342 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
346 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
352 return (*hv
& h1
) < 0;
362 /* Multiply two doubleword integers with doubleword result.
363 Return nonzero if the operation overflows according to UNSIGNED_P.
364 Each argument is given as two `HOST_WIDE_INT' pieces.
365 One argument is L1 and H1; the other, L2 and H2.
366 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
370 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
371 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
374 HOST_WIDE_INT arg1
[4];
375 HOST_WIDE_INT arg2
[4];
376 HOST_WIDE_INT prod
[4 * 2];
377 unsigned HOST_WIDE_INT carry
;
379 unsigned HOST_WIDE_INT toplow
, neglow
;
380 HOST_WIDE_INT tophigh
, neghigh
;
382 encode (arg1
, l1
, h1
);
383 encode (arg2
, l2
, h2
);
385 memset (prod
, 0, sizeof prod
);
387 for (i
= 0; i
< 4; i
++)
390 for (j
= 0; j
< 4; j
++)
393 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
394 carry
+= arg1
[i
] * arg2
[j
];
395 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 prod
[k
] = LOWPART (carry
);
398 carry
= HIGHPART (carry
);
403 decode (prod
, lv
, hv
);
404 decode (prod
+ 4, &toplow
, &tophigh
);
406 /* Unsigned overflow is immediate. */
408 return (toplow
| tophigh
) != 0;
410 /* Check for signed overflow by calculating the signed representation of the
411 top half of the result; it should agree with the low half's sign bit. */
414 neg_double (l2
, h2
, &neglow
, &neghigh
);
415 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
419 neg_double (l1
, h1
, &neglow
, &neghigh
);
420 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
422 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
425 /* Shift the doubleword integer in L1, H1 left by COUNT places
426 keeping only PREC bits of result.
427 Shift right if COUNT is negative.
428 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
429 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
433 HOST_WIDE_INT count
, unsigned int prec
,
434 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
436 unsigned HOST_WIDE_INT signmask
;
440 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
444 if (SHIFT_COUNT_TRUNCATED
)
447 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
449 /* Shifting by the host word size is undefined according to the
450 ANSI standard, so we must handle this as a special case. */
454 else if (count
>= HOST_BITS_PER_WIDE_INT
)
456 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
461 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
462 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
466 /* Sign extend all bits that are beyond the precision. */
468 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
469 ? ((unsigned HOST_WIDE_INT
) *hv
470 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
471 : (*lv
>> (prec
- 1))) & 1);
473 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
475 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
477 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
478 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
483 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
484 *lv
|= signmask
<< prec
;
488 /* Shift the doubleword integer in L1, H1 right by COUNT places
489 keeping only PREC bits of result. COUNT must be positive.
490 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
491 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
495 HOST_WIDE_INT count
, unsigned int prec
,
496 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
499 unsigned HOST_WIDE_INT signmask
;
502 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
505 if (SHIFT_COUNT_TRUNCATED
)
508 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
510 /* Shifting by the host word size is undefined according to the
511 ANSI standard, so we must handle this as a special case. */
515 else if (count
>= HOST_BITS_PER_WIDE_INT
)
518 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
522 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
524 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
527 /* Zero / sign extend all bits that are beyond the precision. */
529 if (count
>= (HOST_WIDE_INT
)prec
)
534 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
536 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
538 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
539 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
544 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
545 *lv
|= signmask
<< (prec
- count
);
549 /* Rotate the doubleword integer in L1, H1 left by COUNT places
550 keeping only PREC bits of result.
551 Rotate right if COUNT is negative.
552 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
556 HOST_WIDE_INT count
, unsigned int prec
,
557 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
559 unsigned HOST_WIDE_INT s1l
, s2l
;
560 HOST_WIDE_INT s1h
, s2h
;
566 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
567 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
572 /* Rotate the doubleword integer in L1, H1 left by COUNT places
573 keeping only PREC bits of result. COUNT must be positive.
574 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
578 HOST_WIDE_INT count
, unsigned int prec
,
579 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
581 unsigned HOST_WIDE_INT s1l
, s2l
;
582 HOST_WIDE_INT s1h
, s2h
;
588 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
589 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
594 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
595 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
596 CODE is a tree code for a kind of division, one of
597 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 It controls how the quotient is rounded to an integer.
600 Return nonzero if the operation overflows.
601 UNS nonzero says do unsigned division. */
604 div_and_round_double (enum tree_code code
, int uns
,
605 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
606 HOST_WIDE_INT hnum_orig
,
607 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
608 HOST_WIDE_INT hden_orig
,
609 unsigned HOST_WIDE_INT
*lquo
,
610 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
614 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
615 HOST_WIDE_INT den
[4], quo
[4];
617 unsigned HOST_WIDE_INT work
;
618 unsigned HOST_WIDE_INT carry
= 0;
619 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
620 HOST_WIDE_INT hnum
= hnum_orig
;
621 unsigned HOST_WIDE_INT lden
= lden_orig
;
622 HOST_WIDE_INT hden
= hden_orig
;
625 if (hden
== 0 && lden
== 0)
626 overflow
= 1, lden
= 1;
628 /* Calculate quotient sign and convert operands to unsigned. */
634 /* (minimum integer) / (-1) is the only overflow case. */
635 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
636 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
642 neg_double (lden
, hden
, &lden
, &hden
);
646 if (hnum
== 0 && hden
== 0)
647 { /* single precision */
649 /* This unsigned division rounds toward zero. */
655 { /* trivial case: dividend < divisor */
656 /* hden != 0 already checked. */
663 memset (quo
, 0, sizeof quo
);
665 memset (num
, 0, sizeof num
); /* to zero 9th element */
666 memset (den
, 0, sizeof den
);
668 encode (num
, lnum
, hnum
);
669 encode (den
, lden
, hden
);
671 /* Special code for when the divisor < BASE. */
672 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
674 /* hnum != 0 already checked. */
675 for (i
= 4 - 1; i
>= 0; i
--)
677 work
= num
[i
] + carry
* BASE
;
678 quo
[i
] = work
/ lden
;
684 /* Full double precision division,
685 with thanks to Don Knuth's "Seminumerical Algorithms". */
686 int num_hi_sig
, den_hi_sig
;
687 unsigned HOST_WIDE_INT quo_est
, scale
;
689 /* Find the highest nonzero divisor digit. */
690 for (i
= 4 - 1;; i
--)
697 /* Insure that the first digit of the divisor is at least BASE/2.
698 This is required by the quotient digit estimation algorithm. */
700 scale
= BASE
/ (den
[den_hi_sig
] + 1);
702 { /* scale divisor and dividend */
704 for (i
= 0; i
<= 4 - 1; i
++)
706 work
= (num
[i
] * scale
) + carry
;
707 num
[i
] = LOWPART (work
);
708 carry
= HIGHPART (work
);
713 for (i
= 0; i
<= 4 - 1; i
++)
715 work
= (den
[i
] * scale
) + carry
;
716 den
[i
] = LOWPART (work
);
717 carry
= HIGHPART (work
);
718 if (den
[i
] != 0) den_hi_sig
= i
;
725 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
727 /* Guess the next quotient digit, quo_est, by dividing the first
728 two remaining dividend digits by the high order quotient digit.
729 quo_est is never low and is at most 2 high. */
730 unsigned HOST_WIDE_INT tmp
;
732 num_hi_sig
= i
+ den_hi_sig
+ 1;
733 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
734 if (num
[num_hi_sig
] != den
[den_hi_sig
])
735 quo_est
= work
/ den
[den_hi_sig
];
739 /* Refine quo_est so it's usually correct, and at most one high. */
740 tmp
= work
- quo_est
* den
[den_hi_sig
];
742 && (den
[den_hi_sig
- 1] * quo_est
743 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
746 /* Try QUO_EST as the quotient digit, by multiplying the
747 divisor by QUO_EST and subtracting from the remaining dividend.
748 Keep in mind that QUO_EST is the I - 1st digit. */
751 for (j
= 0; j
<= den_hi_sig
; j
++)
753 work
= quo_est
* den
[j
] + carry
;
754 carry
= HIGHPART (work
);
755 work
= num
[i
+ j
] - LOWPART (work
);
756 num
[i
+ j
] = LOWPART (work
);
757 carry
+= HIGHPART (work
) != 0;
760 /* If quo_est was high by one, then num[i] went negative and
761 we need to correct things. */
762 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
765 carry
= 0; /* add divisor back in */
766 for (j
= 0; j
<= den_hi_sig
; j
++)
768 work
= num
[i
+ j
] + den
[j
] + carry
;
769 carry
= HIGHPART (work
);
770 num
[i
+ j
] = LOWPART (work
);
773 num
[num_hi_sig
] += carry
;
776 /* Store the quotient digit. */
781 decode (quo
, lquo
, hquo
);
784 /* If result is negative, make it so. */
786 neg_double (*lquo
, *hquo
, lquo
, hquo
);
788 /* Compute trial remainder: rem = num - (quo * den) */
789 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
790 neg_double (*lrem
, *hrem
, lrem
, hrem
);
791 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
796 case TRUNC_MOD_EXPR
: /* round toward zero */
797 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
801 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
802 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
805 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
813 case CEIL_MOD_EXPR
: /* round toward positive infinity */
814 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
816 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
824 case ROUND_MOD_EXPR
: /* round to closest integer */
826 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
827 HOST_WIDE_INT habs_rem
= *hrem
;
828 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
829 HOST_WIDE_INT habs_den
= hden
, htwice
;
831 /* Get absolute values. */
833 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
835 neg_double (lden
, hden
, &labs_den
, &habs_den
);
837 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
838 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
839 labs_rem
, habs_rem
, <wice
, &htwice
);
841 if (((unsigned HOST_WIDE_INT
) habs_den
842 < (unsigned HOST_WIDE_INT
) htwice
)
843 || (((unsigned HOST_WIDE_INT
) habs_den
844 == (unsigned HOST_WIDE_INT
) htwice
)
845 && (labs_den
<= ltwice
)))
849 add_double (*lquo
, *hquo
,
850 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
853 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
865 /* Compute true remainder: rem = num - (quo * den) */
866 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
867 neg_double (*lrem
, *hrem
, lrem
, hrem
);
868 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
872 /* If ARG2 divides ARG1 with zero remainder, carries out the division
873 of type CODE and returns the quotient.
874 Otherwise returns NULL_TREE. */
877 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
879 unsigned HOST_WIDE_INT int1l
, int2l
;
880 HOST_WIDE_INT int1h
, int2h
;
881 unsigned HOST_WIDE_INT quol
, reml
;
882 HOST_WIDE_INT quoh
, remh
;
883 tree type
= TREE_TYPE (arg1
);
884 int uns
= TYPE_UNSIGNED (type
);
886 int1l
= TREE_INT_CST_LOW (arg1
);
887 int1h
= TREE_INT_CST_HIGH (arg1
);
888 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
889 &obj[some_exotic_number]. */
890 if (POINTER_TYPE_P (type
))
893 type
= signed_type_for (type
);
894 fit_double_type (int1l
, int1h
, &int1l
, &int1h
,
898 fit_double_type (int1l
, int1h
, &int1l
, &int1h
, type
);
899 int2l
= TREE_INT_CST_LOW (arg2
);
900 int2h
= TREE_INT_CST_HIGH (arg2
);
902 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
903 &quol
, &quoh
, &reml
, &remh
);
904 if (remh
!= 0 || reml
!= 0)
907 return build_int_cst_wide (type
, quol
, quoh
);
910 /* This is nonzero if we should defer warnings about undefined
911 overflow. This facility exists because these warnings are a
912 special case. The code to estimate loop iterations does not want
913 to issue any warnings, since it works with expressions which do not
914 occur in user code. Various bits of cleanup code call fold(), but
915 only use the result if it has certain characteristics (e.g., is a
916 constant); that code only wants to issue a warning if the result is
919 static int fold_deferring_overflow_warnings
;
921 /* If a warning about undefined overflow is deferred, this is the
922 warning. Note that this may cause us to turn two warnings into
923 one, but that is fine since it is sufficient to only give one
924 warning per expression. */
926 static const char* fold_deferred_overflow_warning
;
928 /* If a warning about undefined overflow is deferred, this is the
929 level at which the warning should be emitted. */
931 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
933 /* Start deferring overflow warnings. We could use a stack here to
934 permit nested calls, but at present it is not necessary. */
937 fold_defer_overflow_warnings (void)
939 ++fold_deferring_overflow_warnings
;
942 /* Stop deferring overflow warnings. If there is a pending warning,
943 and ISSUE is true, then issue the warning if appropriate. STMT is
944 the statement with which the warning should be associated (used for
945 location information); STMT may be NULL. CODE is the level of the
946 warning--a warn_strict_overflow_code value. This function will use
947 the smaller of CODE and the deferred code when deciding whether to
948 issue the warning. CODE may be zero to mean to always use the
952 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
957 gcc_assert (fold_deferring_overflow_warnings
> 0);
958 --fold_deferring_overflow_warnings
;
959 if (fold_deferring_overflow_warnings
> 0)
961 if (fold_deferred_overflow_warning
!= NULL
963 && code
< (int) fold_deferred_overflow_code
)
964 fold_deferred_overflow_code
= code
;
968 warnmsg
= fold_deferred_overflow_warning
;
969 fold_deferred_overflow_warning
= NULL
;
971 if (!issue
|| warnmsg
== NULL
)
974 if (gimple_no_warning_p (stmt
))
977 /* Use the smallest code level when deciding to issue the
979 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
980 code
= fold_deferred_overflow_code
;
982 if (!issue_strict_overflow_warning (code
))
986 locus
= input_location
;
988 locus
= gimple_location (stmt
);
989 warning (OPT_Wstrict_overflow
, "%H%s", &locus
, warnmsg
);
992 /* Stop deferring overflow warnings, ignoring any deferred
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL
, 0);
1001 /* Whether we are deferring overflow warnings. */
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings
> 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1013 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
1015 if (fold_deferring_overflow_warnings
> 0)
1017 if (fold_deferred_overflow_warning
== NULL
1018 || wc
< fold_deferred_overflow_code
)
1020 fold_deferred_overflow_warning
= gmsgid
;
1021 fold_deferred_overflow_code
= wc
;
1024 else if (issue_strict_overflow_warning (wc
))
1025 warning (OPT_Wstrict_overflow
, gmsgid
);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1032 negate_mathfn_p (enum built_in_function code
)
1036 CASE_FLT_FN (BUILT_IN_ASIN
):
1037 CASE_FLT_FN (BUILT_IN_ASINH
):
1038 CASE_FLT_FN (BUILT_IN_ATAN
):
1039 CASE_FLT_FN (BUILT_IN_ATANH
):
1040 CASE_FLT_FN (BUILT_IN_CASIN
):
1041 CASE_FLT_FN (BUILT_IN_CASINH
):
1042 CASE_FLT_FN (BUILT_IN_CATAN
):
1043 CASE_FLT_FN (BUILT_IN_CATANH
):
1044 CASE_FLT_FN (BUILT_IN_CBRT
):
1045 CASE_FLT_FN (BUILT_IN_CPROJ
):
1046 CASE_FLT_FN (BUILT_IN_CSIN
):
1047 CASE_FLT_FN (BUILT_IN_CSINH
):
1048 CASE_FLT_FN (BUILT_IN_CTAN
):
1049 CASE_FLT_FN (BUILT_IN_CTANH
):
1050 CASE_FLT_FN (BUILT_IN_ERF
):
1051 CASE_FLT_FN (BUILT_IN_LLROUND
):
1052 CASE_FLT_FN (BUILT_IN_LROUND
):
1053 CASE_FLT_FN (BUILT_IN_ROUND
):
1054 CASE_FLT_FN (BUILT_IN_SIN
):
1055 CASE_FLT_FN (BUILT_IN_SINH
):
1056 CASE_FLT_FN (BUILT_IN_TAN
):
1057 CASE_FLT_FN (BUILT_IN_TANH
):
1058 CASE_FLT_FN (BUILT_IN_TRUNC
):
1061 CASE_FLT_FN (BUILT_IN_LLRINT
):
1062 CASE_FLT_FN (BUILT_IN_LRINT
):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1064 CASE_FLT_FN (BUILT_IN_RINT
):
1065 return !flag_rounding_math
;
1073 /* Check whether we may negate an integer constant T without causing
1077 may_negate_without_overflow_p (const_tree t
)
1079 unsigned HOST_WIDE_INT val
;
1083 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
1085 type
= TREE_TYPE (t
);
1086 if (TYPE_UNSIGNED (type
))
1089 prec
= TYPE_PRECISION (type
);
1090 if (prec
> HOST_BITS_PER_WIDE_INT
)
1092 if (TREE_INT_CST_LOW (t
) != 0)
1094 prec
-= HOST_BITS_PER_WIDE_INT
;
1095 val
= TREE_INT_CST_HIGH (t
);
1098 val
= TREE_INT_CST_LOW (t
);
1099 if (prec
< HOST_BITS_PER_WIDE_INT
)
1100 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
1101 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1108 negate_expr_p (tree t
)
1115 type
= TREE_TYPE (t
);
1117 STRIP_SIGN_NOPS (t
);
1118 switch (TREE_CODE (t
))
1121 if (TYPE_OVERFLOW_WRAPS (type
))
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t
);
1127 return (INTEGRAL_TYPE_P (type
)
1128 && TYPE_OVERFLOW_WRAPS (type
));
1136 return negate_expr_p (TREE_REALPART (t
))
1137 && negate_expr_p (TREE_IMAGPART (t
));
1140 return negate_expr_p (TREE_OPERAND (t
, 0))
1141 && negate_expr_p (TREE_OPERAND (t
, 1));
1144 return negate_expr_p (TREE_OPERAND (t
, 0));
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t
, 1))
1152 && reorder_operands_p (TREE_OPERAND (t
, 0),
1153 TREE_OPERAND (t
, 1)))
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t
, 0));
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1162 && reorder_operands_p (TREE_OPERAND (t
, 0),
1163 TREE_OPERAND (t
, 1));
1166 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1173 return negate_expr_p (TREE_OPERAND (t
, 1))
1174 || negate_expr_p (TREE_OPERAND (t
, 0));
1177 case TRUNC_DIV_EXPR
:
1178 case ROUND_DIV_EXPR
:
1179 case FLOOR_DIV_EXPR
:
1181 case EXACT_DIV_EXPR
:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
1190 return negate_expr_p (TREE_OPERAND (t
, 1))
1191 || negate_expr_p (TREE_OPERAND (t
, 0));
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type
) == REAL_TYPE
)
1197 tree tem
= strip_float_extensions (t
);
1199 return negate_expr_p (tem
);
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1206 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1213 tree op1
= TREE_OPERAND (t
, 1);
1214 if (TREE_INT_CST_HIGH (op1
) == 0
1215 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1216 == TREE_INT_CST_LOW (op1
))
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1233 fold_negate_expr (tree t
)
1235 tree type
= TREE_TYPE (t
);
1238 switch (TREE_CODE (t
))
1240 /* Convert - (~A) to A + 1. */
1242 if (INTEGRAL_TYPE_P (type
))
1243 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1244 build_int_cst (type
, 1));
1248 tem
= fold_negate_const (t
, type
);
1249 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
1250 || !TYPE_OVERFLOW_TRAPS (type
))
1255 tem
= fold_negate_const (t
, type
);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1262 tem
= fold_negate_const (t
, type
);
1267 tree rpart
= negate_expr (TREE_REALPART (t
));
1268 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1270 if ((TREE_CODE (rpart
) == REAL_CST
1271 && TREE_CODE (ipart
) == REAL_CST
)
1272 || (TREE_CODE (rpart
) == INTEGER_CST
1273 && TREE_CODE (ipart
) == INTEGER_CST
))
1274 return build_complex (type
, rpart
, ipart
);
1279 if (negate_expr_p (t
))
1280 return fold_build2 (COMPLEX_EXPR
, type
,
1281 fold_negate_expr (TREE_OPERAND (t
, 0)),
1282 fold_negate_expr (TREE_OPERAND (t
, 1)));
1286 if (negate_expr_p (t
))
1287 return fold_build1 (CONJ_EXPR
, type
,
1288 fold_negate_expr (TREE_OPERAND (t
, 0)));
1292 return TREE_OPERAND (t
, 0);
1295 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1298 /* -(A + B) -> (-B) - A. */
1299 if (negate_expr_p (TREE_OPERAND (t
, 1))
1300 && reorder_operands_p (TREE_OPERAND (t
, 0),
1301 TREE_OPERAND (t
, 1)))
1303 tem
= negate_expr (TREE_OPERAND (t
, 1));
1304 return fold_build2 (MINUS_EXPR
, type
,
1305 tem
, TREE_OPERAND (t
, 0));
1308 /* -(A + B) -> (-A) - B. */
1309 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1311 tem
= negate_expr (TREE_OPERAND (t
, 0));
1312 return fold_build2 (MINUS_EXPR
, type
,
1313 tem
, TREE_OPERAND (t
, 1));
1319 /* - (A - B) -> B - A */
1320 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1321 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1322 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1323 return fold_build2 (MINUS_EXPR
, type
,
1324 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1328 if (TYPE_UNSIGNED (type
))
1334 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1336 tem
= TREE_OPERAND (t
, 1);
1337 if (negate_expr_p (tem
))
1338 return fold_build2 (TREE_CODE (t
), type
,
1339 TREE_OPERAND (t
, 0), negate_expr (tem
));
1340 tem
= TREE_OPERAND (t
, 0);
1341 if (negate_expr_p (tem
))
1342 return fold_build2 (TREE_CODE (t
), type
,
1343 negate_expr (tem
), TREE_OPERAND (t
, 1));
1347 case TRUNC_DIV_EXPR
:
1348 case ROUND_DIV_EXPR
:
1349 case FLOOR_DIV_EXPR
:
1351 case EXACT_DIV_EXPR
:
1352 /* In general we can't negate A / B, because if A is INT_MIN and
1353 B is 1, we may turn this into INT_MIN / -1 which is undefined
1354 and actually traps on some architectures. But if overflow is
1355 undefined, we can negate, because - (INT_MIN / 1) is an
1357 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
1359 const char * const warnmsg
= G_("assuming signed overflow does not "
1360 "occur when negating a division");
1361 tem
= TREE_OPERAND (t
, 1);
1362 if (negate_expr_p (tem
))
1364 if (INTEGRAL_TYPE_P (type
)
1365 && (TREE_CODE (tem
) != INTEGER_CST
1366 || integer_onep (tem
)))
1367 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1368 return fold_build2 (TREE_CODE (t
), type
,
1369 TREE_OPERAND (t
, 0), negate_expr (tem
));
1371 tem
= TREE_OPERAND (t
, 0);
1372 if (negate_expr_p (tem
))
1374 if (INTEGRAL_TYPE_P (type
)
1375 && (TREE_CODE (tem
) != INTEGER_CST
1376 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
1377 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1378 return fold_build2 (TREE_CODE (t
), type
,
1379 negate_expr (tem
), TREE_OPERAND (t
, 1));
1385 /* Convert -((double)float) into (double)(-float). */
1386 if (TREE_CODE (type
) == REAL_TYPE
)
1388 tem
= strip_float_extensions (t
);
1389 if (tem
!= t
&& negate_expr_p (tem
))
1390 return fold_convert (type
, negate_expr (tem
));
1395 /* Negate -f(x) as f(-x). */
1396 if (negate_mathfn_p (builtin_mathfn_code (t
))
1397 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
1401 fndecl
= get_callee_fndecl (t
);
1402 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
1403 return build_call_expr (fndecl
, 1, arg
);
1408 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1409 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1411 tree op1
= TREE_OPERAND (t
, 1);
1412 if (TREE_INT_CST_HIGH (op1
) == 0
1413 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1414 == TREE_INT_CST_LOW (op1
))
1416 tree ntype
= TYPE_UNSIGNED (type
)
1417 ? signed_type_for (type
)
1418 : unsigned_type_for (type
);
1419 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1420 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1421 return fold_convert (type
, temp
);
1433 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1434 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1435 return NULL_TREE. */
1438 negate_expr (tree t
)
1445 type
= TREE_TYPE (t
);
1446 STRIP_SIGN_NOPS (t
);
1448 tem
= fold_negate_expr (t
);
1450 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1451 return fold_convert (type
, tem
);
1454 /* Split a tree IN into a constant, literal and variable parts that could be
1455 combined with CODE to make IN. "constant" means an expression with
1456 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1457 commutative arithmetic operation. Store the constant part into *CONP,
1458 the literal in *LITP and return the variable part. If a part isn't
1459 present, set it to null. If the tree does not decompose in this way,
1460 return the entire tree as the variable part and the other parts as null.
1462 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1463 case, we negate an operand that was subtracted. Except if it is a
1464 literal for which we use *MINUS_LITP instead.
1466 If NEGATE_P is true, we are negating all of IN, again except a literal
1467 for which we use *MINUS_LITP instead.
1469 If IN is itself a literal or constant, return it as appropriate.
1471 Note that we do not guarantee that any of the three values will be the
1472 same type as IN, but they will have the same signedness and mode. */
1475 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1476 tree
*minus_litp
, int negate_p
)
1484 /* Strip any conversions that don't change the machine mode or signedness. */
1485 STRIP_SIGN_NOPS (in
);
1487 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
1488 || TREE_CODE (in
) == FIXED_CST
)
1490 else if (TREE_CODE (in
) == code
1491 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
1492 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
1493 /* We can associate addition and subtraction together (even
1494 though the C standard doesn't say so) for integers because
1495 the value is not affected. For reals, the value might be
1496 affected, so we can't. */
1497 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1498 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1500 tree op0
= TREE_OPERAND (in
, 0);
1501 tree op1
= TREE_OPERAND (in
, 1);
1502 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1503 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1505 /* First see if either of the operands is a literal, then a constant. */
1506 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
1507 || TREE_CODE (op0
) == FIXED_CST
)
1508 *litp
= op0
, op0
= 0;
1509 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
1510 || TREE_CODE (op1
) == FIXED_CST
)
1511 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1513 if (op0
!= 0 && TREE_CONSTANT (op0
))
1514 *conp
= op0
, op0
= 0;
1515 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1516 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1518 /* If we haven't dealt with either operand, this is not a case we can
1519 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1520 if (op0
!= 0 && op1
!= 0)
1525 var
= op1
, neg_var_p
= neg1_p
;
1527 /* Now do any needed negations. */
1529 *minus_litp
= *litp
, *litp
= 0;
1531 *conp
= negate_expr (*conp
);
1533 var
= negate_expr (var
);
1535 else if (TREE_CONSTANT (in
))
1543 *minus_litp
= *litp
, *litp
= 0;
1544 else if (*minus_litp
)
1545 *litp
= *minus_litp
, *minus_litp
= 0;
1546 *conp
= negate_expr (*conp
);
1547 var
= negate_expr (var
);
1553 /* Re-associate trees split by the above function. T1 and T2 are either
1554 expressions to associate or null. Return the new expression, if any. If
1555 we build an operation, do it in TYPE and with CODE. */
1558 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1565 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1566 try to fold this since we will have infinite recursion. But do
1567 deal with any NEGATE_EXPRs. */
1568 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1569 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1571 if (code
== PLUS_EXPR
)
1573 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1574 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1575 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1576 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1577 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1578 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1579 else if (integer_zerop (t2
))
1580 return fold_convert (type
, t1
);
1582 else if (code
== MINUS_EXPR
)
1584 if (integer_zerop (t2
))
1585 return fold_convert (type
, t1
);
1588 return build2 (code
, type
, fold_convert (type
, t1
),
1589 fold_convert (type
, t2
));
1592 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1593 fold_convert (type
, t2
));
1596 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1597 for use in int_const_binop, size_binop and size_diffop. */
1600 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
1602 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1604 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1619 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1620 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1621 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1625 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1626 to produce a new constant. Return NULL_TREE if we don't know how
1627 to evaluate CODE at compile-time.
1629 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1632 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
1634 unsigned HOST_WIDE_INT int1l
, int2l
;
1635 HOST_WIDE_INT int1h
, int2h
;
1636 unsigned HOST_WIDE_INT low
;
1638 unsigned HOST_WIDE_INT garbagel
;
1639 HOST_WIDE_INT garbageh
;
1641 tree type
= TREE_TYPE (arg1
);
1642 int uns
= TYPE_UNSIGNED (type
);
1644 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1647 int1l
= TREE_INT_CST_LOW (arg1
);
1648 int1h
= TREE_INT_CST_HIGH (arg1
);
1649 int2l
= TREE_INT_CST_LOW (arg2
);
1650 int2h
= TREE_INT_CST_HIGH (arg2
);
1655 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1659 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1663 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1669 /* It's unclear from the C standard whether shifts can overflow.
1670 The following code ignores overflow; perhaps a C standard
1671 interpretation ruling is needed. */
1672 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1679 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1684 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1688 neg_double (int2l
, int2h
, &low
, &hi
);
1689 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1690 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1694 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1697 case TRUNC_DIV_EXPR
:
1698 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1699 case EXACT_DIV_EXPR
:
1700 /* This is a shortcut for a common special case. */
1701 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1702 && !TREE_OVERFLOW (arg1
)
1703 && !TREE_OVERFLOW (arg2
)
1704 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1706 if (code
== CEIL_DIV_EXPR
)
1709 low
= int1l
/ int2l
, hi
= 0;
1713 /* ... fall through ... */
1715 case ROUND_DIV_EXPR
:
1716 if (int2h
== 0 && int2l
== 0)
1718 if (int2h
== 0 && int2l
== 1)
1720 low
= int1l
, hi
= int1h
;
1723 if (int1l
== int2l
&& int1h
== int2h
1724 && ! (int1l
== 0 && int1h
== 0))
1729 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1730 &low
, &hi
, &garbagel
, &garbageh
);
1733 case TRUNC_MOD_EXPR
:
1734 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1735 /* This is a shortcut for a common special case. */
1736 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1737 && !TREE_OVERFLOW (arg1
)
1738 && !TREE_OVERFLOW (arg2
)
1739 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1741 if (code
== CEIL_MOD_EXPR
)
1743 low
= int1l
% int2l
, hi
= 0;
1747 /* ... fall through ... */
1749 case ROUND_MOD_EXPR
:
1750 if (int2h
== 0 && int2l
== 0)
1752 overflow
= div_and_round_double (code
, uns
,
1753 int1l
, int1h
, int2l
, int2h
,
1754 &garbagel
, &garbageh
, &low
, &hi
);
1760 low
= (((unsigned HOST_WIDE_INT
) int1h
1761 < (unsigned HOST_WIDE_INT
) int2h
)
1762 || (((unsigned HOST_WIDE_INT
) int1h
1763 == (unsigned HOST_WIDE_INT
) int2h
)
1766 low
= (int1h
< int2h
1767 || (int1h
== int2h
&& int1l
< int2l
));
1769 if (low
== (code
== MIN_EXPR
))
1770 low
= int1l
, hi
= int1h
;
1772 low
= int2l
, hi
= int2h
;
1781 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1783 /* Propagate overflow flags ourselves. */
1784 if (((!uns
|| is_sizetype
) && overflow
)
1785 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1788 TREE_OVERFLOW (t
) = 1;
1792 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1793 ((!uns
|| is_sizetype
) && overflow
)
1794 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1799 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1800 constant. We assume ARG1 and ARG2 have the same data type, or at least
1801 are the same kind of constant and the same machine mode. Return zero if
1802 combining the constants is not allowed in the current operating mode.
1804 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1807 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1809 /* Sanity check for the recursive cases. */
1816 if (TREE_CODE (arg1
) == INTEGER_CST
)
1817 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1819 if (TREE_CODE (arg1
) == REAL_CST
)
1821 enum machine_mode mode
;
1824 REAL_VALUE_TYPE value
;
1825 REAL_VALUE_TYPE result
;
1829 /* The following codes are handled by real_arithmetic. */
1844 d1
= TREE_REAL_CST (arg1
);
1845 d2
= TREE_REAL_CST (arg2
);
1847 type
= TREE_TYPE (arg1
);
1848 mode
= TYPE_MODE (type
);
1850 /* Don't perform operation if we honor signaling NaNs and
1851 either operand is a NaN. */
1852 if (HONOR_SNANS (mode
)
1853 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1856 /* Don't perform operation if it would raise a division
1857 by zero exception. */
1858 if (code
== RDIV_EXPR
1859 && REAL_VALUES_EQUAL (d2
, dconst0
)
1860 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1863 /* If either operand is a NaN, just return it. Otherwise, set up
1864 for floating-point trap; we return an overflow. */
1865 if (REAL_VALUE_ISNAN (d1
))
1867 else if (REAL_VALUE_ISNAN (d2
))
1870 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1871 real_convert (&result
, mode
, &value
);
1873 /* Don't constant fold this floating point operation if
1874 the result has overflowed and flag_trapping_math. */
1875 if (flag_trapping_math
1876 && MODE_HAS_INFINITIES (mode
)
1877 && REAL_VALUE_ISINF (result
)
1878 && !REAL_VALUE_ISINF (d1
)
1879 && !REAL_VALUE_ISINF (d2
))
1882 /* Don't constant fold this floating point operation if the
1883 result may dependent upon the run-time rounding mode and
1884 flag_rounding_math is set, or if GCC's software emulation
1885 is unable to accurately represent the result. */
1886 if ((flag_rounding_math
1887 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1888 && (inexact
|| !real_identical (&result
, &value
)))
1891 t
= build_real (type
, result
);
1893 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1897 if (TREE_CODE (arg1
) == FIXED_CST
)
1899 FIXED_VALUE_TYPE f1
;
1900 FIXED_VALUE_TYPE f2
;
1901 FIXED_VALUE_TYPE result
;
1906 /* The following codes are handled by fixed_arithmetic. */
1912 case TRUNC_DIV_EXPR
:
1913 f2
= TREE_FIXED_CST (arg2
);
1918 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1919 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1927 f1
= TREE_FIXED_CST (arg1
);
1928 type
= TREE_TYPE (arg1
);
1929 sat_p
= TYPE_SATURATING (type
);
1930 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1931 t
= build_fixed (type
, result
);
1932 /* Propagate overflow flags. */
1933 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1935 TREE_OVERFLOW (t
) = 1;
1936 TREE_CONSTANT_OVERFLOW (t
) = 1;
1938 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1939 TREE_CONSTANT_OVERFLOW (t
) = 1;
1943 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1945 tree type
= TREE_TYPE (arg1
);
1946 tree r1
= TREE_REALPART (arg1
);
1947 tree i1
= TREE_IMAGPART (arg1
);
1948 tree r2
= TREE_REALPART (arg2
);
1949 tree i2
= TREE_IMAGPART (arg2
);
1956 real
= const_binop (code
, r1
, r2
, notrunc
);
1957 imag
= const_binop (code
, i1
, i2
, notrunc
);
1961 real
= const_binop (MINUS_EXPR
,
1962 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1963 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1965 imag
= const_binop (PLUS_EXPR
,
1966 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1967 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1974 = const_binop (PLUS_EXPR
,
1975 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1976 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1979 = const_binop (PLUS_EXPR
,
1980 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1981 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1984 = const_binop (MINUS_EXPR
,
1985 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1986 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1989 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1990 code
= TRUNC_DIV_EXPR
;
1992 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1993 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
2002 return build_complex (type
, real
, imag
);
2008 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2009 indicates which particular sizetype to create. */
2012 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
2014 return build_int_cst (sizetype_tab
[(int) kind
], number
);
2017 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2018 is a tree code. The type of the result is taken from the operands.
2019 Both must be equivalent integer types, ala int_binop_types_match_p.
2020 If the operands are constant, so is the result. */
2023 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
2025 tree type
= TREE_TYPE (arg0
);
2027 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
2028 return error_mark_node
;
2030 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
2033 /* Handle the special case of two integer constants faster. */
2034 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2036 /* And some specific cases even faster than that. */
2037 if (code
== PLUS_EXPR
)
2039 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
2041 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2044 else if (code
== MINUS_EXPR
)
2046 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2049 else if (code
== MULT_EXPR
)
2051 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
2055 /* Handle general case of two integer constants. */
2056 return int_const_binop (code
, arg0
, arg1
, 0);
2059 return fold_build2 (code
, type
, arg0
, arg1
);
2062 /* Given two values, either both of sizetype or both of bitsizetype,
2063 compute the difference between the two values. Return the value
2064 in signed type corresponding to the type of the operands. */
2067 size_diffop (tree arg0
, tree arg1
)
2069 tree type
= TREE_TYPE (arg0
);
2072 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2075 /* If the type is already signed, just do the simple thing. */
2076 if (!TYPE_UNSIGNED (type
))
2077 return size_binop (MINUS_EXPR
, arg0
, arg1
);
2079 if (type
== sizetype
)
2081 else if (type
== bitsizetype
)
2082 ctype
= sbitsizetype
;
2084 ctype
= signed_type_for (type
);
2086 /* If either operand is not a constant, do the conversions to the signed
2087 type and subtract. The hardware will do the right thing with any
2088 overflow in the subtraction. */
2089 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2090 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
2091 fold_convert (ctype
, arg1
));
2093 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2094 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2095 overflow) and negate (which can't either). Special-case a result
2096 of zero while we're here. */
2097 if (tree_int_cst_equal (arg0
, arg1
))
2098 return build_int_cst (ctype
, 0);
2099 else if (tree_int_cst_lt (arg1
, arg0
))
2100 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
2102 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
2103 fold_convert (ctype
, size_binop (MINUS_EXPR
,
2107 /* A subroutine of fold_convert_const handling conversions of an
2108 INTEGER_CST to another integer type. */
2111 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2115 /* Given an integer constant, make new constant with new type,
2116 appropriately sign-extended or truncated. */
2117 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
2118 TREE_INT_CST_HIGH (arg1
),
2119 /* Don't set the overflow when
2120 converting from a pointer, */
2121 !POINTER_TYPE_P (TREE_TYPE (arg1
))
2122 /* or to a sizetype with same signedness
2123 and the precision is unchanged.
2124 ??? sizetype is always sign-extended,
2125 but its signedness depends on the
2126 frontend. Thus we see spurious overflows
2127 here if we do not check this. */
2128 && !((TYPE_PRECISION (TREE_TYPE (arg1
))
2129 == TYPE_PRECISION (type
))
2130 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
2131 == TYPE_UNSIGNED (type
))
2132 && ((TREE_CODE (TREE_TYPE (arg1
)) == INTEGER_TYPE
2133 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1
)))
2134 || (TREE_CODE (type
) == INTEGER_TYPE
2135 && TYPE_IS_SIZETYPE (type
)))),
2136 (TREE_INT_CST_HIGH (arg1
) < 0
2137 && (TYPE_UNSIGNED (type
)
2138 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2139 | TREE_OVERFLOW (arg1
));
2144 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2145 to an integer type. */
2148 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2153 /* The following code implements the floating point to integer
2154 conversion rules required by the Java Language Specification,
2155 that IEEE NaNs are mapped to zero and values that overflow
2156 the target precision saturate, i.e. values greater than
2157 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2158 are mapped to INT_MIN. These semantics are allowed by the
2159 C and C++ standards that simply state that the behavior of
2160 FP-to-integer conversion is unspecified upon overflow. */
2162 HOST_WIDE_INT high
, low
;
2164 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2168 case FIX_TRUNC_EXPR
:
2169 real_trunc (&r
, VOIDmode
, &x
);
2176 /* If R is NaN, return zero and show we have an overflow. */
2177 if (REAL_VALUE_ISNAN (r
))
2184 /* See if R is less than the lower bound or greater than the
2189 tree lt
= TYPE_MIN_VALUE (type
);
2190 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2191 if (REAL_VALUES_LESS (r
, l
))
2194 high
= TREE_INT_CST_HIGH (lt
);
2195 low
= TREE_INT_CST_LOW (lt
);
2201 tree ut
= TYPE_MAX_VALUE (type
);
2204 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2205 if (REAL_VALUES_LESS (u
, r
))
2208 high
= TREE_INT_CST_HIGH (ut
);
2209 low
= TREE_INT_CST_LOW (ut
);
2215 REAL_VALUE_TO_INT (&low
, &high
, r
);
2217 t
= force_fit_type_double (type
, low
, high
, -1,
2218 overflow
| TREE_OVERFLOW (arg1
));
2222 /* A subroutine of fold_convert_const handling conversions of a
2223 FIXED_CST to an integer type. */
2226 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2229 double_int temp
, temp_trunc
;
2232 /* Right shift FIXED_CST to temp by fbit. */
2233 temp
= TREE_FIXED_CST (arg1
).data
;
2234 mode
= TREE_FIXED_CST (arg1
).mode
;
2235 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
2237 lshift_double (temp
.low
, temp
.high
,
2238 - GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2239 &temp
.low
, &temp
.high
, SIGNED_FIXED_POINT_MODE_P (mode
));
2241 /* Left shift temp to temp_trunc by fbit. */
2242 lshift_double (temp
.low
, temp
.high
,
2243 GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2244 &temp_trunc
.low
, &temp_trunc
.high
,
2245 SIGNED_FIXED_POINT_MODE_P (mode
));
2252 temp_trunc
.high
= 0;
2255 /* If FIXED_CST is negative, we need to round the value toward 0.
2256 By checking if the fractional bits are not zero to add 1 to temp. */
2257 if (SIGNED_FIXED_POINT_MODE_P (mode
) && temp_trunc
.high
< 0
2258 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
2263 temp
= double_int_add (temp
, one
);
2266 /* Given a fixed-point constant, make new constant with new type,
2267 appropriately sign-extended or truncated. */
2268 t
= force_fit_type_double (type
, temp
.low
, temp
.high
, -1,
2270 && (TYPE_UNSIGNED (type
)
2271 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2272 | TREE_OVERFLOW (arg1
));
2277 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2278 to another floating point type. */
2281 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2283 REAL_VALUE_TYPE value
;
2286 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2287 t
= build_real (type
, value
);
2289 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2293 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2294 to a floating point type. */
2297 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2299 REAL_VALUE_TYPE value
;
2302 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2303 t
= build_real (type
, value
);
2305 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2306 TREE_CONSTANT_OVERFLOW (t
)
2307 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
2311 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2312 to another fixed-point type. */
2315 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2317 FIXED_VALUE_TYPE value
;
2321 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2322 TYPE_SATURATING (type
));
2323 t
= build_fixed (type
, value
);
2325 /* Propagate overflow flags. */
2326 if (overflow_p
| TREE_OVERFLOW (arg1
))
2328 TREE_OVERFLOW (t
) = 1;
2329 TREE_CONSTANT_OVERFLOW (t
) = 1;
2331 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2332 TREE_CONSTANT_OVERFLOW (t
) = 1;
2336 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2337 to a fixed-point type. */
2340 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2342 FIXED_VALUE_TYPE value
;
2346 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
2347 TREE_INT_CST (arg1
),
2348 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2349 TYPE_SATURATING (type
));
2350 t
= build_fixed (type
, value
);
2352 /* Propagate overflow flags. */
2353 if (overflow_p
| TREE_OVERFLOW (arg1
))
2355 TREE_OVERFLOW (t
) = 1;
2356 TREE_CONSTANT_OVERFLOW (t
) = 1;
2358 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2359 TREE_CONSTANT_OVERFLOW (t
) = 1;
2363 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2364 to a fixed-point type. */
2367 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2369 FIXED_VALUE_TYPE value
;
2373 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2374 &TREE_REAL_CST (arg1
),
2375 TYPE_SATURATING (type
));
2376 t
= build_fixed (type
, value
);
2378 /* Propagate overflow flags. */
2379 if (overflow_p
| TREE_OVERFLOW (arg1
))
2381 TREE_OVERFLOW (t
) = 1;
2382 TREE_CONSTANT_OVERFLOW (t
) = 1;
2384 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2385 TREE_CONSTANT_OVERFLOW (t
) = 1;
2389 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2390 type TYPE. If no simplification can be done return NULL_TREE. */
2393 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2395 if (TREE_TYPE (arg1
) == type
)
2398 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2399 || TREE_CODE (type
) == OFFSET_TYPE
)
2401 if (TREE_CODE (arg1
) == INTEGER_CST
)
2402 return fold_convert_const_int_from_int (type
, arg1
);
2403 else if (TREE_CODE (arg1
) == REAL_CST
)
2404 return fold_convert_const_int_from_real (code
, type
, arg1
);
2405 else if (TREE_CODE (arg1
) == FIXED_CST
)
2406 return fold_convert_const_int_from_fixed (type
, arg1
);
2408 else if (TREE_CODE (type
) == REAL_TYPE
)
2410 if (TREE_CODE (arg1
) == INTEGER_CST
)
2411 return build_real_from_int_cst (type
, arg1
);
2412 else if (TREE_CODE (arg1
) == REAL_CST
)
2413 return fold_convert_const_real_from_real (type
, arg1
);
2414 else if (TREE_CODE (arg1
) == FIXED_CST
)
2415 return fold_convert_const_real_from_fixed (type
, arg1
);
2417 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2419 if (TREE_CODE (arg1
) == FIXED_CST
)
2420 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2421 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2422 return fold_convert_const_fixed_from_int (type
, arg1
);
2423 else if (TREE_CODE (arg1
) == REAL_CST
)
2424 return fold_convert_const_fixed_from_real (type
, arg1
);
2429 /* Construct a vector of zero elements of vector type TYPE. */
2432 build_zero_vector (tree type
)
2437 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2438 units
= TYPE_VECTOR_SUBPARTS (type
);
2441 for (i
= 0; i
< units
; i
++)
2442 list
= tree_cons (NULL_TREE
, elem
, list
);
2443 return build_vector (type
, list
);
2446 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2449 fold_convertible_p (const_tree type
, const_tree arg
)
2451 tree orig
= TREE_TYPE (arg
);
2456 if (TREE_CODE (arg
) == ERROR_MARK
2457 || TREE_CODE (type
) == ERROR_MARK
2458 || TREE_CODE (orig
) == ERROR_MARK
)
2461 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2464 switch (TREE_CODE (type
))
2466 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2467 case POINTER_TYPE
: case REFERENCE_TYPE
:
2469 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2470 || TREE_CODE (orig
) == OFFSET_TYPE
)
2472 return (TREE_CODE (orig
) == VECTOR_TYPE
2473 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2476 case FIXED_POINT_TYPE
:
2480 return TREE_CODE (type
) == TREE_CODE (orig
);
2487 /* Convert expression ARG to type TYPE. Used by the middle-end for
2488 simple conversions in preference to calling the front-end's convert. */
2491 fold_convert (tree type
, tree arg
)
2493 tree orig
= TREE_TYPE (arg
);
2499 if (TREE_CODE (arg
) == ERROR_MARK
2500 || TREE_CODE (type
) == ERROR_MARK
2501 || TREE_CODE (orig
) == ERROR_MARK
)
2502 return error_mark_node
;
2504 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2505 return fold_build1 (NOP_EXPR
, type
, arg
);
2507 switch (TREE_CODE (type
))
2509 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2510 case POINTER_TYPE
: case REFERENCE_TYPE
:
2512 if (TREE_CODE (arg
) == INTEGER_CST
)
2514 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2515 if (tem
!= NULL_TREE
)
2518 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2519 || TREE_CODE (orig
) == OFFSET_TYPE
)
2520 return fold_build1 (NOP_EXPR
, type
, arg
);
2521 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2523 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2524 return fold_convert (type
, tem
);
2526 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2527 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2528 return fold_build1 (NOP_EXPR
, type
, arg
);
2531 if (TREE_CODE (arg
) == INTEGER_CST
)
2533 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2534 if (tem
!= NULL_TREE
)
2537 else if (TREE_CODE (arg
) == REAL_CST
)
2539 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2540 if (tem
!= NULL_TREE
)
2543 else if (TREE_CODE (arg
) == FIXED_CST
)
2545 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2546 if (tem
!= NULL_TREE
)
2550 switch (TREE_CODE (orig
))
2553 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2554 case POINTER_TYPE
: case REFERENCE_TYPE
:
2555 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2558 return fold_build1 (NOP_EXPR
, type
, arg
);
2560 case FIXED_POINT_TYPE
:
2561 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2564 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2565 return fold_convert (type
, tem
);
2571 case FIXED_POINT_TYPE
:
2572 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2573 || TREE_CODE (arg
) == REAL_CST
)
2575 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2576 if (tem
!= NULL_TREE
)
2580 switch (TREE_CODE (orig
))
2582 case FIXED_POINT_TYPE
:
2587 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2590 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2591 return fold_convert (type
, tem
);
2598 switch (TREE_CODE (orig
))
2601 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2602 case POINTER_TYPE
: case REFERENCE_TYPE
:
2604 case FIXED_POINT_TYPE
:
2605 return build2 (COMPLEX_EXPR
, type
,
2606 fold_convert (TREE_TYPE (type
), arg
),
2607 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2612 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2614 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2615 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2616 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2619 arg
= save_expr (arg
);
2620 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2621 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2622 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2623 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2624 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2632 if (integer_zerop (arg
))
2633 return build_zero_vector (type
);
2634 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2635 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2636 || TREE_CODE (orig
) == VECTOR_TYPE
);
2637 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2640 tem
= fold_ignored_result (arg
);
2641 if (TREE_CODE (tem
) == MODIFY_EXPR
)
2643 return fold_build1 (NOP_EXPR
, type
, tem
);
2650 /* Return false if expr can be assumed not to be an lvalue, true
2654 maybe_lvalue_p (const_tree x
)
2656 /* We only need to wrap lvalue tree codes. */
2657 switch (TREE_CODE (x
))
2668 case ALIGN_INDIRECT_REF
:
2669 case MISALIGNED_INDIRECT_REF
:
2671 case ARRAY_RANGE_REF
:
2677 case PREINCREMENT_EXPR
:
2678 case PREDECREMENT_EXPR
:
2680 case TRY_CATCH_EXPR
:
2681 case WITH_CLEANUP_EXPR
:
2692 /* Assume the worst for front-end tree codes. */
2693 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2701 /* Return an expr equal to X but certainly not valid as an lvalue. */
2706 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2711 if (! maybe_lvalue_p (x
))
2713 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2716 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2717 Zero means allow extended lvalues. */
2719 int pedantic_lvalues
;
2721 /* When pedantic, return an expr equal to X but certainly not valid as a
2722 pedantic lvalue. Otherwise, return X. */
2725 pedantic_non_lvalue (tree x
)
2727 if (pedantic_lvalues
)
2728 return non_lvalue (x
);
2733 /* Given a tree comparison code, return the code that is the logical inverse
2734 of the given code. It is not safe to do this for floating-point
2735 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2736 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2739 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2741 if (honor_nans
&& flag_trapping_math
)
2751 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2753 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2755 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2757 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2771 return UNORDERED_EXPR
;
2772 case UNORDERED_EXPR
:
2773 return ORDERED_EXPR
;
2779 /* Similar, but return the comparison that results if the operands are
2780 swapped. This is safe for floating-point. */
2783 swap_tree_comparison (enum tree_code code
)
2790 case UNORDERED_EXPR
:
2816 /* Convert a comparison tree code from an enum tree_code representation
2817 into a compcode bit-based encoding. This function is the inverse of
2818 compcode_to_comparison. */
2820 static enum comparison_code
2821 comparison_to_compcode (enum tree_code code
)
2838 return COMPCODE_ORD
;
2839 case UNORDERED_EXPR
:
2840 return COMPCODE_UNORD
;
2842 return COMPCODE_UNLT
;
2844 return COMPCODE_UNEQ
;
2846 return COMPCODE_UNLE
;
2848 return COMPCODE_UNGT
;
2850 return COMPCODE_LTGT
;
2852 return COMPCODE_UNGE
;
2858 /* Convert a compcode bit-based encoding of a comparison operator back
2859 to GCC's enum tree_code representation. This function is the
2860 inverse of comparison_to_compcode. */
2862 static enum tree_code
2863 compcode_to_comparison (enum comparison_code code
)
2880 return ORDERED_EXPR
;
2881 case COMPCODE_UNORD
:
2882 return UNORDERED_EXPR
;
2900 /* Return a tree for the comparison which is the combination of
2901 doing the AND or OR (depending on CODE) of the two operations LCODE
2902 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2903 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2904 if this makes the transformation invalid. */
2907 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2908 enum tree_code rcode
, tree truth_type
,
2909 tree ll_arg
, tree lr_arg
)
2911 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2912 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2913 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2914 enum comparison_code compcode
;
2918 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2919 compcode
= lcompcode
& rcompcode
;
2922 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2923 compcode
= lcompcode
| rcompcode
;
2932 /* Eliminate unordered comparisons, as well as LTGT and ORD
2933 which are not used unless the mode has NaNs. */
2934 compcode
&= ~COMPCODE_UNORD
;
2935 if (compcode
== COMPCODE_LTGT
)
2936 compcode
= COMPCODE_NE
;
2937 else if (compcode
== COMPCODE_ORD
)
2938 compcode
= COMPCODE_TRUE
;
2940 else if (flag_trapping_math
)
2942 /* Check that the original operation and the optimized ones will trap
2943 under the same condition. */
2944 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2945 && (lcompcode
!= COMPCODE_EQ
)
2946 && (lcompcode
!= COMPCODE_ORD
);
2947 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2948 && (rcompcode
!= COMPCODE_EQ
)
2949 && (rcompcode
!= COMPCODE_ORD
);
2950 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2951 && (compcode
!= COMPCODE_EQ
)
2952 && (compcode
!= COMPCODE_ORD
);
2954 /* In a short-circuited boolean expression the LHS might be
2955 such that the RHS, if evaluated, will never trap. For
2956 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2957 if neither x nor y is NaN. (This is a mixed blessing: for
2958 example, the expression above will never trap, hence
2959 optimizing it to x < y would be invalid). */
2960 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2961 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2964 /* If the comparison was short-circuited, and only the RHS
2965 trapped, we may now generate a spurious trap. */
2967 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2970 /* If we changed the conditions that cause a trap, we lose. */
2971 if ((ltrap
|| rtrap
) != trap
)
2975 if (compcode
== COMPCODE_TRUE
)
2976 return constant_boolean_node (true, truth_type
);
2977 else if (compcode
== COMPCODE_FALSE
)
2978 return constant_boolean_node (false, truth_type
);
2980 return fold_build2 (compcode_to_comparison (compcode
),
2981 truth_type
, ll_arg
, lr_arg
);
2984 /* Return nonzero if two operands (typically of the same tree node)
2985 are necessarily equal. If either argument has side-effects this
2986 function returns zero. FLAGS modifies behavior as follows:
2988 If OEP_ONLY_CONST is set, only return nonzero for constants.
2989 This function tests whether the operands are indistinguishable;
2990 it does not test whether they are equal using C's == operation.
2991 The distinction is important for IEEE floating point, because
2992 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2993 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2995 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2996 even though it may hold multiple values during a function.
2997 This is because a GCC tree node guarantees that nothing else is
2998 executed between the evaluation of its "operands" (which may often
2999 be evaluated in arbitrary order). Hence if the operands themselves
3000 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3001 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3002 unset means assuming isochronic (or instantaneous) tree equivalence.
3003 Unless comparing arbitrary expression trees, such as from different
3004 statements, this flag can usually be left unset.
3006 If OEP_PURE_SAME is set, then pure functions with identical arguments
3007 are considered the same. It is used when the caller has other ways
3008 to ensure that global memory is unchanged in between. */
3011 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3013 /* If either is ERROR_MARK, they aren't equal. */
3014 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
3017 /* Check equality of integer constants before bailing out due to
3018 precision differences. */
3019 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
3020 return tree_int_cst_equal (arg0
, arg1
);
3022 /* If both types don't have the same signedness, then we can't consider
3023 them equal. We must check this before the STRIP_NOPS calls
3024 because they may change the signedness of the arguments. As pointers
3025 strictly don't have a signedness, require either two pointers or
3026 two non-pointers as well. */
3027 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
3028 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
3031 /* If both types don't have the same precision, then it is not safe
3033 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
3039 /* In case both args are comparisons but with different comparison
3040 code, try to swap the comparison operands of one arg to produce
3041 a match and compare that variant. */
3042 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3043 && COMPARISON_CLASS_P (arg0
)
3044 && COMPARISON_CLASS_P (arg1
))
3046 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3048 if (TREE_CODE (arg0
) == swap_code
)
3049 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3050 TREE_OPERAND (arg1
, 1), flags
)
3051 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3052 TREE_OPERAND (arg1
, 0), flags
);
3055 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3056 /* This is needed for conversions and for COMPONENT_REF.
3057 Might as well play it safe and always test this. */
3058 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3059 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3060 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
3063 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3064 We don't care about side effects in that case because the SAVE_EXPR
3065 takes care of that for us. In all other cases, two expressions are
3066 equal if they have no side effects. If we have two identical
3067 expressions with side effects that should be treated the same due
3068 to the only side effects being identical SAVE_EXPR's, that will
3069 be detected in the recursive calls below. */
3070 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3071 && (TREE_CODE (arg0
) == SAVE_EXPR
3072 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3075 /* Next handle constant cases, those for which we can return 1 even
3076 if ONLY_CONST is set. */
3077 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3078 switch (TREE_CODE (arg0
))
3081 return tree_int_cst_equal (arg0
, arg1
);
3084 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3085 TREE_FIXED_CST (arg1
));
3088 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
3089 TREE_REAL_CST (arg1
)))
3093 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
3095 /* If we do not distinguish between signed and unsigned zero,
3096 consider them equal. */
3097 if (real_zerop (arg0
) && real_zerop (arg1
))
3106 v1
= TREE_VECTOR_CST_ELTS (arg0
);
3107 v2
= TREE_VECTOR_CST_ELTS (arg1
);
3110 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
3113 v1
= TREE_CHAIN (v1
);
3114 v2
= TREE_CHAIN (v2
);
3121 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3123 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3127 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3128 && ! memcmp (TREE_STRING_POINTER (arg0
),
3129 TREE_STRING_POINTER (arg1
),
3130 TREE_STRING_LENGTH (arg0
)));
3133 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3139 if (flags
& OEP_ONLY_CONST
)
3142 /* Define macros to test an operand from arg0 and arg1 for equality and a
3143 variant that allows null and views null as being different from any
3144 non-null value. In the latter case, if either is null, the both
3145 must be; otherwise, do the normal comparison. */
3146 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3147 TREE_OPERAND (arg1, N), flags)
3149 #define OP_SAME_WITH_NULL(N) \
3150 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3151 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3153 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3156 /* Two conversions are equal only if signedness and modes match. */
3157 switch (TREE_CODE (arg0
))
3160 case FIX_TRUNC_EXPR
:
3161 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3162 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3172 case tcc_comparison
:
3174 if (OP_SAME (0) && OP_SAME (1))
3177 /* For commutative ops, allow the other order. */
3178 return (commutative_tree_code (TREE_CODE (arg0
))
3179 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3180 TREE_OPERAND (arg1
, 1), flags
)
3181 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3182 TREE_OPERAND (arg1
, 0), flags
));
3185 /* If either of the pointer (or reference) expressions we are
3186 dereferencing contain a side effect, these cannot be equal. */
3187 if (TREE_SIDE_EFFECTS (arg0
)
3188 || TREE_SIDE_EFFECTS (arg1
))
3191 switch (TREE_CODE (arg0
))
3194 case ALIGN_INDIRECT_REF
:
3195 case MISALIGNED_INDIRECT_REF
:
3201 case ARRAY_RANGE_REF
:
3202 /* Operands 2 and 3 may be null.
3203 Compare the array index by value if it is constant first as we
3204 may have different types but same value here. */
3206 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3207 TREE_OPERAND (arg1
, 1))
3209 && OP_SAME_WITH_NULL (2)
3210 && OP_SAME_WITH_NULL (3));
3213 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3214 may be NULL when we're called to compare MEM_EXPRs. */
3215 return OP_SAME_WITH_NULL (0)
3217 && OP_SAME_WITH_NULL (2);
3220 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3226 case tcc_expression
:
3227 switch (TREE_CODE (arg0
))
3230 case TRUTH_NOT_EXPR
:
3233 case TRUTH_ANDIF_EXPR
:
3234 case TRUTH_ORIF_EXPR
:
3235 return OP_SAME (0) && OP_SAME (1);
3237 case TRUTH_AND_EXPR
:
3239 case TRUTH_XOR_EXPR
:
3240 if (OP_SAME (0) && OP_SAME (1))
3243 /* Otherwise take into account this is a commutative operation. */
3244 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3245 TREE_OPERAND (arg1
, 1), flags
)
3246 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3247 TREE_OPERAND (arg1
, 0), flags
));
3250 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3257 switch (TREE_CODE (arg0
))
3260 /* If the CALL_EXPRs call different functions, then they
3261 clearly can not be equal. */
3262 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3267 unsigned int cef
= call_expr_flags (arg0
);
3268 if (flags
& OEP_PURE_SAME
)
3269 cef
&= ECF_CONST
| ECF_PURE
;
3276 /* Now see if all the arguments are the same. */
3278 const_call_expr_arg_iterator iter0
, iter1
;
3280 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3281 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3283 a0
= next_const_call_expr_arg (&iter0
),
3284 a1
= next_const_call_expr_arg (&iter1
))
3285 if (! operand_equal_p (a0
, a1
, flags
))
3288 /* If we get here and both argument lists are exhausted
3289 then the CALL_EXPRs are equal. */
3290 return ! (a0
|| a1
);
3296 case tcc_declaration
:
3297 /* Consider __builtin_sqrt equal to sqrt. */
3298 return (TREE_CODE (arg0
) == FUNCTION_DECL
3299 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3300 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3301 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3308 #undef OP_SAME_WITH_NULL
3311 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3312 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3314 When in doubt, return 0. */
3317 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3319 int unsignedp1
, unsignedpo
;
3320 tree primarg0
, primarg1
, primother
;
3321 unsigned int correct_width
;
3323 if (operand_equal_p (arg0
, arg1
, 0))
3326 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3327 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3330 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3331 and see if the inner values are the same. This removes any
3332 signedness comparison, which doesn't matter here. */
3333 primarg0
= arg0
, primarg1
= arg1
;
3334 STRIP_NOPS (primarg0
);
3335 STRIP_NOPS (primarg1
);
3336 if (operand_equal_p (primarg0
, primarg1
, 0))
3339 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3340 actual comparison operand, ARG0.
3342 First throw away any conversions to wider types
3343 already present in the operands. */
3345 primarg1
= get_narrower (arg1
, &unsignedp1
);
3346 primother
= get_narrower (other
, &unsignedpo
);
3348 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3349 if (unsignedp1
== unsignedpo
3350 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3351 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3353 tree type
= TREE_TYPE (arg0
);
3355 /* Make sure shorter operand is extended the right way
3356 to match the longer operand. */
3357 primarg1
= fold_convert (signed_or_unsigned_type_for
3358 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3360 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3367 /* See if ARG is an expression that is either a comparison or is performing
3368 arithmetic on comparisons. The comparisons must only be comparing
3369 two different values, which will be stored in *CVAL1 and *CVAL2; if
3370 they are nonzero it means that some operands have already been found.
3371 No variables may be used anywhere else in the expression except in the
3372 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3373 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3375 If this is true, return 1. Otherwise, return zero. */
3378 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3380 enum tree_code code
= TREE_CODE (arg
);
3381 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3383 /* We can handle some of the tcc_expression cases here. */
3384 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3386 else if (tclass
== tcc_expression
3387 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3388 || code
== COMPOUND_EXPR
))
3389 tclass
= tcc_binary
;
3391 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3392 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3394 /* If we've already found a CVAL1 or CVAL2, this expression is
3395 two complex to handle. */
3396 if (*cval1
|| *cval2
)
3406 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3409 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3410 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3411 cval1
, cval2
, save_p
));
3416 case tcc_expression
:
3417 if (code
== COND_EXPR
)
3418 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3419 cval1
, cval2
, save_p
)
3420 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3421 cval1
, cval2
, save_p
)
3422 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3423 cval1
, cval2
, save_p
));
3426 case tcc_comparison
:
3427 /* First see if we can handle the first operand, then the second. For
3428 the second operand, we know *CVAL1 can't be zero. It must be that
3429 one side of the comparison is each of the values; test for the
3430 case where this isn't true by failing if the two operands
3433 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3434 TREE_OPERAND (arg
, 1), 0))
3438 *cval1
= TREE_OPERAND (arg
, 0);
3439 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3441 else if (*cval2
== 0)
3442 *cval2
= TREE_OPERAND (arg
, 0);
3443 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3448 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3450 else if (*cval2
== 0)
3451 *cval2
= TREE_OPERAND (arg
, 1);
3452 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3464 /* ARG is a tree that is known to contain just arithmetic operations and
3465 comparisons. Evaluate the operations in the tree substituting NEW0 for
3466 any occurrence of OLD0 as an operand of a comparison and likewise for
3470 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
3472 tree type
= TREE_TYPE (arg
);
3473 enum tree_code code
= TREE_CODE (arg
);
3474 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3476 /* We can handle some of the tcc_expression cases here. */
3477 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3479 else if (tclass
== tcc_expression
3480 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3481 tclass
= tcc_binary
;
3486 return fold_build1 (code
, type
,
3487 eval_subst (TREE_OPERAND (arg
, 0),
3488 old0
, new0
, old1
, new1
));
3491 return fold_build2 (code
, type
,
3492 eval_subst (TREE_OPERAND (arg
, 0),
3493 old0
, new0
, old1
, new1
),
3494 eval_subst (TREE_OPERAND (arg
, 1),
3495 old0
, new0
, old1
, new1
));
3497 case tcc_expression
:
3501 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
3504 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
3507 return fold_build3 (code
, type
,
3508 eval_subst (TREE_OPERAND (arg
, 0),
3509 old0
, new0
, old1
, new1
),
3510 eval_subst (TREE_OPERAND (arg
, 1),
3511 old0
, new0
, old1
, new1
),
3512 eval_subst (TREE_OPERAND (arg
, 2),
3513 old0
, new0
, old1
, new1
));
3517 /* Fall through - ??? */
3519 case tcc_comparison
:
3521 tree arg0
= TREE_OPERAND (arg
, 0);
3522 tree arg1
= TREE_OPERAND (arg
, 1);
3524 /* We need to check both for exact equality and tree equality. The
3525 former will be true if the operand has a side-effect. In that
3526 case, we know the operand occurred exactly once. */
3528 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3530 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3533 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3535 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3538 return fold_build2 (code
, type
, arg0
, arg1
);
3546 /* Return a tree for the case when the result of an expression is RESULT
3547 converted to TYPE and OMITTED was previously an operand of the expression
3548 but is now not needed (e.g., we folded OMITTED * 0).
3550 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3551 the conversion of RESULT to TYPE. */
3554 omit_one_operand (tree type
, tree result
, tree omitted
)
3556 tree t
= fold_convert (type
, result
);
3558 /* If the resulting operand is an empty statement, just return the omitted
3559 statement casted to void. */
3560 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3561 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3563 if (TREE_SIDE_EFFECTS (omitted
))
3564 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3566 return non_lvalue (t
);
3569 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3572 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3574 tree t
= fold_convert (type
, result
);
3576 /* If the resulting operand is an empty statement, just return the omitted
3577 statement casted to void. */
3578 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3579 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3581 if (TREE_SIDE_EFFECTS (omitted
))
3582 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3584 return pedantic_non_lvalue (t
);
3587 /* Return a tree for the case when the result of an expression is RESULT
3588 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3589 of the expression but are now not needed.
3591 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3592 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3593 evaluated before OMITTED2. Otherwise, if neither has side effects,
3594 just do the conversion of RESULT to TYPE. */
3597 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3599 tree t
= fold_convert (type
, result
);
3601 if (TREE_SIDE_EFFECTS (omitted2
))
3602 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3603 if (TREE_SIDE_EFFECTS (omitted1
))
3604 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3606 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3610 /* Return a simplified tree node for the truth-negation of ARG. This
3611 never alters ARG itself. We assume that ARG is an operation that
3612 returns a truth value (0 or 1).
3614 FIXME: one would think we would fold the result, but it causes
3615 problems with the dominator optimizer. */
3618 fold_truth_not_expr (tree arg
)
3620 tree type
= TREE_TYPE (arg
);
3621 enum tree_code code
= TREE_CODE (arg
);
3623 /* If this is a comparison, we can simply invert it, except for
3624 floating-point non-equality comparisons, in which case we just
3625 enclose a TRUTH_NOT_EXPR around what we have. */
3627 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3629 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3630 if (FLOAT_TYPE_P (op_type
)
3631 && flag_trapping_math
3632 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3633 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3637 code
= invert_tree_comparison (code
,
3638 HONOR_NANS (TYPE_MODE (op_type
)));
3639 if (code
== ERROR_MARK
)
3642 return build2 (code
, type
,
3643 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3650 return constant_boolean_node (integer_zerop (arg
), type
);
3652 case TRUTH_AND_EXPR
:
3653 return build2 (TRUTH_OR_EXPR
, type
,
3654 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3655 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3658 return build2 (TRUTH_AND_EXPR
, type
,
3659 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3660 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3662 case TRUTH_XOR_EXPR
:
3663 /* Here we can invert either operand. We invert the first operand
3664 unless the second operand is a TRUTH_NOT_EXPR in which case our
3665 result is the XOR of the first operand with the inside of the
3666 negation of the second operand. */
3668 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3669 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3670 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3672 return build2 (TRUTH_XOR_EXPR
, type
,
3673 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3674 TREE_OPERAND (arg
, 1));
3676 case TRUTH_ANDIF_EXPR
:
3677 return build2 (TRUTH_ORIF_EXPR
, type
,
3678 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3679 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3681 case TRUTH_ORIF_EXPR
:
3682 return build2 (TRUTH_ANDIF_EXPR
, type
,
3683 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3684 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3686 case TRUTH_NOT_EXPR
:
3687 return TREE_OPERAND (arg
, 0);
3691 tree arg1
= TREE_OPERAND (arg
, 1);
3692 tree arg2
= TREE_OPERAND (arg
, 2);
3693 /* A COND_EXPR may have a throw as one operand, which
3694 then has void type. Just leave void operands
3696 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3697 VOID_TYPE_P (TREE_TYPE (arg1
))
3698 ? arg1
: invert_truthvalue (arg1
),
3699 VOID_TYPE_P (TREE_TYPE (arg2
))
3700 ? arg2
: invert_truthvalue (arg2
));
3704 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3705 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3707 case NON_LVALUE_EXPR
:
3708 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3711 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3712 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3716 return build1 (TREE_CODE (arg
), type
,
3717 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3720 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3722 return build2 (EQ_EXPR
, type
, arg
,
3723 build_int_cst (type
, 0));
3726 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3728 case CLEANUP_POINT_EXPR
:
3729 return build1 (CLEANUP_POINT_EXPR
, type
,
3730 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3739 /* Return a simplified tree node for the truth-negation of ARG. This
3740 never alters ARG itself. We assume that ARG is an operation that
3741 returns a truth value (0 or 1).
3743 FIXME: one would think we would fold the result, but it causes
3744 problems with the dominator optimizer. */
3747 invert_truthvalue (tree arg
)
3751 if (TREE_CODE (arg
) == ERROR_MARK
)
3754 tem
= fold_truth_not_expr (arg
);
3756 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3761 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3762 operands are another bit-wise operation with a common input. If so,
3763 distribute the bit operations to save an operation and possibly two if
3764 constants are involved. For example, convert
3765 (A | B) & (A | C) into A | (B & C)
3766 Further simplification will occur if B and C are constants.
3768 If this optimization cannot be done, 0 will be returned. */
3771 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3776 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3777 || TREE_CODE (arg0
) == code
3778 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3779 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3782 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3784 common
= TREE_OPERAND (arg0
, 0);
3785 left
= TREE_OPERAND (arg0
, 1);
3786 right
= TREE_OPERAND (arg1
, 1);
3788 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3790 common
= TREE_OPERAND (arg0
, 0);
3791 left
= TREE_OPERAND (arg0
, 1);
3792 right
= TREE_OPERAND (arg1
, 0);
3794 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3796 common
= TREE_OPERAND (arg0
, 1);
3797 left
= TREE_OPERAND (arg0
, 0);
3798 right
= TREE_OPERAND (arg1
, 1);
3800 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3802 common
= TREE_OPERAND (arg0
, 1);
3803 left
= TREE_OPERAND (arg0
, 0);
3804 right
= TREE_OPERAND (arg1
, 0);
3809 common
= fold_convert (type
, common
);
3810 left
= fold_convert (type
, left
);
3811 right
= fold_convert (type
, right
);
3812 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3813 fold_build2 (code
, type
, left
, right
));
3816 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3817 with code CODE. This optimization is unsafe. */
3819 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3821 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3822 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3824 /* (A / C) +- (B / C) -> (A +- B) / C. */
3826 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3827 TREE_OPERAND (arg1
, 1), 0))
3828 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3829 fold_build2 (code
, type
,
3830 TREE_OPERAND (arg0
, 0),
3831 TREE_OPERAND (arg1
, 0)),
3832 TREE_OPERAND (arg0
, 1));
3834 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3835 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3836 TREE_OPERAND (arg1
, 0), 0)
3837 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3838 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3840 REAL_VALUE_TYPE r0
, r1
;
3841 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3842 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3844 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3846 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3847 real_arithmetic (&r0
, code
, &r0
, &r1
);
3848 return fold_build2 (MULT_EXPR
, type
,
3849 TREE_OPERAND (arg0
, 0),
3850 build_real (type
, r0
));
3856 /* Subroutine for fold_truthop: decode a field reference.
3858 If EXP is a comparison reference, we return the innermost reference.
3860 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3861 set to the starting bit number.
3863 If the innermost field can be completely contained in a mode-sized
3864 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3866 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3867 otherwise it is not changed.
3869 *PUNSIGNEDP is set to the signedness of the field.
3871 *PMASK is set to the mask used. This is either contained in a
3872 BIT_AND_EXPR or derived from the width of the field.
3874 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3876 Return 0 if this is not a component reference or is one that we can't
3877 do anything with. */
3880 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3881 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3882 int *punsignedp
, int *pvolatilep
,
3883 tree
*pmask
, tree
*pand_mask
)
3885 tree outer_type
= 0;
3887 tree mask
, inner
, offset
;
3889 unsigned int precision
;
3891 /* All the optimizations using this function assume integer fields.
3892 There are problems with FP fields since the type_for_size call
3893 below can fail for, e.g., XFmode. */
3894 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3897 /* We are interested in the bare arrangement of bits, so strip everything
3898 that doesn't affect the machine mode. However, record the type of the
3899 outermost expression if it may matter below. */
3900 if (CONVERT_EXPR_P (exp
)
3901 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3902 outer_type
= TREE_TYPE (exp
);
3905 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3907 and_mask
= TREE_OPERAND (exp
, 1);
3908 exp
= TREE_OPERAND (exp
, 0);
3909 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3910 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3914 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3915 punsignedp
, pvolatilep
, false);
3916 if ((inner
== exp
&& and_mask
== 0)
3917 || *pbitsize
< 0 || offset
!= 0
3918 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3921 /* If the number of bits in the reference is the same as the bitsize of
3922 the outer type, then the outer type gives the signedness. Otherwise
3923 (in case of a small bitfield) the signedness is unchanged. */
3924 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3925 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3927 /* Compute the mask to access the bitfield. */
3928 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3929 precision
= TYPE_PRECISION (unsigned_type
);
3931 mask
= build_int_cst_type (unsigned_type
, -1);
3933 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3934 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3936 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3938 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3939 fold_convert (unsigned_type
, and_mask
), mask
);
3942 *pand_mask
= and_mask
;
3946 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3947 represents the sign bit of EXP's type. If EXP represents a sign
3948 or zero extension, also test VAL against the unextended type.
3949 The return value is the (sub)expression whose sign bit is VAL,
3950 or NULL_TREE otherwise. */
3953 sign_bit_p (tree exp
, const_tree val
)
3955 unsigned HOST_WIDE_INT mask_lo
, lo
;
3956 HOST_WIDE_INT mask_hi
, hi
;
3960 /* Tree EXP must have an integral type. */
3961 t
= TREE_TYPE (exp
);
3962 if (! INTEGRAL_TYPE_P (t
))
3965 /* Tree VAL must be an integer constant. */
3966 if (TREE_CODE (val
) != INTEGER_CST
3967 || TREE_OVERFLOW (val
))
3970 width
= TYPE_PRECISION (t
);
3971 if (width
> HOST_BITS_PER_WIDE_INT
)
3973 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3976 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3977 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3983 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3986 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3987 >> (HOST_BITS_PER_WIDE_INT
- width
));
3990 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3991 treat VAL as if it were unsigned. */
3992 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3993 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3996 /* Handle extension from a narrower type. */
3997 if (TREE_CODE (exp
) == NOP_EXPR
3998 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3999 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4004 /* Subroutine for fold_truthop: determine if an operand is simple enough
4005 to be evaluated unconditionally. */
4008 simple_operand_p (const_tree exp
)
4010 /* Strip any conversions that don't change the machine mode. */
4013 return (CONSTANT_CLASS_P (exp
)
4014 || TREE_CODE (exp
) == SSA_NAME
4016 && ! TREE_ADDRESSABLE (exp
)
4017 && ! TREE_THIS_VOLATILE (exp
)
4018 && ! DECL_NONLOCAL (exp
)
4019 /* Don't regard global variables as simple. They may be
4020 allocated in ways unknown to the compiler (shared memory,
4021 #pragma weak, etc). */
4022 && ! TREE_PUBLIC (exp
)
4023 && ! DECL_EXTERNAL (exp
)
4024 /* Loading a static variable is unduly expensive, but global
4025 registers aren't expensive. */
4026 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4029 /* The following functions are subroutines to fold_range_test and allow it to
4030 try to change a logical combination of comparisons into a range test.
4033 X == 2 || X == 3 || X == 4 || X == 5
4037 (unsigned) (X - 2) <= 3
4039 We describe each set of comparisons as being either inside or outside
4040 a range, using a variable named like IN_P, and then describe the
4041 range with a lower and upper bound. If one of the bounds is omitted,
4042 it represents either the highest or lowest value of the type.
4044 In the comments below, we represent a range by two numbers in brackets
4045 preceded by a "+" to designate being inside that range, or a "-" to
4046 designate being outside that range, so the condition can be inverted by
4047 flipping the prefix. An omitted bound is represented by a "-". For
4048 example, "- [-, 10]" means being outside the range starting at the lowest
4049 possible value and ending at 10, in other words, being greater than 10.
4050 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4053 We set up things so that the missing bounds are handled in a consistent
4054 manner so neither a missing bound nor "true" and "false" need to be
4055 handled using a special case. */
4057 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4058 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4059 and UPPER1_P are nonzero if the respective argument is an upper bound
4060 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4061 must be specified for a comparison. ARG1 will be converted to ARG0's
4062 type if both are specified. */
4065 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4066 tree arg1
, int upper1_p
)
4072 /* If neither arg represents infinity, do the normal operation.
4073 Else, if not a comparison, return infinity. Else handle the special
4074 comparison rules. Note that most of the cases below won't occur, but
4075 are handled for consistency. */
4077 if (arg0
!= 0 && arg1
!= 0)
4079 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4080 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4082 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4085 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4088 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4089 for neither. In real maths, we cannot assume open ended ranges are
4090 the same. But, this is computer arithmetic, where numbers are finite.
4091 We can therefore make the transformation of any unbounded range with
4092 the value Z, Z being greater than any representable number. This permits
4093 us to treat unbounded ranges as equal. */
4094 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4095 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4099 result
= sgn0
== sgn1
;
4102 result
= sgn0
!= sgn1
;
4105 result
= sgn0
< sgn1
;
4108 result
= sgn0
<= sgn1
;
4111 result
= sgn0
> sgn1
;
4114 result
= sgn0
>= sgn1
;
4120 return constant_boolean_node (result
, type
);
4123 /* Given EXP, a logical expression, set the range it is testing into
4124 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4125 actually being tested. *PLOW and *PHIGH will be made of the same
4126 type as the returned expression. If EXP is not a comparison, we
4127 will most likely not be returning a useful value and range. Set
4128 *STRICT_OVERFLOW_P to true if the return value is only valid
4129 because signed overflow is undefined; otherwise, do not change
4130 *STRICT_OVERFLOW_P. */
4133 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4134 bool *strict_overflow_p
)
4136 enum tree_code code
;
4137 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4138 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
4140 tree low
, high
, n_low
, n_high
;
4142 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4143 and see if we can refine the range. Some of the cases below may not
4144 happen, but it doesn't seem worth worrying about this. We "continue"
4145 the outer loop when we've changed something; otherwise we "break"
4146 the switch, which will "break" the while. */
4149 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4153 code
= TREE_CODE (exp
);
4154 exp_type
= TREE_TYPE (exp
);
4156 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4158 if (TREE_OPERAND_LENGTH (exp
) > 0)
4159 arg0
= TREE_OPERAND (exp
, 0);
4160 if (TREE_CODE_CLASS (code
) == tcc_comparison
4161 || TREE_CODE_CLASS (code
) == tcc_unary
4162 || TREE_CODE_CLASS (code
) == tcc_binary
)
4163 arg0_type
= TREE_TYPE (arg0
);
4164 if (TREE_CODE_CLASS (code
) == tcc_binary
4165 || TREE_CODE_CLASS (code
) == tcc_comparison
4166 || (TREE_CODE_CLASS (code
) == tcc_expression
4167 && TREE_OPERAND_LENGTH (exp
) > 1))
4168 arg1
= TREE_OPERAND (exp
, 1);
4173 case TRUTH_NOT_EXPR
:
4174 in_p
= ! in_p
, exp
= arg0
;
4177 case EQ_EXPR
: case NE_EXPR
:
4178 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4179 /* We can only do something if the range is testing for zero
4180 and if the second operand is an integer constant. Note that
4181 saying something is "in" the range we make is done by
4182 complementing IN_P since it will set in the initial case of
4183 being not equal to zero; "out" is leaving it alone. */
4184 if (low
== 0 || high
== 0
4185 || ! integer_zerop (low
) || ! integer_zerop (high
)
4186 || TREE_CODE (arg1
) != INTEGER_CST
)
4191 case NE_EXPR
: /* - [c, c] */
4194 case EQ_EXPR
: /* + [c, c] */
4195 in_p
= ! in_p
, low
= high
= arg1
;
4197 case GT_EXPR
: /* - [-, c] */
4198 low
= 0, high
= arg1
;
4200 case GE_EXPR
: /* + [c, -] */
4201 in_p
= ! in_p
, low
= arg1
, high
= 0;
4203 case LT_EXPR
: /* - [c, -] */
4204 low
= arg1
, high
= 0;
4206 case LE_EXPR
: /* + [-, c] */
4207 in_p
= ! in_p
, low
= 0, high
= arg1
;
4213 /* If this is an unsigned comparison, we also know that EXP is
4214 greater than or equal to zero. We base the range tests we make
4215 on that fact, so we record it here so we can parse existing
4216 range tests. We test arg0_type since often the return type
4217 of, e.g. EQ_EXPR, is boolean. */
4218 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4220 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4222 build_int_cst (arg0_type
, 0),
4226 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4228 /* If the high bound is missing, but we have a nonzero low
4229 bound, reverse the range so it goes from zero to the low bound
4231 if (high
== 0 && low
&& ! integer_zerop (low
))
4234 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4235 integer_one_node
, 0);
4236 low
= build_int_cst (arg0_type
, 0);
4244 /* (-x) IN [a,b] -> x in [-b, -a] */
4245 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4246 build_int_cst (exp_type
, 0),
4248 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4249 build_int_cst (exp_type
, 0),
4251 low
= n_low
, high
= n_high
;
4257 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4258 build_int_cst (exp_type
, 1));
4261 case PLUS_EXPR
: case MINUS_EXPR
:
4262 if (TREE_CODE (arg1
) != INTEGER_CST
)
4265 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4266 move a constant to the other side. */
4267 if (!TYPE_UNSIGNED (arg0_type
)
4268 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4271 /* If EXP is signed, any overflow in the computation is undefined,
4272 so we don't worry about it so long as our computations on
4273 the bounds don't overflow. For unsigned, overflow is defined
4274 and this is exactly the right thing. */
4275 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4276 arg0_type
, low
, 0, arg1
, 0);
4277 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4278 arg0_type
, high
, 1, arg1
, 0);
4279 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4280 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4283 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4284 *strict_overflow_p
= true;
4286 /* Check for an unsigned range which has wrapped around the maximum
4287 value thus making n_high < n_low, and normalize it. */
4288 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4290 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4291 integer_one_node
, 0);
4292 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4293 integer_one_node
, 0);
4295 /* If the range is of the form +/- [ x+1, x ], we won't
4296 be able to normalize it. But then, it represents the
4297 whole range or the empty set, so make it
4299 if (tree_int_cst_equal (n_low
, low
)
4300 && tree_int_cst_equal (n_high
, high
))
4306 low
= n_low
, high
= n_high
;
4311 CASE_CONVERT
: case NON_LVALUE_EXPR
:
4312 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4315 if (! INTEGRAL_TYPE_P (arg0_type
)
4316 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4317 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4320 n_low
= low
, n_high
= high
;
4323 n_low
= fold_convert (arg0_type
, n_low
);
4326 n_high
= fold_convert (arg0_type
, n_high
);
4329 /* If we're converting arg0 from an unsigned type, to exp,
4330 a signed type, we will be doing the comparison as unsigned.
4331 The tests above have already verified that LOW and HIGH
4334 So we have to ensure that we will handle large unsigned
4335 values the same way that the current signed bounds treat
4338 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4342 /* For fixed-point modes, we need to pass the saturating flag
4343 as the 2nd parameter. */
4344 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4345 equiv_type
= lang_hooks
.types
.type_for_mode
4346 (TYPE_MODE (arg0_type
),
4347 TYPE_SATURATING (arg0_type
));
4349 equiv_type
= lang_hooks
.types
.type_for_mode
4350 (TYPE_MODE (arg0_type
), 1);
4352 /* A range without an upper bound is, naturally, unbounded.
4353 Since convert would have cropped a very large value, use
4354 the max value for the destination type. */
4356 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4357 : TYPE_MAX_VALUE (arg0_type
);
4359 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4360 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
4361 fold_convert (arg0_type
,
4363 build_int_cst (arg0_type
, 1));
4365 /* If the low bound is specified, "and" the range with the
4366 range for which the original unsigned value will be
4370 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4371 1, n_low
, n_high
, 1,
4372 fold_convert (arg0_type
,
4377 in_p
= (n_in_p
== in_p
);
4381 /* Otherwise, "or" the range with the range of the input
4382 that will be interpreted as negative. */
4383 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4384 0, n_low
, n_high
, 1,
4385 fold_convert (arg0_type
,
4390 in_p
= (in_p
!= n_in_p
);
4395 low
= n_low
, high
= n_high
;
4405 /* If EXP is a constant, we can evaluate whether this is true or false. */
4406 if (TREE_CODE (exp
) == INTEGER_CST
)
4408 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4410 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4416 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4420 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4421 type, TYPE, return an expression to test if EXP is in (or out of, depending
4422 on IN_P) the range. Return 0 if the test couldn't be created. */
4425 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4427 tree etype
= TREE_TYPE (exp
);
4430 #ifdef HAVE_canonicalize_funcptr_for_compare
4431 /* Disable this optimization for function pointer expressions
4432 on targets that require function pointer canonicalization. */
4433 if (HAVE_canonicalize_funcptr_for_compare
4434 && TREE_CODE (etype
) == POINTER_TYPE
4435 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4441 value
= build_range_check (type
, exp
, 1, low
, high
);
4443 return invert_truthvalue (value
);
4448 if (low
== 0 && high
== 0)
4449 return build_int_cst (type
, 1);
4452 return fold_build2 (LE_EXPR
, type
, exp
,
4453 fold_convert (etype
, high
));
4456 return fold_build2 (GE_EXPR
, type
, exp
,
4457 fold_convert (etype
, low
));
4459 if (operand_equal_p (low
, high
, 0))
4460 return fold_build2 (EQ_EXPR
, type
, exp
,
4461 fold_convert (etype
, low
));
4463 if (integer_zerop (low
))
4465 if (! TYPE_UNSIGNED (etype
))
4467 etype
= unsigned_type_for (etype
);
4468 high
= fold_convert (etype
, high
);
4469 exp
= fold_convert (etype
, exp
);
4471 return build_range_check (type
, exp
, 1, 0, high
);
4474 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4475 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4477 unsigned HOST_WIDE_INT lo
;
4481 prec
= TYPE_PRECISION (etype
);
4482 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4485 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4489 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4490 lo
= (unsigned HOST_WIDE_INT
) -1;
4493 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4495 if (TYPE_UNSIGNED (etype
))
4497 tree signed_etype
= signed_type_for (etype
);
4498 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4500 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4502 etype
= signed_etype
;
4503 exp
= fold_convert (etype
, exp
);
4505 return fold_build2 (GT_EXPR
, type
, exp
,
4506 build_int_cst (etype
, 0));
4510 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4511 This requires wrap-around arithmetics for the type of the expression. */
4512 switch (TREE_CODE (etype
))
4515 /* There is no requirement that LOW be within the range of ETYPE
4516 if the latter is a subtype. It must, however, be within the base
4517 type of ETYPE. So be sure we do the subtraction in that type. */
4518 if (TREE_TYPE (etype
))
4519 etype
= TREE_TYPE (etype
);
4524 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4525 TYPE_UNSIGNED (etype
));
4532 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4533 if (TREE_CODE (etype
) == INTEGER_TYPE
4534 && !TYPE_OVERFLOW_WRAPS (etype
))
4536 tree utype
, minv
, maxv
;
4538 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4539 for the type in question, as we rely on this here. */
4540 utype
= unsigned_type_for (etype
);
4541 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4542 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4543 integer_one_node
, 1);
4544 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4546 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4553 high
= fold_convert (etype
, high
);
4554 low
= fold_convert (etype
, low
);
4555 exp
= fold_convert (etype
, exp
);
4557 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4560 if (POINTER_TYPE_P (etype
))
4562 if (value
!= 0 && !TREE_OVERFLOW (value
))
4564 low
= fold_convert (sizetype
, low
);
4565 low
= fold_build1 (NEGATE_EXPR
, sizetype
, low
);
4566 return build_range_check (type
,
4567 fold_build2 (POINTER_PLUS_EXPR
, etype
, exp
, low
),
4568 1, build_int_cst (etype
, 0), value
);
4573 if (value
!= 0 && !TREE_OVERFLOW (value
))
4574 return build_range_check (type
,
4575 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4576 1, build_int_cst (etype
, 0), value
);
4581 /* Return the predecessor of VAL in its type, handling the infinite case. */
4584 range_predecessor (tree val
)
4586 tree type
= TREE_TYPE (val
);
4588 if (INTEGRAL_TYPE_P (type
)
4589 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4592 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4595 /* Return the successor of VAL in its type, handling the infinite case. */
4598 range_successor (tree val
)
4600 tree type
= TREE_TYPE (val
);
4602 if (INTEGRAL_TYPE_P (type
)
4603 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4606 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4609 /* Given two ranges, see if we can merge them into one. Return 1 if we
4610 can, 0 if we can't. Set the output range into the specified parameters. */
4613 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4614 tree high0
, int in1_p
, tree low1
, tree high1
)
4622 int lowequal
= ((low0
== 0 && low1
== 0)
4623 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4624 low0
, 0, low1
, 0)));
4625 int highequal
= ((high0
== 0 && high1
== 0)
4626 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4627 high0
, 1, high1
, 1)));
4629 /* Make range 0 be the range that starts first, or ends last if they
4630 start at the same value. Swap them if it isn't. */
4631 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4634 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4635 high1
, 1, high0
, 1))))
4637 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4638 tem
= low0
, low0
= low1
, low1
= tem
;
4639 tem
= high0
, high0
= high1
, high1
= tem
;
4642 /* Now flag two cases, whether the ranges are disjoint or whether the
4643 second range is totally subsumed in the first. Note that the tests
4644 below are simplified by the ones above. */
4645 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4646 high0
, 1, low1
, 0));
4647 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4648 high1
, 1, high0
, 1));
4650 /* We now have four cases, depending on whether we are including or
4651 excluding the two ranges. */
4654 /* If they don't overlap, the result is false. If the second range
4655 is a subset it is the result. Otherwise, the range is from the start
4656 of the second to the end of the first. */
4658 in_p
= 0, low
= high
= 0;
4660 in_p
= 1, low
= low1
, high
= high1
;
4662 in_p
= 1, low
= low1
, high
= high0
;
4665 else if (in0_p
&& ! in1_p
)
4667 /* If they don't overlap, the result is the first range. If they are
4668 equal, the result is false. If the second range is a subset of the
4669 first, and the ranges begin at the same place, we go from just after
4670 the end of the second range to the end of the first. If the second
4671 range is not a subset of the first, or if it is a subset and both
4672 ranges end at the same place, the range starts at the start of the
4673 first range and ends just before the second range.
4674 Otherwise, we can't describe this as a single range. */
4676 in_p
= 1, low
= low0
, high
= high0
;
4677 else if (lowequal
&& highequal
)
4678 in_p
= 0, low
= high
= 0;
4679 else if (subset
&& lowequal
)
4681 low
= range_successor (high1
);
4686 /* We are in the weird situation where high0 > high1 but
4687 high1 has no successor. Punt. */
4691 else if (! subset
|| highequal
)
4694 high
= range_predecessor (low1
);
4698 /* low0 < low1 but low1 has no predecessor. Punt. */
4706 else if (! in0_p
&& in1_p
)
4708 /* If they don't overlap, the result is the second range. If the second
4709 is a subset of the first, the result is false. Otherwise,
4710 the range starts just after the first range and ends at the
4711 end of the second. */
4713 in_p
= 1, low
= low1
, high
= high1
;
4714 else if (subset
|| highequal
)
4715 in_p
= 0, low
= high
= 0;
4718 low
= range_successor (high0
);
4723 /* high1 > high0 but high0 has no successor. Punt. */
4731 /* The case where we are excluding both ranges. Here the complex case
4732 is if they don't overlap. In that case, the only time we have a
4733 range is if they are adjacent. If the second is a subset of the
4734 first, the result is the first. Otherwise, the range to exclude
4735 starts at the beginning of the first range and ends at the end of the
4739 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4740 range_successor (high0
),
4742 in_p
= 0, low
= low0
, high
= high1
;
4745 /* Canonicalize - [min, x] into - [-, x]. */
4746 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4747 switch (TREE_CODE (TREE_TYPE (low0
)))
4750 if (TYPE_PRECISION (TREE_TYPE (low0
))
4751 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4755 if (tree_int_cst_equal (low0
,
4756 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4760 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4761 && integer_zerop (low0
))
4768 /* Canonicalize - [x, max] into - [x, -]. */
4769 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4770 switch (TREE_CODE (TREE_TYPE (high1
)))
4773 if (TYPE_PRECISION (TREE_TYPE (high1
))
4774 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4778 if (tree_int_cst_equal (high1
,
4779 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4783 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4784 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4786 integer_one_node
, 1)))
4793 /* The ranges might be also adjacent between the maximum and
4794 minimum values of the given type. For
4795 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4796 return + [x + 1, y - 1]. */
4797 if (low0
== 0 && high1
== 0)
4799 low
= range_successor (high0
);
4800 high
= range_predecessor (low1
);
4801 if (low
== 0 || high
== 0)
4811 in_p
= 0, low
= low0
, high
= high0
;
4813 in_p
= 0, low
= low0
, high
= high1
;
4816 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4821 /* Subroutine of fold, looking inside expressions of the form
4822 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4823 of the COND_EXPR. This function is being used also to optimize
4824 A op B ? C : A, by reversing the comparison first.
4826 Return a folded expression whose code is not a COND_EXPR
4827 anymore, or NULL_TREE if no folding opportunity is found. */
4830 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4832 enum tree_code comp_code
= TREE_CODE (arg0
);
4833 tree arg00
= TREE_OPERAND (arg0
, 0);
4834 tree arg01
= TREE_OPERAND (arg0
, 1);
4835 tree arg1_type
= TREE_TYPE (arg1
);
4841 /* If we have A op 0 ? A : -A, consider applying the following
4844 A == 0? A : -A same as -A
4845 A != 0? A : -A same as A
4846 A >= 0? A : -A same as abs (A)
4847 A > 0? A : -A same as abs (A)
4848 A <= 0? A : -A same as -abs (A)
4849 A < 0? A : -A same as -abs (A)
4851 None of these transformations work for modes with signed
4852 zeros. If A is +/-0, the first two transformations will
4853 change the sign of the result (from +0 to -0, or vice
4854 versa). The last four will fix the sign of the result,
4855 even though the original expressions could be positive or
4856 negative, depending on the sign of A.
4858 Note that all these transformations are correct if A is
4859 NaN, since the two alternatives (A and -A) are also NaNs. */
4860 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4861 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4862 ? real_zerop (arg01
)
4863 : integer_zerop (arg01
))
4864 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4865 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4866 /* In the case that A is of the form X-Y, '-A' (arg2) may
4867 have already been folded to Y-X, check for that. */
4868 || (TREE_CODE (arg1
) == MINUS_EXPR
4869 && TREE_CODE (arg2
) == MINUS_EXPR
4870 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4871 TREE_OPERAND (arg2
, 1), 0)
4872 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4873 TREE_OPERAND (arg2
, 0), 0))))
4878 tem
= fold_convert (arg1_type
, arg1
);
4879 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4882 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4885 if (flag_trapping_math
)
4890 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4891 arg1
= fold_convert (signed_type_for
4892 (TREE_TYPE (arg1
)), arg1
);
4893 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4894 return pedantic_non_lvalue (fold_convert (type
, tem
));
4897 if (flag_trapping_math
)
4901 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4902 arg1
= fold_convert (signed_type_for
4903 (TREE_TYPE (arg1
)), arg1
);
4904 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4905 return negate_expr (fold_convert (type
, tem
));
4907 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4911 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4912 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4913 both transformations are correct when A is NaN: A != 0
4914 is then true, and A == 0 is false. */
4916 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4917 && integer_zerop (arg01
) && integer_zerop (arg2
))
4919 if (comp_code
== NE_EXPR
)
4920 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4921 else if (comp_code
== EQ_EXPR
)
4922 return build_int_cst (type
, 0);
4925 /* Try some transformations of A op B ? A : B.
4927 A == B? A : B same as B
4928 A != B? A : B same as A
4929 A >= B? A : B same as max (A, B)
4930 A > B? A : B same as max (B, A)
4931 A <= B? A : B same as min (A, B)
4932 A < B? A : B same as min (B, A)
4934 As above, these transformations don't work in the presence
4935 of signed zeros. For example, if A and B are zeros of
4936 opposite sign, the first two transformations will change
4937 the sign of the result. In the last four, the original
4938 expressions give different results for (A=+0, B=-0) and
4939 (A=-0, B=+0), but the transformed expressions do not.
4941 The first two transformations are correct if either A or B
4942 is a NaN. In the first transformation, the condition will
4943 be false, and B will indeed be chosen. In the case of the
4944 second transformation, the condition A != B will be true,
4945 and A will be chosen.
4947 The conversions to max() and min() are not correct if B is
4948 a number and A is not. The conditions in the original
4949 expressions will be false, so all four give B. The min()
4950 and max() versions would give a NaN instead. */
4951 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4952 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4953 /* Avoid these transformations if the COND_EXPR may be used
4954 as an lvalue in the C++ front-end. PR c++/19199. */
4956 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4957 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4958 || ! maybe_lvalue_p (arg1
)
4959 || ! maybe_lvalue_p (arg2
)))
4961 tree comp_op0
= arg00
;
4962 tree comp_op1
= arg01
;
4963 tree comp_type
= TREE_TYPE (comp_op0
);
4965 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4966 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4976 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4978 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4983 /* In C++ a ?: expression can be an lvalue, so put the
4984 operand which will be used if they are equal first
4985 so that we can convert this back to the
4986 corresponding COND_EXPR. */
4987 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4989 comp_op0
= fold_convert (comp_type
, comp_op0
);
4990 comp_op1
= fold_convert (comp_type
, comp_op1
);
4991 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4992 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4993 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4994 return pedantic_non_lvalue (fold_convert (type
, tem
));
5001 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5003 comp_op0
= fold_convert (comp_type
, comp_op0
);
5004 comp_op1
= fold_convert (comp_type
, comp_op1
);
5005 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5006 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5007 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
5008 return pedantic_non_lvalue (fold_convert (type
, tem
));
5012 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5013 return pedantic_non_lvalue (fold_convert (type
, arg2
));
5016 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5017 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5020 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5025 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5026 we might still be able to simplify this. For example,
5027 if C1 is one less or one more than C2, this might have started
5028 out as a MIN or MAX and been transformed by this function.
5029 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5031 if (INTEGRAL_TYPE_P (type
)
5032 && TREE_CODE (arg01
) == INTEGER_CST
5033 && TREE_CODE (arg2
) == INTEGER_CST
)
5037 /* We can replace A with C1 in this case. */
5038 arg1
= fold_convert (type
, arg01
);
5039 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
5042 /* If C1 is C2 + 1, this is min(A, C2). */
5043 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5045 && operand_equal_p (arg01
,
5046 const_binop (PLUS_EXPR
, arg2
,
5047 build_int_cst (type
, 1), 0),
5049 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5051 fold_convert (type
, arg1
),
5056 /* If C1 is C2 - 1, this is min(A, C2). */
5057 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5059 && operand_equal_p (arg01
,
5060 const_binop (MINUS_EXPR
, arg2
,
5061 build_int_cst (type
, 1), 0),
5063 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5065 fold_convert (type
, arg1
),
5070 /* If C1 is C2 - 1, this is max(A, C2). */
5071 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5073 && operand_equal_p (arg01
,
5074 const_binop (MINUS_EXPR
, arg2
,
5075 build_int_cst (type
, 1), 0),
5077 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5079 fold_convert (type
, arg1
),
5084 /* If C1 is C2 + 1, this is max(A, C2). */
5085 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5087 && operand_equal_p (arg01
,
5088 const_binop (PLUS_EXPR
, arg2
,
5089 build_int_cst (type
, 1), 0),
5091 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5093 fold_convert (type
, arg1
),
5107 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5108 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5109 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5113 /* EXP is some logical combination of boolean tests. See if we can
5114 merge it into some range test. Return the new tree if so. */
5117 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
5119 int or_op
= (code
== TRUTH_ORIF_EXPR
5120 || code
== TRUTH_OR_EXPR
);
5121 int in0_p
, in1_p
, in_p
;
5122 tree low0
, low1
, low
, high0
, high1
, high
;
5123 bool strict_overflow_p
= false;
5124 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5125 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5127 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5128 "when simplifying range test");
5130 /* If this is an OR operation, invert both sides; we will invert
5131 again at the end. */
5133 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5135 /* If both expressions are the same, if we can merge the ranges, and we
5136 can build the range test, return it or it inverted. If one of the
5137 ranges is always true or always false, consider it to be the same
5138 expression as the other. */
5139 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5140 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5142 && 0 != (tem
= (build_range_check (type
,
5144 : rhs
!= 0 ? rhs
: integer_zero_node
,
5147 if (strict_overflow_p
)
5148 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5149 return or_op
? invert_truthvalue (tem
) : tem
;
5152 /* On machines where the branch cost is expensive, if this is a
5153 short-circuited branch and the underlying object on both sides
5154 is the same, make a non-short-circuit operation. */
5155 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5156 && lhs
!= 0 && rhs
!= 0
5157 && (code
== TRUTH_ANDIF_EXPR
5158 || code
== TRUTH_ORIF_EXPR
)
5159 && operand_equal_p (lhs
, rhs
, 0))
5161 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5162 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5163 which cases we can't do this. */
5164 if (simple_operand_p (lhs
))
5165 return build2 (code
== TRUTH_ANDIF_EXPR
5166 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5169 else if (lang_hooks
.decls
.global_bindings_p () == 0
5170 && ! CONTAINS_PLACEHOLDER_P (lhs
))
5172 tree common
= save_expr (lhs
);
5174 if (0 != (lhs
= build_range_check (type
, common
,
5175 or_op
? ! in0_p
: in0_p
,
5177 && (0 != (rhs
= build_range_check (type
, common
,
5178 or_op
? ! in1_p
: in1_p
,
5181 if (strict_overflow_p
)
5182 fold_overflow_warning (warnmsg
,
5183 WARN_STRICT_OVERFLOW_COMPARISON
);
5184 return build2 (code
== TRUTH_ANDIF_EXPR
5185 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5194 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5195 bit value. Arrange things so the extra bits will be set to zero if and
5196 only if C is signed-extended to its full width. If MASK is nonzero,
5197 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5200 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5202 tree type
= TREE_TYPE (c
);
5203 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5206 if (p
== modesize
|| unsignedp
)
5209 /* We work by getting just the sign bit into the low-order bit, then
5210 into the high-order bit, then sign-extend. We then XOR that value
5212 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
5213 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
5215 /* We must use a signed type in order to get an arithmetic right shift.
5216 However, we must also avoid introducing accidental overflows, so that
5217 a subsequent call to integer_zerop will work. Hence we must
5218 do the type conversion here. At this point, the constant is either
5219 zero or one, and the conversion to a signed type can never overflow.
5220 We could get an overflow if this conversion is done anywhere else. */
5221 if (TYPE_UNSIGNED (type
))
5222 temp
= fold_convert (signed_type_for (type
), temp
);
5224 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
5225 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
5227 temp
= const_binop (BIT_AND_EXPR
, temp
,
5228 fold_convert (TREE_TYPE (c
), mask
), 0);
5229 /* If necessary, convert the type back to match the type of C. */
5230 if (TYPE_UNSIGNED (type
))
5231 temp
= fold_convert (type
, temp
);
5233 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
5236 /* Find ways of folding logical expressions of LHS and RHS:
5237 Try to merge two comparisons to the same innermost item.
5238 Look for range tests like "ch >= '0' && ch <= '9'".
5239 Look for combinations of simple terms on machines with expensive branches
5240 and evaluate the RHS unconditionally.
5242 For example, if we have p->a == 2 && p->b == 4 and we can make an
5243 object large enough to span both A and B, we can do this with a comparison
5244 against the object ANDed with the a mask.
5246 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5247 operations to do this with one comparison.
5249 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5250 function and the one above.
5252 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5253 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5255 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5258 We return the simplified tree or 0 if no optimization is possible. */
5261 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
5263 /* If this is the "or" of two comparisons, we can do something if
5264 the comparisons are NE_EXPR. If this is the "and", we can do something
5265 if the comparisons are EQ_EXPR. I.e.,
5266 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5268 WANTED_CODE is this operation code. For single bit fields, we can
5269 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5270 comparison for one-bit fields. */
5272 enum tree_code wanted_code
;
5273 enum tree_code lcode
, rcode
;
5274 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5275 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5276 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5277 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5278 HOST_WIDE_INT xll_bitpos
, xrl_bitpos
;
5279 HOST_WIDE_INT lnbitsize
, lnbitpos
;
5280 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5281 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5282 enum machine_mode lnmode
;
5283 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5284 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5285 tree l_const
, r_const
;
5286 tree lntype
, result
;
5287 int first_bit
, end_bit
;
5289 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5290 enum tree_code orig_code
= code
;
5292 /* Start by getting the comparison codes. Fail if anything is volatile.
5293 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5294 it were surrounded with a NE_EXPR. */
5296 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5299 lcode
= TREE_CODE (lhs
);
5300 rcode
= TREE_CODE (rhs
);
5302 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5304 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5305 build_int_cst (TREE_TYPE (lhs
), 0));
5309 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5311 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5312 build_int_cst (TREE_TYPE (rhs
), 0));
5316 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5317 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5320 ll_arg
= TREE_OPERAND (lhs
, 0);
5321 lr_arg
= TREE_OPERAND (lhs
, 1);
5322 rl_arg
= TREE_OPERAND (rhs
, 0);
5323 rr_arg
= TREE_OPERAND (rhs
, 1);
5325 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5326 if (simple_operand_p (ll_arg
)
5327 && simple_operand_p (lr_arg
))
5330 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5331 && operand_equal_p (lr_arg
, rr_arg
, 0))
5333 result
= combine_comparisons (code
, lcode
, rcode
,
5334 truth_type
, ll_arg
, lr_arg
);
5338 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5339 && operand_equal_p (lr_arg
, rl_arg
, 0))
5341 result
= combine_comparisons (code
, lcode
,
5342 swap_tree_comparison (rcode
),
5343 truth_type
, ll_arg
, lr_arg
);
5349 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5350 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5352 /* If the RHS can be evaluated unconditionally and its operands are
5353 simple, it wins to evaluate the RHS unconditionally on machines
5354 with expensive branches. In this case, this isn't a comparison
5355 that can be merged. Avoid doing this if the RHS is a floating-point
5356 comparison since those can trap. */
5358 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5360 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5361 && simple_operand_p (rl_arg
)
5362 && simple_operand_p (rr_arg
))
5364 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5365 if (code
== TRUTH_OR_EXPR
5366 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5367 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5368 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5369 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5370 return build2 (NE_EXPR
, truth_type
,
5371 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5373 build_int_cst (TREE_TYPE (ll_arg
), 0));
5375 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5376 if (code
== TRUTH_AND_EXPR
5377 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5378 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5379 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5380 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5381 return build2 (EQ_EXPR
, truth_type
,
5382 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5384 build_int_cst (TREE_TYPE (ll_arg
), 0));
5386 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5388 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5389 return build2 (code
, truth_type
, lhs
, rhs
);
5394 /* See if the comparisons can be merged. Then get all the parameters for
5397 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5398 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5402 ll_inner
= decode_field_reference (ll_arg
,
5403 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5404 &ll_unsignedp
, &volatilep
, &ll_mask
,
5406 lr_inner
= decode_field_reference (lr_arg
,
5407 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5408 &lr_unsignedp
, &volatilep
, &lr_mask
,
5410 rl_inner
= decode_field_reference (rl_arg
,
5411 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5412 &rl_unsignedp
, &volatilep
, &rl_mask
,
5414 rr_inner
= decode_field_reference (rr_arg
,
5415 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5416 &rr_unsignedp
, &volatilep
, &rr_mask
,
5419 /* It must be true that the inner operation on the lhs of each
5420 comparison must be the same if we are to be able to do anything.
5421 Then see if we have constants. If not, the same must be true for
5423 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5424 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5427 if (TREE_CODE (lr_arg
) == INTEGER_CST
5428 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5429 l_const
= lr_arg
, r_const
= rr_arg
;
5430 else if (lr_inner
== 0 || rr_inner
== 0
5431 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5434 l_const
= r_const
= 0;
5436 /* If either comparison code is not correct for our logical operation,
5437 fail. However, we can convert a one-bit comparison against zero into
5438 the opposite comparison against that bit being set in the field. */
5440 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5441 if (lcode
!= wanted_code
)
5443 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5445 /* Make the left operand unsigned, since we are only interested
5446 in the value of one bit. Otherwise we are doing the wrong
5455 /* This is analogous to the code for l_const above. */
5456 if (rcode
!= wanted_code
)
5458 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5467 /* See if we can find a mode that contains both fields being compared on
5468 the left. If we can't, fail. Otherwise, update all constants and masks
5469 to be relative to a field of that size. */
5470 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5471 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5472 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5473 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5475 if (lnmode
== VOIDmode
)
5478 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5479 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5480 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5481 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5483 if (BYTES_BIG_ENDIAN
)
5485 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5486 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5489 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5490 size_int (xll_bitpos
), 0);
5491 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5492 size_int (xrl_bitpos
), 0);
5496 l_const
= fold_convert (lntype
, l_const
);
5497 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5498 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5499 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5500 fold_build1 (BIT_NOT_EXPR
,
5504 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5506 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5511 r_const
= fold_convert (lntype
, r_const
);
5512 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5513 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5514 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5515 fold_build1 (BIT_NOT_EXPR
,
5519 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5521 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5525 /* Handle the case of comparisons with constants. If there is something in
5526 common between the masks, those bits of the constants must be the same.
5527 If not, the condition is always false. Test for this to avoid generating
5528 incorrect code below. */
5529 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5530 if (! integer_zerop (result
)
5531 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5532 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5534 if (wanted_code
== NE_EXPR
)
5536 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5537 return constant_boolean_node (true, truth_type
);
5541 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5542 return constant_boolean_node (false, truth_type
);
5549 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5553 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5556 enum tree_code op_code
;
5559 int consts_equal
, consts_lt
;
5562 STRIP_SIGN_NOPS (arg0
);
5564 op_code
= TREE_CODE (arg0
);
5565 minmax_const
= TREE_OPERAND (arg0
, 1);
5566 comp_const
= fold_convert (TREE_TYPE (arg0
), op1
);
5567 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5568 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5569 inner
= TREE_OPERAND (arg0
, 0);
5571 /* If something does not permit us to optimize, return the original tree. */
5572 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5573 || TREE_CODE (comp_const
) != INTEGER_CST
5574 || TREE_OVERFLOW (comp_const
)
5575 || TREE_CODE (minmax_const
) != INTEGER_CST
5576 || TREE_OVERFLOW (minmax_const
))
5579 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5580 and GT_EXPR, doing the rest with recursive calls using logical
5584 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5586 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5589 return invert_truthvalue (tem
);
5595 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5596 optimize_minmax_comparison
5597 (EQ_EXPR
, type
, arg0
, comp_const
),
5598 optimize_minmax_comparison
5599 (GT_EXPR
, type
, arg0
, comp_const
));
5602 if (op_code
== MAX_EXPR
&& consts_equal
)
5603 /* MAX (X, 0) == 0 -> X <= 0 */
5604 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5606 else if (op_code
== MAX_EXPR
&& consts_lt
)
5607 /* MAX (X, 0) == 5 -> X == 5 */
5608 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5610 else if (op_code
== MAX_EXPR
)
5611 /* MAX (X, 0) == -1 -> false */
5612 return omit_one_operand (type
, integer_zero_node
, inner
);
5614 else if (consts_equal
)
5615 /* MIN (X, 0) == 0 -> X >= 0 */
5616 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5619 /* MIN (X, 0) == 5 -> false */
5620 return omit_one_operand (type
, integer_zero_node
, inner
);
5623 /* MIN (X, 0) == -1 -> X == -1 */
5624 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5627 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5628 /* MAX (X, 0) > 0 -> X > 0
5629 MAX (X, 0) > 5 -> X > 5 */
5630 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5632 else if (op_code
== MAX_EXPR
)
5633 /* MAX (X, 0) > -1 -> true */
5634 return omit_one_operand (type
, integer_one_node
, inner
);
5636 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5637 /* MIN (X, 0) > 0 -> false
5638 MIN (X, 0) > 5 -> false */
5639 return omit_one_operand (type
, integer_zero_node
, inner
);
5642 /* MIN (X, 0) > -1 -> X > -1 */
5643 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5650 /* T is an integer expression that is being multiplied, divided, or taken a
5651 modulus (CODE says which and what kind of divide or modulus) by a
5652 constant C. See if we can eliminate that operation by folding it with
5653 other operations already in T. WIDE_TYPE, if non-null, is a type that
5654 should be used for the computation if wider than our type.
5656 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5657 (X * 2) + (Y * 4). We must, however, be assured that either the original
5658 expression would not overflow or that overflow is undefined for the type
5659 in the language in question.
5661 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5662 the machine has a multiply-accumulate insn or that this is part of an
5663 addressing calculation.
5665 If we return a non-null expression, it is an equivalent form of the
5666 original computation, but need not be in the original type.
5668 We set *STRICT_OVERFLOW_P to true if the return values depends on
5669 signed overflow being undefined. Otherwise we do not change
5670 *STRICT_OVERFLOW_P. */
5673 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5674 bool *strict_overflow_p
)
5676 /* To avoid exponential search depth, refuse to allow recursion past
5677 three levels. Beyond that (1) it's highly unlikely that we'll find
5678 something interesting and (2) we've probably processed it before
5679 when we built the inner expression. */
5688 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5695 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5696 bool *strict_overflow_p
)
5698 tree type
= TREE_TYPE (t
);
5699 enum tree_code tcode
= TREE_CODE (t
);
5700 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5701 > GET_MODE_SIZE (TYPE_MODE (type
)))
5702 ? wide_type
: type
);
5704 int same_p
= tcode
== code
;
5705 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5706 bool sub_strict_overflow_p
;
5708 /* Don't deal with constants of zero here; they confuse the code below. */
5709 if (integer_zerop (c
))
5712 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5713 op0
= TREE_OPERAND (t
, 0);
5715 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5716 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5718 /* Note that we need not handle conditional operations here since fold
5719 already handles those cases. So just do arithmetic here. */
5723 /* For a constant, we can always simplify if we are a multiply
5724 or (for divide and modulus) if it is a multiple of our constant. */
5725 if (code
== MULT_EXPR
5726 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5727 return const_binop (code
, fold_convert (ctype
, t
),
5728 fold_convert (ctype
, c
), 0);
5731 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5732 /* If op0 is an expression ... */
5733 if ((COMPARISON_CLASS_P (op0
)
5734 || UNARY_CLASS_P (op0
)
5735 || BINARY_CLASS_P (op0
)
5736 || VL_EXP_CLASS_P (op0
)
5737 || EXPRESSION_CLASS_P (op0
))
5738 /* ... and has wrapping overflow, and its type is smaller
5739 than ctype, then we cannot pass through as widening. */
5740 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5741 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5742 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5743 && (TYPE_PRECISION (ctype
)
5744 > TYPE_PRECISION (TREE_TYPE (op0
))))
5745 /* ... or this is a truncation (t is narrower than op0),
5746 then we cannot pass through this narrowing. */
5747 || (TYPE_PRECISION (type
)
5748 < TYPE_PRECISION (TREE_TYPE (op0
)))
5749 /* ... or signedness changes for division or modulus,
5750 then we cannot pass through this conversion. */
5751 || (code
!= MULT_EXPR
5752 && (TYPE_UNSIGNED (ctype
)
5753 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5754 /* ... or has undefined overflow while the converted to
5755 type has not, we cannot do the operation in the inner type
5756 as that would introduce undefined overflow. */
5757 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5758 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5761 /* Pass the constant down and see if we can make a simplification. If
5762 we can, replace this expression with the inner simplification for
5763 possible later conversion to our or some other type. */
5764 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5765 && TREE_CODE (t2
) == INTEGER_CST
5766 && !TREE_OVERFLOW (t2
)
5767 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5769 ? ctype
: NULL_TREE
,
5770 strict_overflow_p
))))
5775 /* If widening the type changes it from signed to unsigned, then we
5776 must avoid building ABS_EXPR itself as unsigned. */
5777 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5779 tree cstype
= (*signed_type_for
) (ctype
);
5780 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5783 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5784 return fold_convert (ctype
, t1
);
5788 /* If the constant is negative, we cannot simplify this. */
5789 if (tree_int_cst_sgn (c
) == -1)
5793 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5795 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5798 case MIN_EXPR
: case MAX_EXPR
:
5799 /* If widening the type changes the signedness, then we can't perform
5800 this optimization as that changes the result. */
5801 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5804 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5805 sub_strict_overflow_p
= false;
5806 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5807 &sub_strict_overflow_p
)) != 0
5808 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5809 &sub_strict_overflow_p
)) != 0)
5811 if (tree_int_cst_sgn (c
) < 0)
5812 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5813 if (sub_strict_overflow_p
)
5814 *strict_overflow_p
= true;
5815 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5816 fold_convert (ctype
, t2
));
5820 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5821 /* If the second operand is constant, this is a multiplication
5822 or floor division, by a power of two, so we can treat it that
5823 way unless the multiplier or divisor overflows. Signed
5824 left-shift overflow is implementation-defined rather than
5825 undefined in C90, so do not convert signed left shift into
5827 if (TREE_CODE (op1
) == INTEGER_CST
5828 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5829 /* const_binop may not detect overflow correctly,
5830 so check for it explicitly here. */
5831 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5832 && TREE_INT_CST_HIGH (op1
) == 0
5833 && 0 != (t1
= fold_convert (ctype
,
5834 const_binop (LSHIFT_EXPR
,
5837 && !TREE_OVERFLOW (t1
))
5838 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5839 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5840 ctype
, fold_convert (ctype
, op0
), t1
),
5841 c
, code
, wide_type
, strict_overflow_p
);
5844 case PLUS_EXPR
: case MINUS_EXPR
:
5845 /* See if we can eliminate the operation on both sides. If we can, we
5846 can return a new PLUS or MINUS. If we can't, the only remaining
5847 cases where we can do anything are if the second operand is a
5849 sub_strict_overflow_p
= false;
5850 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5851 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5852 if (t1
!= 0 && t2
!= 0
5853 && (code
== MULT_EXPR
5854 /* If not multiplication, we can only do this if both operands
5855 are divisible by c. */
5856 || (multiple_of_p (ctype
, op0
, c
)
5857 && multiple_of_p (ctype
, op1
, c
))))
5859 if (sub_strict_overflow_p
)
5860 *strict_overflow_p
= true;
5861 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5862 fold_convert (ctype
, t2
));
5865 /* If this was a subtraction, negate OP1 and set it to be an addition.
5866 This simplifies the logic below. */
5867 if (tcode
== MINUS_EXPR
)
5868 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5870 if (TREE_CODE (op1
) != INTEGER_CST
)
5873 /* If either OP1 or C are negative, this optimization is not safe for
5874 some of the division and remainder types while for others we need
5875 to change the code. */
5876 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5878 if (code
== CEIL_DIV_EXPR
)
5879 code
= FLOOR_DIV_EXPR
;
5880 else if (code
== FLOOR_DIV_EXPR
)
5881 code
= CEIL_DIV_EXPR
;
5882 else if (code
!= MULT_EXPR
5883 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5887 /* If it's a multiply or a division/modulus operation of a multiple
5888 of our constant, do the operation and verify it doesn't overflow. */
5889 if (code
== MULT_EXPR
5890 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5892 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5893 fold_convert (ctype
, c
), 0);
5894 /* We allow the constant to overflow with wrapping semantics. */
5896 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5902 /* If we have an unsigned type is not a sizetype, we cannot widen
5903 the operation since it will change the result if the original
5904 computation overflowed. */
5905 if (TYPE_UNSIGNED (ctype
)
5906 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5910 /* If we were able to eliminate our operation from the first side,
5911 apply our operation to the second side and reform the PLUS. */
5912 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5913 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5915 /* The last case is if we are a multiply. In that case, we can
5916 apply the distributive law to commute the multiply and addition
5917 if the multiplication of the constants doesn't overflow. */
5918 if (code
== MULT_EXPR
)
5919 return fold_build2 (tcode
, ctype
,
5920 fold_build2 (code
, ctype
,
5921 fold_convert (ctype
, op0
),
5922 fold_convert (ctype
, c
)),
5928 /* We have a special case here if we are doing something like
5929 (C * 8) % 4 since we know that's zero. */
5930 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5931 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5932 /* If the multiplication can overflow we cannot optimize this.
5933 ??? Until we can properly mark individual operations as
5934 not overflowing we need to treat sizetype special here as
5935 stor-layout relies on this opimization to make
5936 DECL_FIELD_BIT_OFFSET always a constant. */
5937 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5938 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
5939 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
5940 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5941 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5943 *strict_overflow_p
= true;
5944 return omit_one_operand (type
, integer_zero_node
, op0
);
5947 /* ... fall through ... */
5949 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5950 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5951 /* If we can extract our operation from the LHS, do so and return a
5952 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5953 do something only if the second operand is a constant. */
5955 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5956 strict_overflow_p
)) != 0)
5957 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5958 fold_convert (ctype
, op1
));
5959 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5960 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5961 strict_overflow_p
)) != 0)
5962 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5963 fold_convert (ctype
, t1
));
5964 else if (TREE_CODE (op1
) != INTEGER_CST
)
5967 /* If these are the same operation types, we can associate them
5968 assuming no overflow. */
5970 && 0 != (t1
= int_const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5971 fold_convert (ctype
, c
), 1))
5972 && 0 != (t1
= force_fit_type_double (ctype
, TREE_INT_CST_LOW (t1
),
5973 TREE_INT_CST_HIGH (t1
),
5974 (TYPE_UNSIGNED (ctype
)
5975 && tcode
!= MULT_EXPR
) ? -1 : 1,
5976 TREE_OVERFLOW (t1
)))
5977 && !TREE_OVERFLOW (t1
))
5978 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5980 /* If these operations "cancel" each other, we have the main
5981 optimizations of this pass, which occur when either constant is a
5982 multiple of the other, in which case we replace this with either an
5983 operation or CODE or TCODE.
5985 If we have an unsigned type that is not a sizetype, we cannot do
5986 this since it will change the result if the original computation
5988 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5989 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5990 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5991 || (tcode
== MULT_EXPR
5992 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5993 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5994 && code
!= MULT_EXPR
)))
5996 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5998 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5999 *strict_overflow_p
= true;
6000 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6001 fold_convert (ctype
,
6002 const_binop (TRUNC_DIV_EXPR
,
6005 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
6007 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6008 *strict_overflow_p
= true;
6009 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6010 fold_convert (ctype
,
6011 const_binop (TRUNC_DIV_EXPR
,
6024 /* Return a node which has the indicated constant VALUE (either 0 or
6025 1), and is of the indicated TYPE. */
6028 constant_boolean_node (int value
, tree type
)
6030 if (type
== integer_type_node
)
6031 return value
? integer_one_node
: integer_zero_node
;
6032 else if (type
== boolean_type_node
)
6033 return value
? boolean_true_node
: boolean_false_node
;
6035 return build_int_cst (type
, value
);
6039 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6040 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6041 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6042 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6043 COND is the first argument to CODE; otherwise (as in the example
6044 given here), it is the second argument. TYPE is the type of the
6045 original expression. Return NULL_TREE if no simplification is
6049 fold_binary_op_with_conditional_arg (enum tree_code code
,
6050 tree type
, tree op0
, tree op1
,
6051 tree cond
, tree arg
, int cond_first_p
)
6053 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6054 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6055 tree test
, true_value
, false_value
;
6056 tree lhs
= NULL_TREE
;
6057 tree rhs
= NULL_TREE
;
6059 /* This transformation is only worthwhile if we don't have to wrap
6060 arg in a SAVE_EXPR, and the operation can be simplified on at least
6061 one of the branches once its pushed inside the COND_EXPR. */
6062 if (!TREE_CONSTANT (arg
))
6065 if (TREE_CODE (cond
) == COND_EXPR
)
6067 test
= TREE_OPERAND (cond
, 0);
6068 true_value
= TREE_OPERAND (cond
, 1);
6069 false_value
= TREE_OPERAND (cond
, 2);
6070 /* If this operand throws an expression, then it does not make
6071 sense to try to perform a logical or arithmetic operation
6073 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6075 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6080 tree testtype
= TREE_TYPE (cond
);
6082 true_value
= constant_boolean_node (true, testtype
);
6083 false_value
= constant_boolean_node (false, testtype
);
6086 arg
= fold_convert (arg_type
, arg
);
6089 true_value
= fold_convert (cond_type
, true_value
);
6091 lhs
= fold_build2 (code
, type
, true_value
, arg
);
6093 lhs
= fold_build2 (code
, type
, arg
, true_value
);
6097 false_value
= fold_convert (cond_type
, false_value
);
6099 rhs
= fold_build2 (code
, type
, false_value
, arg
);
6101 rhs
= fold_build2 (code
, type
, arg
, false_value
);
6104 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
6105 return fold_convert (type
, test
);
6109 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6111 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6112 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6113 ADDEND is the same as X.
6115 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6116 and finite. The problematic cases are when X is zero, and its mode
6117 has signed zeros. In the case of rounding towards -infinity,
6118 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6119 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6122 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6124 if (!real_zerop (addend
))
6127 /* Don't allow the fold with -fsignaling-nans. */
6128 if (HONOR_SNANS (TYPE_MODE (type
)))
6131 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6132 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6135 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6136 if (TREE_CODE (addend
) == REAL_CST
6137 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6140 /* The mode has signed zeros, and we have to honor their sign.
6141 In this situation, there is only one case we can return true for.
6142 X - 0 is the same as X unless rounding towards -infinity is
6144 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6147 /* Subroutine of fold() that checks comparisons of built-in math
6148 functions against real constants.
6150 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6151 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6152 is the type of the result and ARG0 and ARG1 are the operands of the
6153 comparison. ARG1 must be a TREE_REAL_CST.
6155 The function returns the constant folded tree if a simplification
6156 can be made, and NULL_TREE otherwise. */
6159 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
6160 tree type
, tree arg0
, tree arg1
)
6164 if (BUILTIN_SQRT_P (fcode
))
6166 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6167 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6169 c
= TREE_REAL_CST (arg1
);
6170 if (REAL_VALUE_NEGATIVE (c
))
6172 /* sqrt(x) < y is always false, if y is negative. */
6173 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6174 return omit_one_operand (type
, integer_zero_node
, arg
);
6176 /* sqrt(x) > y is always true, if y is negative and we
6177 don't care about NaNs, i.e. negative values of x. */
6178 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6179 return omit_one_operand (type
, integer_one_node
, arg
);
6181 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6182 return fold_build2 (GE_EXPR
, type
, arg
,
6183 build_real (TREE_TYPE (arg
), dconst0
));
6185 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6189 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6190 real_convert (&c2
, mode
, &c2
);
6192 if (REAL_VALUE_ISINF (c2
))
6194 /* sqrt(x) > y is x == +Inf, when y is very large. */
6195 if (HONOR_INFINITIES (mode
))
6196 return fold_build2 (EQ_EXPR
, type
, arg
,
6197 build_real (TREE_TYPE (arg
), c2
));
6199 /* sqrt(x) > y is always false, when y is very large
6200 and we don't care about infinities. */
6201 return omit_one_operand (type
, integer_zero_node
, arg
);
6204 /* sqrt(x) > c is the same as x > c*c. */
6205 return fold_build2 (code
, type
, arg
,
6206 build_real (TREE_TYPE (arg
), c2
));
6208 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6212 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6213 real_convert (&c2
, mode
, &c2
);
6215 if (REAL_VALUE_ISINF (c2
))
6217 /* sqrt(x) < y is always true, when y is a very large
6218 value and we don't care about NaNs or Infinities. */
6219 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6220 return omit_one_operand (type
, integer_one_node
, arg
);
6222 /* sqrt(x) < y is x != +Inf when y is very large and we
6223 don't care about NaNs. */
6224 if (! HONOR_NANS (mode
))
6225 return fold_build2 (NE_EXPR
, type
, arg
,
6226 build_real (TREE_TYPE (arg
), c2
));
6228 /* sqrt(x) < y is x >= 0 when y is very large and we
6229 don't care about Infinities. */
6230 if (! HONOR_INFINITIES (mode
))
6231 return fold_build2 (GE_EXPR
, type
, arg
,
6232 build_real (TREE_TYPE (arg
), dconst0
));
6234 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6235 if (lang_hooks
.decls
.global_bindings_p () != 0
6236 || CONTAINS_PLACEHOLDER_P (arg
))
6239 arg
= save_expr (arg
);
6240 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6241 fold_build2 (GE_EXPR
, type
, arg
,
6242 build_real (TREE_TYPE (arg
),
6244 fold_build2 (NE_EXPR
, type
, arg
,
6245 build_real (TREE_TYPE (arg
),
6249 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6250 if (! HONOR_NANS (mode
))
6251 return fold_build2 (code
, type
, arg
,
6252 build_real (TREE_TYPE (arg
), c2
));
6254 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6255 if (lang_hooks
.decls
.global_bindings_p () == 0
6256 && ! CONTAINS_PLACEHOLDER_P (arg
))
6258 arg
= save_expr (arg
);
6259 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6260 fold_build2 (GE_EXPR
, type
, arg
,
6261 build_real (TREE_TYPE (arg
),
6263 fold_build2 (code
, type
, arg
,
6264 build_real (TREE_TYPE (arg
),
6273 /* Subroutine of fold() that optimizes comparisons against Infinities,
6274 either +Inf or -Inf.
6276 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6277 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6278 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6280 The function returns the constant folded tree if a simplification
6281 can be made, and NULL_TREE otherwise. */
6284 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6286 enum machine_mode mode
;
6287 REAL_VALUE_TYPE max
;
6291 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6293 /* For negative infinity swap the sense of the comparison. */
6294 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6296 code
= swap_tree_comparison (code
);
6301 /* x > +Inf is always false, if with ignore sNANs. */
6302 if (HONOR_SNANS (mode
))
6304 return omit_one_operand (type
, integer_zero_node
, arg0
);
6307 /* x <= +Inf is always true, if we don't case about NaNs. */
6308 if (! HONOR_NANS (mode
))
6309 return omit_one_operand (type
, integer_one_node
, arg0
);
6311 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6312 if (lang_hooks
.decls
.global_bindings_p () == 0
6313 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6315 arg0
= save_expr (arg0
);
6316 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6322 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6323 real_maxval (&max
, neg
, mode
);
6324 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6325 arg0
, build_real (TREE_TYPE (arg0
), max
));
6328 /* x < +Inf is always equal to x <= DBL_MAX. */
6329 real_maxval (&max
, neg
, mode
);
6330 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6331 arg0
, build_real (TREE_TYPE (arg0
), max
));
6334 /* x != +Inf is always equal to !(x > DBL_MAX). */
6335 real_maxval (&max
, neg
, mode
);
6336 if (! HONOR_NANS (mode
))
6337 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6338 arg0
, build_real (TREE_TYPE (arg0
), max
));
6340 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6341 arg0
, build_real (TREE_TYPE (arg0
), max
));
6342 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6351 /* Subroutine of fold() that optimizes comparisons of a division by
6352 a nonzero integer constant against an integer constant, i.e.
6355 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6356 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6357 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6359 The function returns the constant folded tree if a simplification
6360 can be made, and NULL_TREE otherwise. */
6363 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6365 tree prod
, tmp
, hi
, lo
;
6366 tree arg00
= TREE_OPERAND (arg0
, 0);
6367 tree arg01
= TREE_OPERAND (arg0
, 1);
6368 unsigned HOST_WIDE_INT lpart
;
6369 HOST_WIDE_INT hpart
;
6370 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6374 /* We have to do this the hard way to detect unsigned overflow.
6375 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6376 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6377 TREE_INT_CST_HIGH (arg01
),
6378 TREE_INT_CST_LOW (arg1
),
6379 TREE_INT_CST_HIGH (arg1
),
6380 &lpart
, &hpart
, unsigned_p
);
6381 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6383 neg_overflow
= false;
6387 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6388 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6391 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6392 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6393 TREE_INT_CST_HIGH (prod
),
6394 TREE_INT_CST_LOW (tmp
),
6395 TREE_INT_CST_HIGH (tmp
),
6396 &lpart
, &hpart
, unsigned_p
);
6397 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6398 -1, overflow
| TREE_OVERFLOW (prod
));
6400 else if (tree_int_cst_sgn (arg01
) >= 0)
6402 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6403 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6404 switch (tree_int_cst_sgn (arg1
))
6407 neg_overflow
= true;
6408 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6413 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6418 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6428 /* A negative divisor reverses the relational operators. */
6429 code
= swap_tree_comparison (code
);
6431 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6432 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6433 switch (tree_int_cst_sgn (arg1
))
6436 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6441 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6446 neg_overflow
= true;
6447 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6459 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6460 return omit_one_operand (type
, integer_zero_node
, arg00
);
6461 if (TREE_OVERFLOW (hi
))
6462 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6463 if (TREE_OVERFLOW (lo
))
6464 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6465 return build_range_check (type
, arg00
, 1, lo
, hi
);
6468 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6469 return omit_one_operand (type
, integer_one_node
, arg00
);
6470 if (TREE_OVERFLOW (hi
))
6471 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6472 if (TREE_OVERFLOW (lo
))
6473 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6474 return build_range_check (type
, arg00
, 0, lo
, hi
);
6477 if (TREE_OVERFLOW (lo
))
6479 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6480 return omit_one_operand (type
, tmp
, arg00
);
6482 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6485 if (TREE_OVERFLOW (hi
))
6487 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6488 return omit_one_operand (type
, tmp
, arg00
);
6490 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6493 if (TREE_OVERFLOW (hi
))
6495 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6496 return omit_one_operand (type
, tmp
, arg00
);
6498 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6501 if (TREE_OVERFLOW (lo
))
6503 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6504 return omit_one_operand (type
, tmp
, arg00
);
6506 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6516 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6517 equality/inequality test, then return a simplified form of the test
6518 using a sign testing. Otherwise return NULL. TYPE is the desired
6522 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6525 /* If this is testing a single bit, we can optimize the test. */
6526 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6527 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6528 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6530 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6531 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6532 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6534 if (arg00
!= NULL_TREE
6535 /* This is only a win if casting to a signed type is cheap,
6536 i.e. when arg00's type is not a partial mode. */
6537 && TYPE_PRECISION (TREE_TYPE (arg00
))
6538 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6540 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6541 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6542 result_type
, fold_convert (stype
, arg00
),
6543 build_int_cst (stype
, 0));
6550 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6551 equality/inequality test, then return a simplified form of
6552 the test using shifts and logical operations. Otherwise return
6553 NULL. TYPE is the desired result type. */
6556 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6559 /* If this is testing a single bit, we can optimize the test. */
6560 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6561 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6562 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6564 tree inner
= TREE_OPERAND (arg0
, 0);
6565 tree type
= TREE_TYPE (arg0
);
6566 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6567 enum machine_mode operand_mode
= TYPE_MODE (type
);
6569 tree signed_type
, unsigned_type
, intermediate_type
;
6572 /* First, see if we can fold the single bit test into a sign-bit
6574 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6579 /* Otherwise we have (A & C) != 0 where C is a single bit,
6580 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6581 Similarly for (A & C) == 0. */
6583 /* If INNER is a right shift of a constant and it plus BITNUM does
6584 not overflow, adjust BITNUM and INNER. */
6585 if (TREE_CODE (inner
) == RSHIFT_EXPR
6586 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6587 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6588 && bitnum
< TYPE_PRECISION (type
)
6589 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6590 bitnum
- TYPE_PRECISION (type
)))
6592 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6593 inner
= TREE_OPERAND (inner
, 0);
6596 /* If we are going to be able to omit the AND below, we must do our
6597 operations as unsigned. If we must use the AND, we have a choice.
6598 Normally unsigned is faster, but for some machines signed is. */
6599 #ifdef LOAD_EXTEND_OP
6600 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6601 && !flag_syntax_only
) ? 0 : 1;
6606 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6607 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6608 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6609 inner
= fold_convert (intermediate_type
, inner
);
6612 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6613 inner
, size_int (bitnum
));
6615 one
= build_int_cst (intermediate_type
, 1);
6617 if (code
== EQ_EXPR
)
6618 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6620 /* Put the AND last so it can combine with more things. */
6621 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6623 /* Make sure to return the proper type. */
6624 inner
= fold_convert (result_type
, inner
);
6631 /* Check whether we are allowed to reorder operands arg0 and arg1,
6632 such that the evaluation of arg1 occurs before arg0. */
6635 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6637 if (! flag_evaluation_order
)
6639 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6641 return ! TREE_SIDE_EFFECTS (arg0
)
6642 && ! TREE_SIDE_EFFECTS (arg1
);
6645 /* Test whether it is preferable two swap two operands, ARG0 and
6646 ARG1, for example because ARG0 is an integer constant and ARG1
6647 isn't. If REORDER is true, only recommend swapping if we can
6648 evaluate the operands in reverse order. */
6651 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6653 STRIP_SIGN_NOPS (arg0
);
6654 STRIP_SIGN_NOPS (arg1
);
6656 if (TREE_CODE (arg1
) == INTEGER_CST
)
6658 if (TREE_CODE (arg0
) == INTEGER_CST
)
6661 if (TREE_CODE (arg1
) == REAL_CST
)
6663 if (TREE_CODE (arg0
) == REAL_CST
)
6666 if (TREE_CODE (arg1
) == FIXED_CST
)
6668 if (TREE_CODE (arg0
) == FIXED_CST
)
6671 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6673 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6676 if (TREE_CONSTANT (arg1
))
6678 if (TREE_CONSTANT (arg0
))
6681 if (optimize_function_for_size_p (cfun
))
6684 if (reorder
&& flag_evaluation_order
6685 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6688 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6689 for commutative and comparison operators. Ensuring a canonical
6690 form allows the optimizers to find additional redundancies without
6691 having to explicitly check for both orderings. */
6692 if (TREE_CODE (arg0
) == SSA_NAME
6693 && TREE_CODE (arg1
) == SSA_NAME
6694 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6697 /* Put SSA_NAMEs last. */
6698 if (TREE_CODE (arg1
) == SSA_NAME
)
6700 if (TREE_CODE (arg0
) == SSA_NAME
)
6703 /* Put variables last. */
6712 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6713 ARG0 is extended to a wider type. */
6716 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6718 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6720 tree shorter_type
, outer_type
;
6724 if (arg0_unw
== arg0
)
6726 shorter_type
= TREE_TYPE (arg0_unw
);
6728 #ifdef HAVE_canonicalize_funcptr_for_compare
6729 /* Disable this optimization if we're casting a function pointer
6730 type on targets that require function pointer canonicalization. */
6731 if (HAVE_canonicalize_funcptr_for_compare
6732 && TREE_CODE (shorter_type
) == POINTER_TYPE
6733 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6737 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6740 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6742 /* If possible, express the comparison in the shorter mode. */
6743 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6744 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6745 && (TREE_TYPE (arg1_unw
) == shorter_type
6746 || ((TYPE_PRECISION (shorter_type
)
6747 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6748 && (TYPE_UNSIGNED (shorter_type
)
6749 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6750 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6751 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6752 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6753 && int_fits_type_p (arg1_unw
, shorter_type
))))
6754 return fold_build2 (code
, type
, arg0_unw
,
6755 fold_convert (shorter_type
, arg1_unw
));
6757 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6758 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6759 || !int_fits_type_p (arg1_unw
, shorter_type
))
6762 /* If we are comparing with the integer that does not fit into the range
6763 of the shorter type, the result is known. */
6764 outer_type
= TREE_TYPE (arg1_unw
);
6765 min
= lower_bound_in_type (outer_type
, shorter_type
);
6766 max
= upper_bound_in_type (outer_type
, shorter_type
);
6768 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6770 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6777 return omit_one_operand (type
, integer_zero_node
, arg0
);
6782 return omit_one_operand (type
, integer_one_node
, arg0
);
6788 return omit_one_operand (type
, integer_one_node
, arg0
);
6790 return omit_one_operand (type
, integer_zero_node
, arg0
);
6795 return omit_one_operand (type
, integer_zero_node
, arg0
);
6797 return omit_one_operand (type
, integer_one_node
, arg0
);
6806 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6807 ARG0 just the signedness is changed. */
6810 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6811 tree arg0
, tree arg1
)
6814 tree inner_type
, outer_type
;
6816 if (!CONVERT_EXPR_P (arg0
))
6819 outer_type
= TREE_TYPE (arg0
);
6820 arg0_inner
= TREE_OPERAND (arg0
, 0);
6821 inner_type
= TREE_TYPE (arg0_inner
);
6823 #ifdef HAVE_canonicalize_funcptr_for_compare
6824 /* Disable this optimization if we're casting a function pointer
6825 type on targets that require function pointer canonicalization. */
6826 if (HAVE_canonicalize_funcptr_for_compare
6827 && TREE_CODE (inner_type
) == POINTER_TYPE
6828 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6832 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6835 /* If the conversion is from an integral subtype to its basetype
6837 if (TREE_TYPE (inner_type
) == outer_type
)
6840 if (TREE_CODE (arg1
) != INTEGER_CST
6841 && !(CONVERT_EXPR_P (arg1
)
6842 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6845 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6846 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6851 if (TREE_CODE (arg1
) == INTEGER_CST
)
6852 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
6853 TREE_INT_CST_HIGH (arg1
), 0,
6854 TREE_OVERFLOW (arg1
));
6856 arg1
= fold_convert (inner_type
, arg1
);
6858 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6861 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6862 step of the array. Reconstructs s and delta in the case of s * delta
6863 being an integer constant (and thus already folded).
6864 ADDR is the address. MULT is the multiplicative expression.
6865 If the function succeeds, the new address expression is returned. Otherwise
6866 NULL_TREE is returned. */
6869 try_move_mult_to_index (tree addr
, tree op1
)
6871 tree s
, delta
, step
;
6872 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6877 /* Strip the nops that might be added when converting op1 to sizetype. */
6880 /* Canonicalize op1 into a possibly non-constant delta
6881 and an INTEGER_CST s. */
6882 if (TREE_CODE (op1
) == MULT_EXPR
)
6884 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6889 if (TREE_CODE (arg0
) == INTEGER_CST
)
6894 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6902 else if (TREE_CODE (op1
) == INTEGER_CST
)
6909 /* Simulate we are delta * 1. */
6911 s
= integer_one_node
;
6914 for (;; ref
= TREE_OPERAND (ref
, 0))
6916 if (TREE_CODE (ref
) == ARRAY_REF
)
6918 /* Remember if this was a multi-dimensional array. */
6919 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6922 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6926 step
= array_ref_element_size (ref
);
6927 if (TREE_CODE (step
) != INTEGER_CST
)
6932 if (! tree_int_cst_equal (step
, s
))
6937 /* Try if delta is a multiple of step. */
6938 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6944 /* Only fold here if we can verify we do not overflow one
6945 dimension of a multi-dimensional array. */
6950 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6951 || !INTEGRAL_TYPE_P (itype
)
6952 || !TYPE_MAX_VALUE (itype
)
6953 || TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
)
6956 tmp
= fold_binary (PLUS_EXPR
, itype
,
6957 fold_convert (itype
,
6958 TREE_OPERAND (ref
, 1)),
6959 fold_convert (itype
, delta
));
6961 || TREE_CODE (tmp
) != INTEGER_CST
6962 || tree_int_cst_lt (TYPE_MAX_VALUE (itype
), tmp
))
6971 if (!handled_component_p (ref
))
6975 /* We found the suitable array reference. So copy everything up to it,
6976 and replace the index. */
6978 pref
= TREE_OPERAND (addr
, 0);
6979 ret
= copy_node (pref
);
6984 pref
= TREE_OPERAND (pref
, 0);
6985 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6986 pos
= TREE_OPERAND (pos
, 0);
6989 TREE_OPERAND (pos
, 1) = fold_build2 (PLUS_EXPR
, itype
,
6990 fold_convert (itype
,
6991 TREE_OPERAND (pos
, 1)),
6992 fold_convert (itype
, delta
));
6994 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6998 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6999 means A >= Y && A != MAX, but in this case we know that
7000 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7003 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
7005 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7007 if (TREE_CODE (bound
) == LT_EXPR
)
7008 a
= TREE_OPERAND (bound
, 0);
7009 else if (TREE_CODE (bound
) == GT_EXPR
)
7010 a
= TREE_OPERAND (bound
, 1);
7014 typea
= TREE_TYPE (a
);
7015 if (!INTEGRAL_TYPE_P (typea
)
7016 && !POINTER_TYPE_P (typea
))
7019 if (TREE_CODE (ineq
) == LT_EXPR
)
7021 a1
= TREE_OPERAND (ineq
, 1);
7022 y
= TREE_OPERAND (ineq
, 0);
7024 else if (TREE_CODE (ineq
) == GT_EXPR
)
7026 a1
= TREE_OPERAND (ineq
, 0);
7027 y
= TREE_OPERAND (ineq
, 1);
7032 if (TREE_TYPE (a1
) != typea
)
7035 if (POINTER_TYPE_P (typea
))
7037 /* Convert the pointer types into integer before taking the difference. */
7038 tree ta
= fold_convert (ssizetype
, a
);
7039 tree ta1
= fold_convert (ssizetype
, a1
);
7040 diff
= fold_binary (MINUS_EXPR
, ssizetype
, ta1
, ta
);
7043 diff
= fold_binary (MINUS_EXPR
, typea
, a1
, a
);
7045 if (!diff
|| !integer_onep (diff
))
7048 return fold_build2 (GE_EXPR
, type
, a
, y
);
7051 /* Fold a sum or difference of at least one multiplication.
7052 Returns the folded tree or NULL if no simplification could be made. */
7055 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7057 tree arg00
, arg01
, arg10
, arg11
;
7058 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7060 /* (A * C) +- (B * C) -> (A+-B) * C.
7061 (A * C) +- A -> A * (C+-1).
7062 We are most concerned about the case where C is a constant,
7063 but other combinations show up during loop reduction. Since
7064 it is not difficult, try all four possibilities. */
7066 if (TREE_CODE (arg0
) == MULT_EXPR
)
7068 arg00
= TREE_OPERAND (arg0
, 0);
7069 arg01
= TREE_OPERAND (arg0
, 1);
7071 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7073 arg00
= build_one_cst (type
);
7078 /* We cannot generate constant 1 for fract. */
7079 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7082 arg01
= build_one_cst (type
);
7084 if (TREE_CODE (arg1
) == MULT_EXPR
)
7086 arg10
= TREE_OPERAND (arg1
, 0);
7087 arg11
= TREE_OPERAND (arg1
, 1);
7089 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7091 arg10
= build_one_cst (type
);
7096 /* We cannot generate constant 1 for fract. */
7097 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7100 arg11
= build_one_cst (type
);
7104 if (operand_equal_p (arg01
, arg11
, 0))
7105 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7106 else if (operand_equal_p (arg00
, arg10
, 0))
7107 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7108 else if (operand_equal_p (arg00
, arg11
, 0))
7109 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7110 else if (operand_equal_p (arg01
, arg10
, 0))
7111 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7113 /* No identical multiplicands; see if we can find a common
7114 power-of-two factor in non-power-of-two multiplies. This
7115 can help in multi-dimensional array access. */
7116 else if (host_integerp (arg01
, 0)
7117 && host_integerp (arg11
, 0))
7119 HOST_WIDE_INT int01
, int11
, tmp
;
7122 int01
= TREE_INT_CST_LOW (arg01
);
7123 int11
= TREE_INT_CST_LOW (arg11
);
7125 /* Move min of absolute values to int11. */
7126 if ((int01
>= 0 ? int01
: -int01
)
7127 < (int11
>= 0 ? int11
: -int11
))
7129 tmp
= int01
, int01
= int11
, int11
= tmp
;
7130 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7137 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0)
7139 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7140 build_int_cst (TREE_TYPE (arg00
),
7145 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7150 return fold_build2 (MULT_EXPR
, type
,
7151 fold_build2 (code
, type
,
7152 fold_convert (type
, alt0
),
7153 fold_convert (type
, alt1
)),
7154 fold_convert (type
, same
));
7159 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7160 specified by EXPR into the buffer PTR of length LEN bytes.
7161 Return the number of bytes placed in the buffer, or zero
7165 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7167 tree type
= TREE_TYPE (expr
);
7168 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7169 int byte
, offset
, word
, words
;
7170 unsigned char value
;
7172 if (total_bytes
> len
)
7174 words
= total_bytes
/ UNITS_PER_WORD
;
7176 for (byte
= 0; byte
< total_bytes
; byte
++)
7178 int bitpos
= byte
* BITS_PER_UNIT
;
7179 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7180 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7182 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7183 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7185 if (total_bytes
> UNITS_PER_WORD
)
7187 word
= byte
/ UNITS_PER_WORD
;
7188 if (WORDS_BIG_ENDIAN
)
7189 word
= (words
- 1) - word
;
7190 offset
= word
* UNITS_PER_WORD
;
7191 if (BYTES_BIG_ENDIAN
)
7192 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7194 offset
+= byte
% UNITS_PER_WORD
;
7197 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7198 ptr
[offset
] = value
;
7204 /* Subroutine of native_encode_expr. Encode the REAL_CST
7205 specified by EXPR into the buffer PTR of length LEN bytes.
7206 Return the number of bytes placed in the buffer, or zero
7210 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7212 tree type
= TREE_TYPE (expr
);
7213 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7214 int byte
, offset
, word
, words
, bitpos
;
7215 unsigned char value
;
7217 /* There are always 32 bits in each long, no matter the size of
7218 the hosts long. We handle floating point representations with
7222 if (total_bytes
> len
)
7224 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7226 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7228 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7229 bitpos
+= BITS_PER_UNIT
)
7231 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7232 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7234 if (UNITS_PER_WORD
< 4)
7236 word
= byte
/ UNITS_PER_WORD
;
7237 if (WORDS_BIG_ENDIAN
)
7238 word
= (words
- 1) - word
;
7239 offset
= word
* UNITS_PER_WORD
;
7240 if (BYTES_BIG_ENDIAN
)
7241 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7243 offset
+= byte
% UNITS_PER_WORD
;
7246 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7247 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7252 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7253 specified by EXPR into the buffer PTR of length LEN bytes.
7254 Return the number of bytes placed in the buffer, or zero
7258 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7263 part
= TREE_REALPART (expr
);
7264 rsize
= native_encode_expr (part
, ptr
, len
);
7267 part
= TREE_IMAGPART (expr
);
7268 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7271 return rsize
+ isize
;
7275 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7276 specified by EXPR into the buffer PTR of length LEN bytes.
7277 Return the number of bytes placed in the buffer, or zero
7281 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7283 int i
, size
, offset
, count
;
7284 tree itype
, elem
, elements
;
7287 elements
= TREE_VECTOR_CST_ELTS (expr
);
7288 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7289 itype
= TREE_TYPE (TREE_TYPE (expr
));
7290 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7291 for (i
= 0; i
< count
; i
++)
7295 elem
= TREE_VALUE (elements
);
7296 elements
= TREE_CHAIN (elements
);
7303 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7308 if (offset
+ size
> len
)
7310 memset (ptr
+offset
, 0, size
);
7318 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7319 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7320 buffer PTR of length LEN bytes. Return the number of bytes
7321 placed in the buffer, or zero upon failure. */
7324 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7326 switch (TREE_CODE (expr
))
7329 return native_encode_int (expr
, ptr
, len
);
7332 return native_encode_real (expr
, ptr
, len
);
7335 return native_encode_complex (expr
, ptr
, len
);
7338 return native_encode_vector (expr
, ptr
, len
);
7346 /* Subroutine of native_interpret_expr. Interpret the contents of
7347 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7348 If the buffer cannot be interpreted, return NULL_TREE. */
7351 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7353 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7354 int byte
, offset
, word
, words
;
7355 unsigned char value
;
7356 unsigned int HOST_WIDE_INT lo
= 0;
7357 HOST_WIDE_INT hi
= 0;
7359 if (total_bytes
> len
)
7361 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7363 words
= total_bytes
/ UNITS_PER_WORD
;
7365 for (byte
= 0; byte
< total_bytes
; byte
++)
7367 int bitpos
= byte
* BITS_PER_UNIT
;
7368 if (total_bytes
> UNITS_PER_WORD
)
7370 word
= byte
/ UNITS_PER_WORD
;
7371 if (WORDS_BIG_ENDIAN
)
7372 word
= (words
- 1) - word
;
7373 offset
= word
* UNITS_PER_WORD
;
7374 if (BYTES_BIG_ENDIAN
)
7375 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7377 offset
+= byte
% UNITS_PER_WORD
;
7380 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7381 value
= ptr
[offset
];
7383 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7384 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7386 hi
|= (unsigned HOST_WIDE_INT
) value
7387 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7390 return build_int_cst_wide_type (type
, lo
, hi
);
7394 /* Subroutine of native_interpret_expr. Interpret the contents of
7395 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7396 If the buffer cannot be interpreted, return NULL_TREE. */
7399 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7401 enum machine_mode mode
= TYPE_MODE (type
);
7402 int total_bytes
= GET_MODE_SIZE (mode
);
7403 int byte
, offset
, word
, words
, bitpos
;
7404 unsigned char value
;
7405 /* There are always 32 bits in each long, no matter the size of
7406 the hosts long. We handle floating point representations with
7411 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7412 if (total_bytes
> len
|| total_bytes
> 24)
7414 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7416 memset (tmp
, 0, sizeof (tmp
));
7417 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7418 bitpos
+= BITS_PER_UNIT
)
7420 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7421 if (UNITS_PER_WORD
< 4)
7423 word
= byte
/ UNITS_PER_WORD
;
7424 if (WORDS_BIG_ENDIAN
)
7425 word
= (words
- 1) - word
;
7426 offset
= word
* UNITS_PER_WORD
;
7427 if (BYTES_BIG_ENDIAN
)
7428 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7430 offset
+= byte
% UNITS_PER_WORD
;
7433 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7434 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7436 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7439 real_from_target (&r
, tmp
, mode
);
7440 return build_real (type
, r
);
7444 /* Subroutine of native_interpret_expr. Interpret the contents of
7445 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7446 If the buffer cannot be interpreted, return NULL_TREE. */
7449 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7451 tree etype
, rpart
, ipart
;
7454 etype
= TREE_TYPE (type
);
7455 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7458 rpart
= native_interpret_expr (etype
, ptr
, size
);
7461 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7464 return build_complex (type
, rpart
, ipart
);
7468 /* Subroutine of native_interpret_expr. Interpret the contents of
7469 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7470 If the buffer cannot be interpreted, return NULL_TREE. */
7473 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7475 tree etype
, elem
, elements
;
7478 etype
= TREE_TYPE (type
);
7479 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7480 count
= TYPE_VECTOR_SUBPARTS (type
);
7481 if (size
* count
> len
)
7484 elements
= NULL_TREE
;
7485 for (i
= count
- 1; i
>= 0; i
--)
7487 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7490 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7492 return build_vector (type
, elements
);
7496 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7497 the buffer PTR of length LEN as a constant of type TYPE. For
7498 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7499 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7500 return NULL_TREE. */
7503 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7505 switch (TREE_CODE (type
))
7510 return native_interpret_int (type
, ptr
, len
);
7513 return native_interpret_real (type
, ptr
, len
);
7516 return native_interpret_complex (type
, ptr
, len
);
7519 return native_interpret_vector (type
, ptr
, len
);
7527 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7528 TYPE at compile-time. If we're unable to perform the conversion
7529 return NULL_TREE. */
7532 fold_view_convert_expr (tree type
, tree expr
)
7534 /* We support up to 512-bit values (for V8DFmode). */
7535 unsigned char buffer
[64];
7538 /* Check that the host and target are sane. */
7539 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7542 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7546 return native_interpret_expr (type
, buffer
, len
);
7549 /* Build an expression for the address of T. Folds away INDIRECT_REF
7550 to avoid confusing the gimplify process. When IN_FOLD is true
7551 avoid modifications of T. */
7554 build_fold_addr_expr_with_type_1 (tree t
, tree ptrtype
, bool in_fold
)
7556 /* The size of the object is not relevant when talking about its address. */
7557 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7558 t
= TREE_OPERAND (t
, 0);
7560 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7561 if (TREE_CODE (t
) == INDIRECT_REF
7562 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
7564 t
= TREE_OPERAND (t
, 0);
7566 if (TREE_TYPE (t
) != ptrtype
)
7567 t
= build1 (NOP_EXPR
, ptrtype
, t
);
7573 while (handled_component_p (base
))
7574 base
= TREE_OPERAND (base
, 0);
7577 TREE_ADDRESSABLE (base
) = 1;
7579 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7582 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7587 /* Build an expression for the address of T with type PTRTYPE. This
7588 function modifies the input parameter 'T' by sometimes setting the
7589 TREE_ADDRESSABLE flag. */
7592 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
7594 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, false);
7597 /* Build an expression for the address of T. This function modifies
7598 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7599 flag. When called from fold functions, use fold_addr_expr instead. */
7602 build_fold_addr_expr (tree t
)
7604 return build_fold_addr_expr_with_type_1 (t
,
7605 build_pointer_type (TREE_TYPE (t
)),
7609 /* Same as build_fold_addr_expr, builds an expression for the address
7610 of T, but avoids touching the input node 't'. Fold functions
7611 should use this version. */
7614 fold_addr_expr (tree t
)
7616 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7618 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, true);
7621 /* Fold a unary expression of code CODE and type TYPE with operand
7622 OP0. Return the folded expression if folding is successful.
7623 Otherwise, return NULL_TREE. */
7626 fold_unary (enum tree_code code
, tree type
, tree op0
)
7630 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7632 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7633 && TREE_CODE_LENGTH (code
) == 1);
7638 if (CONVERT_EXPR_CODE_P (code
)
7639 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7641 /* Don't use STRIP_NOPS, because signedness of argument type
7643 STRIP_SIGN_NOPS (arg0
);
7647 /* Strip any conversions that don't change the mode. This
7648 is safe for every expression, except for a comparison
7649 expression because its signedness is derived from its
7652 Note that this is done as an internal manipulation within
7653 the constant folder, in order to find the simplest
7654 representation of the arguments so that their form can be
7655 studied. In any cases, the appropriate type conversions
7656 should be put back in the tree that will get out of the
7662 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7664 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7665 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7666 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7667 else if (TREE_CODE (arg0
) == COND_EXPR
)
7669 tree arg01
= TREE_OPERAND (arg0
, 1);
7670 tree arg02
= TREE_OPERAND (arg0
, 2);
7671 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7672 arg01
= fold_build1 (code
, type
, arg01
);
7673 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7674 arg02
= fold_build1 (code
, type
, arg02
);
7675 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7678 /* If this was a conversion, and all we did was to move into
7679 inside the COND_EXPR, bring it back out. But leave it if
7680 it is a conversion from integer to integer and the
7681 result precision is no wider than a word since such a
7682 conversion is cheap and may be optimized away by combine,
7683 while it couldn't if it were outside the COND_EXPR. Then return
7684 so we don't get into an infinite recursion loop taking the
7685 conversion out and then back in. */
7687 if ((CONVERT_EXPR_CODE_P (code
)
7688 || code
== NON_LVALUE_EXPR
)
7689 && TREE_CODE (tem
) == COND_EXPR
7690 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7691 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7692 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7693 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7694 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7695 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7696 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7698 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7699 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7700 || flag_syntax_only
))
7701 tem
= build1 (code
, type
,
7703 TREE_TYPE (TREE_OPERAND
7704 (TREE_OPERAND (tem
, 1), 0)),
7705 TREE_OPERAND (tem
, 0),
7706 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7707 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7710 else if (COMPARISON_CLASS_P (arg0
))
7712 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7714 arg0
= copy_node (arg0
);
7715 TREE_TYPE (arg0
) = type
;
7718 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7719 return fold_build3 (COND_EXPR
, type
, arg0
,
7720 fold_build1 (code
, type
,
7722 fold_build1 (code
, type
,
7723 integer_zero_node
));
7730 /* Re-association barriers around constants and other re-association
7731 barriers can be removed. */
7732 if (CONSTANT_CLASS_P (op0
)
7733 || TREE_CODE (op0
) == PAREN_EXPR
)
7734 return fold_convert (type
, op0
);
7739 case FIX_TRUNC_EXPR
:
7740 if (TREE_TYPE (op0
) == type
)
7743 /* If we have (type) (a CMP b) and type is an integral type, return
7744 new expression involving the new type. */
7745 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7746 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7747 TREE_OPERAND (op0
, 1));
7749 /* Handle cases of two conversions in a row. */
7750 if (CONVERT_EXPR_P (op0
))
7752 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7753 tree inter_type
= TREE_TYPE (op0
);
7754 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7755 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7756 int inside_float
= FLOAT_TYPE_P (inside_type
);
7757 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7758 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7759 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7760 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7761 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7762 int inter_float
= FLOAT_TYPE_P (inter_type
);
7763 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7764 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7765 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7766 int final_int
= INTEGRAL_TYPE_P (type
);
7767 int final_ptr
= POINTER_TYPE_P (type
);
7768 int final_float
= FLOAT_TYPE_P (type
);
7769 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7770 unsigned int final_prec
= TYPE_PRECISION (type
);
7771 int final_unsignedp
= TYPE_UNSIGNED (type
);
7773 /* In addition to the cases of two conversions in a row
7774 handled below, if we are converting something to its own
7775 type via an object of identical or wider precision, neither
7776 conversion is needed. */
7777 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7778 && (((inter_int
|| inter_ptr
) && final_int
)
7779 || (inter_float
&& final_float
))
7780 && inter_prec
>= final_prec
)
7781 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7783 /* Likewise, if the intermediate and final types are either both
7784 float or both integer, we don't need the middle conversion if
7785 it is wider than the final type and doesn't change the signedness
7786 (for integers). Avoid this if the final type is a pointer
7787 since then we sometimes need the inner conversion. Likewise if
7788 the outer has a precision not equal to the size of its mode. */
7789 if (((inter_int
&& inside_int
)
7790 || (inter_float
&& inside_float
)
7791 || (inter_vec
&& inside_vec
))
7792 && inter_prec
>= inside_prec
7793 && (inter_float
|| inter_vec
7794 || inter_unsignedp
== inside_unsignedp
)
7795 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7796 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7798 && (! final_vec
|| inter_prec
== inside_prec
))
7799 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7801 /* If we have a sign-extension of a zero-extended value, we can
7802 replace that by a single zero-extension. */
7803 if (inside_int
&& inter_int
&& final_int
7804 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7805 && inside_unsignedp
&& !inter_unsignedp
)
7806 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7808 /* Two conversions in a row are not needed unless:
7809 - some conversion is floating-point (overstrict for now), or
7810 - some conversion is a vector (overstrict for now), or
7811 - the intermediate type is narrower than both initial and
7813 - the intermediate type and innermost type differ in signedness,
7814 and the outermost type is wider than the intermediate, or
7815 - the initial type is a pointer type and the precisions of the
7816 intermediate and final types differ, or
7817 - the final type is a pointer type and the precisions of the
7818 initial and intermediate types differ. */
7819 if (! inside_float
&& ! inter_float
&& ! final_float
7820 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7821 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7822 && ! (inside_int
&& inter_int
7823 && inter_unsignedp
!= inside_unsignedp
7824 && inter_prec
< final_prec
)
7825 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7826 == (final_unsignedp
&& final_prec
> inter_prec
))
7827 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7828 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7829 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7830 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7831 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7834 /* Handle (T *)&A.B.C for A being of type T and B and C
7835 living at offset zero. This occurs frequently in
7836 C++ upcasting and then accessing the base. */
7837 if (TREE_CODE (op0
) == ADDR_EXPR
7838 && POINTER_TYPE_P (type
)
7839 && handled_component_p (TREE_OPERAND (op0
, 0)))
7841 HOST_WIDE_INT bitsize
, bitpos
;
7843 enum machine_mode mode
;
7844 int unsignedp
, volatilep
;
7845 tree base
= TREE_OPERAND (op0
, 0);
7846 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7847 &mode
, &unsignedp
, &volatilep
, false);
7848 /* If the reference was to a (constant) zero offset, we can use
7849 the address of the base if it has the same base type
7850 as the result type. */
7851 if (! offset
&& bitpos
== 0
7852 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7853 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7854 return fold_convert (type
, fold_addr_expr (base
));
7857 if (TREE_CODE (op0
) == MODIFY_EXPR
7858 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7859 /* Detect assigning a bitfield. */
7860 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7862 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7864 /* Don't leave an assignment inside a conversion
7865 unless assigning a bitfield. */
7866 tem
= fold_build1 (code
, type
, TREE_OPERAND (op0
, 1));
7867 /* First do the assignment, then return converted constant. */
7868 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7869 TREE_NO_WARNING (tem
) = 1;
7870 TREE_USED (tem
) = 1;
7874 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7875 constants (if x has signed type, the sign bit cannot be set
7876 in c). This folds extension into the BIT_AND_EXPR.
7877 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7878 very likely don't have maximal range for their precision and this
7879 transformation effectively doesn't preserve non-maximal ranges. */
7880 if (TREE_CODE (type
) == INTEGER_TYPE
7881 && TREE_CODE (op0
) == BIT_AND_EXPR
7882 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
7883 /* Not if the conversion is to the sub-type. */
7884 && TREE_TYPE (type
) != TREE_TYPE (op0
))
7887 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
7890 if (TYPE_UNSIGNED (TREE_TYPE (and))
7891 || (TYPE_PRECISION (type
)
7892 <= TYPE_PRECISION (TREE_TYPE (and))))
7894 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7895 <= HOST_BITS_PER_WIDE_INT
7896 && host_integerp (and1
, 1))
7898 unsigned HOST_WIDE_INT cst
;
7900 cst
= tree_low_cst (and1
, 1);
7901 cst
&= (HOST_WIDE_INT
) -1
7902 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7903 change
= (cst
== 0);
7904 #ifdef LOAD_EXTEND_OP
7906 && !flag_syntax_only
7907 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7910 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7911 and0
= fold_convert (uns
, and0
);
7912 and1
= fold_convert (uns
, and1
);
7918 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
7919 TREE_INT_CST_HIGH (and1
), 0,
7920 TREE_OVERFLOW (and1
));
7921 return fold_build2 (BIT_AND_EXPR
, type
,
7922 fold_convert (type
, and0
), tem
);
7926 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7927 when one of the new casts will fold away. Conservatively we assume
7928 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7929 if (POINTER_TYPE_P (type
)
7930 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7931 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7932 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7933 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7935 tree arg00
= TREE_OPERAND (arg0
, 0);
7936 tree arg01
= TREE_OPERAND (arg0
, 1);
7938 return fold_build2 (TREE_CODE (arg0
), type
, fold_convert (type
, arg00
),
7939 fold_convert (sizetype
, arg01
));
7942 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7943 of the same precision, and X is an integer type not narrower than
7944 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7945 if (INTEGRAL_TYPE_P (type
)
7946 && TREE_CODE (op0
) == BIT_NOT_EXPR
7947 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7948 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7949 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7951 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7952 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7953 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7954 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
7957 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7958 type of X and Y (integer types only). */
7959 if (INTEGRAL_TYPE_P (type
)
7960 && TREE_CODE (op0
) == MULT_EXPR
7961 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7962 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7964 /* Be careful not to introduce new overflows. */
7966 if (TYPE_OVERFLOW_WRAPS (type
))
7969 mult_type
= unsigned_type_for (type
);
7971 tem
= fold_build2 (MULT_EXPR
, mult_type
,
7972 fold_convert (mult_type
, TREE_OPERAND (op0
, 0)),
7973 fold_convert (mult_type
, TREE_OPERAND (op0
, 1)));
7974 return fold_convert (type
, tem
);
7977 tem
= fold_convert_const (code
, type
, op0
);
7978 return tem
? tem
: NULL_TREE
;
7980 case FIXED_CONVERT_EXPR
:
7981 tem
= fold_convert_const (code
, type
, arg0
);
7982 return tem
? tem
: NULL_TREE
;
7984 case VIEW_CONVERT_EXPR
:
7985 if (TREE_TYPE (op0
) == type
)
7987 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7988 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7990 /* For integral conversions with the same precision or pointer
7991 conversions use a NOP_EXPR instead. */
7992 if ((INTEGRAL_TYPE_P (type
)
7993 || POINTER_TYPE_P (type
))
7994 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7995 || POINTER_TYPE_P (TREE_TYPE (op0
)))
7996 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
))
7997 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7998 a sub-type to its base type as generated by the Ada FE. */
7999 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8000 && TREE_TYPE (TREE_TYPE (op0
))))
8001 return fold_convert (type
, op0
);
8003 /* Strip inner integral conversions that do not change the precision. */
8004 if (CONVERT_EXPR_P (op0
)
8005 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8006 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8007 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8008 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8009 && (TYPE_PRECISION (TREE_TYPE (op0
))
8010 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8011 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
8013 return fold_view_convert_expr (type
, op0
);
8016 tem
= fold_negate_expr (arg0
);
8018 return fold_convert (type
, tem
);
8022 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8023 return fold_abs_const (arg0
, type
);
8024 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8025 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8026 /* Convert fabs((double)float) into (double)fabsf(float). */
8027 else if (TREE_CODE (arg0
) == NOP_EXPR
8028 && TREE_CODE (type
) == REAL_TYPE
)
8030 tree targ0
= strip_float_extensions (arg0
);
8032 return fold_convert (type
, fold_build1 (ABS_EXPR
,
8036 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8037 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8039 else if (tree_expr_nonnegative_p (arg0
))
8042 /* Strip sign ops from argument. */
8043 if (TREE_CODE (type
) == REAL_TYPE
)
8045 tem
= fold_strip_sign_ops (arg0
);
8047 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
8052 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8053 return fold_convert (type
, arg0
);
8054 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8056 tree itype
= TREE_TYPE (type
);
8057 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
8058 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
8059 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
8061 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8063 tree itype
= TREE_TYPE (type
);
8064 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
8065 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
8066 return build_complex (type
, rpart
, negate_expr (ipart
));
8068 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8069 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8073 if (TREE_CODE (arg0
) == INTEGER_CST
)
8074 return fold_not_const (arg0
, type
);
8075 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8076 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8077 /* Convert ~ (-A) to A - 1. */
8078 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8079 return fold_build2 (MINUS_EXPR
, type
,
8080 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8081 build_int_cst (type
, 1));
8082 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8083 else if (INTEGRAL_TYPE_P (type
)
8084 && ((TREE_CODE (arg0
) == MINUS_EXPR
8085 && integer_onep (TREE_OPERAND (arg0
, 1)))
8086 || (TREE_CODE (arg0
) == PLUS_EXPR
8087 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8088 return fold_build1 (NEGATE_EXPR
, type
,
8089 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8090 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8091 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8092 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8094 TREE_OPERAND (arg0
, 0)))))
8095 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
8096 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
8097 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8098 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8100 TREE_OPERAND (arg0
, 1)))))
8101 return fold_build2 (BIT_XOR_EXPR
, type
,
8102 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
8103 /* Perform BIT_NOT_EXPR on each element individually. */
8104 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8106 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8107 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8109 for (i
= 0; i
< count
; i
++)
8113 elem
= TREE_VALUE (elements
);
8114 elem
= fold_unary (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8115 if (elem
== NULL_TREE
)
8117 elements
= TREE_CHAIN (elements
);
8120 elem
= build_int_cst (TREE_TYPE (type
), -1);
8121 list
= tree_cons (NULL_TREE
, elem
, list
);
8124 return build_vector (type
, nreverse (list
));
8129 case TRUTH_NOT_EXPR
:
8130 /* The argument to invert_truthvalue must have Boolean type. */
8131 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8132 arg0
= fold_convert (boolean_type_node
, arg0
);
8134 /* Note that the operand of this must be an int
8135 and its values must be 0 or 1.
8136 ("true" is a fixed value perhaps depending on the language,
8137 but we don't handle values other than 1 correctly yet.) */
8138 tem
= fold_truth_not_expr (arg0
);
8141 return fold_convert (type
, tem
);
8144 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8145 return fold_convert (type
, arg0
);
8146 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8147 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8148 TREE_OPERAND (arg0
, 1));
8149 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8150 return fold_convert (type
, TREE_REALPART (arg0
));
8151 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8153 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8154 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8155 fold_build1 (REALPART_EXPR
, itype
,
8156 TREE_OPERAND (arg0
, 0)),
8157 fold_build1 (REALPART_EXPR
, itype
,
8158 TREE_OPERAND (arg0
, 1)));
8159 return fold_convert (type
, tem
);
8161 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8163 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8164 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8165 return fold_convert (type
, tem
);
8167 if (TREE_CODE (arg0
) == CALL_EXPR
)
8169 tree fn
= get_callee_fndecl (arg0
);
8170 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8171 switch (DECL_FUNCTION_CODE (fn
))
8173 CASE_FLT_FN (BUILT_IN_CEXPI
):
8174 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8176 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8186 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8187 return fold_convert (type
, integer_zero_node
);
8188 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8189 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8190 TREE_OPERAND (arg0
, 0));
8191 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8192 return fold_convert (type
, TREE_IMAGPART (arg0
));
8193 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8195 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8196 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8197 fold_build1 (IMAGPART_EXPR
, itype
,
8198 TREE_OPERAND (arg0
, 0)),
8199 fold_build1 (IMAGPART_EXPR
, itype
,
8200 TREE_OPERAND (arg0
, 1)));
8201 return fold_convert (type
, tem
);
8203 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8205 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8206 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8207 return fold_convert (type
, negate_expr (tem
));
8209 if (TREE_CODE (arg0
) == CALL_EXPR
)
8211 tree fn
= get_callee_fndecl (arg0
);
8212 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8213 switch (DECL_FUNCTION_CODE (fn
))
8215 CASE_FLT_FN (BUILT_IN_CEXPI
):
8216 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8218 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8229 } /* switch (code) */
8232 /* Fold a binary expression of code CODE and type TYPE with operands
8233 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8234 Return the folded expression if folding is successful. Otherwise,
8235 return NULL_TREE. */
8238 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
8240 enum tree_code compl_code
;
8242 if (code
== MIN_EXPR
)
8243 compl_code
= MAX_EXPR
;
8244 else if (code
== MAX_EXPR
)
8245 compl_code
= MIN_EXPR
;
8249 /* MIN (MAX (a, b), b) == b. */
8250 if (TREE_CODE (op0
) == compl_code
8251 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8252 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
8254 /* MIN (MAX (b, a), b) == b. */
8255 if (TREE_CODE (op0
) == compl_code
8256 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8257 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8258 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
8260 /* MIN (a, MAX (a, b)) == a. */
8261 if (TREE_CODE (op1
) == compl_code
8262 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8263 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8264 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
8266 /* MIN (a, MAX (b, a)) == a. */
8267 if (TREE_CODE (op1
) == compl_code
8268 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8269 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8270 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
8275 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8276 by changing CODE to reduce the magnitude of constants involved in
8277 ARG0 of the comparison.
8278 Returns a canonicalized comparison tree if a simplification was
8279 possible, otherwise returns NULL_TREE.
8280 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8281 valid if signed overflow is undefined. */
8284 maybe_canonicalize_comparison_1 (enum tree_code code
, tree type
,
8285 tree arg0
, tree arg1
,
8286 bool *strict_overflow_p
)
8288 enum tree_code code0
= TREE_CODE (arg0
);
8289 tree t
, cst0
= NULL_TREE
;
8293 /* Match A +- CST code arg1 and CST code arg1. We can change the
8294 first form only if overflow is undefined. */
8295 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8296 /* In principle pointers also have undefined overflow behavior,
8297 but that causes problems elsewhere. */
8298 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8299 && (code0
== MINUS_EXPR
8300 || code0
== PLUS_EXPR
)
8301 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8302 || code0
== INTEGER_CST
))
8305 /* Identify the constant in arg0 and its sign. */
8306 if (code0
== INTEGER_CST
)
8309 cst0
= TREE_OPERAND (arg0
, 1);
8310 sgn0
= tree_int_cst_sgn (cst0
);
8312 /* Overflowed constants and zero will cause problems. */
8313 if (integer_zerop (cst0
)
8314 || TREE_OVERFLOW (cst0
))
8317 /* See if we can reduce the magnitude of the constant in
8318 arg0 by changing the comparison code. */
8319 if (code0
== INTEGER_CST
)
8321 /* CST <= arg1 -> CST-1 < arg1. */
8322 if (code
== LE_EXPR
&& sgn0
== 1)
8324 /* -CST < arg1 -> -CST-1 <= arg1. */
8325 else if (code
== LT_EXPR
&& sgn0
== -1)
8327 /* CST > arg1 -> CST-1 >= arg1. */
8328 else if (code
== GT_EXPR
&& sgn0
== 1)
8330 /* -CST >= arg1 -> -CST-1 > arg1. */
8331 else if (code
== GE_EXPR
&& sgn0
== -1)
8335 /* arg1 code' CST' might be more canonical. */
8340 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8342 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8344 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8345 else if (code
== GT_EXPR
8346 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8348 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8349 else if (code
== LE_EXPR
8350 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8352 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8353 else if (code
== GE_EXPR
8354 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8358 *strict_overflow_p
= true;
8361 /* Now build the constant reduced in magnitude. But not if that
8362 would produce one outside of its types range. */
8363 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8365 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8366 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8368 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8369 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8370 /* We cannot swap the comparison here as that would cause us to
8371 endlessly recurse. */
8374 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8375 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
8376 if (code0
!= INTEGER_CST
)
8377 t
= fold_build2 (code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8379 /* If swapping might yield to a more canonical form, do so. */
8381 return fold_build2 (swap_tree_comparison (code
), type
, arg1
, t
);
8383 return fold_build2 (code
, type
, t
, arg1
);
8386 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8387 overflow further. Try to decrease the magnitude of constants involved
8388 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8389 and put sole constants at the second argument position.
8390 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8393 maybe_canonicalize_comparison (enum tree_code code
, tree type
,
8394 tree arg0
, tree arg1
)
8397 bool strict_overflow_p
;
8398 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8399 "when reducing constant in comparison");
8401 /* Try canonicalization by simplifying arg0. */
8402 strict_overflow_p
= false;
8403 t
= maybe_canonicalize_comparison_1 (code
, type
, arg0
, arg1
,
8404 &strict_overflow_p
);
8407 if (strict_overflow_p
)
8408 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8412 /* Try canonicalization by simplifying arg1 using the swapped
8414 code
= swap_tree_comparison (code
);
8415 strict_overflow_p
= false;
8416 t
= maybe_canonicalize_comparison_1 (code
, type
, arg1
, arg0
,
8417 &strict_overflow_p
);
8418 if (t
&& strict_overflow_p
)
8419 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8423 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8424 space. This is used to avoid issuing overflow warnings for
8425 expressions like &p->x which can not wrap. */
8428 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8430 unsigned HOST_WIDE_INT offset_low
, total_low
;
8431 HOST_WIDE_INT size
, offset_high
, total_high
;
8433 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8439 if (offset
== NULL_TREE
)
8444 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8448 offset_low
= TREE_INT_CST_LOW (offset
);
8449 offset_high
= TREE_INT_CST_HIGH (offset
);
8452 if (add_double_with_sign (offset_low
, offset_high
,
8453 bitpos
/ BITS_PER_UNIT
, 0,
8454 &total_low
, &total_high
,
8458 if (total_high
!= 0)
8461 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8465 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8467 if (TREE_CODE (base
) == ADDR_EXPR
)
8469 HOST_WIDE_INT base_size
;
8471 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8472 if (base_size
> 0 && size
< base_size
)
8476 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8479 /* Subroutine of fold_binary. This routine performs all of the
8480 transformations that are common to the equality/inequality
8481 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8482 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8483 fold_binary should call fold_binary. Fold a comparison with
8484 tree code CODE and type TYPE with operands OP0 and OP1. Return
8485 the folded comparison or NULL_TREE. */
8488 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
8490 tree arg0
, arg1
, tem
;
8495 STRIP_SIGN_NOPS (arg0
);
8496 STRIP_SIGN_NOPS (arg1
);
8498 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8499 if (tem
!= NULL_TREE
)
8502 /* If one arg is a real or integer constant, put it last. */
8503 if (tree_swap_operands_p (arg0
, arg1
, true))
8504 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8506 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8507 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8508 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8509 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8510 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8511 && (TREE_CODE (arg1
) == INTEGER_CST
8512 && !TREE_OVERFLOW (arg1
)))
8514 tree const1
= TREE_OPERAND (arg0
, 1);
8516 tree variable
= TREE_OPERAND (arg0
, 0);
8519 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8521 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8522 TREE_TYPE (arg1
), const2
, const1
);
8524 /* If the constant operation overflowed this can be
8525 simplified as a comparison against INT_MAX/INT_MIN. */
8526 if (TREE_CODE (lhs
) == INTEGER_CST
8527 && TREE_OVERFLOW (lhs
))
8529 int const1_sgn
= tree_int_cst_sgn (const1
);
8530 enum tree_code code2
= code
;
8532 /* Get the sign of the constant on the lhs if the
8533 operation were VARIABLE + CONST1. */
8534 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8535 const1_sgn
= -const1_sgn
;
8537 /* The sign of the constant determines if we overflowed
8538 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8539 Canonicalize to the INT_MIN overflow by swapping the comparison
8541 if (const1_sgn
== -1)
8542 code2
= swap_tree_comparison (code
);
8544 /* We now can look at the canonicalized case
8545 VARIABLE + 1 CODE2 INT_MIN
8546 and decide on the result. */
8547 if (code2
== LT_EXPR
8549 || code2
== EQ_EXPR
)
8550 return omit_one_operand (type
, boolean_false_node
, variable
);
8551 else if (code2
== NE_EXPR
8553 || code2
== GT_EXPR
)
8554 return omit_one_operand (type
, boolean_true_node
, variable
);
8557 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8558 && (TREE_CODE (lhs
) != INTEGER_CST
8559 || !TREE_OVERFLOW (lhs
)))
8561 fold_overflow_warning (("assuming signed overflow does not occur "
8562 "when changing X +- C1 cmp C2 to "
8564 WARN_STRICT_OVERFLOW_COMPARISON
);
8565 return fold_build2 (code
, type
, variable
, lhs
);
8569 /* For comparisons of pointers we can decompose it to a compile time
8570 comparison of the base objects and the offsets into the object.
8571 This requires at least one operand being an ADDR_EXPR or a
8572 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8573 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8574 && (TREE_CODE (arg0
) == ADDR_EXPR
8575 || TREE_CODE (arg1
) == ADDR_EXPR
8576 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8577 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8579 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8580 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8581 enum machine_mode mode
;
8582 int volatilep
, unsignedp
;
8583 bool indirect_base0
= false, indirect_base1
= false;
8585 /* Get base and offset for the access. Strip ADDR_EXPR for
8586 get_inner_reference, but put it back by stripping INDIRECT_REF
8587 off the base object if possible. indirect_baseN will be true
8588 if baseN is not an address but refers to the object itself. */
8590 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8592 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8593 &bitsize
, &bitpos0
, &offset0
, &mode
,
8594 &unsignedp
, &volatilep
, false);
8595 if (TREE_CODE (base0
) == INDIRECT_REF
)
8596 base0
= TREE_OPERAND (base0
, 0);
8598 indirect_base0
= true;
8600 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8602 base0
= TREE_OPERAND (arg0
, 0);
8603 offset0
= TREE_OPERAND (arg0
, 1);
8607 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8609 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8610 &bitsize
, &bitpos1
, &offset1
, &mode
,
8611 &unsignedp
, &volatilep
, false);
8612 if (TREE_CODE (base1
) == INDIRECT_REF
)
8613 base1
= TREE_OPERAND (base1
, 0);
8615 indirect_base1
= true;
8617 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8619 base1
= TREE_OPERAND (arg1
, 0);
8620 offset1
= TREE_OPERAND (arg1
, 1);
8623 /* If we have equivalent bases we might be able to simplify. */
8624 if (indirect_base0
== indirect_base1
8625 && operand_equal_p (base0
, base1
, 0))
8627 /* We can fold this expression to a constant if the non-constant
8628 offset parts are equal. */
8629 if ((offset0
== offset1
8630 || (offset0
&& offset1
8631 && operand_equal_p (offset0
, offset1
, 0)))
8634 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8639 && bitpos0
!= bitpos1
8640 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8641 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8642 fold_overflow_warning (("assuming pointer wraparound does not "
8643 "occur when comparing P +- C1 with "
8645 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8650 return build_int_cst (boolean_type_node
, bitpos0
== bitpos1
);
8652 return build_int_cst (boolean_type_node
, bitpos0
!= bitpos1
);
8654 return build_int_cst (boolean_type_node
, bitpos0
< bitpos1
);
8656 return build_int_cst (boolean_type_node
, bitpos0
<= bitpos1
);
8658 return build_int_cst (boolean_type_node
, bitpos0
>= bitpos1
);
8660 return build_int_cst (boolean_type_node
, bitpos0
> bitpos1
);
8664 /* We can simplify the comparison to a comparison of the variable
8665 offset parts if the constant offset parts are equal.
8666 Be careful to use signed size type here because otherwise we
8667 mess with array offsets in the wrong way. This is possible
8668 because pointer arithmetic is restricted to retain within an
8669 object and overflow on pointer differences is undefined as of
8670 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8671 else if (bitpos0
== bitpos1
8672 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8673 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8675 tree signed_size_type_node
;
8676 signed_size_type_node
= signed_type_for (size_type_node
);
8678 /* By converting to signed size type we cover middle-end pointer
8679 arithmetic which operates on unsigned pointer types of size
8680 type size and ARRAY_REF offsets which are properly sign or
8681 zero extended from their type in case it is narrower than
8683 if (offset0
== NULL_TREE
)
8684 offset0
= build_int_cst (signed_size_type_node
, 0);
8686 offset0
= fold_convert (signed_size_type_node
, offset0
);
8687 if (offset1
== NULL_TREE
)
8688 offset1
= build_int_cst (signed_size_type_node
, 0);
8690 offset1
= fold_convert (signed_size_type_node
, offset1
);
8694 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8695 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8696 fold_overflow_warning (("assuming pointer wraparound does not "
8697 "occur when comparing P +- C1 with "
8699 WARN_STRICT_OVERFLOW_COMPARISON
);
8701 return fold_build2 (code
, type
, offset0
, offset1
);
8704 /* For non-equal bases we can simplify if they are addresses
8705 of local binding decls or constants. */
8706 else if (indirect_base0
&& indirect_base1
8707 /* We know that !operand_equal_p (base0, base1, 0)
8708 because the if condition was false. But make
8709 sure two decls are not the same. */
8711 && TREE_CODE (arg0
) == ADDR_EXPR
8712 && TREE_CODE (arg1
) == ADDR_EXPR
8713 && (((TREE_CODE (base0
) == VAR_DECL
8714 || TREE_CODE (base0
) == PARM_DECL
)
8715 && (targetm
.binds_local_p (base0
)
8716 || CONSTANT_CLASS_P (base1
)))
8717 || CONSTANT_CLASS_P (base0
))
8718 && (((TREE_CODE (base1
) == VAR_DECL
8719 || TREE_CODE (base1
) == PARM_DECL
)
8720 && (targetm
.binds_local_p (base1
)
8721 || CONSTANT_CLASS_P (base0
)))
8722 || CONSTANT_CLASS_P (base1
)))
8724 if (code
== EQ_EXPR
)
8725 return omit_two_operands (type
, boolean_false_node
, arg0
, arg1
);
8726 else if (code
== NE_EXPR
)
8727 return omit_two_operands (type
, boolean_true_node
, arg0
, arg1
);
8729 /* For equal offsets we can simplify to a comparison of the
8731 else if (bitpos0
== bitpos1
8733 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8735 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8736 && ((offset0
== offset1
)
8737 || (offset0
&& offset1
8738 && operand_equal_p (offset0
, offset1
, 0))))
8741 base0
= fold_addr_expr (base0
);
8743 base1
= fold_addr_expr (base1
);
8744 return fold_build2 (code
, type
, base0
, base1
);
8748 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8749 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8750 the resulting offset is smaller in absolute value than the
8752 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8753 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8754 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8755 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8756 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8757 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8758 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8760 tree const1
= TREE_OPERAND (arg0
, 1);
8761 tree const2
= TREE_OPERAND (arg1
, 1);
8762 tree variable1
= TREE_OPERAND (arg0
, 0);
8763 tree variable2
= TREE_OPERAND (arg1
, 0);
8765 const char * const warnmsg
= G_("assuming signed overflow does not "
8766 "occur when combining constants around "
8769 /* Put the constant on the side where it doesn't overflow and is
8770 of lower absolute value than before. */
8771 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8772 ? MINUS_EXPR
: PLUS_EXPR
,
8774 if (!TREE_OVERFLOW (cst
)
8775 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8777 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8778 return fold_build2 (code
, type
,
8780 fold_build2 (TREE_CODE (arg1
), TREE_TYPE (arg1
),
8784 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8785 ? MINUS_EXPR
: PLUS_EXPR
,
8787 if (!TREE_OVERFLOW (cst
)
8788 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8790 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8791 return fold_build2 (code
, type
,
8792 fold_build2 (TREE_CODE (arg0
), TREE_TYPE (arg0
),
8798 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8799 signed arithmetic case. That form is created by the compiler
8800 often enough for folding it to be of value. One example is in
8801 computing loop trip counts after Operator Strength Reduction. */
8802 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8803 && TREE_CODE (arg0
) == MULT_EXPR
8804 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8805 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8806 && integer_zerop (arg1
))
8808 tree const1
= TREE_OPERAND (arg0
, 1);
8809 tree const2
= arg1
; /* zero */
8810 tree variable1
= TREE_OPERAND (arg0
, 0);
8811 enum tree_code cmp_code
= code
;
8813 gcc_assert (!integer_zerop (const1
));
8815 fold_overflow_warning (("assuming signed overflow does not occur when "
8816 "eliminating multiplication in comparison "
8818 WARN_STRICT_OVERFLOW_COMPARISON
);
8820 /* If const1 is negative we swap the sense of the comparison. */
8821 if (tree_int_cst_sgn (const1
) < 0)
8822 cmp_code
= swap_tree_comparison (cmp_code
);
8824 return fold_build2 (cmp_code
, type
, variable1
, const2
);
8827 tem
= maybe_canonicalize_comparison (code
, type
, op0
, op1
);
8831 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8833 tree targ0
= strip_float_extensions (arg0
);
8834 tree targ1
= strip_float_extensions (arg1
);
8835 tree newtype
= TREE_TYPE (targ0
);
8837 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8838 newtype
= TREE_TYPE (targ1
);
8840 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8841 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8842 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
8843 fold_convert (newtype
, targ1
));
8845 /* (-a) CMP (-b) -> b CMP a */
8846 if (TREE_CODE (arg0
) == NEGATE_EXPR
8847 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8848 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8849 TREE_OPERAND (arg0
, 0));
8851 if (TREE_CODE (arg1
) == REAL_CST
)
8853 REAL_VALUE_TYPE cst
;
8854 cst
= TREE_REAL_CST (arg1
);
8856 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8857 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8858 return fold_build2 (swap_tree_comparison (code
), type
,
8859 TREE_OPERAND (arg0
, 0),
8860 build_real (TREE_TYPE (arg1
),
8861 REAL_VALUE_NEGATE (cst
)));
8863 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8864 /* a CMP (-0) -> a CMP 0 */
8865 if (REAL_VALUE_MINUS_ZERO (cst
))
8866 return fold_build2 (code
, type
, arg0
,
8867 build_real (TREE_TYPE (arg1
), dconst0
));
8869 /* x != NaN is always true, other ops are always false. */
8870 if (REAL_VALUE_ISNAN (cst
)
8871 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8873 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8874 return omit_one_operand (type
, tem
, arg0
);
8877 /* Fold comparisons against infinity. */
8878 if (REAL_VALUE_ISINF (cst
))
8880 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
8881 if (tem
!= NULL_TREE
)
8886 /* If this is a comparison of a real constant with a PLUS_EXPR
8887 or a MINUS_EXPR of a real constant, we can convert it into a
8888 comparison with a revised real constant as long as no overflow
8889 occurs when unsafe_math_optimizations are enabled. */
8890 if (flag_unsafe_math_optimizations
8891 && TREE_CODE (arg1
) == REAL_CST
8892 && (TREE_CODE (arg0
) == PLUS_EXPR
8893 || TREE_CODE (arg0
) == MINUS_EXPR
)
8894 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8895 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8896 ? MINUS_EXPR
: PLUS_EXPR
,
8897 arg1
, TREE_OPERAND (arg0
, 1), 0))
8898 && !TREE_OVERFLOW (tem
))
8899 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8901 /* Likewise, we can simplify a comparison of a real constant with
8902 a MINUS_EXPR whose first operand is also a real constant, i.e.
8903 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8904 floating-point types only if -fassociative-math is set. */
8905 if (flag_associative_math
8906 && TREE_CODE (arg1
) == REAL_CST
8907 && TREE_CODE (arg0
) == MINUS_EXPR
8908 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
8909 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
8911 && !TREE_OVERFLOW (tem
))
8912 return fold_build2 (swap_tree_comparison (code
), type
,
8913 TREE_OPERAND (arg0
, 1), tem
);
8915 /* Fold comparisons against built-in math functions. */
8916 if (TREE_CODE (arg1
) == REAL_CST
8917 && flag_unsafe_math_optimizations
8918 && ! flag_errno_math
)
8920 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8922 if (fcode
!= END_BUILTINS
)
8924 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
8925 if (tem
!= NULL_TREE
)
8931 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8932 && CONVERT_EXPR_P (arg0
))
8934 /* If we are widening one operand of an integer comparison,
8935 see if the other operand is similarly being widened. Perhaps we
8936 can do the comparison in the narrower type. */
8937 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
8941 /* Or if we are changing signedness. */
8942 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
8947 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8948 constant, we can simplify it. */
8949 if (TREE_CODE (arg1
) == INTEGER_CST
8950 && (TREE_CODE (arg0
) == MIN_EXPR
8951 || TREE_CODE (arg0
) == MAX_EXPR
)
8952 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8954 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
8959 /* Simplify comparison of something with itself. (For IEEE
8960 floating-point, we can only do some of these simplifications.) */
8961 if (operand_equal_p (arg0
, arg1
, 0))
8966 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8967 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8968 return constant_boolean_node (1, type
);
8973 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8974 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8975 return constant_boolean_node (1, type
);
8976 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
8979 /* For NE, we can only do this simplification if integer
8980 or we don't honor IEEE floating point NaNs. */
8981 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8982 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8984 /* ... fall through ... */
8987 return constant_boolean_node (0, type
);
8993 /* If we are comparing an expression that just has comparisons
8994 of two integer values, arithmetic expressions of those comparisons,
8995 and constants, we can simplify it. There are only three cases
8996 to check: the two values can either be equal, the first can be
8997 greater, or the second can be greater. Fold the expression for
8998 those three values. Since each value must be 0 or 1, we have
8999 eight possibilities, each of which corresponds to the constant 0
9000 or 1 or one of the six possible comparisons.
9002 This handles common cases like (a > b) == 0 but also handles
9003 expressions like ((x > y) - (y > x)) > 0, which supposedly
9004 occur in macroized code. */
9006 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9008 tree cval1
= 0, cval2
= 0;
9011 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9012 /* Don't handle degenerate cases here; they should already
9013 have been handled anyway. */
9014 && cval1
!= 0 && cval2
!= 0
9015 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9016 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9017 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9018 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9019 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9020 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9021 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9023 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9024 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9026 /* We can't just pass T to eval_subst in case cval1 or cval2
9027 was the same as ARG1. */
9030 = fold_build2 (code
, type
,
9031 eval_subst (arg0
, cval1
, maxval
,
9035 = fold_build2 (code
, type
,
9036 eval_subst (arg0
, cval1
, maxval
,
9040 = fold_build2 (code
, type
,
9041 eval_subst (arg0
, cval1
, minval
,
9045 /* All three of these results should be 0 or 1. Confirm they are.
9046 Then use those values to select the proper code to use. */
9048 if (TREE_CODE (high_result
) == INTEGER_CST
9049 && TREE_CODE (equal_result
) == INTEGER_CST
9050 && TREE_CODE (low_result
) == INTEGER_CST
)
9052 /* Make a 3-bit mask with the high-order bit being the
9053 value for `>', the next for '=', and the low for '<'. */
9054 switch ((integer_onep (high_result
) * 4)
9055 + (integer_onep (equal_result
) * 2)
9056 + integer_onep (low_result
))
9060 return omit_one_operand (type
, integer_zero_node
, arg0
);
9081 return omit_one_operand (type
, integer_one_node
, arg0
);
9085 return save_expr (build2 (code
, type
, cval1
, cval2
));
9086 return fold_build2 (code
, type
, cval1
, cval2
);
9091 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9092 into a single range test. */
9093 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9094 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9095 && TREE_CODE (arg1
) == INTEGER_CST
9096 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9097 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9098 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9099 && !TREE_OVERFLOW (arg1
))
9101 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
9102 if (tem
!= NULL_TREE
)
9106 /* Fold ~X op ~Y as Y op X. */
9107 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9108 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9110 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9111 return fold_build2 (code
, type
,
9112 fold_convert (cmp_type
, TREE_OPERAND (arg1
, 0)),
9113 TREE_OPERAND (arg0
, 0));
9116 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9117 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9118 && TREE_CODE (arg1
) == INTEGER_CST
)
9120 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9121 return fold_build2 (swap_tree_comparison (code
), type
,
9122 TREE_OPERAND (arg0
, 0),
9123 fold_build1 (BIT_NOT_EXPR
, cmp_type
,
9124 fold_convert (cmp_type
, arg1
)));
9131 /* Subroutine of fold_binary. Optimize complex multiplications of the
9132 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9133 argument EXPR represents the expression "z" of type TYPE. */
9136 fold_mult_zconjz (tree type
, tree expr
)
9138 tree itype
= TREE_TYPE (type
);
9139 tree rpart
, ipart
, tem
;
9141 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9143 rpart
= TREE_OPERAND (expr
, 0);
9144 ipart
= TREE_OPERAND (expr
, 1);
9146 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9148 rpart
= TREE_REALPART (expr
);
9149 ipart
= TREE_IMAGPART (expr
);
9153 expr
= save_expr (expr
);
9154 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
9155 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
9158 rpart
= save_expr (rpart
);
9159 ipart
= save_expr (ipart
);
9160 tem
= fold_build2 (PLUS_EXPR
, itype
,
9161 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
9162 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
9163 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
9164 fold_convert (itype
, integer_zero_node
));
9168 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9169 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9170 guarantees that P and N have the same least significant log2(M) bits.
9171 N is not otherwise constrained. In particular, N is not normalized to
9172 0 <= N < M as is common. In general, the precise value of P is unknown.
9173 M is chosen as large as possible such that constant N can be determined.
9175 Returns M and sets *RESIDUE to N. */
9177 static unsigned HOST_WIDE_INT
9178 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
)
9180 enum tree_code code
;
9184 code
= TREE_CODE (expr
);
9185 if (code
== ADDR_EXPR
)
9187 expr
= TREE_OPERAND (expr
, 0);
9188 if (handled_component_p (expr
))
9190 HOST_WIDE_INT bitsize
, bitpos
;
9192 enum machine_mode mode
;
9193 int unsignedp
, volatilep
;
9195 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9196 &mode
, &unsignedp
, &volatilep
, false);
9197 *residue
= bitpos
/ BITS_PER_UNIT
;
9200 if (TREE_CODE (offset
) == INTEGER_CST
)
9201 *residue
+= TREE_INT_CST_LOW (offset
);
9203 /* We don't handle more complicated offset expressions. */
9208 if (DECL_P (expr
) && TREE_CODE (expr
) != FUNCTION_DECL
)
9209 return DECL_ALIGN_UNIT (expr
);
9211 else if (code
== POINTER_PLUS_EXPR
)
9214 unsigned HOST_WIDE_INT modulus
;
9215 enum tree_code inner_code
;
9217 op0
= TREE_OPERAND (expr
, 0);
9219 modulus
= get_pointer_modulus_and_residue (op0
, residue
);
9221 op1
= TREE_OPERAND (expr
, 1);
9223 inner_code
= TREE_CODE (op1
);
9224 if (inner_code
== INTEGER_CST
)
9226 *residue
+= TREE_INT_CST_LOW (op1
);
9229 else if (inner_code
== MULT_EXPR
)
9231 op1
= TREE_OPERAND (op1
, 1);
9232 if (TREE_CODE (op1
) == INTEGER_CST
)
9234 unsigned HOST_WIDE_INT align
;
9236 /* Compute the greatest power-of-2 divisor of op1. */
9237 align
= TREE_INT_CST_LOW (op1
);
9240 /* If align is non-zero and less than *modulus, replace
9241 *modulus with align., If align is 0, then either op1 is 0
9242 or the greatest power-of-2 divisor of op1 doesn't fit in an
9243 unsigned HOST_WIDE_INT. In either case, no additional
9244 constraint is imposed. */
9246 modulus
= MIN (modulus
, align
);
9253 /* If we get here, we were unable to determine anything useful about the
9259 /* Fold a binary expression of code CODE and type TYPE with operands
9260 OP0 and OP1. Return the folded expression if folding is
9261 successful. Otherwise, return NULL_TREE. */
9264 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
9266 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9267 tree arg0
, arg1
, tem
;
9268 tree t1
= NULL_TREE
;
9269 bool strict_overflow_p
;
9271 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9272 && TREE_CODE_LENGTH (code
) == 2
9274 && op1
!= NULL_TREE
);
9279 /* Strip any conversions that don't change the mode. This is
9280 safe for every expression, except for a comparison expression
9281 because its signedness is derived from its operands. So, in
9282 the latter case, only strip conversions that don't change the
9283 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9286 Note that this is done as an internal manipulation within the
9287 constant folder, in order to find the simplest representation
9288 of the arguments so that their form can be studied. In any
9289 cases, the appropriate type conversions should be put back in
9290 the tree that will get out of the constant folder. */
9292 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9294 STRIP_SIGN_NOPS (arg0
);
9295 STRIP_SIGN_NOPS (arg1
);
9303 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9304 constant but we can't do arithmetic on them. */
9305 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9306 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9307 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9308 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9309 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9310 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9312 if (kind
== tcc_binary
)
9314 /* Make sure type and arg0 have the same saturating flag. */
9315 gcc_assert (TYPE_SATURATING (type
)
9316 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9317 tem
= const_binop (code
, arg0
, arg1
, 0);
9319 else if (kind
== tcc_comparison
)
9320 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9324 if (tem
!= NULL_TREE
)
9326 if (TREE_TYPE (tem
) != type
)
9327 tem
= fold_convert (type
, tem
);
9332 /* If this is a commutative operation, and ARG0 is a constant, move it
9333 to ARG1 to reduce the number of tests below. */
9334 if (commutative_tree_code (code
)
9335 && tree_swap_operands_p (arg0
, arg1
, true))
9336 return fold_build2 (code
, type
, op1
, op0
);
9338 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9340 First check for cases where an arithmetic operation is applied to a
9341 compound, conditional, or comparison operation. Push the arithmetic
9342 operation inside the compound or conditional to see if any folding
9343 can then be done. Convert comparison to conditional for this purpose.
9344 The also optimizes non-constant cases that used to be done in
9347 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9348 one of the operands is a comparison and the other is a comparison, a
9349 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9350 code below would make the expression more complex. Change it to a
9351 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9352 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9354 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9355 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9356 && ((truth_value_p (TREE_CODE (arg0
))
9357 && (truth_value_p (TREE_CODE (arg1
))
9358 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9359 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9360 || (truth_value_p (TREE_CODE (arg1
))
9361 && (truth_value_p (TREE_CODE (arg0
))
9362 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9363 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9365 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9366 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9369 fold_convert (boolean_type_node
, arg0
),
9370 fold_convert (boolean_type_node
, arg1
));
9372 if (code
== EQ_EXPR
)
9373 tem
= invert_truthvalue (tem
);
9375 return fold_convert (type
, tem
);
9378 if (TREE_CODE_CLASS (code
) == tcc_binary
9379 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9381 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9382 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9383 fold_build2 (code
, type
,
9384 fold_convert (TREE_TYPE (op0
),
9385 TREE_OPERAND (arg0
, 1)),
9387 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9388 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9389 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9390 fold_build2 (code
, type
, op0
,
9391 fold_convert (TREE_TYPE (op1
),
9392 TREE_OPERAND (arg1
, 1))));
9394 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9396 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9398 /*cond_first_p=*/1);
9399 if (tem
!= NULL_TREE
)
9403 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9405 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9407 /*cond_first_p=*/0);
9408 if (tem
!= NULL_TREE
)
9415 case POINTER_PLUS_EXPR
:
9416 /* 0 +p index -> (type)index */
9417 if (integer_zerop (arg0
))
9418 return non_lvalue (fold_convert (type
, arg1
));
9420 /* PTR +p 0 -> PTR */
9421 if (integer_zerop (arg1
))
9422 return non_lvalue (fold_convert (type
, arg0
));
9424 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9425 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9426 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9427 return fold_convert (type
, fold_build2 (PLUS_EXPR
, sizetype
,
9428 fold_convert (sizetype
, arg1
),
9429 fold_convert (sizetype
, arg0
)));
9431 /* index +p PTR -> PTR +p index */
9432 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9433 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9434 return fold_build2 (POINTER_PLUS_EXPR
, type
,
9435 fold_convert (type
, arg1
),
9436 fold_convert (sizetype
, arg0
));
9438 /* (PTR +p B) +p A -> PTR +p (B + A) */
9439 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9442 tree arg01
= fold_convert (sizetype
, TREE_OPERAND (arg0
, 1));
9443 tree arg00
= TREE_OPERAND (arg0
, 0);
9444 inner
= fold_build2 (PLUS_EXPR
, sizetype
,
9445 arg01
, fold_convert (sizetype
, arg1
));
9446 return fold_convert (type
,
9447 fold_build2 (POINTER_PLUS_EXPR
,
9448 TREE_TYPE (arg00
), arg00
, inner
));
9451 /* PTR_CST +p CST -> CST1 */
9452 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9453 return fold_build2 (PLUS_EXPR
, type
, arg0
, fold_convert (type
, arg1
));
9455 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9456 of the array. Loop optimizer sometimes produce this type of
9458 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9460 tem
= try_move_mult_to_index (arg0
, fold_convert (sizetype
, arg1
));
9462 return fold_convert (type
, tem
);
9468 /* PTR + INT -> (INT)(PTR p+ INT) */
9469 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9470 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9471 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9474 fold_convert (sizetype
, arg1
)));
9475 /* INT + PTR -> (INT)(PTR p+ INT) */
9476 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9477 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9478 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9481 fold_convert (sizetype
, arg0
)));
9482 /* A + (-B) -> A - B */
9483 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9484 return fold_build2 (MINUS_EXPR
, type
,
9485 fold_convert (type
, arg0
),
9486 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9487 /* (-A) + B -> B - A */
9488 if (TREE_CODE (arg0
) == NEGATE_EXPR
9489 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9490 return fold_build2 (MINUS_EXPR
, type
,
9491 fold_convert (type
, arg1
),
9492 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9494 if (INTEGRAL_TYPE_P (type
))
9496 /* Convert ~A + 1 to -A. */
9497 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9498 && integer_onep (arg1
))
9499 return fold_build1 (NEGATE_EXPR
, type
,
9500 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9503 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9504 && !TYPE_OVERFLOW_TRAPS (type
))
9506 tree tem
= TREE_OPERAND (arg0
, 0);
9509 if (operand_equal_p (tem
, arg1
, 0))
9511 t1
= build_int_cst_type (type
, -1);
9512 return omit_one_operand (type
, t1
, arg1
);
9517 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9518 && !TYPE_OVERFLOW_TRAPS (type
))
9520 tree tem
= TREE_OPERAND (arg1
, 0);
9523 if (operand_equal_p (arg0
, tem
, 0))
9525 t1
= build_int_cst_type (type
, -1);
9526 return omit_one_operand (type
, t1
, arg0
);
9530 /* X + (X / CST) * -CST is X % CST. */
9531 if (TREE_CODE (arg1
) == MULT_EXPR
9532 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9533 && operand_equal_p (arg0
,
9534 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9536 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9537 tree cst1
= TREE_OPERAND (arg1
, 1);
9538 tree sum
= fold_binary (PLUS_EXPR
, TREE_TYPE (cst1
), cst1
, cst0
);
9539 if (sum
&& integer_zerop (sum
))
9540 return fold_convert (type
,
9541 fold_build2 (TRUNC_MOD_EXPR
,
9542 TREE_TYPE (arg0
), arg0
, cst0
));
9546 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9547 same or one. Make sure type is not saturating.
9548 fold_plusminus_mult_expr will re-associate. */
9549 if ((TREE_CODE (arg0
) == MULT_EXPR
9550 || TREE_CODE (arg1
) == MULT_EXPR
)
9551 && !TYPE_SATURATING (type
)
9552 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9554 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9559 if (! FLOAT_TYPE_P (type
))
9561 if (integer_zerop (arg1
))
9562 return non_lvalue (fold_convert (type
, arg0
));
9564 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9565 with a constant, and the two constants have no bits in common,
9566 we should treat this as a BIT_IOR_EXPR since this may produce more
9568 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9569 && TREE_CODE (arg1
) == BIT_AND_EXPR
9570 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9571 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9572 && integer_zerop (const_binop (BIT_AND_EXPR
,
9573 TREE_OPERAND (arg0
, 1),
9574 TREE_OPERAND (arg1
, 1), 0)))
9576 code
= BIT_IOR_EXPR
;
9580 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9581 (plus (plus (mult) (mult)) (foo)) so that we can
9582 take advantage of the factoring cases below. */
9583 if (((TREE_CODE (arg0
) == PLUS_EXPR
9584 || TREE_CODE (arg0
) == MINUS_EXPR
)
9585 && TREE_CODE (arg1
) == MULT_EXPR
)
9586 || ((TREE_CODE (arg1
) == PLUS_EXPR
9587 || TREE_CODE (arg1
) == MINUS_EXPR
)
9588 && TREE_CODE (arg0
) == MULT_EXPR
))
9590 tree parg0
, parg1
, parg
, marg
;
9591 enum tree_code pcode
;
9593 if (TREE_CODE (arg1
) == MULT_EXPR
)
9594 parg
= arg0
, marg
= arg1
;
9596 parg
= arg1
, marg
= arg0
;
9597 pcode
= TREE_CODE (parg
);
9598 parg0
= TREE_OPERAND (parg
, 0);
9599 parg1
= TREE_OPERAND (parg
, 1);
9603 if (TREE_CODE (parg0
) == MULT_EXPR
9604 && TREE_CODE (parg1
) != MULT_EXPR
)
9605 return fold_build2 (pcode
, type
,
9606 fold_build2 (PLUS_EXPR
, type
,
9607 fold_convert (type
, parg0
),
9608 fold_convert (type
, marg
)),
9609 fold_convert (type
, parg1
));
9610 if (TREE_CODE (parg0
) != MULT_EXPR
9611 && TREE_CODE (parg1
) == MULT_EXPR
)
9612 return fold_build2 (PLUS_EXPR
, type
,
9613 fold_convert (type
, parg0
),
9614 fold_build2 (pcode
, type
,
9615 fold_convert (type
, marg
),
9622 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9623 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9624 return non_lvalue (fold_convert (type
, arg0
));
9626 /* Likewise if the operands are reversed. */
9627 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9628 return non_lvalue (fold_convert (type
, arg1
));
9630 /* Convert X + -C into X - C. */
9631 if (TREE_CODE (arg1
) == REAL_CST
9632 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9634 tem
= fold_negate_const (arg1
, type
);
9635 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9636 return fold_build2 (MINUS_EXPR
, type
,
9637 fold_convert (type
, arg0
),
9638 fold_convert (type
, tem
));
9641 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9642 to __complex__ ( x, y ). This is not the same for SNaNs or
9643 if signed zeros are involved. */
9644 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9645 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9646 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9648 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9649 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9650 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9651 bool arg0rz
= false, arg0iz
= false;
9652 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9653 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9655 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9656 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9657 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9659 tree rp
= arg1r
? arg1r
9660 : build1 (REALPART_EXPR
, rtype
, arg1
);
9661 tree ip
= arg0i
? arg0i
9662 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9663 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9665 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9667 tree rp
= arg0r
? arg0r
9668 : build1 (REALPART_EXPR
, rtype
, arg0
);
9669 tree ip
= arg1i
? arg1i
9670 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9671 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9676 if (flag_unsafe_math_optimizations
9677 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9678 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9679 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9682 /* Convert x+x into x*2.0. */
9683 if (operand_equal_p (arg0
, arg1
, 0)
9684 && SCALAR_FLOAT_TYPE_P (type
))
9685 return fold_build2 (MULT_EXPR
, type
, arg0
,
9686 build_real (type
, dconst2
));
9688 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9689 We associate floats only if the user has specified
9690 -fassociative-math. */
9691 if (flag_associative_math
9692 && TREE_CODE (arg1
) == PLUS_EXPR
9693 && TREE_CODE (arg0
) != MULT_EXPR
)
9695 tree tree10
= TREE_OPERAND (arg1
, 0);
9696 tree tree11
= TREE_OPERAND (arg1
, 1);
9697 if (TREE_CODE (tree11
) == MULT_EXPR
9698 && TREE_CODE (tree10
) == MULT_EXPR
)
9701 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
9702 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
9705 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9706 We associate floats only if the user has specified
9707 -fassociative-math. */
9708 if (flag_associative_math
9709 && TREE_CODE (arg0
) == PLUS_EXPR
9710 && TREE_CODE (arg1
) != MULT_EXPR
)
9712 tree tree00
= TREE_OPERAND (arg0
, 0);
9713 tree tree01
= TREE_OPERAND (arg0
, 1);
9714 if (TREE_CODE (tree01
) == MULT_EXPR
9715 && TREE_CODE (tree00
) == MULT_EXPR
)
9718 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
9719 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
9725 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9726 is a rotate of A by C1 bits. */
9727 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9728 is a rotate of A by B bits. */
9730 enum tree_code code0
, code1
;
9732 code0
= TREE_CODE (arg0
);
9733 code1
= TREE_CODE (arg1
);
9734 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9735 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9736 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9737 TREE_OPERAND (arg1
, 0), 0)
9738 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9739 TYPE_UNSIGNED (rtype
))
9740 /* Only create rotates in complete modes. Other cases are not
9741 expanded properly. */
9742 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
9744 tree tree01
, tree11
;
9745 enum tree_code code01
, code11
;
9747 tree01
= TREE_OPERAND (arg0
, 1);
9748 tree11
= TREE_OPERAND (arg1
, 1);
9749 STRIP_NOPS (tree01
);
9750 STRIP_NOPS (tree11
);
9751 code01
= TREE_CODE (tree01
);
9752 code11
= TREE_CODE (tree11
);
9753 if (code01
== INTEGER_CST
9754 && code11
== INTEGER_CST
9755 && TREE_INT_CST_HIGH (tree01
) == 0
9756 && TREE_INT_CST_HIGH (tree11
) == 0
9757 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9758 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9759 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9760 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9761 else if (code11
== MINUS_EXPR
)
9763 tree tree110
, tree111
;
9764 tree110
= TREE_OPERAND (tree11
, 0);
9765 tree111
= TREE_OPERAND (tree11
, 1);
9766 STRIP_NOPS (tree110
);
9767 STRIP_NOPS (tree111
);
9768 if (TREE_CODE (tree110
) == INTEGER_CST
9769 && 0 == compare_tree_int (tree110
,
9771 (TREE_TYPE (TREE_OPERAND
9773 && operand_equal_p (tree01
, tree111
, 0))
9774 return build2 ((code0
== LSHIFT_EXPR
9777 type
, TREE_OPERAND (arg0
, 0), tree01
);
9779 else if (code01
== MINUS_EXPR
)
9781 tree tree010
, tree011
;
9782 tree010
= TREE_OPERAND (tree01
, 0);
9783 tree011
= TREE_OPERAND (tree01
, 1);
9784 STRIP_NOPS (tree010
);
9785 STRIP_NOPS (tree011
);
9786 if (TREE_CODE (tree010
) == INTEGER_CST
9787 && 0 == compare_tree_int (tree010
,
9789 (TREE_TYPE (TREE_OPERAND
9791 && operand_equal_p (tree11
, tree011
, 0))
9792 return build2 ((code0
!= LSHIFT_EXPR
9795 type
, TREE_OPERAND (arg0
, 0), tree11
);
9801 /* In most languages, can't associate operations on floats through
9802 parentheses. Rather than remember where the parentheses were, we
9803 don't associate floats at all, unless the user has specified
9805 And, we need to make sure type is not saturating. */
9807 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9808 && !TYPE_SATURATING (type
))
9810 tree var0
, con0
, lit0
, minus_lit0
;
9811 tree var1
, con1
, lit1
, minus_lit1
;
9814 /* Split both trees into variables, constants, and literals. Then
9815 associate each group together, the constants with literals,
9816 then the result with variables. This increases the chances of
9817 literals being recombined later and of generating relocatable
9818 expressions for the sum of a constant and literal. */
9819 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9820 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9821 code
== MINUS_EXPR
);
9823 /* With undefined overflow we can only associate constants
9824 with one variable. */
9825 if (((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9826 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9832 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9833 tmp0
= TREE_OPERAND (tmp0
, 0);
9834 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9835 tmp1
= TREE_OPERAND (tmp1
, 0);
9836 /* The only case we can still associate with two variables
9837 is if they are the same, modulo negation. */
9838 if (!operand_equal_p (tmp0
, tmp1
, 0))
9842 /* Only do something if we found more than two objects. Otherwise,
9843 nothing has changed and we risk infinite recursion. */
9845 && (2 < ((var0
!= 0) + (var1
!= 0)
9846 + (con0
!= 0) + (con1
!= 0)
9847 + (lit0
!= 0) + (lit1
!= 0)
9848 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9850 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9851 if (code
== MINUS_EXPR
)
9854 var0
= associate_trees (var0
, var1
, code
, type
);
9855 con0
= associate_trees (con0
, con1
, code
, type
);
9856 lit0
= associate_trees (lit0
, lit1
, code
, type
);
9857 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
9859 /* Preserve the MINUS_EXPR if the negative part of the literal is
9860 greater than the positive part. Otherwise, the multiplicative
9861 folding code (i.e extract_muldiv) may be fooled in case
9862 unsigned constants are subtracted, like in the following
9863 example: ((X*2 + 4) - 8U)/2. */
9864 if (minus_lit0
&& lit0
)
9866 if (TREE_CODE (lit0
) == INTEGER_CST
9867 && TREE_CODE (minus_lit0
) == INTEGER_CST
9868 && tree_int_cst_lt (lit0
, minus_lit0
))
9870 minus_lit0
= associate_trees (minus_lit0
, lit0
,
9876 lit0
= associate_trees (lit0
, minus_lit0
,
9884 return fold_convert (type
,
9885 associate_trees (var0
, minus_lit0
,
9889 con0
= associate_trees (con0
, minus_lit0
,
9891 return fold_convert (type
,
9892 associate_trees (var0
, con0
,
9897 con0
= associate_trees (con0
, lit0
, code
, type
);
9898 return fold_convert (type
, associate_trees (var0
, con0
,
9906 /* Pointer simplifications for subtraction, simple reassociations. */
9907 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
9909 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9910 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9911 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9913 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9914 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9915 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9916 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9917 return fold_build2 (PLUS_EXPR
, type
,
9918 fold_build2 (MINUS_EXPR
, type
, arg00
, arg10
),
9919 fold_build2 (MINUS_EXPR
, type
, arg01
, arg11
));
9921 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9922 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9924 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9925 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9926 tree tmp
= fold_binary (MINUS_EXPR
, type
, arg00
, fold_convert (type
, arg1
));
9928 return fold_build2 (PLUS_EXPR
, type
, tmp
, arg01
);
9931 /* A - (-B) -> A + B */
9932 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9933 return fold_build2 (PLUS_EXPR
, type
, op0
,
9934 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9935 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9936 if (TREE_CODE (arg0
) == NEGATE_EXPR
9937 && (FLOAT_TYPE_P (type
)
9938 || INTEGRAL_TYPE_P (type
))
9939 && negate_expr_p (arg1
)
9940 && reorder_operands_p (arg0
, arg1
))
9941 return fold_build2 (MINUS_EXPR
, type
,
9942 fold_convert (type
, negate_expr (arg1
)),
9943 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9944 /* Convert -A - 1 to ~A. */
9945 if (INTEGRAL_TYPE_P (type
)
9946 && TREE_CODE (arg0
) == NEGATE_EXPR
9947 && integer_onep (arg1
)
9948 && !TYPE_OVERFLOW_TRAPS (type
))
9949 return fold_build1 (BIT_NOT_EXPR
, type
,
9950 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9952 /* Convert -1 - A to ~A. */
9953 if (INTEGRAL_TYPE_P (type
)
9954 && integer_all_onesp (arg0
))
9955 return fold_build1 (BIT_NOT_EXPR
, type
, op1
);
9958 /* X - (X / CST) * CST is X % CST. */
9959 if (INTEGRAL_TYPE_P (type
)
9960 && TREE_CODE (arg1
) == MULT_EXPR
9961 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9962 && operand_equal_p (arg0
,
9963 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
9964 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
9965 TREE_OPERAND (arg1
, 1), 0))
9966 return fold_convert (type
,
9967 fold_build2 (TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
9968 arg0
, TREE_OPERAND (arg1
, 1)));
9970 if (! FLOAT_TYPE_P (type
))
9972 if (integer_zerop (arg0
))
9973 return negate_expr (fold_convert (type
, arg1
));
9974 if (integer_zerop (arg1
))
9975 return non_lvalue (fold_convert (type
, arg0
));
9977 /* Fold A - (A & B) into ~B & A. */
9978 if (!TREE_SIDE_EFFECTS (arg0
)
9979 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
9981 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
9983 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9984 return fold_build2 (BIT_AND_EXPR
, type
,
9985 fold_build1 (BIT_NOT_EXPR
, type
, arg10
),
9986 fold_convert (type
, arg0
));
9988 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9990 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9991 return fold_build2 (BIT_AND_EXPR
, type
,
9992 fold_build1 (BIT_NOT_EXPR
, type
, arg11
),
9993 fold_convert (type
, arg0
));
9997 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9998 any power of 2 minus 1. */
9999 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10000 && TREE_CODE (arg1
) == BIT_AND_EXPR
10001 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10002 TREE_OPERAND (arg1
, 0), 0))
10004 tree mask0
= TREE_OPERAND (arg0
, 1);
10005 tree mask1
= TREE_OPERAND (arg1
, 1);
10006 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
10008 if (operand_equal_p (tem
, mask1
, 0))
10010 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
10011 TREE_OPERAND (arg0
, 0), mask1
);
10012 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
10017 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10018 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10019 return non_lvalue (fold_convert (type
, arg0
));
10021 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10022 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10023 (-ARG1 + ARG0) reduces to -ARG1. */
10024 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10025 return negate_expr (fold_convert (type
, arg1
));
10027 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10028 __complex__ ( x, -y ). This is not the same for SNaNs or if
10029 signed zeros are involved. */
10030 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10031 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10032 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10034 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10035 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
10036 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
10037 bool arg0rz
= false, arg0iz
= false;
10038 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10039 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10041 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
10042 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
10043 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10045 tree rp
= fold_build1 (NEGATE_EXPR
, rtype
,
10047 : build1 (REALPART_EXPR
, rtype
, arg1
));
10048 tree ip
= arg0i
? arg0i
10049 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10050 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
10052 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10054 tree rp
= arg0r
? arg0r
10055 : build1 (REALPART_EXPR
, rtype
, arg0
);
10056 tree ip
= fold_build1 (NEGATE_EXPR
, rtype
,
10058 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10059 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
10064 /* Fold &x - &x. This can happen from &x.foo - &x.
10065 This is unsafe for certain floats even in non-IEEE formats.
10066 In IEEE, it is unsafe because it does wrong for NaNs.
10067 Also note that operand_equal_p is always false if an operand
10070 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10071 && operand_equal_p (arg0
, arg1
, 0))
10072 return fold_convert (type
, integer_zero_node
);
10074 /* A - B -> A + (-B) if B is easily negatable. */
10075 if (negate_expr_p (arg1
)
10076 && ((FLOAT_TYPE_P (type
)
10077 /* Avoid this transformation if B is a positive REAL_CST. */
10078 && (TREE_CODE (arg1
) != REAL_CST
10079 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10080 || INTEGRAL_TYPE_P (type
)))
10081 return fold_build2 (PLUS_EXPR
, type
,
10082 fold_convert (type
, arg0
),
10083 fold_convert (type
, negate_expr (arg1
)));
10085 /* Try folding difference of addresses. */
10087 HOST_WIDE_INT diff
;
10089 if ((TREE_CODE (arg0
) == ADDR_EXPR
10090 || TREE_CODE (arg1
) == ADDR_EXPR
)
10091 && ptr_difference_const (arg0
, arg1
, &diff
))
10092 return build_int_cst_type (type
, diff
);
10095 /* Fold &a[i] - &a[j] to i-j. */
10096 if (TREE_CODE (arg0
) == ADDR_EXPR
10097 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10098 && TREE_CODE (arg1
) == ADDR_EXPR
10099 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10101 tree aref0
= TREE_OPERAND (arg0
, 0);
10102 tree aref1
= TREE_OPERAND (arg1
, 0);
10103 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10104 TREE_OPERAND (aref1
, 0), 0))
10106 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
10107 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
10108 tree esz
= array_ref_element_size (aref0
);
10109 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10110 return fold_build2 (MULT_EXPR
, type
, diff
,
10111 fold_convert (type
, esz
));
10116 if (flag_unsafe_math_optimizations
10117 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10118 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10119 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
10122 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10123 same or one. Make sure type is not saturating.
10124 fold_plusminus_mult_expr will re-associate. */
10125 if ((TREE_CODE (arg0
) == MULT_EXPR
10126 || TREE_CODE (arg1
) == MULT_EXPR
)
10127 && !TYPE_SATURATING (type
)
10128 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10130 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
10138 /* (-A) * (-B) -> A * B */
10139 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10140 return fold_build2 (MULT_EXPR
, type
,
10141 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10142 fold_convert (type
, negate_expr (arg1
)));
10143 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10144 return fold_build2 (MULT_EXPR
, type
,
10145 fold_convert (type
, negate_expr (arg0
)),
10146 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10148 if (! FLOAT_TYPE_P (type
))
10150 if (integer_zerop (arg1
))
10151 return omit_one_operand (type
, arg1
, arg0
);
10152 if (integer_onep (arg1
))
10153 return non_lvalue (fold_convert (type
, arg0
));
10154 /* Transform x * -1 into -x. Make sure to do the negation
10155 on the original operand with conversions not stripped
10156 because we can only strip non-sign-changing conversions. */
10157 if (integer_all_onesp (arg1
))
10158 return fold_convert (type
, negate_expr (op0
));
10159 /* Transform x * -C into -x * C if x is easily negatable. */
10160 if (TREE_CODE (arg1
) == INTEGER_CST
10161 && tree_int_cst_sgn (arg1
) == -1
10162 && negate_expr_p (arg0
)
10163 && (tem
= negate_expr (arg1
)) != arg1
10164 && !TREE_OVERFLOW (tem
))
10165 return fold_build2 (MULT_EXPR
, type
,
10166 fold_convert (type
, negate_expr (arg0
)), tem
);
10168 /* (a * (1 << b)) is (a << b) */
10169 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10170 && integer_onep (TREE_OPERAND (arg1
, 0)))
10171 return fold_build2 (LSHIFT_EXPR
, type
, op0
,
10172 TREE_OPERAND (arg1
, 1));
10173 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10174 && integer_onep (TREE_OPERAND (arg0
, 0)))
10175 return fold_build2 (LSHIFT_EXPR
, type
, op1
,
10176 TREE_OPERAND (arg0
, 1));
10178 /* (A + A) * C -> A * 2 * C */
10179 if (TREE_CODE (arg0
) == PLUS_EXPR
10180 && TREE_CODE (arg1
) == INTEGER_CST
10181 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10182 TREE_OPERAND (arg0
, 1), 0))
10183 return fold_build2 (MULT_EXPR
, type
,
10184 omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
10185 TREE_OPERAND (arg0
, 1)),
10186 fold_build2 (MULT_EXPR
, type
,
10187 build_int_cst (type
, 2) , arg1
));
10189 strict_overflow_p
= false;
10190 if (TREE_CODE (arg1
) == INTEGER_CST
10191 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10192 &strict_overflow_p
)))
10194 if (strict_overflow_p
)
10195 fold_overflow_warning (("assuming signed overflow does not "
10196 "occur when simplifying "
10198 WARN_STRICT_OVERFLOW_MISC
);
10199 return fold_convert (type
, tem
);
10202 /* Optimize z * conj(z) for integer complex numbers. */
10203 if (TREE_CODE (arg0
) == CONJ_EXPR
10204 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10205 return fold_mult_zconjz (type
, arg1
);
10206 if (TREE_CODE (arg1
) == CONJ_EXPR
10207 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10208 return fold_mult_zconjz (type
, arg0
);
10212 /* Maybe fold x * 0 to 0. The expressions aren't the same
10213 when x is NaN, since x * 0 is also NaN. Nor are they the
10214 same in modes with signed zeros, since multiplying a
10215 negative value by 0 gives -0, not +0. */
10216 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10217 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10218 && real_zerop (arg1
))
10219 return omit_one_operand (type
, arg1
, arg0
);
10220 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10221 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10222 && real_onep (arg1
))
10223 return non_lvalue (fold_convert (type
, arg0
));
10225 /* Transform x * -1.0 into -x. */
10226 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10227 && real_minus_onep (arg1
))
10228 return fold_convert (type
, negate_expr (arg0
));
10230 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10231 the result for floating point types due to rounding so it is applied
10232 only if -fassociative-math was specify. */
10233 if (flag_associative_math
10234 && TREE_CODE (arg0
) == RDIV_EXPR
10235 && TREE_CODE (arg1
) == REAL_CST
10236 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10238 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10241 return fold_build2 (RDIV_EXPR
, type
, tem
,
10242 TREE_OPERAND (arg0
, 1));
10245 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10246 if (operand_equal_p (arg0
, arg1
, 0))
10248 tree tem
= fold_strip_sign_ops (arg0
);
10249 if (tem
!= NULL_TREE
)
10251 tem
= fold_convert (type
, tem
);
10252 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
10256 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10257 This is not the same for NaNs or if signed zeros are
10259 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10260 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10261 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10262 && TREE_CODE (arg1
) == COMPLEX_CST
10263 && real_zerop (TREE_REALPART (arg1
)))
10265 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10266 if (real_onep (TREE_IMAGPART (arg1
)))
10267 return fold_build2 (COMPLEX_EXPR
, type
,
10268 negate_expr (fold_build1 (IMAGPART_EXPR
,
10270 fold_build1 (REALPART_EXPR
, rtype
, arg0
));
10271 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10272 return fold_build2 (COMPLEX_EXPR
, type
,
10273 fold_build1 (IMAGPART_EXPR
, rtype
, arg0
),
10274 negate_expr (fold_build1 (REALPART_EXPR
,
10278 /* Optimize z * conj(z) for floating point complex numbers.
10279 Guarded by flag_unsafe_math_optimizations as non-finite
10280 imaginary components don't produce scalar results. */
10281 if (flag_unsafe_math_optimizations
10282 && TREE_CODE (arg0
) == CONJ_EXPR
10283 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10284 return fold_mult_zconjz (type
, arg1
);
10285 if (flag_unsafe_math_optimizations
10286 && TREE_CODE (arg1
) == CONJ_EXPR
10287 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10288 return fold_mult_zconjz (type
, arg0
);
10290 if (flag_unsafe_math_optimizations
)
10292 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10293 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10295 /* Optimizations of root(...)*root(...). */
10296 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10299 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10300 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10302 /* Optimize sqrt(x)*sqrt(x) as x. */
10303 if (BUILTIN_SQRT_P (fcode0
)
10304 && operand_equal_p (arg00
, arg10
, 0)
10305 && ! HONOR_SNANS (TYPE_MODE (type
)))
10308 /* Optimize root(x)*root(y) as root(x*y). */
10309 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10310 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10311 return build_call_expr (rootfn
, 1, arg
);
10314 /* Optimize expN(x)*expN(y) as expN(x+y). */
10315 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10317 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10318 tree arg
= fold_build2 (PLUS_EXPR
, type
,
10319 CALL_EXPR_ARG (arg0
, 0),
10320 CALL_EXPR_ARG (arg1
, 0));
10321 return build_call_expr (expfn
, 1, arg
);
10324 /* Optimizations of pow(...)*pow(...). */
10325 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10326 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10327 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10329 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10330 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10331 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10332 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10334 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10335 if (operand_equal_p (arg01
, arg11
, 0))
10337 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10338 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10339 return build_call_expr (powfn
, 2, arg
, arg01
);
10342 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10343 if (operand_equal_p (arg00
, arg10
, 0))
10345 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10346 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
10347 return build_call_expr (powfn
, 2, arg00
, arg
);
10351 /* Optimize tan(x)*cos(x) as sin(x). */
10352 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10353 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10354 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10355 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10356 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10357 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10358 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10359 CALL_EXPR_ARG (arg1
, 0), 0))
10361 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10363 if (sinfn
!= NULL_TREE
)
10364 return build_call_expr (sinfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10367 /* Optimize x*pow(x,c) as pow(x,c+1). */
10368 if (fcode1
== BUILT_IN_POW
10369 || fcode1
== BUILT_IN_POWF
10370 || fcode1
== BUILT_IN_POWL
)
10372 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10373 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10374 if (TREE_CODE (arg11
) == REAL_CST
10375 && !TREE_OVERFLOW (arg11
)
10376 && operand_equal_p (arg0
, arg10
, 0))
10378 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10382 c
= TREE_REAL_CST (arg11
);
10383 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10384 arg
= build_real (type
, c
);
10385 return build_call_expr (powfn
, 2, arg0
, arg
);
10389 /* Optimize pow(x,c)*x as pow(x,c+1). */
10390 if (fcode0
== BUILT_IN_POW
10391 || fcode0
== BUILT_IN_POWF
10392 || fcode0
== BUILT_IN_POWL
)
10394 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10395 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10396 if (TREE_CODE (arg01
) == REAL_CST
10397 && !TREE_OVERFLOW (arg01
)
10398 && operand_equal_p (arg1
, arg00
, 0))
10400 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10404 c
= TREE_REAL_CST (arg01
);
10405 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10406 arg
= build_real (type
, c
);
10407 return build_call_expr (powfn
, 2, arg1
, arg
);
10411 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10412 if (optimize_function_for_speed_p (cfun
)
10413 && operand_equal_p (arg0
, arg1
, 0))
10415 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10419 tree arg
= build_real (type
, dconst2
);
10420 return build_call_expr (powfn
, 2, arg0
, arg
);
10429 if (integer_all_onesp (arg1
))
10430 return omit_one_operand (type
, arg1
, arg0
);
10431 if (integer_zerop (arg1
))
10432 return non_lvalue (fold_convert (type
, arg0
));
10433 if (operand_equal_p (arg0
, arg1
, 0))
10434 return non_lvalue (fold_convert (type
, arg0
));
10436 /* ~X | X is -1. */
10437 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10438 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10440 t1
= fold_convert (type
, integer_zero_node
);
10441 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10442 return omit_one_operand (type
, t1
, arg1
);
10445 /* X | ~X is -1. */
10446 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10447 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10449 t1
= fold_convert (type
, integer_zero_node
);
10450 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10451 return omit_one_operand (type
, t1
, arg0
);
10454 /* Canonicalize (X & C1) | C2. */
10455 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10456 && TREE_CODE (arg1
) == INTEGER_CST
10457 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10459 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10460 int width
= TYPE_PRECISION (type
), w
;
10461 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10462 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10463 hi2
= TREE_INT_CST_HIGH (arg1
);
10464 lo2
= TREE_INT_CST_LOW (arg1
);
10466 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10467 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10468 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10470 if (width
> HOST_BITS_PER_WIDE_INT
)
10472 mhi
= (unsigned HOST_WIDE_INT
) -1
10473 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10479 mlo
= (unsigned HOST_WIDE_INT
) -1
10480 >> (HOST_BITS_PER_WIDE_INT
- width
);
10483 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10484 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10485 return fold_build2 (BIT_IOR_EXPR
, type
,
10486 TREE_OPERAND (arg0
, 0), arg1
);
10488 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10489 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10490 mode which allows further optimizations. */
10497 for (w
= BITS_PER_UNIT
;
10498 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10501 unsigned HOST_WIDE_INT mask
10502 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10503 if (((lo1
| lo2
) & mask
) == mask
10504 && (lo1
& ~mask
) == 0 && hi1
== 0)
10511 if (hi3
!= hi1
|| lo3
!= lo1
)
10512 return fold_build2 (BIT_IOR_EXPR
, type
,
10513 fold_build2 (BIT_AND_EXPR
, type
,
10514 TREE_OPERAND (arg0
, 0),
10515 build_int_cst_wide (type
,
10520 /* (X & Y) | Y is (X, Y). */
10521 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10522 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10523 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10524 /* (X & Y) | X is (Y, X). */
10525 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10526 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10527 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10528 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10529 /* X | (X & Y) is (Y, X). */
10530 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10531 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10532 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10533 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10534 /* X | (Y & X) is (Y, X). */
10535 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10536 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10537 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10538 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10540 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10541 if (t1
!= NULL_TREE
)
10544 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10546 This results in more efficient code for machines without a NAND
10547 instruction. Combine will canonicalize to the first form
10548 which will allow use of NAND instructions provided by the
10549 backend if they exist. */
10550 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10551 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10553 return fold_build1 (BIT_NOT_EXPR
, type
,
10554 build2 (BIT_AND_EXPR
, type
,
10555 fold_convert (type
,
10556 TREE_OPERAND (arg0
, 0)),
10557 fold_convert (type
,
10558 TREE_OPERAND (arg1
, 0))));
10561 /* See if this can be simplified into a rotate first. If that
10562 is unsuccessful continue in the association code. */
10566 if (integer_zerop (arg1
))
10567 return non_lvalue (fold_convert (type
, arg0
));
10568 if (integer_all_onesp (arg1
))
10569 return fold_build1 (BIT_NOT_EXPR
, type
, op0
);
10570 if (operand_equal_p (arg0
, arg1
, 0))
10571 return omit_one_operand (type
, integer_zero_node
, arg0
);
10573 /* ~X ^ X is -1. */
10574 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10575 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10577 t1
= fold_convert (type
, integer_zero_node
);
10578 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10579 return omit_one_operand (type
, t1
, arg1
);
10582 /* X ^ ~X is -1. */
10583 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10584 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10586 t1
= fold_convert (type
, integer_zero_node
);
10587 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10588 return omit_one_operand (type
, t1
, arg0
);
10591 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10592 with a constant, and the two constants have no bits in common,
10593 we should treat this as a BIT_IOR_EXPR since this may produce more
10594 simplifications. */
10595 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10596 && TREE_CODE (arg1
) == BIT_AND_EXPR
10597 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10598 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10599 && integer_zerop (const_binop (BIT_AND_EXPR
,
10600 TREE_OPERAND (arg0
, 1),
10601 TREE_OPERAND (arg1
, 1), 0)))
10603 code
= BIT_IOR_EXPR
;
10607 /* (X | Y) ^ X -> Y & ~ X*/
10608 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10609 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10611 tree t2
= TREE_OPERAND (arg0
, 1);
10612 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10614 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10615 fold_convert (type
, t1
));
10619 /* (Y | X) ^ X -> Y & ~ X*/
10620 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10621 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10623 tree t2
= TREE_OPERAND (arg0
, 0);
10624 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10626 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10627 fold_convert (type
, t1
));
10631 /* X ^ (X | Y) -> Y & ~ X*/
10632 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10633 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10635 tree t2
= TREE_OPERAND (arg1
, 1);
10636 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10638 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10639 fold_convert (type
, t1
));
10643 /* X ^ (Y | X) -> Y & ~ X*/
10644 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10645 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10647 tree t2
= TREE_OPERAND (arg1
, 0);
10648 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10650 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10651 fold_convert (type
, t1
));
10655 /* Convert ~X ^ ~Y to X ^ Y. */
10656 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10657 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10658 return fold_build2 (code
, type
,
10659 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10660 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10662 /* Convert ~X ^ C to X ^ ~C. */
10663 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10664 && TREE_CODE (arg1
) == INTEGER_CST
)
10665 return fold_build2 (code
, type
,
10666 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10667 fold_build1 (BIT_NOT_EXPR
, type
, arg1
));
10669 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10670 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10671 && integer_onep (TREE_OPERAND (arg0
, 1))
10672 && integer_onep (arg1
))
10673 return fold_build2 (EQ_EXPR
, type
, arg0
,
10674 build_int_cst (TREE_TYPE (arg0
), 0));
10676 /* Fold (X & Y) ^ Y as ~X & Y. */
10677 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10678 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10680 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10681 return fold_build2 (BIT_AND_EXPR
, type
,
10682 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10683 fold_convert (type
, arg1
));
10685 /* Fold (X & Y) ^ X as ~Y & X. */
10686 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10687 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10688 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10690 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10691 return fold_build2 (BIT_AND_EXPR
, type
,
10692 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10693 fold_convert (type
, arg1
));
10695 /* Fold X ^ (X & Y) as X & ~Y. */
10696 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10697 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10699 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10700 return fold_build2 (BIT_AND_EXPR
, type
,
10701 fold_convert (type
, arg0
),
10702 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10704 /* Fold X ^ (Y & X) as ~Y & X. */
10705 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10706 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10707 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10709 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10710 return fold_build2 (BIT_AND_EXPR
, type
,
10711 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10712 fold_convert (type
, arg0
));
10715 /* See if this can be simplified into a rotate first. If that
10716 is unsuccessful continue in the association code. */
10720 if (integer_all_onesp (arg1
))
10721 return non_lvalue (fold_convert (type
, arg0
));
10722 if (integer_zerop (arg1
))
10723 return omit_one_operand (type
, arg1
, arg0
);
10724 if (operand_equal_p (arg0
, arg1
, 0))
10725 return non_lvalue (fold_convert (type
, arg0
));
10727 /* ~X & X is always zero. */
10728 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10729 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10730 return omit_one_operand (type
, integer_zero_node
, arg1
);
10732 /* X & ~X is always zero. */
10733 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10734 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10735 return omit_one_operand (type
, integer_zero_node
, arg0
);
10737 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10738 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10739 && TREE_CODE (arg1
) == INTEGER_CST
10740 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10742 tree tmp1
= fold_convert (type
, arg1
);
10743 tree tmp2
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10744 tree tmp3
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10745 tmp2
= fold_build2 (BIT_AND_EXPR
, type
, tmp2
, tmp1
);
10746 tmp3
= fold_build2 (BIT_AND_EXPR
, type
, tmp3
, tmp1
);
10747 return fold_convert (type
,
10748 fold_build2 (BIT_IOR_EXPR
, type
, tmp2
, tmp3
));
10751 /* (X | Y) & Y is (X, Y). */
10752 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10753 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10754 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10755 /* (X | Y) & X is (Y, X). */
10756 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10757 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10758 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10759 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10760 /* X & (X | Y) is (Y, X). */
10761 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10762 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10763 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10764 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10765 /* X & (Y | X) is (Y, X). */
10766 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10767 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10768 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10769 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10771 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10772 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10773 && integer_onep (TREE_OPERAND (arg0
, 1))
10774 && integer_onep (arg1
))
10776 tem
= TREE_OPERAND (arg0
, 0);
10777 return fold_build2 (EQ_EXPR
, type
,
10778 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10779 build_int_cst (TREE_TYPE (tem
), 1)),
10780 build_int_cst (TREE_TYPE (tem
), 0));
10782 /* Fold ~X & 1 as (X & 1) == 0. */
10783 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10784 && integer_onep (arg1
))
10786 tem
= TREE_OPERAND (arg0
, 0);
10787 return fold_build2 (EQ_EXPR
, type
,
10788 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10789 build_int_cst (TREE_TYPE (tem
), 1)),
10790 build_int_cst (TREE_TYPE (tem
), 0));
10793 /* Fold (X ^ Y) & Y as ~X & Y. */
10794 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10795 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10797 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10798 return fold_build2 (BIT_AND_EXPR
, type
,
10799 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10800 fold_convert (type
, arg1
));
10802 /* Fold (X ^ Y) & X as ~Y & X. */
10803 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10804 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10805 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10807 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10808 return fold_build2 (BIT_AND_EXPR
, type
,
10809 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10810 fold_convert (type
, arg1
));
10812 /* Fold X & (X ^ Y) as X & ~Y. */
10813 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10814 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10816 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10817 return fold_build2 (BIT_AND_EXPR
, type
,
10818 fold_convert (type
, arg0
),
10819 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10821 /* Fold X & (Y ^ X) as ~Y & X. */
10822 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10823 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10824 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10826 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10827 return fold_build2 (BIT_AND_EXPR
, type
,
10828 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10829 fold_convert (type
, arg0
));
10832 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10833 if (t1
!= NULL_TREE
)
10835 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10836 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10837 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10840 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10842 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
10843 && (~TREE_INT_CST_LOW (arg1
)
10844 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
10845 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
10848 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10850 This results in more efficient code for machines without a NOR
10851 instruction. Combine will canonicalize to the first form
10852 which will allow use of NOR instructions provided by the
10853 backend if they exist. */
10854 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10855 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10857 return fold_build1 (BIT_NOT_EXPR
, type
,
10858 build2 (BIT_IOR_EXPR
, type
,
10859 fold_convert (type
,
10860 TREE_OPERAND (arg0
, 0)),
10861 fold_convert (type
,
10862 TREE_OPERAND (arg1
, 0))));
10865 /* If arg0 is derived from the address of an object or function, we may
10866 be able to fold this expression using the object or function's
10868 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
10870 unsigned HOST_WIDE_INT modulus
, residue
;
10871 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
10873 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
);
10875 /* This works because modulus is a power of 2. If this weren't the
10876 case, we'd have to replace it by its greatest power-of-2
10877 divisor: modulus & -modulus. */
10879 return build_int_cst (type
, residue
& low
);
10882 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10883 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10884 if the new mask might be further optimized. */
10885 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
10886 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
10887 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
10888 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
10889 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
10890 < TYPE_PRECISION (TREE_TYPE (arg0
))
10891 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
10892 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
10894 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
10895 unsigned HOST_WIDE_INT mask
10896 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
10897 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
10898 tree shift_type
= TREE_TYPE (arg0
);
10900 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
10901 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
10902 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
10903 && TYPE_PRECISION (TREE_TYPE (arg0
))
10904 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
10906 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
10907 tree arg00
= TREE_OPERAND (arg0
, 0);
10908 /* See if more bits can be proven as zero because of
10910 if (TREE_CODE (arg00
) == NOP_EXPR
10911 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
10913 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
10914 if (TYPE_PRECISION (inner_type
)
10915 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
10916 && TYPE_PRECISION (inner_type
) < prec
)
10918 prec
= TYPE_PRECISION (inner_type
);
10919 /* See if we can shorten the right shift. */
10921 shift_type
= inner_type
;
10924 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
10925 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
10926 zerobits
<<= prec
- shiftc
;
10927 /* For arithmetic shift if sign bit could be set, zerobits
10928 can contain actually sign bits, so no transformation is
10929 possible, unless MASK masks them all away. In that
10930 case the shift needs to be converted into logical shift. */
10931 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
10932 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
10934 if ((mask
& zerobits
) == 0)
10935 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
10941 /* ((X << 16) & 0xff00) is (X, 0). */
10942 if ((mask
& zerobits
) == mask
)
10943 return omit_one_operand (type
, build_int_cst (type
, 0), arg0
);
10945 newmask
= mask
| zerobits
;
10946 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
10950 /* Only do the transformation if NEWMASK is some integer
10952 for (prec
= BITS_PER_UNIT
;
10953 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
10954 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
10956 if (prec
< HOST_BITS_PER_WIDE_INT
10957 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
10959 if (shift_type
!= TREE_TYPE (arg0
))
10961 tem
= fold_build2 (TREE_CODE (arg0
), shift_type
,
10962 fold_convert (shift_type
,
10963 TREE_OPERAND (arg0
, 0)),
10964 TREE_OPERAND (arg0
, 1));
10965 tem
= fold_convert (type
, tem
);
10969 return fold_build2 (BIT_AND_EXPR
, type
, tem
,
10970 build_int_cst_type (TREE_TYPE (op1
),
10979 /* Don't touch a floating-point divide by zero unless the mode
10980 of the constant can represent infinity. */
10981 if (TREE_CODE (arg1
) == REAL_CST
10982 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10983 && real_zerop (arg1
))
10986 /* Optimize A / A to 1.0 if we don't care about
10987 NaNs or Infinities. Skip the transformation
10988 for non-real operands. */
10989 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10990 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10991 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
10992 && operand_equal_p (arg0
, arg1
, 0))
10994 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
10996 return omit_two_operands (type
, r
, arg0
, arg1
);
10999 /* The complex version of the above A / A optimization. */
11000 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11001 && operand_equal_p (arg0
, arg1
, 0))
11003 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11004 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11005 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11007 tree r
= build_real (elem_type
, dconst1
);
11008 /* omit_two_operands will call fold_convert for us. */
11009 return omit_two_operands (type
, r
, arg0
, arg1
);
11013 /* (-A) / (-B) -> A / B */
11014 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11015 return fold_build2 (RDIV_EXPR
, type
,
11016 TREE_OPERAND (arg0
, 0),
11017 negate_expr (arg1
));
11018 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11019 return fold_build2 (RDIV_EXPR
, type
,
11020 negate_expr (arg0
),
11021 TREE_OPERAND (arg1
, 0));
11023 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11024 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11025 && real_onep (arg1
))
11026 return non_lvalue (fold_convert (type
, arg0
));
11028 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11029 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11030 && real_minus_onep (arg1
))
11031 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
11033 /* If ARG1 is a constant, we can convert this to a multiply by the
11034 reciprocal. This does not have the same rounding properties,
11035 so only do this if -freciprocal-math. We can actually
11036 always safely do it if ARG1 is a power of two, but it's hard to
11037 tell if it is or not in a portable manner. */
11038 if (TREE_CODE (arg1
) == REAL_CST
)
11040 if (flag_reciprocal_math
11041 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11043 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
11044 /* Find the reciprocal if optimizing and the result is exact. */
11048 r
= TREE_REAL_CST (arg1
);
11049 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11051 tem
= build_real (type
, r
);
11052 return fold_build2 (MULT_EXPR
, type
,
11053 fold_convert (type
, arg0
), tem
);
11057 /* Convert A/B/C to A/(B*C). */
11058 if (flag_reciprocal_math
11059 && TREE_CODE (arg0
) == RDIV_EXPR
)
11060 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11061 fold_build2 (MULT_EXPR
, type
,
11062 TREE_OPERAND (arg0
, 1), arg1
));
11064 /* Convert A/(B/C) to (A/B)*C. */
11065 if (flag_reciprocal_math
11066 && TREE_CODE (arg1
) == RDIV_EXPR
)
11067 return fold_build2 (MULT_EXPR
, type
,
11068 fold_build2 (RDIV_EXPR
, type
, arg0
,
11069 TREE_OPERAND (arg1
, 0)),
11070 TREE_OPERAND (arg1
, 1));
11072 /* Convert C1/(X*C2) into (C1/C2)/X. */
11073 if (flag_reciprocal_math
11074 && TREE_CODE (arg1
) == MULT_EXPR
11075 && TREE_CODE (arg0
) == REAL_CST
11076 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11078 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11079 TREE_OPERAND (arg1
, 1), 0);
11081 return fold_build2 (RDIV_EXPR
, type
, tem
,
11082 TREE_OPERAND (arg1
, 0));
11085 if (flag_unsafe_math_optimizations
)
11087 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11088 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11090 /* Optimize sin(x)/cos(x) as tan(x). */
11091 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11092 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11093 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11094 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11095 CALL_EXPR_ARG (arg1
, 0), 0))
11097 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11099 if (tanfn
!= NULL_TREE
)
11100 return build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11103 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11104 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11105 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11106 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11107 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11108 CALL_EXPR_ARG (arg1
, 0), 0))
11110 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11112 if (tanfn
!= NULL_TREE
)
11114 tree tmp
= build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11115 return fold_build2 (RDIV_EXPR
, type
,
11116 build_real (type
, dconst1
), tmp
);
11120 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11121 NaNs or Infinities. */
11122 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11123 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11124 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11126 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11127 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11129 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11130 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11131 && operand_equal_p (arg00
, arg01
, 0))
11133 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11135 if (cosfn
!= NULL_TREE
)
11136 return build_call_expr (cosfn
, 1, arg00
);
11140 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11141 NaNs or Infinities. */
11142 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11143 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11144 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11146 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11147 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11149 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11150 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11151 && operand_equal_p (arg00
, arg01
, 0))
11153 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11155 if (cosfn
!= NULL_TREE
)
11157 tree tmp
= build_call_expr (cosfn
, 1, arg00
);
11158 return fold_build2 (RDIV_EXPR
, type
,
11159 build_real (type
, dconst1
),
11165 /* Optimize pow(x,c)/x as pow(x,c-1). */
11166 if (fcode0
== BUILT_IN_POW
11167 || fcode0
== BUILT_IN_POWF
11168 || fcode0
== BUILT_IN_POWL
)
11170 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11171 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11172 if (TREE_CODE (arg01
) == REAL_CST
11173 && !TREE_OVERFLOW (arg01
)
11174 && operand_equal_p (arg1
, arg00
, 0))
11176 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11180 c
= TREE_REAL_CST (arg01
);
11181 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11182 arg
= build_real (type
, c
);
11183 return build_call_expr (powfn
, 2, arg1
, arg
);
11187 /* Optimize a/root(b/c) into a*root(c/b). */
11188 if (BUILTIN_ROOT_P (fcode1
))
11190 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11192 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11194 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11195 tree b
= TREE_OPERAND (rootarg
, 0);
11196 tree c
= TREE_OPERAND (rootarg
, 1);
11198 tree tmp
= fold_build2 (RDIV_EXPR
, type
, c
, b
);
11200 tmp
= build_call_expr (rootfn
, 1, tmp
);
11201 return fold_build2 (MULT_EXPR
, type
, arg0
, tmp
);
11205 /* Optimize x/expN(y) into x*expN(-y). */
11206 if (BUILTIN_EXPONENT_P (fcode1
))
11208 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11209 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11210 arg1
= build_call_expr (expfn
, 1, fold_convert (type
, arg
));
11211 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11214 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11215 if (fcode1
== BUILT_IN_POW
11216 || fcode1
== BUILT_IN_POWF
11217 || fcode1
== BUILT_IN_POWL
)
11219 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11220 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11221 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11222 tree neg11
= fold_convert (type
, negate_expr (arg11
));
11223 arg1
= build_call_expr (powfn
, 2, arg10
, neg11
);
11224 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11229 case TRUNC_DIV_EXPR
:
11230 case FLOOR_DIV_EXPR
:
11231 /* Simplify A / (B << N) where A and B are positive and B is
11232 a power of 2, to A >> (N + log2(B)). */
11233 strict_overflow_p
= false;
11234 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11235 && (TYPE_UNSIGNED (type
)
11236 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11238 tree sval
= TREE_OPERAND (arg1
, 0);
11239 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11241 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11242 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11244 if (strict_overflow_p
)
11245 fold_overflow_warning (("assuming signed overflow does not "
11246 "occur when simplifying A / (B << N)"),
11247 WARN_STRICT_OVERFLOW_MISC
);
11249 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11250 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
11251 return fold_build2 (RSHIFT_EXPR
, type
,
11252 fold_convert (type
, arg0
), sh_cnt
);
11256 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11257 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11258 if (INTEGRAL_TYPE_P (type
)
11259 && TYPE_UNSIGNED (type
)
11260 && code
== FLOOR_DIV_EXPR
)
11261 return fold_build2 (TRUNC_DIV_EXPR
, type
, op0
, op1
);
11265 case ROUND_DIV_EXPR
:
11266 case CEIL_DIV_EXPR
:
11267 case EXACT_DIV_EXPR
:
11268 if (integer_onep (arg1
))
11269 return non_lvalue (fold_convert (type
, arg0
));
11270 if (integer_zerop (arg1
))
11272 /* X / -1 is -X. */
11273 if (!TYPE_UNSIGNED (type
)
11274 && TREE_CODE (arg1
) == INTEGER_CST
11275 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11276 && TREE_INT_CST_HIGH (arg1
) == -1)
11277 return fold_convert (type
, negate_expr (arg0
));
11279 /* Convert -A / -B to A / B when the type is signed and overflow is
11281 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11282 && TREE_CODE (arg0
) == NEGATE_EXPR
11283 && negate_expr_p (arg1
))
11285 if (INTEGRAL_TYPE_P (type
))
11286 fold_overflow_warning (("assuming signed overflow does not occur "
11287 "when distributing negation across "
11289 WARN_STRICT_OVERFLOW_MISC
);
11290 return fold_build2 (code
, type
,
11291 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11292 negate_expr (arg1
));
11294 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11295 && TREE_CODE (arg1
) == NEGATE_EXPR
11296 && negate_expr_p (arg0
))
11298 if (INTEGRAL_TYPE_P (type
))
11299 fold_overflow_warning (("assuming signed overflow does not occur "
11300 "when distributing negation across "
11302 WARN_STRICT_OVERFLOW_MISC
);
11303 return fold_build2 (code
, type
, negate_expr (arg0
),
11304 TREE_OPERAND (arg1
, 0));
11307 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11308 operation, EXACT_DIV_EXPR.
11310 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11311 At one time others generated faster code, it's not clear if they do
11312 after the last round to changes to the DIV code in expmed.c. */
11313 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11314 && multiple_of_p (type
, arg0
, arg1
))
11315 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11317 strict_overflow_p
= false;
11318 if (TREE_CODE (arg1
) == INTEGER_CST
11319 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11320 &strict_overflow_p
)))
11322 if (strict_overflow_p
)
11323 fold_overflow_warning (("assuming signed overflow does not occur "
11324 "when simplifying division"),
11325 WARN_STRICT_OVERFLOW_MISC
);
11326 return fold_convert (type
, tem
);
11331 case CEIL_MOD_EXPR
:
11332 case FLOOR_MOD_EXPR
:
11333 case ROUND_MOD_EXPR
:
11334 case TRUNC_MOD_EXPR
:
11335 /* X % 1 is always zero, but be sure to preserve any side
11337 if (integer_onep (arg1
))
11338 return omit_one_operand (type
, integer_zero_node
, arg0
);
11340 /* X % 0, return X % 0 unchanged so that we can get the
11341 proper warnings and errors. */
11342 if (integer_zerop (arg1
))
11345 /* 0 % X is always zero, but be sure to preserve any side
11346 effects in X. Place this after checking for X == 0. */
11347 if (integer_zerop (arg0
))
11348 return omit_one_operand (type
, integer_zero_node
, arg1
);
11350 /* X % -1 is zero. */
11351 if (!TYPE_UNSIGNED (type
)
11352 && TREE_CODE (arg1
) == INTEGER_CST
11353 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11354 && TREE_INT_CST_HIGH (arg1
) == -1)
11355 return omit_one_operand (type
, integer_zero_node
, arg0
);
11357 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11358 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11359 strict_overflow_p
= false;
11360 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11361 && (TYPE_UNSIGNED (type
)
11362 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11365 /* Also optimize A % (C << N) where C is a power of 2,
11366 to A & ((C << N) - 1). */
11367 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11368 c
= TREE_OPERAND (arg1
, 0);
11370 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11372 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11373 build_int_cst (TREE_TYPE (arg1
), 1));
11374 if (strict_overflow_p
)
11375 fold_overflow_warning (("assuming signed overflow does not "
11376 "occur when simplifying "
11377 "X % (power of two)"),
11378 WARN_STRICT_OVERFLOW_MISC
);
11379 return fold_build2 (BIT_AND_EXPR
, type
,
11380 fold_convert (type
, arg0
),
11381 fold_convert (type
, mask
));
11385 /* X % -C is the same as X % C. */
11386 if (code
== TRUNC_MOD_EXPR
11387 && !TYPE_UNSIGNED (type
)
11388 && TREE_CODE (arg1
) == INTEGER_CST
11389 && !TREE_OVERFLOW (arg1
)
11390 && TREE_INT_CST_HIGH (arg1
) < 0
11391 && !TYPE_OVERFLOW_TRAPS (type
)
11392 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11393 && !sign_bit_p (arg1
, arg1
))
11394 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11395 fold_convert (type
, negate_expr (arg1
)));
11397 /* X % -Y is the same as X % Y. */
11398 if (code
== TRUNC_MOD_EXPR
11399 && !TYPE_UNSIGNED (type
)
11400 && TREE_CODE (arg1
) == NEGATE_EXPR
11401 && !TYPE_OVERFLOW_TRAPS (type
))
11402 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11403 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
11405 if (TREE_CODE (arg1
) == INTEGER_CST
11406 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11407 &strict_overflow_p
)))
11409 if (strict_overflow_p
)
11410 fold_overflow_warning (("assuming signed overflow does not occur "
11411 "when simplifying modulus"),
11412 WARN_STRICT_OVERFLOW_MISC
);
11413 return fold_convert (type
, tem
);
11420 if (integer_all_onesp (arg0
))
11421 return omit_one_operand (type
, arg0
, arg1
);
11425 /* Optimize -1 >> x for arithmetic right shifts. */
11426 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
11427 return omit_one_operand (type
, arg0
, arg1
);
11428 /* ... fall through ... */
11432 if (integer_zerop (arg1
))
11433 return non_lvalue (fold_convert (type
, arg0
));
11434 if (integer_zerop (arg0
))
11435 return omit_one_operand (type
, arg0
, arg1
);
11437 /* Since negative shift count is not well-defined,
11438 don't try to compute it in the compiler. */
11439 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11442 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11443 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11444 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11445 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11446 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11448 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11449 + TREE_INT_CST_LOW (arg1
));
11451 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11452 being well defined. */
11453 if (low
>= TYPE_PRECISION (type
))
11455 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11456 low
= low
% TYPE_PRECISION (type
);
11457 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11458 return build_int_cst (type
, 0);
11460 low
= TYPE_PRECISION (type
) - 1;
11463 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11464 build_int_cst (type
, low
));
11467 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11468 into x & ((unsigned)-1 >> c) for unsigned types. */
11469 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11470 || (TYPE_UNSIGNED (type
)
11471 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11472 && host_integerp (arg1
, false)
11473 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11474 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11475 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11477 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11478 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11484 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
11486 lshift
= build_int_cst (type
, -1);
11487 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
11489 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
11493 /* Rewrite an LROTATE_EXPR by a constant into an
11494 RROTATE_EXPR by a new constant. */
11495 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
11497 tree tem
= build_int_cst (TREE_TYPE (arg1
),
11498 TYPE_PRECISION (type
));
11499 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
11500 return fold_build2 (RROTATE_EXPR
, type
, op0
, tem
);
11503 /* If we have a rotate of a bit operation with the rotate count and
11504 the second operand of the bit operation both constant,
11505 permute the two operations. */
11506 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11507 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11508 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11509 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11510 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11511 return fold_build2 (TREE_CODE (arg0
), type
,
11512 fold_build2 (code
, type
,
11513 TREE_OPERAND (arg0
, 0), arg1
),
11514 fold_build2 (code
, type
,
11515 TREE_OPERAND (arg0
, 1), arg1
));
11517 /* Two consecutive rotates adding up to the precision of the
11518 type can be ignored. */
11519 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11520 && TREE_CODE (arg0
) == RROTATE_EXPR
11521 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11522 && TREE_INT_CST_HIGH (arg1
) == 0
11523 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
11524 && ((TREE_INT_CST_LOW (arg1
)
11525 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
11526 == (unsigned int) TYPE_PRECISION (type
)))
11527 return TREE_OPERAND (arg0
, 0);
11529 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11530 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11531 if the latter can be further optimized. */
11532 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11533 && TREE_CODE (arg0
) == BIT_AND_EXPR
11534 && TREE_CODE (arg1
) == INTEGER_CST
11535 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11537 tree mask
= fold_build2 (code
, type
,
11538 fold_convert (type
, TREE_OPERAND (arg0
, 1)),
11540 tree shift
= fold_build2 (code
, type
,
11541 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11543 tem
= fold_binary (BIT_AND_EXPR
, type
, shift
, mask
);
11551 if (operand_equal_p (arg0
, arg1
, 0))
11552 return omit_one_operand (type
, arg0
, arg1
);
11553 if (INTEGRAL_TYPE_P (type
)
11554 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
11555 return omit_one_operand (type
, arg1
, arg0
);
11556 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
11562 if (operand_equal_p (arg0
, arg1
, 0))
11563 return omit_one_operand (type
, arg0
, arg1
);
11564 if (INTEGRAL_TYPE_P (type
)
11565 && TYPE_MAX_VALUE (type
)
11566 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
11567 return omit_one_operand (type
, arg1
, arg0
);
11568 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
11573 case TRUTH_ANDIF_EXPR
:
11574 /* Note that the operands of this must be ints
11575 and their values must be 0 or 1.
11576 ("true" is a fixed value perhaps depending on the language.) */
11577 /* If first arg is constant zero, return it. */
11578 if (integer_zerop (arg0
))
11579 return fold_convert (type
, arg0
);
11580 case TRUTH_AND_EXPR
:
11581 /* If either arg is constant true, drop it. */
11582 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11583 return non_lvalue (fold_convert (type
, arg1
));
11584 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11585 /* Preserve sequence points. */
11586 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11587 return non_lvalue (fold_convert (type
, arg0
));
11588 /* If second arg is constant zero, result is zero, but first arg
11589 must be evaluated. */
11590 if (integer_zerop (arg1
))
11591 return omit_one_operand (type
, arg1
, arg0
);
11592 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11593 case will be handled here. */
11594 if (integer_zerop (arg0
))
11595 return omit_one_operand (type
, arg0
, arg1
);
11597 /* !X && X is always false. */
11598 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11599 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11600 return omit_one_operand (type
, integer_zero_node
, arg1
);
11601 /* X && !X is always false. */
11602 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11603 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11604 return omit_one_operand (type
, integer_zero_node
, arg0
);
11606 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11607 means A >= Y && A != MAX, but in this case we know that
11610 if (!TREE_SIDE_EFFECTS (arg0
)
11611 && !TREE_SIDE_EFFECTS (arg1
))
11613 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
11614 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11615 return fold_build2 (code
, type
, tem
, arg1
);
11617 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
11618 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11619 return fold_build2 (code
, type
, arg0
, tem
);
11623 /* We only do these simplifications if we are optimizing. */
11627 /* Check for things like (A || B) && (A || C). We can convert this
11628 to A || (B && C). Note that either operator can be any of the four
11629 truth and/or operations and the transformation will still be
11630 valid. Also note that we only care about order for the
11631 ANDIF and ORIF operators. If B contains side effects, this
11632 might change the truth-value of A. */
11633 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
11634 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
11635 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
11636 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
11637 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
11638 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
11640 tree a00
= TREE_OPERAND (arg0
, 0);
11641 tree a01
= TREE_OPERAND (arg0
, 1);
11642 tree a10
= TREE_OPERAND (arg1
, 0);
11643 tree a11
= TREE_OPERAND (arg1
, 1);
11644 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
11645 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
11646 && (code
== TRUTH_AND_EXPR
11647 || code
== TRUTH_OR_EXPR
));
11649 if (operand_equal_p (a00
, a10
, 0))
11650 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11651 fold_build2 (code
, type
, a01
, a11
));
11652 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
11653 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11654 fold_build2 (code
, type
, a01
, a10
));
11655 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
11656 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
11657 fold_build2 (code
, type
, a00
, a11
));
11659 /* This case if tricky because we must either have commutative
11660 operators or else A10 must not have side-effects. */
11662 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
11663 && operand_equal_p (a01
, a11
, 0))
11664 return fold_build2 (TREE_CODE (arg0
), type
,
11665 fold_build2 (code
, type
, a00
, a10
),
11669 /* See if we can build a range comparison. */
11670 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
11673 /* Check for the possibility of merging component references. If our
11674 lhs is another similar operation, try to merge its rhs with our
11675 rhs. Then try to merge our lhs and rhs. */
11676 if (TREE_CODE (arg0
) == code
11677 && 0 != (tem
= fold_truthop (code
, type
,
11678 TREE_OPERAND (arg0
, 1), arg1
)))
11679 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11681 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
11686 case TRUTH_ORIF_EXPR
:
11687 /* Note that the operands of this must be ints
11688 and their values must be 0 or true.
11689 ("true" is a fixed value perhaps depending on the language.) */
11690 /* If first arg is constant true, return it. */
11691 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11692 return fold_convert (type
, arg0
);
11693 case TRUTH_OR_EXPR
:
11694 /* If either arg is constant zero, drop it. */
11695 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11696 return non_lvalue (fold_convert (type
, arg1
));
11697 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11698 /* Preserve sequence points. */
11699 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11700 return non_lvalue (fold_convert (type
, arg0
));
11701 /* If second arg is constant true, result is true, but we must
11702 evaluate first arg. */
11703 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11704 return omit_one_operand (type
, arg1
, arg0
);
11705 /* Likewise for first arg, but note this only occurs here for
11707 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11708 return omit_one_operand (type
, arg0
, arg1
);
11710 /* !X || X is always true. */
11711 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11712 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11713 return omit_one_operand (type
, integer_one_node
, arg1
);
11714 /* X || !X is always true. */
11715 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11716 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11717 return omit_one_operand (type
, integer_one_node
, arg0
);
11721 case TRUTH_XOR_EXPR
:
11722 /* If the second arg is constant zero, drop it. */
11723 if (integer_zerop (arg1
))
11724 return non_lvalue (fold_convert (type
, arg0
));
11725 /* If the second arg is constant true, this is a logical inversion. */
11726 if (integer_onep (arg1
))
11728 /* Only call invert_truthvalue if operand is a truth value. */
11729 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
11730 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
11732 tem
= invert_truthvalue (arg0
);
11733 return non_lvalue (fold_convert (type
, tem
));
11735 /* Identical arguments cancel to zero. */
11736 if (operand_equal_p (arg0
, arg1
, 0))
11737 return omit_one_operand (type
, integer_zero_node
, arg0
);
11739 /* !X ^ X is always true. */
11740 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11741 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11742 return omit_one_operand (type
, integer_one_node
, arg1
);
11744 /* X ^ !X is always true. */
11745 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11746 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11747 return omit_one_operand (type
, integer_one_node
, arg0
);
11753 tem
= fold_comparison (code
, type
, op0
, op1
);
11754 if (tem
!= NULL_TREE
)
11757 /* bool_var != 0 becomes bool_var. */
11758 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11759 && code
== NE_EXPR
)
11760 return non_lvalue (fold_convert (type
, arg0
));
11762 /* bool_var == 1 becomes bool_var. */
11763 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11764 && code
== EQ_EXPR
)
11765 return non_lvalue (fold_convert (type
, arg0
));
11767 /* bool_var != 1 becomes !bool_var. */
11768 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11769 && code
== NE_EXPR
)
11770 return fold_build1 (TRUTH_NOT_EXPR
, type
, fold_convert (type
, arg0
));
11772 /* bool_var == 0 becomes !bool_var. */
11773 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11774 && code
== EQ_EXPR
)
11775 return fold_build1 (TRUTH_NOT_EXPR
, type
, fold_convert (type
, arg0
));
11777 /* If this is an equality comparison of the address of two non-weak,
11778 unaliased symbols neither of which are extern (since we do not
11779 have access to attributes for externs), then we know the result. */
11780 if (TREE_CODE (arg0
) == ADDR_EXPR
11781 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
11782 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
11783 && ! lookup_attribute ("alias",
11784 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
11785 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
11786 && TREE_CODE (arg1
) == ADDR_EXPR
11787 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
11788 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
11789 && ! lookup_attribute ("alias",
11790 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
11791 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
11793 /* We know that we're looking at the address of two
11794 non-weak, unaliased, static _DECL nodes.
11796 It is both wasteful and incorrect to call operand_equal_p
11797 to compare the two ADDR_EXPR nodes. It is wasteful in that
11798 all we need to do is test pointer equality for the arguments
11799 to the two ADDR_EXPR nodes. It is incorrect to use
11800 operand_equal_p as that function is NOT equivalent to a
11801 C equality test. It can in fact return false for two
11802 objects which would test as equal using the C equality
11804 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
11805 return constant_boolean_node (equal
11806 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
11810 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11811 a MINUS_EXPR of a constant, we can convert it into a comparison with
11812 a revised constant as long as no overflow occurs. */
11813 if (TREE_CODE (arg1
) == INTEGER_CST
11814 && (TREE_CODE (arg0
) == PLUS_EXPR
11815 || TREE_CODE (arg0
) == MINUS_EXPR
)
11816 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11817 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
11818 ? MINUS_EXPR
: PLUS_EXPR
,
11819 fold_convert (TREE_TYPE (arg0
), arg1
),
11820 TREE_OPERAND (arg0
, 1), 0))
11821 && !TREE_OVERFLOW (tem
))
11822 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11824 /* Similarly for a NEGATE_EXPR. */
11825 if (TREE_CODE (arg0
) == NEGATE_EXPR
11826 && TREE_CODE (arg1
) == INTEGER_CST
11827 && 0 != (tem
= negate_expr (arg1
))
11828 && TREE_CODE (tem
) == INTEGER_CST
11829 && !TREE_OVERFLOW (tem
))
11830 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11832 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11833 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11834 && TREE_CODE (arg1
) == INTEGER_CST
11835 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11836 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11837 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg0
),
11838 fold_convert (TREE_TYPE (arg0
), arg1
),
11839 TREE_OPERAND (arg0
, 1)));
11841 /* Transform comparisons of the form X +- C CMP X. */
11842 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11843 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11844 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11845 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11846 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
11848 tree cst
= TREE_OPERAND (arg0
, 1);
11850 if (code
== EQ_EXPR
11851 && !integer_zerop (cst
))
11852 return omit_two_operands (type
, boolean_false_node
,
11853 TREE_OPERAND (arg0
, 0), arg1
);
11855 return omit_two_operands (type
, boolean_true_node
,
11856 TREE_OPERAND (arg0
, 0), arg1
);
11859 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11860 for !=. Don't do this for ordered comparisons due to overflow. */
11861 if (TREE_CODE (arg0
) == MINUS_EXPR
11862 && integer_zerop (arg1
))
11863 return fold_build2 (code
, type
,
11864 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
11866 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11867 if (TREE_CODE (arg0
) == ABS_EXPR
11868 && (integer_zerop (arg1
) || real_zerop (arg1
)))
11869 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
11871 /* If this is an EQ or NE comparison with zero and ARG0 is
11872 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11873 two operations, but the latter can be done in one less insn
11874 on machines that have only two-operand insns or on which a
11875 constant cannot be the first operand. */
11876 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11877 && integer_zerop (arg1
))
11879 tree arg00
= TREE_OPERAND (arg0
, 0);
11880 tree arg01
= TREE_OPERAND (arg0
, 1);
11881 if (TREE_CODE (arg00
) == LSHIFT_EXPR
11882 && integer_onep (TREE_OPERAND (arg00
, 0)))
11884 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
11885 arg01
, TREE_OPERAND (arg00
, 1));
11886 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11887 build_int_cst (TREE_TYPE (arg0
), 1));
11888 return fold_build2 (code
, type
,
11889 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
11891 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
11892 && integer_onep (TREE_OPERAND (arg01
, 0)))
11894 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
11895 arg00
, TREE_OPERAND (arg01
, 1));
11896 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11897 build_int_cst (TREE_TYPE (arg0
), 1));
11898 return fold_build2 (code
, type
,
11899 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
11903 /* If this is an NE or EQ comparison of zero against the result of a
11904 signed MOD operation whose second operand is a power of 2, make
11905 the MOD operation unsigned since it is simpler and equivalent. */
11906 if (integer_zerop (arg1
)
11907 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
11908 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
11909 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
11910 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
11911 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
11912 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11914 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
11915 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
11916 fold_convert (newtype
,
11917 TREE_OPERAND (arg0
, 0)),
11918 fold_convert (newtype
,
11919 TREE_OPERAND (arg0
, 1)));
11921 return fold_build2 (code
, type
, newmod
,
11922 fold_convert (newtype
, arg1
));
11925 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11926 C1 is a valid shift constant, and C2 is a power of two, i.e.
11928 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11929 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
11930 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11932 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11933 && integer_zerop (arg1
))
11935 tree itype
= TREE_TYPE (arg0
);
11936 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
11937 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11939 /* Check for a valid shift count. */
11940 if (TREE_INT_CST_HIGH (arg001
) == 0
11941 && TREE_INT_CST_LOW (arg001
) < prec
)
11943 tree arg01
= TREE_OPERAND (arg0
, 1);
11944 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11945 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
11946 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11947 can be rewritten as (X & (C2 << C1)) != 0. */
11948 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
11950 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
11951 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
11952 return fold_build2 (code
, type
, tem
, arg1
);
11954 /* Otherwise, for signed (arithmetic) shifts,
11955 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11956 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11957 else if (!TYPE_UNSIGNED (itype
))
11958 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
11959 arg000
, build_int_cst (itype
, 0));
11960 /* Otherwise, of unsigned (logical) shifts,
11961 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11962 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11964 return omit_one_operand (type
,
11965 code
== EQ_EXPR
? integer_one_node
11966 : integer_zero_node
,
11971 /* If this is an NE comparison of zero with an AND of one, remove the
11972 comparison since the AND will give the correct value. */
11973 if (code
== NE_EXPR
11974 && integer_zerop (arg1
)
11975 && TREE_CODE (arg0
) == BIT_AND_EXPR
11976 && integer_onep (TREE_OPERAND (arg0
, 1)))
11977 return fold_convert (type
, arg0
);
11979 /* If we have (A & C) == C where C is a power of 2, convert this into
11980 (A & C) != 0. Similarly for NE_EXPR. */
11981 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11982 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11983 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11984 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11985 arg0
, fold_convert (TREE_TYPE (arg0
),
11986 integer_zero_node
));
11988 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11989 bit, then fold the expression into A < 0 or A >= 0. */
11990 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
11994 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11995 Similarly for NE_EXPR. */
11996 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11997 && TREE_CODE (arg1
) == INTEGER_CST
11998 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12000 tree notc
= fold_build1 (BIT_NOT_EXPR
,
12001 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12002 TREE_OPERAND (arg0
, 1));
12003 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
12005 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12006 if (integer_nonzerop (dandnotc
))
12007 return omit_one_operand (type
, rslt
, arg0
);
12010 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12011 Similarly for NE_EXPR. */
12012 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12013 && TREE_CODE (arg1
) == INTEGER_CST
12014 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12016 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12017 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
12018 TREE_OPERAND (arg0
, 1), notd
);
12019 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12020 if (integer_nonzerop (candnotd
))
12021 return omit_one_operand (type
, rslt
, arg0
);
12024 /* Optimize comparisons of strlen vs zero to a compare of the
12025 first character of the string vs zero. To wit,
12026 strlen(ptr) == 0 => *ptr == 0
12027 strlen(ptr) != 0 => *ptr != 0
12028 Other cases should reduce to one of these two (or a constant)
12029 due to the return value of strlen being unsigned. */
12030 if (TREE_CODE (arg0
) == CALL_EXPR
12031 && integer_zerop (arg1
))
12033 tree fndecl
= get_callee_fndecl (arg0
);
12036 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12037 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12038 && call_expr_nargs (arg0
) == 1
12039 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12041 tree iref
= build_fold_indirect_ref (CALL_EXPR_ARG (arg0
, 0));
12042 return fold_build2 (code
, type
, iref
,
12043 build_int_cst (TREE_TYPE (iref
), 0));
12047 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12048 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12049 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12050 && integer_zerop (arg1
)
12051 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12053 tree arg00
= TREE_OPERAND (arg0
, 0);
12054 tree arg01
= TREE_OPERAND (arg0
, 1);
12055 tree itype
= TREE_TYPE (arg00
);
12056 if (TREE_INT_CST_HIGH (arg01
) == 0
12057 && TREE_INT_CST_LOW (arg01
)
12058 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12060 if (TYPE_UNSIGNED (itype
))
12062 itype
= signed_type_for (itype
);
12063 arg00
= fold_convert (itype
, arg00
);
12065 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12066 type
, arg00
, build_int_cst (itype
, 0));
12070 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12071 if (integer_zerop (arg1
)
12072 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12073 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12074 TREE_OPERAND (arg0
, 1));
12076 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12077 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12078 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12079 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12080 build_int_cst (TREE_TYPE (arg1
), 0));
12081 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12082 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12083 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12084 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12085 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
12086 build_int_cst (TREE_TYPE (arg1
), 0));
12088 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12089 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12090 && TREE_CODE (arg1
) == INTEGER_CST
12091 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12092 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12093 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12094 TREE_OPERAND (arg0
, 1), arg1
));
12096 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12097 (X & C) == 0 when C is a single bit. */
12098 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12099 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12100 && integer_zerop (arg1
)
12101 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12103 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
12104 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12105 TREE_OPERAND (arg0
, 1));
12106 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12110 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12111 constant C is a power of two, i.e. a single bit. */
12112 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12113 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12114 && integer_zerop (arg1
)
12115 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12116 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12117 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12119 tree arg00
= TREE_OPERAND (arg0
, 0);
12120 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12121 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12124 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12125 when is C is a power of two, i.e. a single bit. */
12126 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12127 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12128 && integer_zerop (arg1
)
12129 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12130 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12131 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12133 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12134 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
12135 arg000
, TREE_OPERAND (arg0
, 1));
12136 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12137 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12140 if (integer_zerop (arg1
)
12141 && tree_expr_nonzero_p (arg0
))
12143 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12144 return omit_one_operand (type
, res
, arg0
);
12147 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12148 if (TREE_CODE (arg0
) == NEGATE_EXPR
12149 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12150 return fold_build2 (code
, type
,
12151 TREE_OPERAND (arg0
, 0),
12152 TREE_OPERAND (arg1
, 0));
12154 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12155 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12156 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12158 tree arg00
= TREE_OPERAND (arg0
, 0);
12159 tree arg01
= TREE_OPERAND (arg0
, 1);
12160 tree arg10
= TREE_OPERAND (arg1
, 0);
12161 tree arg11
= TREE_OPERAND (arg1
, 1);
12162 tree itype
= TREE_TYPE (arg0
);
12164 if (operand_equal_p (arg01
, arg11
, 0))
12165 return fold_build2 (code
, type
,
12166 fold_build2 (BIT_AND_EXPR
, itype
,
12167 fold_build2 (BIT_XOR_EXPR
, itype
,
12170 build_int_cst (itype
, 0));
12172 if (operand_equal_p (arg01
, arg10
, 0))
12173 return fold_build2 (code
, type
,
12174 fold_build2 (BIT_AND_EXPR
, itype
,
12175 fold_build2 (BIT_XOR_EXPR
, itype
,
12178 build_int_cst (itype
, 0));
12180 if (operand_equal_p (arg00
, arg11
, 0))
12181 return fold_build2 (code
, type
,
12182 fold_build2 (BIT_AND_EXPR
, itype
,
12183 fold_build2 (BIT_XOR_EXPR
, itype
,
12186 build_int_cst (itype
, 0));
12188 if (operand_equal_p (arg00
, arg10
, 0))
12189 return fold_build2 (code
, type
,
12190 fold_build2 (BIT_AND_EXPR
, itype
,
12191 fold_build2 (BIT_XOR_EXPR
, itype
,
12194 build_int_cst (itype
, 0));
12197 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12198 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12200 tree arg00
= TREE_OPERAND (arg0
, 0);
12201 tree arg01
= TREE_OPERAND (arg0
, 1);
12202 tree arg10
= TREE_OPERAND (arg1
, 0);
12203 tree arg11
= TREE_OPERAND (arg1
, 1);
12204 tree itype
= TREE_TYPE (arg0
);
12206 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12207 operand_equal_p guarantees no side-effects so we don't need
12208 to use omit_one_operand on Z. */
12209 if (operand_equal_p (arg01
, arg11
, 0))
12210 return fold_build2 (code
, type
, arg00
, arg10
);
12211 if (operand_equal_p (arg01
, arg10
, 0))
12212 return fold_build2 (code
, type
, arg00
, arg11
);
12213 if (operand_equal_p (arg00
, arg11
, 0))
12214 return fold_build2 (code
, type
, arg01
, arg10
);
12215 if (operand_equal_p (arg00
, arg10
, 0))
12216 return fold_build2 (code
, type
, arg01
, arg11
);
12218 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12219 if (TREE_CODE (arg01
) == INTEGER_CST
12220 && TREE_CODE (arg11
) == INTEGER_CST
)
12221 return fold_build2 (code
, type
,
12222 fold_build2 (BIT_XOR_EXPR
, itype
, arg00
,
12223 fold_build2 (BIT_XOR_EXPR
, itype
,
12228 /* Attempt to simplify equality/inequality comparisons of complex
12229 values. Only lower the comparison if the result is known or
12230 can be simplified to a single scalar comparison. */
12231 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12232 || TREE_CODE (arg0
) == COMPLEX_CST
)
12233 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12234 || TREE_CODE (arg1
) == COMPLEX_CST
))
12236 tree real0
, imag0
, real1
, imag1
;
12239 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12241 real0
= TREE_OPERAND (arg0
, 0);
12242 imag0
= TREE_OPERAND (arg0
, 1);
12246 real0
= TREE_REALPART (arg0
);
12247 imag0
= TREE_IMAGPART (arg0
);
12250 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12252 real1
= TREE_OPERAND (arg1
, 0);
12253 imag1
= TREE_OPERAND (arg1
, 1);
12257 real1
= TREE_REALPART (arg1
);
12258 imag1
= TREE_IMAGPART (arg1
);
12261 rcond
= fold_binary (code
, type
, real0
, real1
);
12262 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12264 if (integer_zerop (rcond
))
12266 if (code
== EQ_EXPR
)
12267 return omit_two_operands (type
, boolean_false_node
,
12269 return fold_build2 (NE_EXPR
, type
, imag0
, imag1
);
12273 if (code
== NE_EXPR
)
12274 return omit_two_operands (type
, boolean_true_node
,
12276 return fold_build2 (EQ_EXPR
, type
, imag0
, imag1
);
12280 icond
= fold_binary (code
, type
, imag0
, imag1
);
12281 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12283 if (integer_zerop (icond
))
12285 if (code
== EQ_EXPR
)
12286 return omit_two_operands (type
, boolean_false_node
,
12288 return fold_build2 (NE_EXPR
, type
, real0
, real1
);
12292 if (code
== NE_EXPR
)
12293 return omit_two_operands (type
, boolean_true_node
,
12295 return fold_build2 (EQ_EXPR
, type
, real0
, real1
);
12306 tem
= fold_comparison (code
, type
, op0
, op1
);
12307 if (tem
!= NULL_TREE
)
12310 /* Transform comparisons of the form X +- C CMP X. */
12311 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12312 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12313 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12314 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12315 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12316 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12318 tree arg01
= TREE_OPERAND (arg0
, 1);
12319 enum tree_code code0
= TREE_CODE (arg0
);
12322 if (TREE_CODE (arg01
) == REAL_CST
)
12323 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12325 is_positive
= tree_int_cst_sgn (arg01
);
12327 /* (X - c) > X becomes false. */
12328 if (code
== GT_EXPR
12329 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12330 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12332 if (TREE_CODE (arg01
) == INTEGER_CST
12333 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12334 fold_overflow_warning (("assuming signed overflow does not "
12335 "occur when assuming that (X - c) > X "
12336 "is always false"),
12337 WARN_STRICT_OVERFLOW_ALL
);
12338 return constant_boolean_node (0, type
);
12341 /* Likewise (X + c) < X becomes false. */
12342 if (code
== LT_EXPR
12343 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12344 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12346 if (TREE_CODE (arg01
) == INTEGER_CST
12347 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12348 fold_overflow_warning (("assuming signed overflow does not "
12349 "occur when assuming that "
12350 "(X + c) < X is always false"),
12351 WARN_STRICT_OVERFLOW_ALL
);
12352 return constant_boolean_node (0, type
);
12355 /* Convert (X - c) <= X to true. */
12356 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12358 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12359 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12361 if (TREE_CODE (arg01
) == INTEGER_CST
12362 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12363 fold_overflow_warning (("assuming signed overflow does not "
12364 "occur when assuming that "
12365 "(X - c) <= X is always true"),
12366 WARN_STRICT_OVERFLOW_ALL
);
12367 return constant_boolean_node (1, type
);
12370 /* Convert (X + c) >= X to true. */
12371 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12373 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12374 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12376 if (TREE_CODE (arg01
) == INTEGER_CST
12377 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12378 fold_overflow_warning (("assuming signed overflow does not "
12379 "occur when assuming that "
12380 "(X + c) >= X is always true"),
12381 WARN_STRICT_OVERFLOW_ALL
);
12382 return constant_boolean_node (1, type
);
12385 if (TREE_CODE (arg01
) == INTEGER_CST
)
12387 /* Convert X + c > X and X - c < X to true for integers. */
12388 if (code
== GT_EXPR
12389 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12390 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12392 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12393 fold_overflow_warning (("assuming signed overflow does "
12394 "not occur when assuming that "
12395 "(X + c) > X is always true"),
12396 WARN_STRICT_OVERFLOW_ALL
);
12397 return constant_boolean_node (1, type
);
12400 if (code
== LT_EXPR
12401 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12402 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12404 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12405 fold_overflow_warning (("assuming signed overflow does "
12406 "not occur when assuming that "
12407 "(X - c) < X is always true"),
12408 WARN_STRICT_OVERFLOW_ALL
);
12409 return constant_boolean_node (1, type
);
12412 /* Convert X + c <= X and X - c >= X to false for integers. */
12413 if (code
== LE_EXPR
12414 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12415 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12417 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12418 fold_overflow_warning (("assuming signed overflow does "
12419 "not occur when assuming that "
12420 "(X + c) <= X is always false"),
12421 WARN_STRICT_OVERFLOW_ALL
);
12422 return constant_boolean_node (0, type
);
12425 if (code
== GE_EXPR
12426 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12427 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12429 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12430 fold_overflow_warning (("assuming signed overflow does "
12431 "not occur when assuming that "
12432 "(X - c) >= X is always false"),
12433 WARN_STRICT_OVERFLOW_ALL
);
12434 return constant_boolean_node (0, type
);
12439 /* Comparisons with the highest or lowest possible integer of
12440 the specified precision will have known values. */
12442 tree arg1_type
= TREE_TYPE (arg1
);
12443 unsigned int width
= TYPE_PRECISION (arg1_type
);
12445 if (TREE_CODE (arg1
) == INTEGER_CST
12446 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12447 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12449 HOST_WIDE_INT signed_max_hi
;
12450 unsigned HOST_WIDE_INT signed_max_lo
;
12451 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12453 if (width
<= HOST_BITS_PER_WIDE_INT
)
12455 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12460 if (TYPE_UNSIGNED (arg1_type
))
12462 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12468 max_lo
= signed_max_lo
;
12469 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12475 width
-= HOST_BITS_PER_WIDE_INT
;
12476 signed_max_lo
= -1;
12477 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12482 if (TYPE_UNSIGNED (arg1_type
))
12484 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12489 max_hi
= signed_max_hi
;
12490 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12494 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
12495 && TREE_INT_CST_LOW (arg1
) == max_lo
)
12499 return omit_one_operand (type
, integer_zero_node
, arg0
);
12502 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12505 return omit_one_operand (type
, integer_one_node
, arg0
);
12508 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12510 /* The GE_EXPR and LT_EXPR cases above are not normally
12511 reached because of previous transformations. */
12516 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12518 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
12522 arg1
= const_binop (PLUS_EXPR
, arg1
,
12523 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12524 return fold_build2 (EQ_EXPR
, type
,
12525 fold_convert (TREE_TYPE (arg1
), arg0
),
12528 arg1
= const_binop (PLUS_EXPR
, arg1
,
12529 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12530 return fold_build2 (NE_EXPR
, type
,
12531 fold_convert (TREE_TYPE (arg1
), arg0
),
12536 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12538 && TREE_INT_CST_LOW (arg1
) == min_lo
)
12542 return omit_one_operand (type
, integer_zero_node
, arg0
);
12545 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12548 return omit_one_operand (type
, integer_one_node
, arg0
);
12551 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12556 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12558 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
12562 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12563 return fold_build2 (NE_EXPR
, type
,
12564 fold_convert (TREE_TYPE (arg1
), arg0
),
12567 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12568 return fold_build2 (EQ_EXPR
, type
,
12569 fold_convert (TREE_TYPE (arg1
), arg0
),
12575 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
12576 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
12577 && TYPE_UNSIGNED (arg1_type
)
12578 /* We will flip the signedness of the comparison operator
12579 associated with the mode of arg1, so the sign bit is
12580 specified by this mode. Check that arg1 is the signed
12581 max associated with this sign bit. */
12582 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
12583 /* signed_type does not work on pointer types. */
12584 && INTEGRAL_TYPE_P (arg1_type
))
12586 /* The following case also applies to X < signed_max+1
12587 and X >= signed_max+1 because previous transformations. */
12588 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12591 st
= signed_type_for (TREE_TYPE (arg1
));
12592 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12593 type
, fold_convert (st
, arg0
),
12594 build_int_cst (st
, 0));
12600 /* If we are comparing an ABS_EXPR with a constant, we can
12601 convert all the cases into explicit comparisons, but they may
12602 well not be faster than doing the ABS and one comparison.
12603 But ABS (X) <= C is a range comparison, which becomes a subtraction
12604 and a comparison, and is probably faster. */
12605 if (code
== LE_EXPR
12606 && TREE_CODE (arg1
) == INTEGER_CST
12607 && TREE_CODE (arg0
) == ABS_EXPR
12608 && ! TREE_SIDE_EFFECTS (arg0
)
12609 && (0 != (tem
= negate_expr (arg1
)))
12610 && TREE_CODE (tem
) == INTEGER_CST
12611 && !TREE_OVERFLOW (tem
))
12612 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12613 build2 (GE_EXPR
, type
,
12614 TREE_OPERAND (arg0
, 0), tem
),
12615 build2 (LE_EXPR
, type
,
12616 TREE_OPERAND (arg0
, 0), arg1
));
12618 /* Convert ABS_EXPR<x> >= 0 to true. */
12619 strict_overflow_p
= false;
12620 if (code
== GE_EXPR
12621 && (integer_zerop (arg1
)
12622 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12623 && real_zerop (arg1
)))
12624 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12626 if (strict_overflow_p
)
12627 fold_overflow_warning (("assuming signed overflow does not occur "
12628 "when simplifying comparison of "
12629 "absolute value and zero"),
12630 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12631 return omit_one_operand (type
, integer_one_node
, arg0
);
12634 /* Convert ABS_EXPR<x> < 0 to false. */
12635 strict_overflow_p
= false;
12636 if (code
== LT_EXPR
12637 && (integer_zerop (arg1
) || real_zerop (arg1
))
12638 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12640 if (strict_overflow_p
)
12641 fold_overflow_warning (("assuming signed overflow does not occur "
12642 "when simplifying comparison of "
12643 "absolute value and zero"),
12644 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12645 return omit_one_operand (type
, integer_zero_node
, arg0
);
12648 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12649 and similarly for >= into !=. */
12650 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12651 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12652 && TREE_CODE (arg1
) == LSHIFT_EXPR
12653 && integer_onep (TREE_OPERAND (arg1
, 0)))
12654 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12655 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12656 TREE_OPERAND (arg1
, 1)),
12657 build_int_cst (TREE_TYPE (arg0
), 0));
12659 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12660 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12661 && CONVERT_EXPR_P (arg1
)
12662 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12663 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12665 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12666 fold_convert (TREE_TYPE (arg0
),
12667 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12668 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
12670 build_int_cst (TREE_TYPE (arg0
), 0));
12674 case UNORDERED_EXPR
:
12682 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
12684 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
12685 if (t1
!= NULL_TREE
)
12689 /* If the first operand is NaN, the result is constant. */
12690 if (TREE_CODE (arg0
) == REAL_CST
12691 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
12692 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12694 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12695 ? integer_zero_node
12696 : integer_one_node
;
12697 return omit_one_operand (type
, t1
, arg1
);
12700 /* If the second operand is NaN, the result is constant. */
12701 if (TREE_CODE (arg1
) == REAL_CST
12702 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
12703 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12705 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12706 ? integer_zero_node
12707 : integer_one_node
;
12708 return omit_one_operand (type
, t1
, arg0
);
12711 /* Simplify unordered comparison of something with itself. */
12712 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
12713 && operand_equal_p (arg0
, arg1
, 0))
12714 return constant_boolean_node (1, type
);
12716 if (code
== LTGT_EXPR
12717 && !flag_trapping_math
12718 && operand_equal_p (arg0
, arg1
, 0))
12719 return constant_boolean_node (0, type
);
12721 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12723 tree targ0
= strip_float_extensions (arg0
);
12724 tree targ1
= strip_float_extensions (arg1
);
12725 tree newtype
= TREE_TYPE (targ0
);
12727 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12728 newtype
= TREE_TYPE (targ1
);
12730 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12731 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
12732 fold_convert (newtype
, targ1
));
12737 case COMPOUND_EXPR
:
12738 /* When pedantic, a compound expression can be neither an lvalue
12739 nor an integer constant expression. */
12740 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12742 /* Don't let (0, 0) be null pointer constant. */
12743 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12744 : fold_convert (type
, arg1
);
12745 return pedantic_non_lvalue (tem
);
12748 if ((TREE_CODE (arg0
) == REAL_CST
12749 && TREE_CODE (arg1
) == REAL_CST
)
12750 || (TREE_CODE (arg0
) == INTEGER_CST
12751 && TREE_CODE (arg1
) == INTEGER_CST
))
12752 return build_complex (type
, arg0
, arg1
);
12756 /* An ASSERT_EXPR should never be passed to fold_binary. */
12757 gcc_unreachable ();
12761 } /* switch (code) */
12764 /* Callback for walk_tree, looking for LABEL_EXPR.
12765 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12766 Do not check the sub-tree of GOTO_EXPR. */
12769 contains_label_1 (tree
*tp
,
12770 int *walk_subtrees
,
12771 void *data ATTRIBUTE_UNUSED
)
12773 switch (TREE_CODE (*tp
))
12778 *walk_subtrees
= 0;
12785 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12786 accessible from outside the sub-tree. Returns NULL_TREE if no
12787 addressable label is found. */
12790 contains_label_p (tree st
)
12792 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
12795 /* Fold a ternary expression of code CODE and type TYPE with operands
12796 OP0, OP1, and OP2. Return the folded expression if folding is
12797 successful. Otherwise, return NULL_TREE. */
12800 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
12803 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
12804 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12806 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12807 && TREE_CODE_LENGTH (code
) == 3);
12809 /* Strip any conversions that don't change the mode. This is safe
12810 for every expression, except for a comparison expression because
12811 its signedness is derived from its operands. So, in the latter
12812 case, only strip conversions that don't change the signedness.
12814 Note that this is done as an internal manipulation within the
12815 constant folder, in order to find the simplest representation of
12816 the arguments so that their form can be studied. In any cases,
12817 the appropriate type conversions should be put back in the tree
12818 that will get out of the constant folder. */
12833 case COMPONENT_REF
:
12834 if (TREE_CODE (arg0
) == CONSTRUCTOR
12835 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12837 unsigned HOST_WIDE_INT idx
;
12839 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12846 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12847 so all simple results must be passed through pedantic_non_lvalue. */
12848 if (TREE_CODE (arg0
) == INTEGER_CST
)
12850 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12851 tem
= integer_zerop (arg0
) ? op2
: op1
;
12852 /* Only optimize constant conditions when the selected branch
12853 has the same type as the COND_EXPR. This avoids optimizing
12854 away "c ? x : throw", where the throw has a void type.
12855 Avoid throwing away that operand which contains label. */
12856 if ((!TREE_SIDE_EFFECTS (unused_op
)
12857 || !contains_label_p (unused_op
))
12858 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12859 || VOID_TYPE_P (type
)))
12860 return pedantic_non_lvalue (tem
);
12863 if (operand_equal_p (arg1
, op2
, 0))
12864 return pedantic_omit_one_operand (type
, arg1
, arg0
);
12866 /* If we have A op B ? A : C, we may be able to convert this to a
12867 simpler expression, depending on the operation and the values
12868 of B and C. Signed zeros prevent all of these transformations,
12869 for reasons given above each one.
12871 Also try swapping the arguments and inverting the conditional. */
12872 if (COMPARISON_CLASS_P (arg0
)
12873 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12874 arg1
, TREE_OPERAND (arg0
, 1))
12875 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
12877 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
12882 if (COMPARISON_CLASS_P (arg0
)
12883 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12885 TREE_OPERAND (arg0
, 1))
12886 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
12888 tem
= fold_truth_not_expr (arg0
);
12889 if (tem
&& COMPARISON_CLASS_P (tem
))
12891 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
12897 /* If the second operand is simpler than the third, swap them
12898 since that produces better jump optimization results. */
12899 if (truth_value_p (TREE_CODE (arg0
))
12900 && tree_swap_operands_p (op1
, op2
, false))
12902 /* See if this can be inverted. If it can't, possibly because
12903 it was a floating-point inequality comparison, don't do
12905 tem
= fold_truth_not_expr (arg0
);
12907 return fold_build3 (code
, type
, tem
, op2
, op1
);
12910 /* Convert A ? 1 : 0 to simply A. */
12911 if (integer_onep (op1
)
12912 && integer_zerop (op2
)
12913 /* If we try to convert OP0 to our type, the
12914 call to fold will try to move the conversion inside
12915 a COND, which will recurse. In that case, the COND_EXPR
12916 is probably the best choice, so leave it alone. */
12917 && type
== TREE_TYPE (arg0
))
12918 return pedantic_non_lvalue (arg0
);
12920 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12921 over COND_EXPR in cases such as floating point comparisons. */
12922 if (integer_zerop (op1
)
12923 && integer_onep (op2
)
12924 && truth_value_p (TREE_CODE (arg0
)))
12925 return pedantic_non_lvalue (fold_convert (type
,
12926 invert_truthvalue (arg0
)));
12928 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12929 if (TREE_CODE (arg0
) == LT_EXPR
12930 && integer_zerop (TREE_OPERAND (arg0
, 1))
12931 && integer_zerop (op2
)
12932 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12934 /* sign_bit_p only checks ARG1 bits within A's precision.
12935 If <sign bit of A> has wider type than A, bits outside
12936 of A's precision in <sign bit of A> need to be checked.
12937 If they are all 0, this optimization needs to be done
12938 in unsigned A's type, if they are all 1 in signed A's type,
12939 otherwise this can't be done. */
12940 if (TYPE_PRECISION (TREE_TYPE (tem
))
12941 < TYPE_PRECISION (TREE_TYPE (arg1
))
12942 && TYPE_PRECISION (TREE_TYPE (tem
))
12943 < TYPE_PRECISION (type
))
12945 unsigned HOST_WIDE_INT mask_lo
;
12946 HOST_WIDE_INT mask_hi
;
12947 int inner_width
, outer_width
;
12950 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12951 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12952 if (outer_width
> TYPE_PRECISION (type
))
12953 outer_width
= TYPE_PRECISION (type
);
12955 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
12957 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
12958 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
12964 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
12965 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
12967 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
12969 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
12970 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
12974 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
12975 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
12977 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
12978 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
12980 tem_type
= signed_type_for (TREE_TYPE (tem
));
12981 tem
= fold_convert (tem_type
, tem
);
12983 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
12984 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
12986 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
12987 tem
= fold_convert (tem_type
, tem
);
12994 return fold_convert (type
,
12995 fold_build2 (BIT_AND_EXPR
,
12996 TREE_TYPE (tem
), tem
,
12997 fold_convert (TREE_TYPE (tem
),
13001 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13002 already handled above. */
13003 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13004 && integer_onep (TREE_OPERAND (arg0
, 1))
13005 && integer_zerop (op2
)
13006 && integer_pow2p (arg1
))
13008 tree tem
= TREE_OPERAND (arg0
, 0);
13010 if (TREE_CODE (tem
) == RSHIFT_EXPR
13011 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13012 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13013 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13014 return fold_build2 (BIT_AND_EXPR
, type
,
13015 TREE_OPERAND (tem
, 0), arg1
);
13018 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13019 is probably obsolete because the first operand should be a
13020 truth value (that's why we have the two cases above), but let's
13021 leave it in until we can confirm this for all front-ends. */
13022 if (integer_zerop (op2
)
13023 && TREE_CODE (arg0
) == NE_EXPR
13024 && integer_zerop (TREE_OPERAND (arg0
, 1))
13025 && integer_pow2p (arg1
)
13026 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13027 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13028 arg1
, OEP_ONLY_CONST
))
13029 return pedantic_non_lvalue (fold_convert (type
,
13030 TREE_OPERAND (arg0
, 0)));
13032 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13033 if (integer_zerop (op2
)
13034 && truth_value_p (TREE_CODE (arg0
))
13035 && truth_value_p (TREE_CODE (arg1
)))
13036 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
13037 fold_convert (type
, arg0
),
13040 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13041 if (integer_onep (op2
)
13042 && truth_value_p (TREE_CODE (arg0
))
13043 && truth_value_p (TREE_CODE (arg1
)))
13045 /* Only perform transformation if ARG0 is easily inverted. */
13046 tem
= fold_truth_not_expr (arg0
);
13048 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
13049 fold_convert (type
, tem
),
13053 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13054 if (integer_zerop (arg1
)
13055 && truth_value_p (TREE_CODE (arg0
))
13056 && truth_value_p (TREE_CODE (op2
)))
13058 /* Only perform transformation if ARG0 is easily inverted. */
13059 tem
= fold_truth_not_expr (arg0
);
13061 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
13062 fold_convert (type
, tem
),
13066 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13067 if (integer_onep (arg1
)
13068 && truth_value_p (TREE_CODE (arg0
))
13069 && truth_value_p (TREE_CODE (op2
)))
13070 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
13071 fold_convert (type
, arg0
),
13077 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13078 of fold_ternary on them. */
13079 gcc_unreachable ();
13081 case BIT_FIELD_REF
:
13082 if ((TREE_CODE (arg0
) == VECTOR_CST
13083 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
13084 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
13086 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13087 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13090 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13091 && (idx
% width
) == 0
13092 && (idx
= idx
/ width
)
13093 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13095 tree elements
= NULL_TREE
;
13097 if (TREE_CODE (arg0
) == VECTOR_CST
)
13098 elements
= TREE_VECTOR_CST_ELTS (arg0
);
13101 unsigned HOST_WIDE_INT idx
;
13104 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
13105 elements
= tree_cons (NULL_TREE
, value
, elements
);
13107 while (idx
-- > 0 && elements
)
13108 elements
= TREE_CHAIN (elements
);
13110 return TREE_VALUE (elements
);
13112 return fold_convert (type
, integer_zero_node
);
13116 /* A bit-field-ref that referenced the full argument can be stripped. */
13117 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13118 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
13119 && integer_zerop (op2
))
13120 return fold_convert (type
, arg0
);
13126 } /* switch (code) */
13129 /* Perform constant folding and related simplification of EXPR.
13130 The related simplifications include x*1 => x, x*0 => 0, etc.,
13131 and application of the associative law.
13132 NOP_EXPR conversions may be removed freely (as long as we
13133 are careful not to change the type of the overall expression).
13134 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13135 but we can constant-fold them if they have constant operands. */
13137 #ifdef ENABLE_FOLD_CHECKING
13138 # define fold(x) fold_1 (x)
13139 static tree
fold_1 (tree
);
13145 const tree t
= expr
;
13146 enum tree_code code
= TREE_CODE (t
);
13147 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13150 /* Return right away if a constant. */
13151 if (kind
== tcc_constant
)
13154 /* CALL_EXPR-like objects with variable numbers of operands are
13155 treated specially. */
13156 if (kind
== tcc_vl_exp
)
13158 if (code
== CALL_EXPR
)
13160 tem
= fold_call_expr (expr
, false);
13161 return tem
? tem
: expr
;
13166 if (IS_EXPR_CODE_CLASS (kind
))
13168 tree type
= TREE_TYPE (t
);
13169 tree op0
, op1
, op2
;
13171 switch (TREE_CODE_LENGTH (code
))
13174 op0
= TREE_OPERAND (t
, 0);
13175 tem
= fold_unary (code
, type
, op0
);
13176 return tem
? tem
: expr
;
13178 op0
= TREE_OPERAND (t
, 0);
13179 op1
= TREE_OPERAND (t
, 1);
13180 tem
= fold_binary (code
, type
, op0
, op1
);
13181 return tem
? tem
: expr
;
13183 op0
= TREE_OPERAND (t
, 0);
13184 op1
= TREE_OPERAND (t
, 1);
13185 op2
= TREE_OPERAND (t
, 2);
13186 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13187 return tem
? tem
: expr
;
13197 tree op0
= TREE_OPERAND (t
, 0);
13198 tree op1
= TREE_OPERAND (t
, 1);
13200 if (TREE_CODE (op1
) == INTEGER_CST
13201 && TREE_CODE (op0
) == CONSTRUCTOR
13202 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13204 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
13205 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
13206 unsigned HOST_WIDE_INT begin
= 0;
13208 /* Find a matching index by means of a binary search. */
13209 while (begin
!= end
)
13211 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13212 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
13214 if (TREE_CODE (index
) == INTEGER_CST
13215 && tree_int_cst_lt (index
, op1
))
13216 begin
= middle
+ 1;
13217 else if (TREE_CODE (index
) == INTEGER_CST
13218 && tree_int_cst_lt (op1
, index
))
13220 else if (TREE_CODE (index
) == RANGE_EXPR
13221 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13222 begin
= middle
+ 1;
13223 else if (TREE_CODE (index
) == RANGE_EXPR
13224 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13227 return VEC_index (constructor_elt
, elts
, middle
)->value
;
13235 return fold (DECL_INITIAL (t
));
13239 } /* switch (code) */
13242 #ifdef ENABLE_FOLD_CHECKING
13245 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13246 static void fold_check_failed (const_tree
, const_tree
);
13247 void print_fold_checksum (const_tree
);
13249 /* When --enable-checking=fold, compute a digest of expr before
13250 and after actual fold call to see if fold did not accidentally
13251 change original expr. */
13257 struct md5_ctx ctx
;
13258 unsigned char checksum_before
[16], checksum_after
[16];
13261 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13262 md5_init_ctx (&ctx
);
13263 fold_checksum_tree (expr
, &ctx
, ht
);
13264 md5_finish_ctx (&ctx
, checksum_before
);
13267 ret
= fold_1 (expr
);
13269 md5_init_ctx (&ctx
);
13270 fold_checksum_tree (expr
, &ctx
, ht
);
13271 md5_finish_ctx (&ctx
, checksum_after
);
13274 if (memcmp (checksum_before
, checksum_after
, 16))
13275 fold_check_failed (expr
, ret
);
13281 print_fold_checksum (const_tree expr
)
13283 struct md5_ctx ctx
;
13284 unsigned char checksum
[16], cnt
;
13287 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13288 md5_init_ctx (&ctx
);
13289 fold_checksum_tree (expr
, &ctx
, ht
);
13290 md5_finish_ctx (&ctx
, checksum
);
13292 for (cnt
= 0; cnt
< 16; ++cnt
)
13293 fprintf (stderr
, "%02x", checksum
[cnt
]);
13294 putc ('\n', stderr
);
13298 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13300 internal_error ("fold check: original tree changed by fold");
13304 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13307 enum tree_code code
;
13308 struct tree_function_decl buf
;
13313 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
13314 <= sizeof (struct tree_function_decl
))
13315 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
13318 slot
= (const void **) htab_find_slot (ht
, expr
, INSERT
);
13322 code
= TREE_CODE (expr
);
13323 if (TREE_CODE_CLASS (code
) == tcc_declaration
13324 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13326 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13327 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13328 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13329 expr
= (tree
) &buf
;
13331 else if (TREE_CODE_CLASS (code
) == tcc_type
13332 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
13333 || TYPE_CACHED_VALUES_P (expr
)
13334 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
13336 /* Allow these fields to be modified. */
13338 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13339 expr
= tmp
= (tree
) &buf
;
13340 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13341 TYPE_POINTER_TO (tmp
) = NULL
;
13342 TYPE_REFERENCE_TO (tmp
) = NULL
;
13343 if (TYPE_CACHED_VALUES_P (tmp
))
13345 TYPE_CACHED_VALUES_P (tmp
) = 0;
13346 TYPE_CACHED_VALUES (tmp
) = NULL
;
13349 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13350 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13351 if (TREE_CODE_CLASS (code
) != tcc_type
13352 && TREE_CODE_CLASS (code
) != tcc_declaration
13353 && code
!= TREE_LIST
13354 && code
!= SSA_NAME
)
13355 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13356 switch (TREE_CODE_CLASS (code
))
13362 md5_process_bytes (TREE_STRING_POINTER (expr
),
13363 TREE_STRING_LENGTH (expr
), ctx
);
13366 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13367 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13370 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13376 case tcc_exceptional
:
13380 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13381 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13382 expr
= TREE_CHAIN (expr
);
13383 goto recursive_label
;
13386 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13387 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13393 case tcc_expression
:
13394 case tcc_reference
:
13395 case tcc_comparison
:
13398 case tcc_statement
:
13400 len
= TREE_OPERAND_LENGTH (expr
);
13401 for (i
= 0; i
< len
; ++i
)
13402 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13404 case tcc_declaration
:
13405 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13406 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13407 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13409 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13410 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13411 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13412 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13413 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13415 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13416 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
13418 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13420 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13421 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13422 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
13426 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13427 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13428 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13429 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13430 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13431 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13432 if (INTEGRAL_TYPE_P (expr
)
13433 || SCALAR_FLOAT_TYPE_P (expr
))
13435 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13436 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13438 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13439 if (TREE_CODE (expr
) == RECORD_TYPE
13440 || TREE_CODE (expr
) == UNION_TYPE
13441 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13442 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13443 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13450 /* Helper function for outputting the checksum of a tree T. When
13451 debugging with gdb, you can "define mynext" to be "next" followed
13452 by "call debug_fold_checksum (op0)", then just trace down till the
13456 debug_fold_checksum (const_tree t
)
13459 unsigned char checksum
[16];
13460 struct md5_ctx ctx
;
13461 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13463 md5_init_ctx (&ctx
);
13464 fold_checksum_tree (t
, &ctx
, ht
);
13465 md5_finish_ctx (&ctx
, checksum
);
13468 for (i
= 0; i
< 16; i
++)
13469 fprintf (stderr
, "%d ", checksum
[i
]);
13471 fprintf (stderr
, "\n");
13476 /* Fold a unary tree expression with code CODE of type TYPE with an
13477 operand OP0. Return a folded expression if successful. Otherwise,
13478 return a tree expression with code CODE of type TYPE with an
13482 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13485 #ifdef ENABLE_FOLD_CHECKING
13486 unsigned char checksum_before
[16], checksum_after
[16];
13487 struct md5_ctx ctx
;
13490 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13491 md5_init_ctx (&ctx
);
13492 fold_checksum_tree (op0
, &ctx
, ht
);
13493 md5_finish_ctx (&ctx
, checksum_before
);
13497 tem
= fold_unary (code
, type
, op0
);
13499 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
13501 #ifdef ENABLE_FOLD_CHECKING
13502 md5_init_ctx (&ctx
);
13503 fold_checksum_tree (op0
, &ctx
, ht
);
13504 md5_finish_ctx (&ctx
, checksum_after
);
13507 if (memcmp (checksum_before
, checksum_after
, 16))
13508 fold_check_failed (op0
, tem
);
13513 /* Fold a binary tree expression with code CODE of type TYPE with
13514 operands OP0 and OP1. Return a folded expression if successful.
13515 Otherwise, return a tree expression with code CODE of type TYPE
13516 with operands OP0 and OP1. */
13519 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
13523 #ifdef ENABLE_FOLD_CHECKING
13524 unsigned char checksum_before_op0
[16],
13525 checksum_before_op1
[16],
13526 checksum_after_op0
[16],
13527 checksum_after_op1
[16];
13528 struct md5_ctx ctx
;
13531 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13532 md5_init_ctx (&ctx
);
13533 fold_checksum_tree (op0
, &ctx
, ht
);
13534 md5_finish_ctx (&ctx
, checksum_before_op0
);
13537 md5_init_ctx (&ctx
);
13538 fold_checksum_tree (op1
, &ctx
, ht
);
13539 md5_finish_ctx (&ctx
, checksum_before_op1
);
13543 tem
= fold_binary (code
, type
, op0
, op1
);
13545 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
13547 #ifdef ENABLE_FOLD_CHECKING
13548 md5_init_ctx (&ctx
);
13549 fold_checksum_tree (op0
, &ctx
, ht
);
13550 md5_finish_ctx (&ctx
, checksum_after_op0
);
13553 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13554 fold_check_failed (op0
, tem
);
13556 md5_init_ctx (&ctx
);
13557 fold_checksum_tree (op1
, &ctx
, ht
);
13558 md5_finish_ctx (&ctx
, checksum_after_op1
);
13561 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13562 fold_check_failed (op1
, tem
);
13567 /* Fold a ternary tree expression with code CODE of type TYPE with
13568 operands OP0, OP1, and OP2. Return a folded expression if
13569 successful. Otherwise, return a tree expression with code CODE of
13570 type TYPE with operands OP0, OP1, and OP2. */
13573 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
13577 #ifdef ENABLE_FOLD_CHECKING
13578 unsigned char checksum_before_op0
[16],
13579 checksum_before_op1
[16],
13580 checksum_before_op2
[16],
13581 checksum_after_op0
[16],
13582 checksum_after_op1
[16],
13583 checksum_after_op2
[16];
13584 struct md5_ctx ctx
;
13587 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13588 md5_init_ctx (&ctx
);
13589 fold_checksum_tree (op0
, &ctx
, ht
);
13590 md5_finish_ctx (&ctx
, checksum_before_op0
);
13593 md5_init_ctx (&ctx
);
13594 fold_checksum_tree (op1
, &ctx
, ht
);
13595 md5_finish_ctx (&ctx
, checksum_before_op1
);
13598 md5_init_ctx (&ctx
);
13599 fold_checksum_tree (op2
, &ctx
, ht
);
13600 md5_finish_ctx (&ctx
, checksum_before_op2
);
13604 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13605 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13607 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13609 #ifdef ENABLE_FOLD_CHECKING
13610 md5_init_ctx (&ctx
);
13611 fold_checksum_tree (op0
, &ctx
, ht
);
13612 md5_finish_ctx (&ctx
, checksum_after_op0
);
13615 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13616 fold_check_failed (op0
, tem
);
13618 md5_init_ctx (&ctx
);
13619 fold_checksum_tree (op1
, &ctx
, ht
);
13620 md5_finish_ctx (&ctx
, checksum_after_op1
);
13623 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13624 fold_check_failed (op1
, tem
);
13626 md5_init_ctx (&ctx
);
13627 fold_checksum_tree (op2
, &ctx
, ht
);
13628 md5_finish_ctx (&ctx
, checksum_after_op2
);
13631 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13632 fold_check_failed (op2
, tem
);
13637 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13638 arguments in ARGARRAY, and a null static chain.
13639 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13640 of type TYPE from the given operands as constructed by build_call_array. */
13643 fold_build_call_array (tree type
, tree fn
, int nargs
, tree
*argarray
)
13646 #ifdef ENABLE_FOLD_CHECKING
13647 unsigned char checksum_before_fn
[16],
13648 checksum_before_arglist
[16],
13649 checksum_after_fn
[16],
13650 checksum_after_arglist
[16];
13651 struct md5_ctx ctx
;
13655 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13656 md5_init_ctx (&ctx
);
13657 fold_checksum_tree (fn
, &ctx
, ht
);
13658 md5_finish_ctx (&ctx
, checksum_before_fn
);
13661 md5_init_ctx (&ctx
);
13662 for (i
= 0; i
< nargs
; i
++)
13663 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13664 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13668 tem
= fold_builtin_call_array (type
, fn
, nargs
, argarray
);
13670 #ifdef ENABLE_FOLD_CHECKING
13671 md5_init_ctx (&ctx
);
13672 fold_checksum_tree (fn
, &ctx
, ht
);
13673 md5_finish_ctx (&ctx
, checksum_after_fn
);
13676 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13677 fold_check_failed (fn
, tem
);
13679 md5_init_ctx (&ctx
);
13680 for (i
= 0; i
< nargs
; i
++)
13681 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13682 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13685 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13686 fold_check_failed (NULL_TREE
, tem
);
13691 /* Perform constant folding and related simplification of initializer
13692 expression EXPR. These behave identically to "fold_buildN" but ignore
13693 potential run-time traps and exceptions that fold must preserve. */
13695 #define START_FOLD_INIT \
13696 int saved_signaling_nans = flag_signaling_nans;\
13697 int saved_trapping_math = flag_trapping_math;\
13698 int saved_rounding_math = flag_rounding_math;\
13699 int saved_trapv = flag_trapv;\
13700 int saved_folding_initializer = folding_initializer;\
13701 flag_signaling_nans = 0;\
13702 flag_trapping_math = 0;\
13703 flag_rounding_math = 0;\
13705 folding_initializer = 1;
13707 #define END_FOLD_INIT \
13708 flag_signaling_nans = saved_signaling_nans;\
13709 flag_trapping_math = saved_trapping_math;\
13710 flag_rounding_math = saved_rounding_math;\
13711 flag_trapv = saved_trapv;\
13712 folding_initializer = saved_folding_initializer;
13715 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
13720 result
= fold_build1 (code
, type
, op
);
13727 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
13732 result
= fold_build2 (code
, type
, op0
, op1
);
13739 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
13745 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
13752 fold_build_call_array_initializer (tree type
, tree fn
,
13753 int nargs
, tree
*argarray
)
13758 result
= fold_build_call_array (type
, fn
, nargs
, argarray
);
13764 #undef START_FOLD_INIT
13765 #undef END_FOLD_INIT
13767 /* Determine if first argument is a multiple of second argument. Return 0 if
13768 it is not, or we cannot easily determined it to be.
13770 An example of the sort of thing we care about (at this point; this routine
13771 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13772 fold cases do now) is discovering that
13774 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13780 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13782 This code also handles discovering that
13784 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13786 is a multiple of 8 so we don't have to worry about dealing with a
13787 possible remainder.
13789 Note that we *look* inside a SAVE_EXPR only to determine how it was
13790 calculated; it is not safe for fold to do much of anything else with the
13791 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13792 at run time. For example, the latter example above *cannot* be implemented
13793 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13794 evaluation time of the original SAVE_EXPR is not necessarily the same at
13795 the time the new expression is evaluated. The only optimization of this
13796 sort that would be valid is changing
13798 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13802 SAVE_EXPR (I) * SAVE_EXPR (J)
13804 (where the same SAVE_EXPR (J) is used in the original and the
13805 transformed version). */
13808 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13810 if (operand_equal_p (top
, bottom
, 0))
13813 if (TREE_CODE (type
) != INTEGER_TYPE
)
13816 switch (TREE_CODE (top
))
13819 /* Bitwise and provides a power of two multiple. If the mask is
13820 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13821 if (!integer_pow2p (bottom
))
13826 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13827 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13831 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13832 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13835 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13839 op1
= TREE_OPERAND (top
, 1);
13840 /* const_binop may not detect overflow correctly,
13841 so check for it explicitly here. */
13842 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
13843 > TREE_INT_CST_LOW (op1
)
13844 && TREE_INT_CST_HIGH (op1
) == 0
13845 && 0 != (t1
= fold_convert (type
,
13846 const_binop (LSHIFT_EXPR
,
13849 && !TREE_OVERFLOW (t1
))
13850 return multiple_of_p (type
, t1
, bottom
);
13855 /* Can't handle conversions from non-integral or wider integral type. */
13856 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13857 || (TYPE_PRECISION (type
)
13858 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13861 /* .. fall through ... */
13864 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13867 if (TREE_CODE (bottom
) != INTEGER_CST
13868 || integer_zerop (bottom
)
13869 || (TYPE_UNSIGNED (type
)
13870 && (tree_int_cst_sgn (top
) < 0
13871 || tree_int_cst_sgn (bottom
) < 0)))
13873 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
13881 /* Return true if CODE or TYPE is known to be non-negative. */
13884 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
13886 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
13887 && truth_value_p (code
))
13888 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13889 have a signed:1 type (where the value is -1 and 0). */
13894 /* Return true if (CODE OP0) is known to be non-negative. If the return
13895 value is based on the assumption that signed overflow is undefined,
13896 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13897 *STRICT_OVERFLOW_P. */
13900 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13901 bool *strict_overflow_p
)
13903 if (TYPE_UNSIGNED (type
))
13909 /* We can't return 1 if flag_wrapv is set because
13910 ABS_EXPR<INT_MIN> = INT_MIN. */
13911 if (!INTEGRAL_TYPE_P (type
))
13913 if (TYPE_OVERFLOW_UNDEFINED (type
))
13915 *strict_overflow_p
= true;
13920 case NON_LVALUE_EXPR
:
13922 case FIX_TRUNC_EXPR
:
13923 return tree_expr_nonnegative_warnv_p (op0
,
13924 strict_overflow_p
);
13928 tree inner_type
= TREE_TYPE (op0
);
13929 tree outer_type
= type
;
13931 if (TREE_CODE (outer_type
) == REAL_TYPE
)
13933 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13934 return tree_expr_nonnegative_warnv_p (op0
,
13935 strict_overflow_p
);
13936 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
13938 if (TYPE_UNSIGNED (inner_type
))
13940 return tree_expr_nonnegative_warnv_p (op0
,
13941 strict_overflow_p
);
13944 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
13946 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13947 return tree_expr_nonnegative_warnv_p (op0
,
13948 strict_overflow_p
);
13949 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
13950 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
13951 && TYPE_UNSIGNED (inner_type
);
13957 return tree_simple_nonnegative_warnv_p (code
, type
);
13960 /* We don't know sign of `t', so be conservative and return false. */
13964 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13965 value is based on the assumption that signed overflow is undefined,
13966 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13967 *STRICT_OVERFLOW_P. */
13970 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13971 tree op1
, bool *strict_overflow_p
)
13973 if (TYPE_UNSIGNED (type
))
13978 case POINTER_PLUS_EXPR
:
13980 if (FLOAT_TYPE_P (type
))
13981 return (tree_expr_nonnegative_warnv_p (op0
,
13983 && tree_expr_nonnegative_warnv_p (op1
,
13984 strict_overflow_p
));
13986 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13987 both unsigned and at least 2 bits shorter than the result. */
13988 if (TREE_CODE (type
) == INTEGER_TYPE
13989 && TREE_CODE (op0
) == NOP_EXPR
13990 && TREE_CODE (op1
) == NOP_EXPR
)
13992 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
13993 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
13994 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13995 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13997 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
13998 TYPE_PRECISION (inner2
)) + 1;
13999 return prec
< TYPE_PRECISION (type
);
14005 if (FLOAT_TYPE_P (type
))
14007 /* x * x for floating point x is always non-negative. */
14008 if (operand_equal_p (op0
, op1
, 0))
14010 return (tree_expr_nonnegative_warnv_p (op0
,
14012 && tree_expr_nonnegative_warnv_p (op1
,
14013 strict_overflow_p
));
14016 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14017 both unsigned and their total bits is shorter than the result. */
14018 if (TREE_CODE (type
) == INTEGER_TYPE
14019 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14020 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14022 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14023 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14025 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14026 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14029 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14030 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14032 if (TREE_CODE (op0
) == INTEGER_CST
)
14033 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14035 if (TREE_CODE (op1
) == INTEGER_CST
)
14036 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14038 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14039 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14041 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14042 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
14043 : TYPE_PRECISION (inner0
);
14045 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14046 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
14047 : TYPE_PRECISION (inner1
);
14049 return precision0
+ precision1
< TYPE_PRECISION (type
);
14056 return (tree_expr_nonnegative_warnv_p (op0
,
14058 || tree_expr_nonnegative_warnv_p (op1
,
14059 strict_overflow_p
));
14065 case TRUNC_DIV_EXPR
:
14066 case CEIL_DIV_EXPR
:
14067 case FLOOR_DIV_EXPR
:
14068 case ROUND_DIV_EXPR
:
14069 return (tree_expr_nonnegative_warnv_p (op0
,
14071 && tree_expr_nonnegative_warnv_p (op1
,
14072 strict_overflow_p
));
14074 case TRUNC_MOD_EXPR
:
14075 case CEIL_MOD_EXPR
:
14076 case FLOOR_MOD_EXPR
:
14077 case ROUND_MOD_EXPR
:
14078 return tree_expr_nonnegative_warnv_p (op0
,
14079 strict_overflow_p
);
14081 return tree_simple_nonnegative_warnv_p (code
, type
);
14084 /* We don't know sign of `t', so be conservative and return false. */
14088 /* Return true if T is known to be non-negative. If the return
14089 value is based on the assumption that signed overflow is undefined,
14090 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14091 *STRICT_OVERFLOW_P. */
14094 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14096 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14099 switch (TREE_CODE (t
))
14102 return tree_int_cst_sgn (t
) >= 0;
14105 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14108 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14111 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14113 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14114 strict_overflow_p
));
14116 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14119 /* We don't know sign of `t', so be conservative and return false. */
14123 /* Return true if T is known to be non-negative. If the return
14124 value is based on the assumption that signed overflow is undefined,
14125 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14126 *STRICT_OVERFLOW_P. */
14129 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14130 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14132 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14133 switch (DECL_FUNCTION_CODE (fndecl
))
14135 CASE_FLT_FN (BUILT_IN_ACOS
):
14136 CASE_FLT_FN (BUILT_IN_ACOSH
):
14137 CASE_FLT_FN (BUILT_IN_CABS
):
14138 CASE_FLT_FN (BUILT_IN_COSH
):
14139 CASE_FLT_FN (BUILT_IN_ERFC
):
14140 CASE_FLT_FN (BUILT_IN_EXP
):
14141 CASE_FLT_FN (BUILT_IN_EXP10
):
14142 CASE_FLT_FN (BUILT_IN_EXP2
):
14143 CASE_FLT_FN (BUILT_IN_FABS
):
14144 CASE_FLT_FN (BUILT_IN_FDIM
):
14145 CASE_FLT_FN (BUILT_IN_HYPOT
):
14146 CASE_FLT_FN (BUILT_IN_POW10
):
14147 CASE_INT_FN (BUILT_IN_FFS
):
14148 CASE_INT_FN (BUILT_IN_PARITY
):
14149 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14150 case BUILT_IN_BSWAP32
:
14151 case BUILT_IN_BSWAP64
:
14155 CASE_FLT_FN (BUILT_IN_SQRT
):
14156 /* sqrt(-0.0) is -0.0. */
14157 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
14159 return tree_expr_nonnegative_warnv_p (arg0
,
14160 strict_overflow_p
);
14162 CASE_FLT_FN (BUILT_IN_ASINH
):
14163 CASE_FLT_FN (BUILT_IN_ATAN
):
14164 CASE_FLT_FN (BUILT_IN_ATANH
):
14165 CASE_FLT_FN (BUILT_IN_CBRT
):
14166 CASE_FLT_FN (BUILT_IN_CEIL
):
14167 CASE_FLT_FN (BUILT_IN_ERF
):
14168 CASE_FLT_FN (BUILT_IN_EXPM1
):
14169 CASE_FLT_FN (BUILT_IN_FLOOR
):
14170 CASE_FLT_FN (BUILT_IN_FMOD
):
14171 CASE_FLT_FN (BUILT_IN_FREXP
):
14172 CASE_FLT_FN (BUILT_IN_LCEIL
):
14173 CASE_FLT_FN (BUILT_IN_LDEXP
):
14174 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14175 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14176 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14177 CASE_FLT_FN (BUILT_IN_LLRINT
):
14178 CASE_FLT_FN (BUILT_IN_LLROUND
):
14179 CASE_FLT_FN (BUILT_IN_LRINT
):
14180 CASE_FLT_FN (BUILT_IN_LROUND
):
14181 CASE_FLT_FN (BUILT_IN_MODF
):
14182 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14183 CASE_FLT_FN (BUILT_IN_RINT
):
14184 CASE_FLT_FN (BUILT_IN_ROUND
):
14185 CASE_FLT_FN (BUILT_IN_SCALB
):
14186 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14187 CASE_FLT_FN (BUILT_IN_SCALBN
):
14188 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14189 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14190 CASE_FLT_FN (BUILT_IN_SINH
):
14191 CASE_FLT_FN (BUILT_IN_TANH
):
14192 CASE_FLT_FN (BUILT_IN_TRUNC
):
14193 /* True if the 1st argument is nonnegative. */
14194 return tree_expr_nonnegative_warnv_p (arg0
,
14195 strict_overflow_p
);
14197 CASE_FLT_FN (BUILT_IN_FMAX
):
14198 /* True if the 1st OR 2nd arguments are nonnegative. */
14199 return (tree_expr_nonnegative_warnv_p (arg0
,
14201 || (tree_expr_nonnegative_warnv_p (arg1
,
14202 strict_overflow_p
)));
14204 CASE_FLT_FN (BUILT_IN_FMIN
):
14205 /* True if the 1st AND 2nd arguments are nonnegative. */
14206 return (tree_expr_nonnegative_warnv_p (arg0
,
14208 && (tree_expr_nonnegative_warnv_p (arg1
,
14209 strict_overflow_p
)));
14211 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14212 /* True if the 2nd argument is nonnegative. */
14213 return tree_expr_nonnegative_warnv_p (arg1
,
14214 strict_overflow_p
);
14216 CASE_FLT_FN (BUILT_IN_POWI
):
14217 /* True if the 1st argument is nonnegative or the second
14218 argument is an even integer. */
14219 if (TREE_CODE (arg1
) == INTEGER_CST
14220 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14222 return tree_expr_nonnegative_warnv_p (arg0
,
14223 strict_overflow_p
);
14225 CASE_FLT_FN (BUILT_IN_POW
):
14226 /* True if the 1st argument is nonnegative or the second
14227 argument is an even integer valued real. */
14228 if (TREE_CODE (arg1
) == REAL_CST
)
14233 c
= TREE_REAL_CST (arg1
);
14234 n
= real_to_integer (&c
);
14237 REAL_VALUE_TYPE cint
;
14238 real_from_integer (&cint
, VOIDmode
, n
,
14239 n
< 0 ? -1 : 0, 0);
14240 if (real_identical (&c
, &cint
))
14244 return tree_expr_nonnegative_warnv_p (arg0
,
14245 strict_overflow_p
);
14250 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14254 /* Return true if T is known to be non-negative. If the return
14255 value is based on the assumption that signed overflow is undefined,
14256 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14257 *STRICT_OVERFLOW_P. */
14260 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14262 enum tree_code code
= TREE_CODE (t
);
14263 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14270 tree temp
= TARGET_EXPR_SLOT (t
);
14271 t
= TARGET_EXPR_INITIAL (t
);
14273 /* If the initializer is non-void, then it's a normal expression
14274 that will be assigned to the slot. */
14275 if (!VOID_TYPE_P (t
))
14276 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14278 /* Otherwise, the initializer sets the slot in some way. One common
14279 way is an assignment statement at the end of the initializer. */
14282 if (TREE_CODE (t
) == BIND_EXPR
)
14283 t
= expr_last (BIND_EXPR_BODY (t
));
14284 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14285 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14286 t
= expr_last (TREE_OPERAND (t
, 0));
14287 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14292 if (TREE_CODE (t
) == MODIFY_EXPR
14293 && TREE_OPERAND (t
, 0) == temp
)
14294 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14295 strict_overflow_p
);
14302 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14303 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14305 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14306 get_callee_fndecl (t
),
14309 strict_overflow_p
);
14311 case COMPOUND_EXPR
:
14313 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14314 strict_overflow_p
);
14316 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14317 strict_overflow_p
);
14319 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14320 strict_overflow_p
);
14323 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14327 /* We don't know sign of `t', so be conservative and return false. */
14331 /* Return true if T is known to be non-negative. If the return
14332 value is based on the assumption that signed overflow is undefined,
14333 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14334 *STRICT_OVERFLOW_P. */
14337 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14339 enum tree_code code
;
14340 if (t
== error_mark_node
)
14343 code
= TREE_CODE (t
);
14344 switch (TREE_CODE_CLASS (code
))
14347 case tcc_comparison
:
14348 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14350 TREE_OPERAND (t
, 0),
14351 TREE_OPERAND (t
, 1),
14352 strict_overflow_p
);
14355 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14357 TREE_OPERAND (t
, 0),
14358 strict_overflow_p
);
14361 case tcc_declaration
:
14362 case tcc_reference
:
14363 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14371 case TRUTH_AND_EXPR
:
14372 case TRUTH_OR_EXPR
:
14373 case TRUTH_XOR_EXPR
:
14374 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14376 TREE_OPERAND (t
, 0),
14377 TREE_OPERAND (t
, 1),
14378 strict_overflow_p
);
14379 case TRUTH_NOT_EXPR
:
14380 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14382 TREE_OPERAND (t
, 0),
14383 strict_overflow_p
);
14390 case WITH_SIZE_EXPR
:
14394 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14397 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
14401 /* Return true if `t' is known to be non-negative. Handle warnings
14402 about undefined signed overflow. */
14405 tree_expr_nonnegative_p (tree t
)
14407 bool ret
, strict_overflow_p
;
14409 strict_overflow_p
= false;
14410 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14411 if (strict_overflow_p
)
14412 fold_overflow_warning (("assuming signed overflow does not occur when "
14413 "determining that expression is always "
14415 WARN_STRICT_OVERFLOW_MISC
);
14420 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14421 For floating point we further ensure that T is not denormal.
14422 Similar logic is present in nonzero_address in rtlanal.h.
14424 If the return value is based on the assumption that signed overflow
14425 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14426 change *STRICT_OVERFLOW_P. */
14429 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14430 bool *strict_overflow_p
)
14435 return tree_expr_nonzero_warnv_p (op0
,
14436 strict_overflow_p
);
14440 tree inner_type
= TREE_TYPE (op0
);
14441 tree outer_type
= type
;
14443 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14444 && tree_expr_nonzero_warnv_p (op0
,
14445 strict_overflow_p
));
14449 case NON_LVALUE_EXPR
:
14450 return tree_expr_nonzero_warnv_p (op0
,
14451 strict_overflow_p
);
14460 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14461 For floating point we further ensure that T is not denormal.
14462 Similar logic is present in nonzero_address in rtlanal.h.
14464 If the return value is based on the assumption that signed overflow
14465 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14466 change *STRICT_OVERFLOW_P. */
14469 tree_binary_nonzero_warnv_p (enum tree_code code
,
14472 tree op1
, bool *strict_overflow_p
)
14474 bool sub_strict_overflow_p
;
14477 case POINTER_PLUS_EXPR
:
14479 if (TYPE_OVERFLOW_UNDEFINED (type
))
14481 /* With the presence of negative values it is hard
14482 to say something. */
14483 sub_strict_overflow_p
= false;
14484 if (!tree_expr_nonnegative_warnv_p (op0
,
14485 &sub_strict_overflow_p
)
14486 || !tree_expr_nonnegative_warnv_p (op1
,
14487 &sub_strict_overflow_p
))
14489 /* One of operands must be positive and the other non-negative. */
14490 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14491 overflows, on a twos-complement machine the sum of two
14492 nonnegative numbers can never be zero. */
14493 return (tree_expr_nonzero_warnv_p (op0
,
14495 || tree_expr_nonzero_warnv_p (op1
,
14496 strict_overflow_p
));
14501 if (TYPE_OVERFLOW_UNDEFINED (type
))
14503 if (tree_expr_nonzero_warnv_p (op0
,
14505 && tree_expr_nonzero_warnv_p (op1
,
14506 strict_overflow_p
))
14508 *strict_overflow_p
= true;
14515 sub_strict_overflow_p
= false;
14516 if (tree_expr_nonzero_warnv_p (op0
,
14517 &sub_strict_overflow_p
)
14518 && tree_expr_nonzero_warnv_p (op1
,
14519 &sub_strict_overflow_p
))
14521 if (sub_strict_overflow_p
)
14522 *strict_overflow_p
= true;
14527 sub_strict_overflow_p
= false;
14528 if (tree_expr_nonzero_warnv_p (op0
,
14529 &sub_strict_overflow_p
))
14531 if (sub_strict_overflow_p
)
14532 *strict_overflow_p
= true;
14534 /* When both operands are nonzero, then MAX must be too. */
14535 if (tree_expr_nonzero_warnv_p (op1
,
14536 strict_overflow_p
))
14539 /* MAX where operand 0 is positive is positive. */
14540 return tree_expr_nonnegative_warnv_p (op0
,
14541 strict_overflow_p
);
14543 /* MAX where operand 1 is positive is positive. */
14544 else if (tree_expr_nonzero_warnv_p (op1
,
14545 &sub_strict_overflow_p
)
14546 && tree_expr_nonnegative_warnv_p (op1
,
14547 &sub_strict_overflow_p
))
14549 if (sub_strict_overflow_p
)
14550 *strict_overflow_p
= true;
14556 return (tree_expr_nonzero_warnv_p (op1
,
14558 || tree_expr_nonzero_warnv_p (op0
,
14559 strict_overflow_p
));
14568 /* Return true when T is an address and is known to be nonzero.
14569 For floating point we further ensure that T is not denormal.
14570 Similar logic is present in nonzero_address in rtlanal.h.
14572 If the return value is based on the assumption that signed overflow
14573 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14574 change *STRICT_OVERFLOW_P. */
14577 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14579 bool sub_strict_overflow_p
;
14580 switch (TREE_CODE (t
))
14583 return !integer_zerop (t
);
14587 tree base
= get_base_address (TREE_OPERAND (t
, 0));
14592 /* Weak declarations may link to NULL. */
14593 if (VAR_OR_FUNCTION_DECL_P (base
))
14594 return !DECL_WEAK (base
);
14596 /* Constants are never weak. */
14597 if (CONSTANT_CLASS_P (base
))
14604 sub_strict_overflow_p
= false;
14605 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14606 &sub_strict_overflow_p
)
14607 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14608 &sub_strict_overflow_p
))
14610 if (sub_strict_overflow_p
)
14611 *strict_overflow_p
= true;
14622 /* Return true when T is an address and is known to be nonzero.
14623 For floating point we further ensure that T is not denormal.
14624 Similar logic is present in nonzero_address in rtlanal.h.
14626 If the return value is based on the assumption that signed overflow
14627 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14628 change *STRICT_OVERFLOW_P. */
14631 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14633 tree type
= TREE_TYPE (t
);
14634 enum tree_code code
;
14636 /* Doing something useful for floating point would need more work. */
14637 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
14640 code
= TREE_CODE (t
);
14641 switch (TREE_CODE_CLASS (code
))
14644 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
14645 strict_overflow_p
);
14647 case tcc_comparison
:
14648 return tree_binary_nonzero_warnv_p (code
, type
,
14649 TREE_OPERAND (t
, 0),
14650 TREE_OPERAND (t
, 1),
14651 strict_overflow_p
);
14653 case tcc_declaration
:
14654 case tcc_reference
:
14655 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
14663 case TRUTH_NOT_EXPR
:
14664 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
14665 strict_overflow_p
);
14667 case TRUTH_AND_EXPR
:
14668 case TRUTH_OR_EXPR
:
14669 case TRUTH_XOR_EXPR
:
14670 return tree_binary_nonzero_warnv_p (code
, type
,
14671 TREE_OPERAND (t
, 0),
14672 TREE_OPERAND (t
, 1),
14673 strict_overflow_p
);
14680 case WITH_SIZE_EXPR
:
14684 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
14686 case COMPOUND_EXPR
:
14689 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14690 strict_overflow_p
);
14693 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14694 strict_overflow_p
);
14697 return alloca_call_p (t
);
14705 /* Return true when T is an address and is known to be nonzero.
14706 Handle warnings about undefined signed overflow. */
14709 tree_expr_nonzero_p (tree t
)
14711 bool ret
, strict_overflow_p
;
14713 strict_overflow_p
= false;
14714 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
14715 if (strict_overflow_p
)
14716 fold_overflow_warning (("assuming signed overflow does not occur when "
14717 "determining that expression is always "
14719 WARN_STRICT_OVERFLOW_MISC
);
14723 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14724 attempt to fold the expression to a constant without modifying TYPE,
14727 If the expression could be simplified to a constant, then return
14728 the constant. If the expression would not be simplified to a
14729 constant, then return NULL_TREE. */
14732 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
14734 tree tem
= fold_binary (code
, type
, op0
, op1
);
14735 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14738 /* Given the components of a unary expression CODE, TYPE and OP0,
14739 attempt to fold the expression to a constant without modifying
14742 If the expression could be simplified to a constant, then return
14743 the constant. If the expression would not be simplified to a
14744 constant, then return NULL_TREE. */
14747 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
14749 tree tem
= fold_unary (code
, type
, op0
);
14750 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14753 /* If EXP represents referencing an element in a constant string
14754 (either via pointer arithmetic or array indexing), return the
14755 tree representing the value accessed, otherwise return NULL. */
14758 fold_read_from_constant_string (tree exp
)
14760 if ((TREE_CODE (exp
) == INDIRECT_REF
14761 || TREE_CODE (exp
) == ARRAY_REF
)
14762 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
14764 tree exp1
= TREE_OPERAND (exp
, 0);
14768 if (TREE_CODE (exp
) == INDIRECT_REF
)
14769 string
= string_constant (exp1
, &index
);
14772 tree low_bound
= array_ref_low_bound (exp
);
14773 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
14775 /* Optimize the special-case of a zero lower bound.
14777 We convert the low_bound to sizetype to avoid some problems
14778 with constant folding. (E.g. suppose the lower bound is 1,
14779 and its mode is QI. Without the conversion,l (ARRAY
14780 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14781 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14782 if (! integer_zerop (low_bound
))
14783 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
14789 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
14790 && TREE_CODE (string
) == STRING_CST
14791 && TREE_CODE (index
) == INTEGER_CST
14792 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
14793 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
14795 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
14796 return build_int_cst_type (TREE_TYPE (exp
),
14797 (TREE_STRING_POINTER (string
)
14798 [TREE_INT_CST_LOW (index
)]));
14803 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14804 an integer constant, real, or fixed-point constant.
14806 TYPE is the type of the result. */
14809 fold_negate_const (tree arg0
, tree type
)
14811 tree t
= NULL_TREE
;
14813 switch (TREE_CODE (arg0
))
14817 unsigned HOST_WIDE_INT low
;
14818 HOST_WIDE_INT high
;
14819 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14820 TREE_INT_CST_HIGH (arg0
),
14822 t
= force_fit_type_double (type
, low
, high
, 1,
14823 (overflow
| TREE_OVERFLOW (arg0
))
14824 && !TYPE_UNSIGNED (type
));
14829 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14834 FIXED_VALUE_TYPE f
;
14835 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
14836 &(TREE_FIXED_CST (arg0
)), NULL
,
14837 TYPE_SATURATING (type
));
14838 t
= build_fixed (type
, f
);
14839 /* Propagate overflow flags. */
14840 if (overflow_p
| TREE_OVERFLOW (arg0
))
14842 TREE_OVERFLOW (t
) = 1;
14843 TREE_CONSTANT_OVERFLOW (t
) = 1;
14845 else if (TREE_CONSTANT_OVERFLOW (arg0
))
14846 TREE_CONSTANT_OVERFLOW (t
) = 1;
14851 gcc_unreachable ();
14857 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14858 an integer constant or real constant.
14860 TYPE is the type of the result. */
14863 fold_abs_const (tree arg0
, tree type
)
14865 tree t
= NULL_TREE
;
14867 switch (TREE_CODE (arg0
))
14870 /* If the value is unsigned, then the absolute value is
14871 the same as the ordinary value. */
14872 if (TYPE_UNSIGNED (type
))
14874 /* Similarly, if the value is non-negative. */
14875 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
14877 /* If the value is negative, then the absolute value is
14881 unsigned HOST_WIDE_INT low
;
14882 HOST_WIDE_INT high
;
14883 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14884 TREE_INT_CST_HIGH (arg0
),
14886 t
= force_fit_type_double (type
, low
, high
, -1,
14887 overflow
| TREE_OVERFLOW (arg0
));
14892 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14893 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14899 gcc_unreachable ();
14905 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14906 constant. TYPE is the type of the result. */
14909 fold_not_const (tree arg0
, tree type
)
14911 tree t
= NULL_TREE
;
14913 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14915 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
14916 ~TREE_INT_CST_HIGH (arg0
), 0,
14917 TREE_OVERFLOW (arg0
));
14922 /* Given CODE, a relational operator, the target type, TYPE and two
14923 constant operands OP0 and OP1, return the result of the
14924 relational operation. If the result is not a compile time
14925 constant, then return NULL_TREE. */
14928 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14930 int result
, invert
;
14932 /* From here on, the only cases we handle are when the result is
14933 known to be a constant. */
14935 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14937 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14938 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14940 /* Handle the cases where either operand is a NaN. */
14941 if (real_isnan (c0
) || real_isnan (c1
))
14951 case UNORDERED_EXPR
:
14965 if (flag_trapping_math
)
14971 gcc_unreachable ();
14974 return constant_boolean_node (result
, type
);
14977 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14980 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14982 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14983 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14984 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14987 /* Handle equality/inequality of complex constants. */
14988 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14990 tree rcond
= fold_relational_const (code
, type
,
14991 TREE_REALPART (op0
),
14992 TREE_REALPART (op1
));
14993 tree icond
= fold_relational_const (code
, type
,
14994 TREE_IMAGPART (op0
),
14995 TREE_IMAGPART (op1
));
14996 if (code
== EQ_EXPR
)
14997 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14998 else if (code
== NE_EXPR
)
14999 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15004 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15006 To compute GT, swap the arguments and do LT.
15007 To compute GE, do LT and invert the result.
15008 To compute LE, swap the arguments, do LT and invert the result.
15009 To compute NE, do EQ and invert the result.
15011 Therefore, the code below must handle only EQ and LT. */
15013 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15018 code
= swap_tree_comparison (code
);
15021 /* Note that it is safe to invert for real values here because we
15022 have already handled the one case that it matters. */
15025 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15028 code
= invert_tree_comparison (code
, false);
15031 /* Compute a result for LT or EQ if args permit;
15032 Otherwise return T. */
15033 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15035 if (code
== EQ_EXPR
)
15036 result
= tree_int_cst_equal (op0
, op1
);
15037 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
15038 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
15040 result
= INT_CST_LT (op0
, op1
);
15047 return constant_boolean_node (result
, type
);
15050 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15051 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15055 fold_build_cleanup_point_expr (tree type
, tree expr
)
15057 /* If the expression does not have side effects then we don't have to wrap
15058 it with a cleanup point expression. */
15059 if (!TREE_SIDE_EFFECTS (expr
))
15062 /* If the expression is a return, check to see if the expression inside the
15063 return has no side effects or the right hand side of the modify expression
15064 inside the return. If either don't have side effects set we don't need to
15065 wrap the expression in a cleanup point expression. Note we don't check the
15066 left hand side of the modify because it should always be a return decl. */
15067 if (TREE_CODE (expr
) == RETURN_EXPR
)
15069 tree op
= TREE_OPERAND (expr
, 0);
15070 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15072 op
= TREE_OPERAND (op
, 1);
15073 if (!TREE_SIDE_EFFECTS (op
))
15077 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15080 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15081 of an indirection through OP0, or NULL_TREE if no simplification is
15085 fold_indirect_ref_1 (tree type
, tree op0
)
15091 subtype
= TREE_TYPE (sub
);
15092 if (!POINTER_TYPE_P (subtype
))
15095 if (TREE_CODE (sub
) == ADDR_EXPR
)
15097 tree op
= TREE_OPERAND (sub
, 0);
15098 tree optype
= TREE_TYPE (op
);
15099 /* *&CONST_DECL -> to the value of the const decl. */
15100 if (TREE_CODE (op
) == CONST_DECL
)
15101 return DECL_INITIAL (op
);
15102 /* *&p => p; make sure to handle *&"str"[cst] here. */
15103 if (type
== optype
)
15105 tree fop
= fold_read_from_constant_string (op
);
15111 /* *(foo *)&fooarray => fooarray[0] */
15112 else if (TREE_CODE (optype
) == ARRAY_TYPE
15113 && type
== TREE_TYPE (optype
))
15115 tree type_domain
= TYPE_DOMAIN (optype
);
15116 tree min_val
= size_zero_node
;
15117 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15118 min_val
= TYPE_MIN_VALUE (type_domain
);
15119 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
15121 /* *(foo *)&complexfoo => __real__ complexfoo */
15122 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15123 && type
== TREE_TYPE (optype
))
15124 return fold_build1 (REALPART_EXPR
, type
, op
);
15125 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15126 else if (TREE_CODE (optype
) == VECTOR_TYPE
15127 && type
== TREE_TYPE (optype
))
15129 tree part_width
= TYPE_SIZE (type
);
15130 tree index
= bitsize_int (0);
15131 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
15135 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15136 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15137 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15139 tree op00
= TREE_OPERAND (sub
, 0);
15140 tree op01
= TREE_OPERAND (sub
, 1);
15144 op00type
= TREE_TYPE (op00
);
15145 if (TREE_CODE (op00
) == ADDR_EXPR
15146 && TREE_CODE (TREE_TYPE (op00type
)) == VECTOR_TYPE
15147 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
15149 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
15150 tree part_width
= TYPE_SIZE (type
);
15151 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
15152 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15153 tree index
= bitsize_int (indexi
);
15155 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type
)))
15156 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (op00
, 0),
15157 part_width
, index
);
15163 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15164 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15165 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15167 tree op00
= TREE_OPERAND (sub
, 0);
15168 tree op01
= TREE_OPERAND (sub
, 1);
15172 op00type
= TREE_TYPE (op00
);
15173 if (TREE_CODE (op00
) == ADDR_EXPR
15174 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
15175 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
15177 tree size
= TYPE_SIZE_UNIT (type
);
15178 if (tree_int_cst_equal (size
, op01
))
15179 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
15183 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15184 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15185 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
15188 tree min_val
= size_zero_node
;
15189 sub
= build_fold_indirect_ref (sub
);
15190 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15191 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15192 min_val
= TYPE_MIN_VALUE (type_domain
);
15193 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
15199 /* Builds an expression for an indirection through T, simplifying some
15203 build_fold_indirect_ref (tree t
)
15205 tree type
= TREE_TYPE (TREE_TYPE (t
));
15206 tree sub
= fold_indirect_ref_1 (type
, t
);
15211 return build1 (INDIRECT_REF
, type
, t
);
15214 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15217 fold_indirect_ref (tree t
)
15219 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15227 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15228 whose result is ignored. The type of the returned tree need not be
15229 the same as the original expression. */
15232 fold_ignored_result (tree t
)
15234 if (!TREE_SIDE_EFFECTS (t
))
15235 return integer_zero_node
;
15238 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15241 t
= TREE_OPERAND (t
, 0);
15245 case tcc_comparison
:
15246 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15247 t
= TREE_OPERAND (t
, 0);
15248 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15249 t
= TREE_OPERAND (t
, 1);
15254 case tcc_expression
:
15255 switch (TREE_CODE (t
))
15257 case COMPOUND_EXPR
:
15258 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15260 t
= TREE_OPERAND (t
, 0);
15264 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15265 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15267 t
= TREE_OPERAND (t
, 0);
15280 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15281 This can only be applied to objects of a sizetype. */
15284 round_up (tree value
, int divisor
)
15286 tree div
= NULL_TREE
;
15288 gcc_assert (divisor
> 0);
15292 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15293 have to do anything. Only do this when we are not given a const,
15294 because in that case, this check is more expensive than just
15296 if (TREE_CODE (value
) != INTEGER_CST
)
15298 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15300 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15304 /* If divisor is a power of two, simplify this to bit manipulation. */
15305 if (divisor
== (divisor
& -divisor
))
15307 if (TREE_CODE (value
) == INTEGER_CST
)
15309 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
15310 unsigned HOST_WIDE_INT high
;
15313 if ((low
& (divisor
- 1)) == 0)
15316 overflow_p
= TREE_OVERFLOW (value
);
15317 high
= TREE_INT_CST_HIGH (value
);
15318 low
&= ~(divisor
- 1);
15327 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
15334 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15335 value
= size_binop (PLUS_EXPR
, value
, t
);
15336 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15337 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15343 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15344 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
15345 value
= size_binop (MULT_EXPR
, value
, div
);
15351 /* Likewise, but round down. */
15354 round_down (tree value
, int divisor
)
15356 tree div
= NULL_TREE
;
15358 gcc_assert (divisor
> 0);
15362 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15363 have to do anything. Only do this when we are not given a const,
15364 because in that case, this check is more expensive than just
15366 if (TREE_CODE (value
) != INTEGER_CST
)
15368 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15370 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15374 /* If divisor is a power of two, simplify this to bit manipulation. */
15375 if (divisor
== (divisor
& -divisor
))
15379 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15380 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15385 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15386 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
15387 value
= size_binop (MULT_EXPR
, value
, div
);
15393 /* Returns the pointer to the base of the object addressed by EXP and
15394 extracts the information about the offset of the access, storing it
15395 to PBITPOS and POFFSET. */
15398 split_address_to_core_and_offset (tree exp
,
15399 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15402 enum machine_mode mode
;
15403 int unsignedp
, volatilep
;
15404 HOST_WIDE_INT bitsize
;
15406 if (TREE_CODE (exp
) == ADDR_EXPR
)
15408 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15409 poffset
, &mode
, &unsignedp
, &volatilep
,
15411 core
= fold_addr_expr (core
);
15417 *poffset
= NULL_TREE
;
15423 /* Returns true if addresses of E1 and E2 differ by a constant, false
15424 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15427 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15430 HOST_WIDE_INT bitpos1
, bitpos2
;
15431 tree toffset1
, toffset2
, tdiff
, type
;
15433 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15434 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15436 if (bitpos1
% BITS_PER_UNIT
!= 0
15437 || bitpos2
% BITS_PER_UNIT
!= 0
15438 || !operand_equal_p (core1
, core2
, 0))
15441 if (toffset1
&& toffset2
)
15443 type
= TREE_TYPE (toffset1
);
15444 if (type
!= TREE_TYPE (toffset2
))
15445 toffset2
= fold_convert (type
, toffset2
);
15447 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15448 if (!cst_and_fits_in_hwi (tdiff
))
15451 *diff
= int_cst_value (tdiff
);
15453 else if (toffset1
|| toffset2
)
15455 /* If only one of the offsets is non-constant, the difference cannot
15462 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
15466 /* Simplify the floating point expression EXP when the sign of the
15467 result is not significant. Return NULL_TREE if no simplification
15471 fold_strip_sign_ops (tree exp
)
15475 switch (TREE_CODE (exp
))
15479 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15480 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
15484 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
15486 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15487 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15488 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
15489 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
15490 arg0
? arg0
: TREE_OPERAND (exp
, 0),
15491 arg1
? arg1
: TREE_OPERAND (exp
, 1));
15494 case COMPOUND_EXPR
:
15495 arg0
= TREE_OPERAND (exp
, 0);
15496 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15498 return fold_build2 (COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
15502 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15503 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
15505 return fold_build3 (COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
15506 arg0
? arg0
: TREE_OPERAND (exp
, 1),
15507 arg1
? arg1
: TREE_OPERAND (exp
, 2));
15512 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
15515 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15516 /* Strip copysign function call, return the 1st argument. */
15517 arg0
= CALL_EXPR_ARG (exp
, 0);
15518 arg1
= CALL_EXPR_ARG (exp
, 1);
15519 return omit_one_operand (TREE_TYPE (exp
), arg0
, arg1
);
15522 /* Strip sign ops from the argument of "odd" math functions. */
15523 if (negate_mathfn_p (fcode
))
15525 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
15527 return build_call_expr (get_callee_fndecl (exp
), 1, arg0
);