1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
65 #include "langhooks.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
70 int folding_initializer
= 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code
{
94 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
95 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
96 static bool negate_mathfn_p (enum built_in_function
);
97 static bool negate_expr_p (tree
);
98 static tree
negate_expr (tree
);
99 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
100 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
101 static tree
const_binop (enum tree_code
, tree
, tree
, int);
102 static enum comparison_code
comparison_to_compcode (enum tree_code
);
103 static enum tree_code
compcode_to_comparison (enum comparison_code
);
104 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
105 enum tree_code
, tree
, tree
, tree
);
106 static int truth_value_p (enum tree_code
);
107 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
108 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
109 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
110 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
111 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
112 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
113 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
114 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
115 enum machine_mode
*, int *, int *,
117 static int all_ones_mask_p (const_tree
, int);
118 static tree
sign_bit_p (tree
, const_tree
);
119 static int simple_operand_p (const_tree
);
120 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
121 static tree
range_predecessor (tree
);
122 static tree
range_successor (tree
);
123 static tree
make_range (tree
, int *, tree
*, tree
*, bool *);
124 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
125 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
127 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
128 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
129 static tree
unextend (tree
, int, int, tree
);
130 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
131 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
132 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
133 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
134 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
137 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
139 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
140 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
141 static bool reorder_operands_p (const_tree
, const_tree
);
142 static tree
fold_negate_const (tree
, tree
);
143 static tree
fold_not_const (tree
, tree
);
144 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
173 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
175 words
[0] = LOWPART (low
);
176 words
[1] = HIGHPART (low
);
177 words
[2] = LOWPART (hi
);
178 words
[3] = HIGHPART (hi
);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
189 *low
= words
[0] + words
[1] * BASE
;
190 *hi
= words
[2] + words
[3] * BASE
;
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
199 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
200 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, const_tree type
)
202 unsigned HOST_WIDE_INT low0
= l1
;
203 HOST_WIDE_INT high0
= h1
;
205 int sign_extended_type
;
207 if (POINTER_TYPE_P (type
)
208 || TREE_CODE (type
) == OFFSET_TYPE
)
211 prec
= TYPE_PRECISION (type
);
213 /* Size types *are* sign extended. */
214 sign_extended_type
= (!TYPE_UNSIGNED (type
)
215 || (TREE_CODE (type
) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type
)));
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
221 else if (prec
> HOST_BITS_PER_WIDE_INT
)
222 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
226 if (prec
< HOST_BITS_PER_WIDE_INT
)
227 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type
)
232 /* No sign extension */;
233 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
234 /* Correct width already. */;
235 else if (prec
> HOST_BITS_PER_WIDE_INT
)
237 /* Sign extend top half? */
238 if (h1
& ((unsigned HOST_WIDE_INT
)1
239 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
240 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
242 else if (prec
== HOST_BITS_PER_WIDE_INT
)
244 if ((HOST_WIDE_INT
)l1
< 0)
249 /* Sign extend bottom half? */
250 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
253 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
260 /* If the value didn't fit, signal overflow. */
261 return l1
!= low0
|| h1
!= high0
;
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
280 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
281 HOST_WIDE_INT high
, int overflowable
,
284 int sign_extended_type
;
287 /* Size types *are* sign extended. */
288 sign_extended_type
= (!TYPE_UNSIGNED (type
)
289 || (TREE_CODE (type
) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type
)));
292 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed
|| overflow
)
299 || (overflowable
> 0 && sign_extended_type
))
301 tree t
= make_node (INTEGER_CST
);
302 TREE_INT_CST_LOW (t
) = low
;
303 TREE_INT_CST_HIGH (t
) = high
;
304 TREE_TYPE (t
) = type
;
305 TREE_OVERFLOW (t
) = 1;
310 /* Else build a shared node. */
311 return build_int_cst_wide (type
, low
, high
);
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
322 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
323 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
326 unsigned HOST_WIDE_INT l
;
330 h
= h1
+ h2
+ (l
< l1
);
336 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
338 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
341 /* Negate a doubleword integer with doubleword result.
342 Return nonzero if the operation overflows, assuming it's signed.
343 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
344 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
347 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
348 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
354 return (*hv
& h1
) < 0;
364 /* Multiply two doubleword integers with doubleword result.
365 Return nonzero if the operation overflows according to UNSIGNED_P.
366 Each argument is given as two `HOST_WIDE_INT' pieces.
367 One argument is L1 and H1; the other, L2 and H2.
368 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
372 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
373 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
376 HOST_WIDE_INT arg1
[4];
377 HOST_WIDE_INT arg2
[4];
378 HOST_WIDE_INT prod
[4 * 2];
379 unsigned HOST_WIDE_INT carry
;
381 unsigned HOST_WIDE_INT toplow
, neglow
;
382 HOST_WIDE_INT tophigh
, neghigh
;
384 encode (arg1
, l1
, h1
);
385 encode (arg2
, l2
, h2
);
387 memset (prod
, 0, sizeof prod
);
389 for (i
= 0; i
< 4; i
++)
392 for (j
= 0; j
< 4; j
++)
395 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
396 carry
+= arg1
[i
] * arg2
[j
];
397 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 prod
[k
] = LOWPART (carry
);
400 carry
= HIGHPART (carry
);
405 decode (prod
, lv
, hv
);
406 decode (prod
+ 4, &toplow
, &tophigh
);
408 /* Unsigned overflow is immediate. */
410 return (toplow
| tophigh
) != 0;
412 /* Check for signed overflow by calculating the signed representation of the
413 top half of the result; it should agree with the low half's sign bit. */
416 neg_double (l2
, h2
, &neglow
, &neghigh
);
417 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
421 neg_double (l1
, h1
, &neglow
, &neghigh
);
422 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
424 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
427 /* Shift the doubleword integer in L1, H1 left by COUNT places
428 keeping only PREC bits of result.
429 Shift right if COUNT is negative.
430 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
435 HOST_WIDE_INT count
, unsigned int prec
,
436 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
438 unsigned HOST_WIDE_INT signmask
;
442 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
446 if (SHIFT_COUNT_TRUNCATED
)
449 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
451 /* Shifting by the host word size is undefined according to the
452 ANSI standard, so we must handle this as a special case. */
456 else if (count
>= HOST_BITS_PER_WIDE_INT
)
458 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
463 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
464 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
468 /* Sign extend all bits that are beyond the precision. */
470 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
471 ? ((unsigned HOST_WIDE_INT
) *hv
472 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
473 : (*lv
>> (prec
- 1))) & 1);
475 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
477 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
479 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
480 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
485 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
486 *lv
|= signmask
<< prec
;
490 /* Shift the doubleword integer in L1, H1 right by COUNT places
491 keeping only PREC bits of result. COUNT must be positive.
492 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
493 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
496 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
497 HOST_WIDE_INT count
, unsigned int prec
,
498 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
501 unsigned HOST_WIDE_INT signmask
;
504 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
507 if (SHIFT_COUNT_TRUNCATED
)
510 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
512 /* Shifting by the host word size is undefined according to the
513 ANSI standard, so we must handle this as a special case. */
517 else if (count
>= HOST_BITS_PER_WIDE_INT
)
520 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
524 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
526 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
529 /* Zero / sign extend all bits that are beyond the precision. */
531 if (count
>= (HOST_WIDE_INT
)prec
)
536 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
538 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
540 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
541 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
546 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
547 *lv
|= signmask
<< (prec
- count
);
551 /* Rotate the doubleword integer in L1, H1 left by COUNT places
552 keeping only PREC bits of result.
553 Rotate right if COUNT is negative.
554 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
558 HOST_WIDE_INT count
, unsigned int prec
,
559 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
561 unsigned HOST_WIDE_INT s1l
, s2l
;
562 HOST_WIDE_INT s1h
, s2h
;
568 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
569 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
574 /* Rotate the doubleword integer in L1, H1 left by COUNT places
575 keeping only PREC bits of result. COUNT must be positive.
576 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
579 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
580 HOST_WIDE_INT count
, unsigned int prec
,
581 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
583 unsigned HOST_WIDE_INT s1l
, s2l
;
584 HOST_WIDE_INT s1h
, s2h
;
590 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
591 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
596 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
597 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
598 CODE is a tree code for a kind of division, one of
599 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 It controls how the quotient is rounded to an integer.
602 Return nonzero if the operation overflows.
603 UNS nonzero says do unsigned division. */
606 div_and_round_double (enum tree_code code
, int uns
,
607 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
608 HOST_WIDE_INT hnum_orig
,
609 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
610 HOST_WIDE_INT hden_orig
,
611 unsigned HOST_WIDE_INT
*lquo
,
612 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
616 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
617 HOST_WIDE_INT den
[4], quo
[4];
619 unsigned HOST_WIDE_INT work
;
620 unsigned HOST_WIDE_INT carry
= 0;
621 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
622 HOST_WIDE_INT hnum
= hnum_orig
;
623 unsigned HOST_WIDE_INT lden
= lden_orig
;
624 HOST_WIDE_INT hden
= hden_orig
;
627 if (hden
== 0 && lden
== 0)
628 overflow
= 1, lden
= 1;
630 /* Calculate quotient sign and convert operands to unsigned. */
636 /* (minimum integer) / (-1) is the only overflow case. */
637 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
638 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
644 neg_double (lden
, hden
, &lden
, &hden
);
648 if (hnum
== 0 && hden
== 0)
649 { /* single precision */
651 /* This unsigned division rounds toward zero. */
657 { /* trivial case: dividend < divisor */
658 /* hden != 0 already checked. */
665 memset (quo
, 0, sizeof quo
);
667 memset (num
, 0, sizeof num
); /* to zero 9th element */
668 memset (den
, 0, sizeof den
);
670 encode (num
, lnum
, hnum
);
671 encode (den
, lden
, hden
);
673 /* Special code for when the divisor < BASE. */
674 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
676 /* hnum != 0 already checked. */
677 for (i
= 4 - 1; i
>= 0; i
--)
679 work
= num
[i
] + carry
* BASE
;
680 quo
[i
] = work
/ lden
;
686 /* Full double precision division,
687 with thanks to Don Knuth's "Seminumerical Algorithms". */
688 int num_hi_sig
, den_hi_sig
;
689 unsigned HOST_WIDE_INT quo_est
, scale
;
691 /* Find the highest nonzero divisor digit. */
692 for (i
= 4 - 1;; i
--)
699 /* Insure that the first digit of the divisor is at least BASE/2.
700 This is required by the quotient digit estimation algorithm. */
702 scale
= BASE
/ (den
[den_hi_sig
] + 1);
704 { /* scale divisor and dividend */
706 for (i
= 0; i
<= 4 - 1; i
++)
708 work
= (num
[i
] * scale
) + carry
;
709 num
[i
] = LOWPART (work
);
710 carry
= HIGHPART (work
);
715 for (i
= 0; i
<= 4 - 1; i
++)
717 work
= (den
[i
] * scale
) + carry
;
718 den
[i
] = LOWPART (work
);
719 carry
= HIGHPART (work
);
720 if (den
[i
] != 0) den_hi_sig
= i
;
727 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
729 /* Guess the next quotient digit, quo_est, by dividing the first
730 two remaining dividend digits by the high order quotient digit.
731 quo_est is never low and is at most 2 high. */
732 unsigned HOST_WIDE_INT tmp
;
734 num_hi_sig
= i
+ den_hi_sig
+ 1;
735 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
736 if (num
[num_hi_sig
] != den
[den_hi_sig
])
737 quo_est
= work
/ den
[den_hi_sig
];
741 /* Refine quo_est so it's usually correct, and at most one high. */
742 tmp
= work
- quo_est
* den
[den_hi_sig
];
744 && (den
[den_hi_sig
- 1] * quo_est
745 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
748 /* Try QUO_EST as the quotient digit, by multiplying the
749 divisor by QUO_EST and subtracting from the remaining dividend.
750 Keep in mind that QUO_EST is the I - 1st digit. */
753 for (j
= 0; j
<= den_hi_sig
; j
++)
755 work
= quo_est
* den
[j
] + carry
;
756 carry
= HIGHPART (work
);
757 work
= num
[i
+ j
] - LOWPART (work
);
758 num
[i
+ j
] = LOWPART (work
);
759 carry
+= HIGHPART (work
) != 0;
762 /* If quo_est was high by one, then num[i] went negative and
763 we need to correct things. */
764 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
767 carry
= 0; /* add divisor back in */
768 for (j
= 0; j
<= den_hi_sig
; j
++)
770 work
= num
[i
+ j
] + den
[j
] + carry
;
771 carry
= HIGHPART (work
);
772 num
[i
+ j
] = LOWPART (work
);
775 num
[num_hi_sig
] += carry
;
778 /* Store the quotient digit. */
783 decode (quo
, lquo
, hquo
);
786 /* If result is negative, make it so. */
788 neg_double (*lquo
, *hquo
, lquo
, hquo
);
790 /* Compute trial remainder: rem = num - (quo * den) */
791 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
792 neg_double (*lrem
, *hrem
, lrem
, hrem
);
793 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
798 case TRUNC_MOD_EXPR
: /* round toward zero */
799 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
803 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
804 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
807 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
815 case CEIL_MOD_EXPR
: /* round toward positive infinity */
816 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
818 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
826 case ROUND_MOD_EXPR
: /* round to closest integer */
828 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
829 HOST_WIDE_INT habs_rem
= *hrem
;
830 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
831 HOST_WIDE_INT habs_den
= hden
, htwice
;
833 /* Get absolute values. */
835 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
837 neg_double (lden
, hden
, &labs_den
, &habs_den
);
839 /* If (2 * abs (lrem) >= abs (lden)) */
840 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
841 labs_rem
, habs_rem
, <wice
, &htwice
);
843 if (((unsigned HOST_WIDE_INT
) habs_den
844 < (unsigned HOST_WIDE_INT
) htwice
)
845 || (((unsigned HOST_WIDE_INT
) habs_den
846 == (unsigned HOST_WIDE_INT
) htwice
)
847 && (labs_den
< ltwice
)))
851 add_double (*lquo
, *hquo
,
852 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
855 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
867 /* Compute true remainder: rem = num - (quo * den) */
868 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
869 neg_double (*lrem
, *hrem
, lrem
, hrem
);
870 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
874 /* If ARG2 divides ARG1 with zero remainder, carries out the division
875 of type CODE and returns the quotient.
876 Otherwise returns NULL_TREE. */
879 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
881 unsigned HOST_WIDE_INT int1l
, int2l
;
882 HOST_WIDE_INT int1h
, int2h
;
883 unsigned HOST_WIDE_INT quol
, reml
;
884 HOST_WIDE_INT quoh
, remh
;
885 tree type
= TREE_TYPE (arg1
);
886 int uns
= TYPE_UNSIGNED (type
);
888 int1l
= TREE_INT_CST_LOW (arg1
);
889 int1h
= TREE_INT_CST_HIGH (arg1
);
890 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
891 &obj[some_exotic_number]. */
892 if (POINTER_TYPE_P (type
))
895 type
= signed_type_for (type
);
896 fit_double_type (int1l
, int1h
, &int1l
, &int1h
,
900 fit_double_type (int1l
, int1h
, &int1l
, &int1h
, type
);
901 int2l
= TREE_INT_CST_LOW (arg2
);
902 int2h
= TREE_INT_CST_HIGH (arg2
);
904 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
905 &quol
, &quoh
, &reml
, &remh
);
906 if (remh
!= 0 || reml
!= 0)
909 return build_int_cst_wide (type
, quol
, quoh
);
912 /* This is nonzero if we should defer warnings about undefined
913 overflow. This facility exists because these warnings are a
914 special case. The code to estimate loop iterations does not want
915 to issue any warnings, since it works with expressions which do not
916 occur in user code. Various bits of cleanup code call fold(), but
917 only use the result if it has certain characteristics (e.g., is a
918 constant); that code only wants to issue a warning if the result is
921 static int fold_deferring_overflow_warnings
;
923 /* If a warning about undefined overflow is deferred, this is the
924 warning. Note that this may cause us to turn two warnings into
925 one, but that is fine since it is sufficient to only give one
926 warning per expression. */
928 static const char* fold_deferred_overflow_warning
;
930 /* If a warning about undefined overflow is deferred, this is the
931 level at which the warning should be emitted. */
933 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
935 /* Start deferring overflow warnings. We could use a stack here to
936 permit nested calls, but at present it is not necessary. */
939 fold_defer_overflow_warnings (void)
941 ++fold_deferring_overflow_warnings
;
944 /* Stop deferring overflow warnings. If there is a pending warning,
945 and ISSUE is true, then issue the warning if appropriate. STMT is
946 the statement with which the warning should be associated (used for
947 location information); STMT may be NULL. CODE is the level of the
948 warning--a warn_strict_overflow_code value. This function will use
949 the smaller of CODE and the deferred code when deciding whether to
950 issue the warning. CODE may be zero to mean to always use the
954 fold_undefer_overflow_warnings (bool issue
, const_tree stmt
, int code
)
959 gcc_assert (fold_deferring_overflow_warnings
> 0);
960 --fold_deferring_overflow_warnings
;
961 if (fold_deferring_overflow_warnings
> 0)
963 if (fold_deferred_overflow_warning
!= NULL
965 && code
< (int) fold_deferred_overflow_code
)
966 fold_deferred_overflow_code
= code
;
970 warnmsg
= fold_deferred_overflow_warning
;
971 fold_deferred_overflow_warning
= NULL
;
973 if (!issue
|| warnmsg
== NULL
)
976 if (stmt
!= NULL_TREE
&& TREE_NO_WARNING (stmt
))
979 /* Use the smallest code level when deciding to issue the
981 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
982 code
= fold_deferred_overflow_code
;
984 if (!issue_strict_overflow_warning (code
))
987 if (stmt
== NULL_TREE
|| !expr_has_location (stmt
))
988 locus
= input_location
;
990 locus
= expr_location (stmt
);
991 warning (OPT_Wstrict_overflow
, "%H%s", &locus
, warnmsg
);
994 /* Stop deferring overflow warnings, ignoring any deferred
998 fold_undefer_and_ignore_overflow_warnings (void)
1000 fold_undefer_overflow_warnings (false, NULL_TREE
, 0);
1003 /* Whether we are deferring overflow warnings. */
1006 fold_deferring_overflow_warnings_p (void)
1008 return fold_deferring_overflow_warnings
> 0;
1011 /* This is called when we fold something based on the fact that signed
1012 overflow is undefined. */
1015 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
1017 gcc_assert (!flag_wrapv
&& !flag_trapv
);
1018 if (fold_deferring_overflow_warnings
> 0)
1020 if (fold_deferred_overflow_warning
== NULL
1021 || wc
< fold_deferred_overflow_code
)
1023 fold_deferred_overflow_warning
= gmsgid
;
1024 fold_deferred_overflow_code
= wc
;
1027 else if (issue_strict_overflow_warning (wc
))
1028 warning (OPT_Wstrict_overflow
, gmsgid
);
1031 /* Return true if the built-in mathematical function specified by CODE
1032 is odd, i.e. -f(x) == f(-x). */
1035 negate_mathfn_p (enum built_in_function code
)
1039 CASE_FLT_FN (BUILT_IN_ASIN
):
1040 CASE_FLT_FN (BUILT_IN_ASINH
):
1041 CASE_FLT_FN (BUILT_IN_ATAN
):
1042 CASE_FLT_FN (BUILT_IN_ATANH
):
1043 CASE_FLT_FN (BUILT_IN_CASIN
):
1044 CASE_FLT_FN (BUILT_IN_CASINH
):
1045 CASE_FLT_FN (BUILT_IN_CATAN
):
1046 CASE_FLT_FN (BUILT_IN_CATANH
):
1047 CASE_FLT_FN (BUILT_IN_CBRT
):
1048 CASE_FLT_FN (BUILT_IN_CPROJ
):
1049 CASE_FLT_FN (BUILT_IN_CSIN
):
1050 CASE_FLT_FN (BUILT_IN_CSINH
):
1051 CASE_FLT_FN (BUILT_IN_CTAN
):
1052 CASE_FLT_FN (BUILT_IN_CTANH
):
1053 CASE_FLT_FN (BUILT_IN_ERF
):
1054 CASE_FLT_FN (BUILT_IN_LLROUND
):
1055 CASE_FLT_FN (BUILT_IN_LROUND
):
1056 CASE_FLT_FN (BUILT_IN_ROUND
):
1057 CASE_FLT_FN (BUILT_IN_SIN
):
1058 CASE_FLT_FN (BUILT_IN_SINH
):
1059 CASE_FLT_FN (BUILT_IN_TAN
):
1060 CASE_FLT_FN (BUILT_IN_TANH
):
1061 CASE_FLT_FN (BUILT_IN_TRUNC
):
1064 CASE_FLT_FN (BUILT_IN_LLRINT
):
1065 CASE_FLT_FN (BUILT_IN_LRINT
):
1066 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1067 CASE_FLT_FN (BUILT_IN_RINT
):
1068 return !flag_rounding_math
;
1076 /* Check whether we may negate an integer constant T without causing
1080 may_negate_without_overflow_p (const_tree t
)
1082 unsigned HOST_WIDE_INT val
;
1086 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
1088 type
= TREE_TYPE (t
);
1089 if (TYPE_UNSIGNED (type
))
1092 prec
= TYPE_PRECISION (type
);
1093 if (prec
> HOST_BITS_PER_WIDE_INT
)
1095 if (TREE_INT_CST_LOW (t
) != 0)
1097 prec
-= HOST_BITS_PER_WIDE_INT
;
1098 val
= TREE_INT_CST_HIGH (t
);
1101 val
= TREE_INT_CST_LOW (t
);
1102 if (prec
< HOST_BITS_PER_WIDE_INT
)
1103 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
1104 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
1107 /* Determine whether an expression T can be cheaply negated using
1108 the function negate_expr without introducing undefined overflow. */
1111 negate_expr_p (tree t
)
1118 type
= TREE_TYPE (t
);
1120 STRIP_SIGN_NOPS (t
);
1121 switch (TREE_CODE (t
))
1124 if (TYPE_OVERFLOW_WRAPS (type
))
1127 /* Check that -CST will not overflow type. */
1128 return may_negate_without_overflow_p (t
);
1130 return (INTEGRAL_TYPE_P (type
)
1131 && TYPE_OVERFLOW_WRAPS (type
));
1139 return negate_expr_p (TREE_REALPART (t
))
1140 && negate_expr_p (TREE_IMAGPART (t
));
1143 return negate_expr_p (TREE_OPERAND (t
, 0))
1144 && negate_expr_p (TREE_OPERAND (t
, 1));
1147 return negate_expr_p (TREE_OPERAND (t
, 0));
1150 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1151 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1153 /* -(A + B) -> (-B) - A. */
1154 if (negate_expr_p (TREE_OPERAND (t
, 1))
1155 && reorder_operands_p (TREE_OPERAND (t
, 0),
1156 TREE_OPERAND (t
, 1)))
1158 /* -(A + B) -> (-A) - B. */
1159 return negate_expr_p (TREE_OPERAND (t
, 0));
1162 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1163 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1164 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1165 && reorder_operands_p (TREE_OPERAND (t
, 0),
1166 TREE_OPERAND (t
, 1));
1169 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1175 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1176 return negate_expr_p (TREE_OPERAND (t
, 1))
1177 || negate_expr_p (TREE_OPERAND (t
, 0));
1180 case TRUNC_DIV_EXPR
:
1181 case ROUND_DIV_EXPR
:
1182 case FLOOR_DIV_EXPR
:
1184 case EXACT_DIV_EXPR
:
1185 /* In general we can't negate A / B, because if A is INT_MIN and
1186 B is 1, we may turn this into INT_MIN / -1 which is undefined
1187 and actually traps on some architectures. But if overflow is
1188 undefined, we can negate, because - (INT_MIN / 1) is an
1190 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
1191 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
1193 return negate_expr_p (TREE_OPERAND (t
, 1))
1194 || negate_expr_p (TREE_OPERAND (t
, 0));
1197 /* Negate -((double)float) as (double)(-float). */
1198 if (TREE_CODE (type
) == REAL_TYPE
)
1200 tree tem
= strip_float_extensions (t
);
1202 return negate_expr_p (tem
);
1207 /* Negate -f(x) as f(-x). */
1208 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1209 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
1213 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1214 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1216 tree op1
= TREE_OPERAND (t
, 1);
1217 if (TREE_INT_CST_HIGH (op1
) == 0
1218 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1219 == TREE_INT_CST_LOW (op1
))
1230 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1231 simplification is possible.
1232 If negate_expr_p would return true for T, NULL_TREE will never be
1236 fold_negate_expr (tree t
)
1238 tree type
= TREE_TYPE (t
);
1241 switch (TREE_CODE (t
))
1243 /* Convert - (~A) to A + 1. */
1245 if (INTEGRAL_TYPE_P (type
))
1246 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1247 build_int_cst (type
, 1));
1251 tem
= fold_negate_const (t
, type
);
1252 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
1253 || !TYPE_OVERFLOW_TRAPS (type
))
1258 tem
= fold_negate_const (t
, type
);
1259 /* Two's complement FP formats, such as c4x, may overflow. */
1260 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1265 tem
= fold_negate_const (t
, type
);
1270 tree rpart
= negate_expr (TREE_REALPART (t
));
1271 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1273 if ((TREE_CODE (rpart
) == REAL_CST
1274 && TREE_CODE (ipart
) == REAL_CST
)
1275 || (TREE_CODE (rpart
) == INTEGER_CST
1276 && TREE_CODE (ipart
) == INTEGER_CST
))
1277 return build_complex (type
, rpart
, ipart
);
1282 if (negate_expr_p (t
))
1283 return fold_build2 (COMPLEX_EXPR
, type
,
1284 fold_negate_expr (TREE_OPERAND (t
, 0)),
1285 fold_negate_expr (TREE_OPERAND (t
, 1)));
1289 if (negate_expr_p (t
))
1290 return fold_build1 (CONJ_EXPR
, type
,
1291 fold_negate_expr (TREE_OPERAND (t
, 0)));
1295 return TREE_OPERAND (t
, 0);
1298 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1299 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1301 /* -(A + B) -> (-B) - A. */
1302 if (negate_expr_p (TREE_OPERAND (t
, 1))
1303 && reorder_operands_p (TREE_OPERAND (t
, 0),
1304 TREE_OPERAND (t
, 1)))
1306 tem
= negate_expr (TREE_OPERAND (t
, 1));
1307 return fold_build2 (MINUS_EXPR
, type
,
1308 tem
, TREE_OPERAND (t
, 0));
1311 /* -(A + B) -> (-A) - B. */
1312 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1314 tem
= negate_expr (TREE_OPERAND (t
, 0));
1315 return fold_build2 (MINUS_EXPR
, type
,
1316 tem
, TREE_OPERAND (t
, 1));
1322 /* - (A - B) -> B - A */
1323 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1324 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1325 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1326 return fold_build2 (MINUS_EXPR
, type
,
1327 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1331 if (TYPE_UNSIGNED (type
))
1337 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1339 tem
= TREE_OPERAND (t
, 1);
1340 if (negate_expr_p (tem
))
1341 return fold_build2 (TREE_CODE (t
), type
,
1342 TREE_OPERAND (t
, 0), negate_expr (tem
));
1343 tem
= TREE_OPERAND (t
, 0);
1344 if (negate_expr_p (tem
))
1345 return fold_build2 (TREE_CODE (t
), type
,
1346 negate_expr (tem
), TREE_OPERAND (t
, 1));
1350 case TRUNC_DIV_EXPR
:
1351 case ROUND_DIV_EXPR
:
1352 case FLOOR_DIV_EXPR
:
1354 case EXACT_DIV_EXPR
:
1355 /* In general we can't negate A / B, because if A is INT_MIN and
1356 B is 1, we may turn this into INT_MIN / -1 which is undefined
1357 and actually traps on some architectures. But if overflow is
1358 undefined, we can negate, because - (INT_MIN / 1) is an
1360 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
1362 const char * const warnmsg
= G_("assuming signed overflow does not "
1363 "occur when negating a division");
1364 tem
= TREE_OPERAND (t
, 1);
1365 if (negate_expr_p (tem
))
1367 if (INTEGRAL_TYPE_P (type
)
1368 && (TREE_CODE (tem
) != INTEGER_CST
1369 || integer_onep (tem
)))
1370 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1371 return fold_build2 (TREE_CODE (t
), type
,
1372 TREE_OPERAND (t
, 0), negate_expr (tem
));
1374 tem
= TREE_OPERAND (t
, 0);
1375 if (negate_expr_p (tem
))
1377 if (INTEGRAL_TYPE_P (type
)
1378 && (TREE_CODE (tem
) != INTEGER_CST
1379 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
1380 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1381 return fold_build2 (TREE_CODE (t
), type
,
1382 negate_expr (tem
), TREE_OPERAND (t
, 1));
1388 /* Convert -((double)float) into (double)(-float). */
1389 if (TREE_CODE (type
) == REAL_TYPE
)
1391 tem
= strip_float_extensions (t
);
1392 if (tem
!= t
&& negate_expr_p (tem
))
1393 return fold_convert (type
, negate_expr (tem
));
1398 /* Negate -f(x) as f(-x). */
1399 if (negate_mathfn_p (builtin_mathfn_code (t
))
1400 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
1404 fndecl
= get_callee_fndecl (t
);
1405 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
1406 return build_call_expr (fndecl
, 1, arg
);
1411 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1412 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1414 tree op1
= TREE_OPERAND (t
, 1);
1415 if (TREE_INT_CST_HIGH (op1
) == 0
1416 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1417 == TREE_INT_CST_LOW (op1
))
1419 tree ntype
= TYPE_UNSIGNED (type
)
1420 ? signed_type_for (type
)
1421 : unsigned_type_for (type
);
1422 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1423 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1424 return fold_convert (type
, temp
);
1436 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1437 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1438 return NULL_TREE. */
1441 negate_expr (tree t
)
1448 type
= TREE_TYPE (t
);
1449 STRIP_SIGN_NOPS (t
);
1451 tem
= fold_negate_expr (t
);
1453 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1454 return fold_convert (type
, tem
);
1457 /* Split a tree IN into a constant, literal and variable parts that could be
1458 combined with CODE to make IN. "constant" means an expression with
1459 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1460 commutative arithmetic operation. Store the constant part into *CONP,
1461 the literal in *LITP and return the variable part. If a part isn't
1462 present, set it to null. If the tree does not decompose in this way,
1463 return the entire tree as the variable part and the other parts as null.
1465 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1466 case, we negate an operand that was subtracted. Except if it is a
1467 literal for which we use *MINUS_LITP instead.
1469 If NEGATE_P is true, we are negating all of IN, again except a literal
1470 for which we use *MINUS_LITP instead.
1472 If IN is itself a literal or constant, return it as appropriate.
1474 Note that we do not guarantee that any of the three values will be the
1475 same type as IN, but they will have the same signedness and mode. */
1478 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1479 tree
*minus_litp
, int negate_p
)
1487 /* Strip any conversions that don't change the machine mode or signedness. */
1488 STRIP_SIGN_NOPS (in
);
1490 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
1491 || TREE_CODE (in
) == FIXED_CST
)
1493 else if (TREE_CODE (in
) == code
1494 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
1495 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
1496 /* We can associate addition and subtraction together (even
1497 though the C standard doesn't say so) for integers because
1498 the value is not affected. For reals, the value might be
1499 affected, so we can't. */
1500 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1501 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1503 tree op0
= TREE_OPERAND (in
, 0);
1504 tree op1
= TREE_OPERAND (in
, 1);
1505 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1506 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1508 /* First see if either of the operands is a literal, then a constant. */
1509 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
1510 || TREE_CODE (op0
) == FIXED_CST
)
1511 *litp
= op0
, op0
= 0;
1512 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
1513 || TREE_CODE (op1
) == FIXED_CST
)
1514 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1516 if (op0
!= 0 && TREE_CONSTANT (op0
))
1517 *conp
= op0
, op0
= 0;
1518 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1519 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1521 /* If we haven't dealt with either operand, this is not a case we can
1522 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1523 if (op0
!= 0 && op1
!= 0)
1528 var
= op1
, neg_var_p
= neg1_p
;
1530 /* Now do any needed negations. */
1532 *minus_litp
= *litp
, *litp
= 0;
1534 *conp
= negate_expr (*conp
);
1536 var
= negate_expr (var
);
1538 else if (TREE_CONSTANT (in
))
1546 *minus_litp
= *litp
, *litp
= 0;
1547 else if (*minus_litp
)
1548 *litp
= *minus_litp
, *minus_litp
= 0;
1549 *conp
= negate_expr (*conp
);
1550 var
= negate_expr (var
);
1556 /* Re-associate trees split by the above function. T1 and T2 are either
1557 expressions to associate or null. Return the new expression, if any. If
1558 we build an operation, do it in TYPE and with CODE. */
1561 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1568 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1569 try to fold this since we will have infinite recursion. But do
1570 deal with any NEGATE_EXPRs. */
1571 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1572 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1574 if (code
== PLUS_EXPR
)
1576 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1577 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1578 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1579 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1580 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1581 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1582 else if (integer_zerop (t2
))
1583 return fold_convert (type
, t1
);
1585 else if (code
== MINUS_EXPR
)
1587 if (integer_zerop (t2
))
1588 return fold_convert (type
, t1
);
1591 return build2 (code
, type
, fold_convert (type
, t1
),
1592 fold_convert (type
, t2
));
1595 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1596 fold_convert (type
, t2
));
1599 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1600 for use in int_const_binop, size_binop and size_diffop. */
1603 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
1605 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1607 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1622 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1623 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1624 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1628 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1629 to produce a new constant. Return NULL_TREE if we don't know how
1630 to evaluate CODE at compile-time.
1632 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1635 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
1637 unsigned HOST_WIDE_INT int1l
, int2l
;
1638 HOST_WIDE_INT int1h
, int2h
;
1639 unsigned HOST_WIDE_INT low
;
1641 unsigned HOST_WIDE_INT garbagel
;
1642 HOST_WIDE_INT garbageh
;
1644 tree type
= TREE_TYPE (arg1
);
1645 int uns
= TYPE_UNSIGNED (type
);
1647 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1650 int1l
= TREE_INT_CST_LOW (arg1
);
1651 int1h
= TREE_INT_CST_HIGH (arg1
);
1652 int2l
= TREE_INT_CST_LOW (arg2
);
1653 int2h
= TREE_INT_CST_HIGH (arg2
);
1658 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1662 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1666 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1672 /* It's unclear from the C standard whether shifts can overflow.
1673 The following code ignores overflow; perhaps a C standard
1674 interpretation ruling is needed. */
1675 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1682 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1687 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1691 neg_double (int2l
, int2h
, &low
, &hi
);
1692 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1693 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1697 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1700 case TRUNC_DIV_EXPR
:
1701 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1702 case EXACT_DIV_EXPR
:
1703 /* This is a shortcut for a common special case. */
1704 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1705 && !TREE_OVERFLOW (arg1
)
1706 && !TREE_OVERFLOW (arg2
)
1707 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1709 if (code
== CEIL_DIV_EXPR
)
1712 low
= int1l
/ int2l
, hi
= 0;
1716 /* ... fall through ... */
1718 case ROUND_DIV_EXPR
:
1719 if (int2h
== 0 && int2l
== 0)
1721 if (int2h
== 0 && int2l
== 1)
1723 low
= int1l
, hi
= int1h
;
1726 if (int1l
== int2l
&& int1h
== int2h
1727 && ! (int1l
== 0 && int1h
== 0))
1732 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1733 &low
, &hi
, &garbagel
, &garbageh
);
1736 case TRUNC_MOD_EXPR
:
1737 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1738 /* This is a shortcut for a common special case. */
1739 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1740 && !TREE_OVERFLOW (arg1
)
1741 && !TREE_OVERFLOW (arg2
)
1742 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1744 if (code
== CEIL_MOD_EXPR
)
1746 low
= int1l
% int2l
, hi
= 0;
1750 /* ... fall through ... */
1752 case ROUND_MOD_EXPR
:
1753 if (int2h
== 0 && int2l
== 0)
1755 overflow
= div_and_round_double (code
, uns
,
1756 int1l
, int1h
, int2l
, int2h
,
1757 &garbagel
, &garbageh
, &low
, &hi
);
1763 low
= (((unsigned HOST_WIDE_INT
) int1h
1764 < (unsigned HOST_WIDE_INT
) int2h
)
1765 || (((unsigned HOST_WIDE_INT
) int1h
1766 == (unsigned HOST_WIDE_INT
) int2h
)
1769 low
= (int1h
< int2h
1770 || (int1h
== int2h
&& int1l
< int2l
));
1772 if (low
== (code
== MIN_EXPR
))
1773 low
= int1l
, hi
= int1h
;
1775 low
= int2l
, hi
= int2h
;
1784 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1786 /* Propagate overflow flags ourselves. */
1787 if (((!uns
|| is_sizetype
) && overflow
)
1788 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1791 TREE_OVERFLOW (t
) = 1;
1795 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1796 ((!uns
|| is_sizetype
) && overflow
)
1797 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1802 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1803 constant. We assume ARG1 and ARG2 have the same data type, or at least
1804 are the same kind of constant and the same machine mode. Return zero if
1805 combining the constants is not allowed in the current operating mode.
1807 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1810 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1812 /* Sanity check for the recursive cases. */
1819 if (TREE_CODE (arg1
) == INTEGER_CST
)
1820 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1822 if (TREE_CODE (arg1
) == REAL_CST
)
1824 enum machine_mode mode
;
1827 REAL_VALUE_TYPE value
;
1828 REAL_VALUE_TYPE result
;
1832 /* The following codes are handled by real_arithmetic. */
1847 d1
= TREE_REAL_CST (arg1
);
1848 d2
= TREE_REAL_CST (arg2
);
1850 type
= TREE_TYPE (arg1
);
1851 mode
= TYPE_MODE (type
);
1853 /* Don't perform operation if we honor signaling NaNs and
1854 either operand is a NaN. */
1855 if (HONOR_SNANS (mode
)
1856 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1859 /* Don't perform operation if it would raise a division
1860 by zero exception. */
1861 if (code
== RDIV_EXPR
1862 && REAL_VALUES_EQUAL (d2
, dconst0
)
1863 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1866 /* If either operand is a NaN, just return it. Otherwise, set up
1867 for floating-point trap; we return an overflow. */
1868 if (REAL_VALUE_ISNAN (d1
))
1870 else if (REAL_VALUE_ISNAN (d2
))
1873 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1874 real_convert (&result
, mode
, &value
);
1876 /* Don't constant fold this floating point operation if
1877 the result has overflowed and flag_trapping_math. */
1878 if (flag_trapping_math
1879 && MODE_HAS_INFINITIES (mode
)
1880 && REAL_VALUE_ISINF (result
)
1881 && !REAL_VALUE_ISINF (d1
)
1882 && !REAL_VALUE_ISINF (d2
))
1885 /* Don't constant fold this floating point operation if the
1886 result may dependent upon the run-time rounding mode and
1887 flag_rounding_math is set, or if GCC's software emulation
1888 is unable to accurately represent the result. */
1889 if ((flag_rounding_math
1890 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1891 && !flag_unsafe_math_optimizations
))
1892 && (inexact
|| !real_identical (&result
, &value
)))
1895 t
= build_real (type
, result
);
1897 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1901 if (TREE_CODE (arg1
) == FIXED_CST
)
1903 FIXED_VALUE_TYPE f1
;
1904 FIXED_VALUE_TYPE f2
;
1905 FIXED_VALUE_TYPE result
;
1910 /* The following codes are handled by fixed_arithmetic. */
1916 case TRUNC_DIV_EXPR
:
1917 f2
= TREE_FIXED_CST (arg2
);
1922 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1923 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1931 f1
= TREE_FIXED_CST (arg1
);
1932 type
= TREE_TYPE (arg1
);
1933 sat_p
= TYPE_SATURATING (type
);
1934 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1935 t
= build_fixed (type
, result
);
1936 /* Propagate overflow flags. */
1937 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1939 TREE_OVERFLOW (t
) = 1;
1940 TREE_CONSTANT_OVERFLOW (t
) = 1;
1942 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1943 TREE_CONSTANT_OVERFLOW (t
) = 1;
1947 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1949 tree type
= TREE_TYPE (arg1
);
1950 tree r1
= TREE_REALPART (arg1
);
1951 tree i1
= TREE_IMAGPART (arg1
);
1952 tree r2
= TREE_REALPART (arg2
);
1953 tree i2
= TREE_IMAGPART (arg2
);
1960 real
= const_binop (code
, r1
, r2
, notrunc
);
1961 imag
= const_binop (code
, i1
, i2
, notrunc
);
1965 real
= const_binop (MINUS_EXPR
,
1966 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1967 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1969 imag
= const_binop (PLUS_EXPR
,
1970 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1971 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1978 = const_binop (PLUS_EXPR
,
1979 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1980 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1983 = const_binop (PLUS_EXPR
,
1984 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1985 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1988 = const_binop (MINUS_EXPR
,
1989 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1990 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1993 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1994 code
= TRUNC_DIV_EXPR
;
1996 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1997 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
2006 return build_complex (type
, real
, imag
);
2012 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2013 indicates which particular sizetype to create. */
2016 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
2018 return build_int_cst (sizetype_tab
[(int) kind
], number
);
2021 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2022 is a tree code. The type of the result is taken from the operands.
2023 Both must be equivalent integer types, ala int_binop_types_match_p.
2024 If the operands are constant, so is the result. */
2027 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
2029 tree type
= TREE_TYPE (arg0
);
2031 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
2032 return error_mark_node
;
2034 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
2037 /* Handle the special case of two integer constants faster. */
2038 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2040 /* And some specific cases even faster than that. */
2041 if (code
== PLUS_EXPR
)
2043 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
2045 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2048 else if (code
== MINUS_EXPR
)
2050 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2053 else if (code
== MULT_EXPR
)
2055 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
2059 /* Handle general case of two integer constants. */
2060 return int_const_binop (code
, arg0
, arg1
, 0);
2063 return fold_build2 (code
, type
, arg0
, arg1
);
2066 /* Given two values, either both of sizetype or both of bitsizetype,
2067 compute the difference between the two values. Return the value
2068 in signed type corresponding to the type of the operands. */
2071 size_diffop (tree arg0
, tree arg1
)
2073 tree type
= TREE_TYPE (arg0
);
2076 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2079 /* If the type is already signed, just do the simple thing. */
2080 if (!TYPE_UNSIGNED (type
))
2081 return size_binop (MINUS_EXPR
, arg0
, arg1
);
2083 if (type
== sizetype
)
2085 else if (type
== bitsizetype
)
2086 ctype
= sbitsizetype
;
2088 ctype
= signed_type_for (type
);
2090 /* If either operand is not a constant, do the conversions to the signed
2091 type and subtract. The hardware will do the right thing with any
2092 overflow in the subtraction. */
2093 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2094 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
2095 fold_convert (ctype
, arg1
));
2097 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2098 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2099 overflow) and negate (which can't either). Special-case a result
2100 of zero while we're here. */
2101 if (tree_int_cst_equal (arg0
, arg1
))
2102 return build_int_cst (ctype
, 0);
2103 else if (tree_int_cst_lt (arg1
, arg0
))
2104 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
2106 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
2107 fold_convert (ctype
, size_binop (MINUS_EXPR
,
2111 /* A subroutine of fold_convert_const handling conversions of an
2112 INTEGER_CST to another integer type. */
2115 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2119 /* Given an integer constant, make new constant with new type,
2120 appropriately sign-extended or truncated. */
2121 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
2122 TREE_INT_CST_HIGH (arg1
),
2123 /* Don't set the overflow when
2124 converting from a pointer, */
2125 !POINTER_TYPE_P (TREE_TYPE (arg1
))
2126 /* or to a sizetype with same signedness
2127 and the precision is unchanged.
2128 ??? sizetype is always sign-extended,
2129 but its signedness depends on the
2130 frontend. Thus we see spurious overflows
2131 here if we do not check this. */
2132 && !((TYPE_PRECISION (TREE_TYPE (arg1
))
2133 == TYPE_PRECISION (type
))
2134 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
2135 == TYPE_UNSIGNED (type
))
2136 && ((TREE_CODE (TREE_TYPE (arg1
)) == INTEGER_TYPE
2137 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1
)))
2138 || (TREE_CODE (type
) == INTEGER_TYPE
2139 && TYPE_IS_SIZETYPE (type
)))),
2140 (TREE_INT_CST_HIGH (arg1
) < 0
2141 && (TYPE_UNSIGNED (type
)
2142 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2143 | TREE_OVERFLOW (arg1
));
2148 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2149 to an integer type. */
2152 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2157 /* The following code implements the floating point to integer
2158 conversion rules required by the Java Language Specification,
2159 that IEEE NaNs are mapped to zero and values that overflow
2160 the target precision saturate, i.e. values greater than
2161 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2162 are mapped to INT_MIN. These semantics are allowed by the
2163 C and C++ standards that simply state that the behavior of
2164 FP-to-integer conversion is unspecified upon overflow. */
2166 HOST_WIDE_INT high
, low
;
2168 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2172 case FIX_TRUNC_EXPR
:
2173 real_trunc (&r
, VOIDmode
, &x
);
2180 /* If R is NaN, return zero and show we have an overflow. */
2181 if (REAL_VALUE_ISNAN (r
))
2188 /* See if R is less than the lower bound or greater than the
2193 tree lt
= TYPE_MIN_VALUE (type
);
2194 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2195 if (REAL_VALUES_LESS (r
, l
))
2198 high
= TREE_INT_CST_HIGH (lt
);
2199 low
= TREE_INT_CST_LOW (lt
);
2205 tree ut
= TYPE_MAX_VALUE (type
);
2208 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2209 if (REAL_VALUES_LESS (u
, r
))
2212 high
= TREE_INT_CST_HIGH (ut
);
2213 low
= TREE_INT_CST_LOW (ut
);
2219 REAL_VALUE_TO_INT (&low
, &high
, r
);
2221 t
= force_fit_type_double (type
, low
, high
, -1,
2222 overflow
| TREE_OVERFLOW (arg1
));
2226 /* A subroutine of fold_convert_const handling conversions of a
2227 FIXED_CST to an integer type. */
2230 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2233 double_int temp
, temp_trunc
;
2236 /* Right shift FIXED_CST to temp by fbit. */
2237 temp
= TREE_FIXED_CST (arg1
).data
;
2238 mode
= TREE_FIXED_CST (arg1
).mode
;
2239 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
2241 lshift_double (temp
.low
, temp
.high
,
2242 - GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2243 &temp
.low
, &temp
.high
, SIGNED_FIXED_POINT_MODE_P (mode
));
2245 /* Left shift temp to temp_trunc by fbit. */
2246 lshift_double (temp
.low
, temp
.high
,
2247 GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2248 &temp_trunc
.low
, &temp_trunc
.high
,
2249 SIGNED_FIXED_POINT_MODE_P (mode
));
2256 temp_trunc
.high
= 0;
2259 /* If FIXED_CST is negative, we need to round the value toward 0.
2260 By checking if the fractional bits are not zero to add 1 to temp. */
2261 if (SIGNED_FIXED_POINT_MODE_P (mode
) && temp_trunc
.high
< 0
2262 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
2267 temp
= double_int_add (temp
, one
);
2270 /* Given a fixed-point constant, make new constant with new type,
2271 appropriately sign-extended or truncated. */
2272 t
= force_fit_type_double (type
, temp
.low
, temp
.high
, -1,
2274 && (TYPE_UNSIGNED (type
)
2275 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2276 | TREE_OVERFLOW (arg1
));
2281 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2282 to another floating point type. */
2285 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2287 REAL_VALUE_TYPE value
;
2290 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2291 t
= build_real (type
, value
);
2293 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2297 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2298 to a floating point type. */
2301 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2303 REAL_VALUE_TYPE value
;
2306 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2307 t
= build_real (type
, value
);
2309 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2310 TREE_CONSTANT_OVERFLOW (t
)
2311 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
2315 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2316 to another fixed-point type. */
2319 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2321 FIXED_VALUE_TYPE value
;
2325 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2326 TYPE_SATURATING (type
));
2327 t
= build_fixed (type
, value
);
2329 /* Propagate overflow flags. */
2330 if (overflow_p
| TREE_OVERFLOW (arg1
))
2332 TREE_OVERFLOW (t
) = 1;
2333 TREE_CONSTANT_OVERFLOW (t
) = 1;
2335 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2336 TREE_CONSTANT_OVERFLOW (t
) = 1;
2340 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2341 to a fixed-point type. */
2344 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2346 FIXED_VALUE_TYPE value
;
2350 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
2351 TREE_INT_CST (arg1
),
2352 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2353 TYPE_SATURATING (type
));
2354 t
= build_fixed (type
, value
);
2356 /* Propagate overflow flags. */
2357 if (overflow_p
| TREE_OVERFLOW (arg1
))
2359 TREE_OVERFLOW (t
) = 1;
2360 TREE_CONSTANT_OVERFLOW (t
) = 1;
2362 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2363 TREE_CONSTANT_OVERFLOW (t
) = 1;
2367 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2368 to a fixed-point type. */
2371 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2373 FIXED_VALUE_TYPE value
;
2377 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2378 &TREE_REAL_CST (arg1
),
2379 TYPE_SATURATING (type
));
2380 t
= build_fixed (type
, value
);
2382 /* Propagate overflow flags. */
2383 if (overflow_p
| TREE_OVERFLOW (arg1
))
2385 TREE_OVERFLOW (t
) = 1;
2386 TREE_CONSTANT_OVERFLOW (t
) = 1;
2388 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2389 TREE_CONSTANT_OVERFLOW (t
) = 1;
2393 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2394 type TYPE. If no simplification can be done return NULL_TREE. */
2397 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2399 if (TREE_TYPE (arg1
) == type
)
2402 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2404 if (TREE_CODE (arg1
) == INTEGER_CST
)
2405 return fold_convert_const_int_from_int (type
, arg1
);
2406 else if (TREE_CODE (arg1
) == REAL_CST
)
2407 return fold_convert_const_int_from_real (code
, type
, arg1
);
2408 else if (TREE_CODE (arg1
) == FIXED_CST
)
2409 return fold_convert_const_int_from_fixed (type
, arg1
);
2411 else if (TREE_CODE (type
) == REAL_TYPE
)
2413 if (TREE_CODE (arg1
) == INTEGER_CST
)
2414 return build_real_from_int_cst (type
, arg1
);
2415 else if (TREE_CODE (arg1
) == REAL_CST
)
2416 return fold_convert_const_real_from_real (type
, arg1
);
2417 else if (TREE_CODE (arg1
) == FIXED_CST
)
2418 return fold_convert_const_real_from_fixed (type
, arg1
);
2420 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2422 if (TREE_CODE (arg1
) == FIXED_CST
)
2423 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2424 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2425 return fold_convert_const_fixed_from_int (type
, arg1
);
2426 else if (TREE_CODE (arg1
) == REAL_CST
)
2427 return fold_convert_const_fixed_from_real (type
, arg1
);
2432 /* Construct a vector of zero elements of vector type TYPE. */
2435 build_zero_vector (tree type
)
2440 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2441 units
= TYPE_VECTOR_SUBPARTS (type
);
2444 for (i
= 0; i
< units
; i
++)
2445 list
= tree_cons (NULL_TREE
, elem
, list
);
2446 return build_vector (type
, list
);
2449 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2452 fold_convertible_p (const_tree type
, const_tree arg
)
2454 tree orig
= TREE_TYPE (arg
);
2459 if (TREE_CODE (arg
) == ERROR_MARK
2460 || TREE_CODE (type
) == ERROR_MARK
2461 || TREE_CODE (orig
) == ERROR_MARK
)
2464 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2467 switch (TREE_CODE (type
))
2469 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2470 case POINTER_TYPE
: case REFERENCE_TYPE
:
2472 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2473 || TREE_CODE (orig
) == OFFSET_TYPE
)
2475 return (TREE_CODE (orig
) == VECTOR_TYPE
2476 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2479 case FIXED_POINT_TYPE
:
2483 return TREE_CODE (type
) == TREE_CODE (orig
);
2490 /* Convert expression ARG to type TYPE. Used by the middle-end for
2491 simple conversions in preference to calling the front-end's convert. */
2494 fold_convert (tree type
, tree arg
)
2496 tree orig
= TREE_TYPE (arg
);
2502 if (TREE_CODE (arg
) == ERROR_MARK
2503 || TREE_CODE (type
) == ERROR_MARK
2504 || TREE_CODE (orig
) == ERROR_MARK
)
2505 return error_mark_node
;
2507 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2508 return fold_build1 (NOP_EXPR
, type
, arg
);
2510 switch (TREE_CODE (type
))
2512 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2513 case POINTER_TYPE
: case REFERENCE_TYPE
:
2515 if (TREE_CODE (arg
) == INTEGER_CST
)
2517 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2518 if (tem
!= NULL_TREE
)
2521 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2522 || TREE_CODE (orig
) == OFFSET_TYPE
)
2523 return fold_build1 (NOP_EXPR
, type
, arg
);
2524 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2526 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2527 return fold_convert (type
, tem
);
2529 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2530 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2531 return fold_build1 (NOP_EXPR
, type
, arg
);
2534 if (TREE_CODE (arg
) == INTEGER_CST
)
2536 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2537 if (tem
!= NULL_TREE
)
2540 else if (TREE_CODE (arg
) == REAL_CST
)
2542 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2543 if (tem
!= NULL_TREE
)
2546 else if (TREE_CODE (arg
) == FIXED_CST
)
2548 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2549 if (tem
!= NULL_TREE
)
2553 switch (TREE_CODE (orig
))
2556 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2557 case POINTER_TYPE
: case REFERENCE_TYPE
:
2558 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2561 return fold_build1 (NOP_EXPR
, type
, arg
);
2563 case FIXED_POINT_TYPE
:
2564 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2567 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2568 return fold_convert (type
, tem
);
2574 case FIXED_POINT_TYPE
:
2575 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2576 || TREE_CODE (arg
) == REAL_CST
)
2578 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2579 if (tem
!= NULL_TREE
)
2583 switch (TREE_CODE (orig
))
2585 case FIXED_POINT_TYPE
:
2590 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2593 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2594 return fold_convert (type
, tem
);
2601 switch (TREE_CODE (orig
))
2604 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2605 case POINTER_TYPE
: case REFERENCE_TYPE
:
2607 case FIXED_POINT_TYPE
:
2608 return build2 (COMPLEX_EXPR
, type
,
2609 fold_convert (TREE_TYPE (type
), arg
),
2610 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2615 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2617 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2618 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2619 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2622 arg
= save_expr (arg
);
2623 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2624 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2625 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2626 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2627 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2635 if (integer_zerop (arg
))
2636 return build_zero_vector (type
);
2637 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2638 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2639 || TREE_CODE (orig
) == VECTOR_TYPE
);
2640 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2643 tem
= fold_ignored_result (arg
);
2644 if (TREE_CODE (tem
) == GIMPLE_MODIFY_STMT
)
2646 return fold_build1 (NOP_EXPR
, type
, tem
);
2653 /* Return false if expr can be assumed not to be an lvalue, true
2657 maybe_lvalue_p (const_tree x
)
2659 /* We only need to wrap lvalue tree codes. */
2660 switch (TREE_CODE (x
))
2671 case ALIGN_INDIRECT_REF
:
2672 case MISALIGNED_INDIRECT_REF
:
2674 case ARRAY_RANGE_REF
:
2680 case PREINCREMENT_EXPR
:
2681 case PREDECREMENT_EXPR
:
2683 case TRY_CATCH_EXPR
:
2684 case WITH_CLEANUP_EXPR
:
2687 case GIMPLE_MODIFY_STMT
:
2696 /* Assume the worst for front-end tree codes. */
2697 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2705 /* Return an expr equal to X but certainly not valid as an lvalue. */
2710 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2715 if (! maybe_lvalue_p (x
))
2717 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2720 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2721 Zero means allow extended lvalues. */
2723 int pedantic_lvalues
;
2725 /* When pedantic, return an expr equal to X but certainly not valid as a
2726 pedantic lvalue. Otherwise, return X. */
2729 pedantic_non_lvalue (tree x
)
2731 if (pedantic_lvalues
)
2732 return non_lvalue (x
);
2737 /* Given a tree comparison code, return the code that is the logical inverse
2738 of the given code. It is not safe to do this for floating-point
2739 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2740 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2743 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2745 if (honor_nans
&& flag_trapping_math
)
2755 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2757 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2759 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2761 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2775 return UNORDERED_EXPR
;
2776 case UNORDERED_EXPR
:
2777 return ORDERED_EXPR
;
2783 /* Similar, but return the comparison that results if the operands are
2784 swapped. This is safe for floating-point. */
2787 swap_tree_comparison (enum tree_code code
)
2794 case UNORDERED_EXPR
:
2820 /* Convert a comparison tree code from an enum tree_code representation
2821 into a compcode bit-based encoding. This function is the inverse of
2822 compcode_to_comparison. */
2824 static enum comparison_code
2825 comparison_to_compcode (enum tree_code code
)
2842 return COMPCODE_ORD
;
2843 case UNORDERED_EXPR
:
2844 return COMPCODE_UNORD
;
2846 return COMPCODE_UNLT
;
2848 return COMPCODE_UNEQ
;
2850 return COMPCODE_UNLE
;
2852 return COMPCODE_UNGT
;
2854 return COMPCODE_LTGT
;
2856 return COMPCODE_UNGE
;
2862 /* Convert a compcode bit-based encoding of a comparison operator back
2863 to GCC's enum tree_code representation. This function is the
2864 inverse of comparison_to_compcode. */
2866 static enum tree_code
2867 compcode_to_comparison (enum comparison_code code
)
2884 return ORDERED_EXPR
;
2885 case COMPCODE_UNORD
:
2886 return UNORDERED_EXPR
;
2904 /* Return a tree for the comparison which is the combination of
2905 doing the AND or OR (depending on CODE) of the two operations LCODE
2906 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2907 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2908 if this makes the transformation invalid. */
2911 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2912 enum tree_code rcode
, tree truth_type
,
2913 tree ll_arg
, tree lr_arg
)
2915 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2916 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2917 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2918 enum comparison_code compcode
;
2922 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2923 compcode
= lcompcode
& rcompcode
;
2926 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2927 compcode
= lcompcode
| rcompcode
;
2936 /* Eliminate unordered comparisons, as well as LTGT and ORD
2937 which are not used unless the mode has NaNs. */
2938 compcode
&= ~COMPCODE_UNORD
;
2939 if (compcode
== COMPCODE_LTGT
)
2940 compcode
= COMPCODE_NE
;
2941 else if (compcode
== COMPCODE_ORD
)
2942 compcode
= COMPCODE_TRUE
;
2944 else if (flag_trapping_math
)
2946 /* Check that the original operation and the optimized ones will trap
2947 under the same condition. */
2948 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2949 && (lcompcode
!= COMPCODE_EQ
)
2950 && (lcompcode
!= COMPCODE_ORD
);
2951 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2952 && (rcompcode
!= COMPCODE_EQ
)
2953 && (rcompcode
!= COMPCODE_ORD
);
2954 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2955 && (compcode
!= COMPCODE_EQ
)
2956 && (compcode
!= COMPCODE_ORD
);
2958 /* In a short-circuited boolean expression the LHS might be
2959 such that the RHS, if evaluated, will never trap. For
2960 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2961 if neither x nor y is NaN. (This is a mixed blessing: for
2962 example, the expression above will never trap, hence
2963 optimizing it to x < y would be invalid). */
2964 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2965 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2968 /* If the comparison was short-circuited, and only the RHS
2969 trapped, we may now generate a spurious trap. */
2971 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2974 /* If we changed the conditions that cause a trap, we lose. */
2975 if ((ltrap
|| rtrap
) != trap
)
2979 if (compcode
== COMPCODE_TRUE
)
2980 return constant_boolean_node (true, truth_type
);
2981 else if (compcode
== COMPCODE_FALSE
)
2982 return constant_boolean_node (false, truth_type
);
2984 return fold_build2 (compcode_to_comparison (compcode
),
2985 truth_type
, ll_arg
, lr_arg
);
2988 /* Return nonzero if CODE is a tree code that represents a truth value. */
2991 truth_value_p (enum tree_code code
)
2993 return (TREE_CODE_CLASS (code
) == tcc_comparison
2994 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2995 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2996 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2999 /* Return nonzero if two operands (typically of the same tree node)
3000 are necessarily equal. If either argument has side-effects this
3001 function returns zero. FLAGS modifies behavior as follows:
3003 If OEP_ONLY_CONST is set, only return nonzero for constants.
3004 This function tests whether the operands are indistinguishable;
3005 it does not test whether they are equal using C's == operation.
3006 The distinction is important for IEEE floating point, because
3007 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3008 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3010 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3011 even though it may hold multiple values during a function.
3012 This is because a GCC tree node guarantees that nothing else is
3013 executed between the evaluation of its "operands" (which may often
3014 be evaluated in arbitrary order). Hence if the operands themselves
3015 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3016 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3017 unset means assuming isochronic (or instantaneous) tree equivalence.
3018 Unless comparing arbitrary expression trees, such as from different
3019 statements, this flag can usually be left unset.
3021 If OEP_PURE_SAME is set, then pure functions with identical arguments
3022 are considered the same. It is used when the caller has other ways
3023 to ensure that global memory is unchanged in between. */
3026 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3028 /* If either is ERROR_MARK, they aren't equal. */
3029 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
3032 /* If both types don't have the same signedness, then we can't consider
3033 them equal. We must check this before the STRIP_NOPS calls
3034 because they may change the signedness of the arguments. */
3035 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3038 /* If both types don't have the same precision, then it is not safe
3040 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
3046 /* In case both args are comparisons but with different comparison
3047 code, try to swap the comparison operands of one arg to produce
3048 a match and compare that variant. */
3049 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3050 && COMPARISON_CLASS_P (arg0
)
3051 && COMPARISON_CLASS_P (arg1
))
3053 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3055 if (TREE_CODE (arg0
) == swap_code
)
3056 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3057 TREE_OPERAND (arg1
, 1), flags
)
3058 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3059 TREE_OPERAND (arg1
, 0), flags
);
3062 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3063 /* This is needed for conversions and for COMPONENT_REF.
3064 Might as well play it safe and always test this. */
3065 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3066 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3067 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
3070 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3071 We don't care about side effects in that case because the SAVE_EXPR
3072 takes care of that for us. In all other cases, two expressions are
3073 equal if they have no side effects. If we have two identical
3074 expressions with side effects that should be treated the same due
3075 to the only side effects being identical SAVE_EXPR's, that will
3076 be detected in the recursive calls below. */
3077 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3078 && (TREE_CODE (arg0
) == SAVE_EXPR
3079 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3082 /* Next handle constant cases, those for which we can return 1 even
3083 if ONLY_CONST is set. */
3084 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3085 switch (TREE_CODE (arg0
))
3088 return tree_int_cst_equal (arg0
, arg1
);
3091 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3092 TREE_FIXED_CST (arg1
));
3095 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
3096 TREE_REAL_CST (arg1
)))
3100 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
3102 /* If we do not distinguish between signed and unsigned zero,
3103 consider them equal. */
3104 if (real_zerop (arg0
) && real_zerop (arg1
))
3113 v1
= TREE_VECTOR_CST_ELTS (arg0
);
3114 v2
= TREE_VECTOR_CST_ELTS (arg1
);
3117 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
3120 v1
= TREE_CHAIN (v1
);
3121 v2
= TREE_CHAIN (v2
);
3128 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3130 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3134 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3135 && ! memcmp (TREE_STRING_POINTER (arg0
),
3136 TREE_STRING_POINTER (arg1
),
3137 TREE_STRING_LENGTH (arg0
)));
3140 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3146 if (flags
& OEP_ONLY_CONST
)
3149 /* Define macros to test an operand from arg0 and arg1 for equality and a
3150 variant that allows null and views null as being different from any
3151 non-null value. In the latter case, if either is null, the both
3152 must be; otherwise, do the normal comparison. */
3153 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3154 TREE_OPERAND (arg1, N), flags)
3156 #define OP_SAME_WITH_NULL(N) \
3157 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3158 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3160 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3163 /* Two conversions are equal only if signedness and modes match. */
3164 switch (TREE_CODE (arg0
))
3168 case FIX_TRUNC_EXPR
:
3169 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3170 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3180 case tcc_comparison
:
3182 if (OP_SAME (0) && OP_SAME (1))
3185 /* For commutative ops, allow the other order. */
3186 return (commutative_tree_code (TREE_CODE (arg0
))
3187 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3188 TREE_OPERAND (arg1
, 1), flags
)
3189 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3190 TREE_OPERAND (arg1
, 0), flags
));
3193 /* If either of the pointer (or reference) expressions we are
3194 dereferencing contain a side effect, these cannot be equal. */
3195 if (TREE_SIDE_EFFECTS (arg0
)
3196 || TREE_SIDE_EFFECTS (arg1
))
3199 switch (TREE_CODE (arg0
))
3202 case ALIGN_INDIRECT_REF
:
3203 case MISALIGNED_INDIRECT_REF
:
3209 case ARRAY_RANGE_REF
:
3210 /* Operands 2 and 3 may be null.
3211 Compare the array index by value if it is constant first as we
3212 may have different types but same value here. */
3214 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3215 TREE_OPERAND (arg1
, 1))
3217 && OP_SAME_WITH_NULL (2)
3218 && OP_SAME_WITH_NULL (3));
3221 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3222 may be NULL when we're called to compare MEM_EXPRs. */
3223 return OP_SAME_WITH_NULL (0)
3225 && OP_SAME_WITH_NULL (2);
3228 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3234 case tcc_expression
:
3235 switch (TREE_CODE (arg0
))
3238 case TRUTH_NOT_EXPR
:
3241 case TRUTH_ANDIF_EXPR
:
3242 case TRUTH_ORIF_EXPR
:
3243 return OP_SAME (0) && OP_SAME (1);
3245 case TRUTH_AND_EXPR
:
3247 case TRUTH_XOR_EXPR
:
3248 if (OP_SAME (0) && OP_SAME (1))
3251 /* Otherwise take into account this is a commutative operation. */
3252 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3253 TREE_OPERAND (arg1
, 1), flags
)
3254 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3255 TREE_OPERAND (arg1
, 0), flags
));
3262 switch (TREE_CODE (arg0
))
3265 /* If the CALL_EXPRs call different functions, then they
3266 clearly can not be equal. */
3267 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3272 unsigned int cef
= call_expr_flags (arg0
);
3273 if (flags
& OEP_PURE_SAME
)
3274 cef
&= ECF_CONST
| ECF_PURE
;
3281 /* Now see if all the arguments are the same. */
3283 const_call_expr_arg_iterator iter0
, iter1
;
3285 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3286 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3288 a0
= next_const_call_expr_arg (&iter0
),
3289 a1
= next_const_call_expr_arg (&iter1
))
3290 if (! operand_equal_p (a0
, a1
, flags
))
3293 /* If we get here and both argument lists are exhausted
3294 then the CALL_EXPRs are equal. */
3295 return ! (a0
|| a1
);
3301 case tcc_declaration
:
3302 /* Consider __builtin_sqrt equal to sqrt. */
3303 return (TREE_CODE (arg0
) == FUNCTION_DECL
3304 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3305 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3306 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3313 #undef OP_SAME_WITH_NULL
3316 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3317 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3319 When in doubt, return 0. */
3322 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3324 int unsignedp1
, unsignedpo
;
3325 tree primarg0
, primarg1
, primother
;
3326 unsigned int correct_width
;
3328 if (operand_equal_p (arg0
, arg1
, 0))
3331 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3332 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3335 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3336 and see if the inner values are the same. This removes any
3337 signedness comparison, which doesn't matter here. */
3338 primarg0
= arg0
, primarg1
= arg1
;
3339 STRIP_NOPS (primarg0
);
3340 STRIP_NOPS (primarg1
);
3341 if (operand_equal_p (primarg0
, primarg1
, 0))
3344 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3345 actual comparison operand, ARG0.
3347 First throw away any conversions to wider types
3348 already present in the operands. */
3350 primarg1
= get_narrower (arg1
, &unsignedp1
);
3351 primother
= get_narrower (other
, &unsignedpo
);
3353 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3354 if (unsignedp1
== unsignedpo
3355 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3356 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3358 tree type
= TREE_TYPE (arg0
);
3360 /* Make sure shorter operand is extended the right way
3361 to match the longer operand. */
3362 primarg1
= fold_convert (signed_or_unsigned_type_for
3363 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3365 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3372 /* See if ARG is an expression that is either a comparison or is performing
3373 arithmetic on comparisons. The comparisons must only be comparing
3374 two different values, which will be stored in *CVAL1 and *CVAL2; if
3375 they are nonzero it means that some operands have already been found.
3376 No variables may be used anywhere else in the expression except in the
3377 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3378 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3380 If this is true, return 1. Otherwise, return zero. */
3383 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3385 enum tree_code code
= TREE_CODE (arg
);
3386 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3388 /* We can handle some of the tcc_expression cases here. */
3389 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3391 else if (class == tcc_expression
3392 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3393 || code
== COMPOUND_EXPR
))
3396 else if (class == tcc_expression
&& code
== SAVE_EXPR
3397 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3399 /* If we've already found a CVAL1 or CVAL2, this expression is
3400 two complex to handle. */
3401 if (*cval1
|| *cval2
)
3411 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3414 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3415 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3416 cval1
, cval2
, save_p
));
3421 case tcc_expression
:
3422 if (code
== COND_EXPR
)
3423 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3424 cval1
, cval2
, save_p
)
3425 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3426 cval1
, cval2
, save_p
)
3427 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3428 cval1
, cval2
, save_p
));
3431 case tcc_comparison
:
3432 /* First see if we can handle the first operand, then the second. For
3433 the second operand, we know *CVAL1 can't be zero. It must be that
3434 one side of the comparison is each of the values; test for the
3435 case where this isn't true by failing if the two operands
3438 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3439 TREE_OPERAND (arg
, 1), 0))
3443 *cval1
= TREE_OPERAND (arg
, 0);
3444 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3446 else if (*cval2
== 0)
3447 *cval2
= TREE_OPERAND (arg
, 0);
3448 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3453 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3455 else if (*cval2
== 0)
3456 *cval2
= TREE_OPERAND (arg
, 1);
3457 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3469 /* ARG is a tree that is known to contain just arithmetic operations and
3470 comparisons. Evaluate the operations in the tree substituting NEW0 for
3471 any occurrence of OLD0 as an operand of a comparison and likewise for
3475 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
3477 tree type
= TREE_TYPE (arg
);
3478 enum tree_code code
= TREE_CODE (arg
);
3479 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3481 /* We can handle some of the tcc_expression cases here. */
3482 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3484 else if (class == tcc_expression
3485 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3491 return fold_build1 (code
, type
,
3492 eval_subst (TREE_OPERAND (arg
, 0),
3493 old0
, new0
, old1
, new1
));
3496 return fold_build2 (code
, type
,
3497 eval_subst (TREE_OPERAND (arg
, 0),
3498 old0
, new0
, old1
, new1
),
3499 eval_subst (TREE_OPERAND (arg
, 1),
3500 old0
, new0
, old1
, new1
));
3502 case tcc_expression
:
3506 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
3509 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
3512 return fold_build3 (code
, type
,
3513 eval_subst (TREE_OPERAND (arg
, 0),
3514 old0
, new0
, old1
, new1
),
3515 eval_subst (TREE_OPERAND (arg
, 1),
3516 old0
, new0
, old1
, new1
),
3517 eval_subst (TREE_OPERAND (arg
, 2),
3518 old0
, new0
, old1
, new1
));
3522 /* Fall through - ??? */
3524 case tcc_comparison
:
3526 tree arg0
= TREE_OPERAND (arg
, 0);
3527 tree arg1
= TREE_OPERAND (arg
, 1);
3529 /* We need to check both for exact equality and tree equality. The
3530 former will be true if the operand has a side-effect. In that
3531 case, we know the operand occurred exactly once. */
3533 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3535 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3538 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3540 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3543 return fold_build2 (code
, type
, arg0
, arg1
);
3551 /* Return a tree for the case when the result of an expression is RESULT
3552 converted to TYPE and OMITTED was previously an operand of the expression
3553 but is now not needed (e.g., we folded OMITTED * 0).
3555 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3556 the conversion of RESULT to TYPE. */
3559 omit_one_operand (tree type
, tree result
, tree omitted
)
3561 tree t
= fold_convert (type
, result
);
3563 /* If the resulting operand is an empty statement, just return the omitted
3564 statement casted to void. */
3565 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3566 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3568 if (TREE_SIDE_EFFECTS (omitted
))
3569 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3571 return non_lvalue (t
);
3574 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3577 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3579 tree t
= fold_convert (type
, result
);
3581 /* If the resulting operand is an empty statement, just return the omitted
3582 statement casted to void. */
3583 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3584 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3586 if (TREE_SIDE_EFFECTS (omitted
))
3587 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3589 return pedantic_non_lvalue (t
);
3592 /* Return a tree for the case when the result of an expression is RESULT
3593 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3594 of the expression but are now not needed.
3596 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3597 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3598 evaluated before OMITTED2. Otherwise, if neither has side effects,
3599 just do the conversion of RESULT to TYPE. */
3602 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3604 tree t
= fold_convert (type
, result
);
3606 if (TREE_SIDE_EFFECTS (omitted2
))
3607 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3608 if (TREE_SIDE_EFFECTS (omitted1
))
3609 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3611 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3615 /* Return a simplified tree node for the truth-negation of ARG. This
3616 never alters ARG itself. We assume that ARG is an operation that
3617 returns a truth value (0 or 1).
3619 FIXME: one would think we would fold the result, but it causes
3620 problems with the dominator optimizer. */
3623 fold_truth_not_expr (tree arg
)
3625 tree type
= TREE_TYPE (arg
);
3626 enum tree_code code
= TREE_CODE (arg
);
3628 /* If this is a comparison, we can simply invert it, except for
3629 floating-point non-equality comparisons, in which case we just
3630 enclose a TRUTH_NOT_EXPR around what we have. */
3632 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3634 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3635 if (FLOAT_TYPE_P (op_type
)
3636 && flag_trapping_math
3637 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3638 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3642 code
= invert_tree_comparison (code
,
3643 HONOR_NANS (TYPE_MODE (op_type
)));
3644 if (code
== ERROR_MARK
)
3647 return build2 (code
, type
,
3648 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3655 return constant_boolean_node (integer_zerop (arg
), type
);
3657 case TRUTH_AND_EXPR
:
3658 return build2 (TRUTH_OR_EXPR
, type
,
3659 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3660 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3663 return build2 (TRUTH_AND_EXPR
, type
,
3664 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3665 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3667 case TRUTH_XOR_EXPR
:
3668 /* Here we can invert either operand. We invert the first operand
3669 unless the second operand is a TRUTH_NOT_EXPR in which case our
3670 result is the XOR of the first operand with the inside of the
3671 negation of the second operand. */
3673 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3674 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3675 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3677 return build2 (TRUTH_XOR_EXPR
, type
,
3678 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3679 TREE_OPERAND (arg
, 1));
3681 case TRUTH_ANDIF_EXPR
:
3682 return build2 (TRUTH_ORIF_EXPR
, type
,
3683 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3684 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3686 case TRUTH_ORIF_EXPR
:
3687 return build2 (TRUTH_ANDIF_EXPR
, type
,
3688 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3689 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3691 case TRUTH_NOT_EXPR
:
3692 return TREE_OPERAND (arg
, 0);
3696 tree arg1
= TREE_OPERAND (arg
, 1);
3697 tree arg2
= TREE_OPERAND (arg
, 2);
3698 /* A COND_EXPR may have a throw as one operand, which
3699 then has void type. Just leave void operands
3701 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3702 VOID_TYPE_P (TREE_TYPE (arg1
))
3703 ? arg1
: invert_truthvalue (arg1
),
3704 VOID_TYPE_P (TREE_TYPE (arg2
))
3705 ? arg2
: invert_truthvalue (arg2
));
3709 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3710 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3712 case NON_LVALUE_EXPR
:
3713 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3716 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3717 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3721 return build1 (TREE_CODE (arg
), type
,
3722 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3725 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3727 return build2 (EQ_EXPR
, type
, arg
,
3728 build_int_cst (type
, 0));
3731 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3733 case CLEANUP_POINT_EXPR
:
3734 return build1 (CLEANUP_POINT_EXPR
, type
,
3735 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3744 /* Return a simplified tree node for the truth-negation of ARG. This
3745 never alters ARG itself. We assume that ARG is an operation that
3746 returns a truth value (0 or 1).
3748 FIXME: one would think we would fold the result, but it causes
3749 problems with the dominator optimizer. */
3752 invert_truthvalue (tree arg
)
3756 if (TREE_CODE (arg
) == ERROR_MARK
)
3759 tem
= fold_truth_not_expr (arg
);
3761 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3766 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3767 operands are another bit-wise operation with a common input. If so,
3768 distribute the bit operations to save an operation and possibly two if
3769 constants are involved. For example, convert
3770 (A | B) & (A | C) into A | (B & C)
3771 Further simplification will occur if B and C are constants.
3773 If this optimization cannot be done, 0 will be returned. */
3776 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3781 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3782 || TREE_CODE (arg0
) == code
3783 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3784 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3787 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3789 common
= TREE_OPERAND (arg0
, 0);
3790 left
= TREE_OPERAND (arg0
, 1);
3791 right
= TREE_OPERAND (arg1
, 1);
3793 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3795 common
= TREE_OPERAND (arg0
, 0);
3796 left
= TREE_OPERAND (arg0
, 1);
3797 right
= TREE_OPERAND (arg1
, 0);
3799 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3801 common
= TREE_OPERAND (arg0
, 1);
3802 left
= TREE_OPERAND (arg0
, 0);
3803 right
= TREE_OPERAND (arg1
, 1);
3805 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3807 common
= TREE_OPERAND (arg0
, 1);
3808 left
= TREE_OPERAND (arg0
, 0);
3809 right
= TREE_OPERAND (arg1
, 0);
3814 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3815 fold_build2 (code
, type
, left
, right
));
3818 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3819 with code CODE. This optimization is unsafe. */
3821 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3823 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3824 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3826 /* (A / C) +- (B / C) -> (A +- B) / C. */
3828 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3829 TREE_OPERAND (arg1
, 1), 0))
3830 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3831 fold_build2 (code
, type
,
3832 TREE_OPERAND (arg0
, 0),
3833 TREE_OPERAND (arg1
, 0)),
3834 TREE_OPERAND (arg0
, 1));
3836 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3837 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3838 TREE_OPERAND (arg1
, 0), 0)
3839 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3840 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3842 REAL_VALUE_TYPE r0
, r1
;
3843 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3844 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3846 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3848 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3849 real_arithmetic (&r0
, code
, &r0
, &r1
);
3850 return fold_build2 (MULT_EXPR
, type
,
3851 TREE_OPERAND (arg0
, 0),
3852 build_real (type
, r0
));
3858 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3859 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3862 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3869 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3870 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3871 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3872 && host_integerp (size
, 0)
3873 && tree_low_cst (size
, 0) == bitsize
)
3874 return fold_convert (type
, inner
);
3877 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3878 size_int (bitsize
), bitsize_int (bitpos
));
3880 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3885 /* Optimize a bit-field compare.
3887 There are two cases: First is a compare against a constant and the
3888 second is a comparison of two items where the fields are at the same
3889 bit position relative to the start of a chunk (byte, halfword, word)
3890 large enough to contain it. In these cases we can avoid the shift
3891 implicit in bitfield extractions.
3893 For constants, we emit a compare of the shifted constant with the
3894 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3895 compared. For two fields at the same position, we do the ANDs with the
3896 similar mask and compare the result of the ANDs.
3898 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3899 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3900 are the left and right operands of the comparison, respectively.
3902 If the optimization described above can be done, we return the resulting
3903 tree. Otherwise we return zero. */
3906 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3909 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3910 tree type
= TREE_TYPE (lhs
);
3911 tree signed_type
, unsigned_type
;
3912 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3913 enum machine_mode lmode
, rmode
, nmode
;
3914 int lunsignedp
, runsignedp
;
3915 int lvolatilep
= 0, rvolatilep
= 0;
3916 tree linner
, rinner
= NULL_TREE
;
3920 /* Get all the information about the extractions being done. If the bit size
3921 if the same as the size of the underlying object, we aren't doing an
3922 extraction at all and so can do nothing. We also don't want to
3923 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3924 then will no longer be able to replace it. */
3925 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3926 &lunsignedp
, &lvolatilep
, false);
3927 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3928 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3933 /* If this is not a constant, we can only do something if bit positions,
3934 sizes, and signedness are the same. */
3935 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3936 &runsignedp
, &rvolatilep
, false);
3938 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3939 || lunsignedp
!= runsignedp
|| offset
!= 0
3940 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3944 /* See if we can find a mode to refer to this field. We should be able to,
3945 but fail if we can't. */
3946 nmode
= get_best_mode (lbitsize
, lbitpos
,
3947 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3948 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3949 TYPE_ALIGN (TREE_TYPE (rinner
))),
3950 word_mode
, lvolatilep
|| rvolatilep
);
3951 if (nmode
== VOIDmode
)
3954 /* Set signed and unsigned types of the precision of this mode for the
3956 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3957 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3959 /* Compute the bit position and size for the new reference and our offset
3960 within it. If the new reference is the same size as the original, we
3961 won't optimize anything, so return zero. */
3962 nbitsize
= GET_MODE_BITSIZE (nmode
);
3963 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3965 if (nbitsize
== lbitsize
)
3968 if (BYTES_BIG_ENDIAN
)
3969 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3971 /* Make the mask to be used against the extracted field. */
3972 mask
= build_int_cst_type (unsigned_type
, -1);
3973 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3974 mask
= const_binop (RSHIFT_EXPR
, mask
,
3975 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3978 /* If not comparing with constant, just rework the comparison
3980 return fold_build2 (code
, compare_type
,
3981 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3982 make_bit_field_ref (linner
,
3987 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3988 make_bit_field_ref (rinner
,
3994 /* Otherwise, we are handling the constant case. See if the constant is too
3995 big for the field. Warn and return a tree of for 0 (false) if so. We do
3996 this not only for its own sake, but to avoid having to test for this
3997 error case below. If we didn't, we might generate wrong code.
3999 For unsigned fields, the constant shifted right by the field length should
4000 be all zero. For signed fields, the high-order bits should agree with
4005 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
4006 fold_convert (unsigned_type
, rhs
),
4007 size_int (lbitsize
), 0)))
4009 warning (0, "comparison is always %d due to width of bit-field",
4011 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4016 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
4017 size_int (lbitsize
- 1), 0);
4018 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
4020 warning (0, "comparison is always %d due to width of bit-field",
4022 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4026 /* Single-bit compares should always be against zero. */
4027 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4029 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4030 rhs
= build_int_cst (type
, 0);
4033 /* Make a new bitfield reference, shift the constant over the
4034 appropriate number of bits and mask it with the computed mask
4035 (in case this was a signed field). If we changed it, make a new one. */
4036 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
4039 TREE_SIDE_EFFECTS (lhs
) = 1;
4040 TREE_THIS_VOLATILE (lhs
) = 1;
4043 rhs
= const_binop (BIT_AND_EXPR
,
4044 const_binop (LSHIFT_EXPR
,
4045 fold_convert (unsigned_type
, rhs
),
4046 size_int (lbitpos
), 0),
4049 return build2 (code
, compare_type
,
4050 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
4054 /* Subroutine for fold_truthop: decode a field reference.
4056 If EXP is a comparison reference, we return the innermost reference.
4058 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4059 set to the starting bit number.
4061 If the innermost field can be completely contained in a mode-sized
4062 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4064 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4065 otherwise it is not changed.
4067 *PUNSIGNEDP is set to the signedness of the field.
4069 *PMASK is set to the mask used. This is either contained in a
4070 BIT_AND_EXPR or derived from the width of the field.
4072 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4074 Return 0 if this is not a component reference or is one that we can't
4075 do anything with. */
4078 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
4079 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
4080 int *punsignedp
, int *pvolatilep
,
4081 tree
*pmask
, tree
*pand_mask
)
4083 tree outer_type
= 0;
4085 tree mask
, inner
, offset
;
4087 unsigned int precision
;
4089 /* All the optimizations using this function assume integer fields.
4090 There are problems with FP fields since the type_for_size call
4091 below can fail for, e.g., XFmode. */
4092 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4095 /* We are interested in the bare arrangement of bits, so strip everything
4096 that doesn't affect the machine mode. However, record the type of the
4097 outermost expression if it may matter below. */
4098 if (TREE_CODE (exp
) == NOP_EXPR
4099 || TREE_CODE (exp
) == CONVERT_EXPR
4100 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4101 outer_type
= TREE_TYPE (exp
);
4104 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4106 and_mask
= TREE_OPERAND (exp
, 1);
4107 exp
= TREE_OPERAND (exp
, 0);
4108 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4109 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4113 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
4114 punsignedp
, pvolatilep
, false);
4115 if ((inner
== exp
&& and_mask
== 0)
4116 || *pbitsize
< 0 || offset
!= 0
4117 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4120 /* If the number of bits in the reference is the same as the bitsize of
4121 the outer type, then the outer type gives the signedness. Otherwise
4122 (in case of a small bitfield) the signedness is unchanged. */
4123 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4124 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4126 /* Compute the mask to access the bitfield. */
4127 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4128 precision
= TYPE_PRECISION (unsigned_type
);
4130 mask
= build_int_cst_type (unsigned_type
, -1);
4132 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
4133 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
4135 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4137 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
4138 fold_convert (unsigned_type
, and_mask
), mask
);
4141 *pand_mask
= and_mask
;
4145 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4149 all_ones_mask_p (const_tree mask
, int size
)
4151 tree type
= TREE_TYPE (mask
);
4152 unsigned int precision
= TYPE_PRECISION (type
);
4155 tmask
= build_int_cst_type (signed_type_for (type
), -1);
4158 tree_int_cst_equal (mask
,
4159 const_binop (RSHIFT_EXPR
,
4160 const_binop (LSHIFT_EXPR
, tmask
,
4161 size_int (precision
- size
),
4163 size_int (precision
- size
), 0));
4166 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4167 represents the sign bit of EXP's type. If EXP represents a sign
4168 or zero extension, also test VAL against the unextended type.
4169 The return value is the (sub)expression whose sign bit is VAL,
4170 or NULL_TREE otherwise. */
4173 sign_bit_p (tree exp
, const_tree val
)
4175 unsigned HOST_WIDE_INT mask_lo
, lo
;
4176 HOST_WIDE_INT mask_hi
, hi
;
4180 /* Tree EXP must have an integral type. */
4181 t
= TREE_TYPE (exp
);
4182 if (! INTEGRAL_TYPE_P (t
))
4185 /* Tree VAL must be an integer constant. */
4186 if (TREE_CODE (val
) != INTEGER_CST
4187 || TREE_OVERFLOW (val
))
4190 width
= TYPE_PRECISION (t
);
4191 if (width
> HOST_BITS_PER_WIDE_INT
)
4193 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
4196 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
4197 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
4203 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
4206 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
4207 >> (HOST_BITS_PER_WIDE_INT
- width
));
4210 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4211 treat VAL as if it were unsigned. */
4212 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
4213 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
4216 /* Handle extension from a narrower type. */
4217 if (TREE_CODE (exp
) == NOP_EXPR
4218 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4219 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4224 /* Subroutine for fold_truthop: determine if an operand is simple enough
4225 to be evaluated unconditionally. */
4228 simple_operand_p (const_tree exp
)
4230 /* Strip any conversions that don't change the machine mode. */
4233 return (CONSTANT_CLASS_P (exp
)
4234 || TREE_CODE (exp
) == SSA_NAME
4236 && ! TREE_ADDRESSABLE (exp
)
4237 && ! TREE_THIS_VOLATILE (exp
)
4238 && ! DECL_NONLOCAL (exp
)
4239 /* Don't regard global variables as simple. They may be
4240 allocated in ways unknown to the compiler (shared memory,
4241 #pragma weak, etc). */
4242 && ! TREE_PUBLIC (exp
)
4243 && ! DECL_EXTERNAL (exp
)
4244 /* Loading a static variable is unduly expensive, but global
4245 registers aren't expensive. */
4246 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4249 /* The following functions are subroutines to fold_range_test and allow it to
4250 try to change a logical combination of comparisons into a range test.
4253 X == 2 || X == 3 || X == 4 || X == 5
4257 (unsigned) (X - 2) <= 3
4259 We describe each set of comparisons as being either inside or outside
4260 a range, using a variable named like IN_P, and then describe the
4261 range with a lower and upper bound. If one of the bounds is omitted,
4262 it represents either the highest or lowest value of the type.
4264 In the comments below, we represent a range by two numbers in brackets
4265 preceded by a "+" to designate being inside that range, or a "-" to
4266 designate being outside that range, so the condition can be inverted by
4267 flipping the prefix. An omitted bound is represented by a "-". For
4268 example, "- [-, 10]" means being outside the range starting at the lowest
4269 possible value and ending at 10, in other words, being greater than 10.
4270 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4273 We set up things so that the missing bounds are handled in a consistent
4274 manner so neither a missing bound nor "true" and "false" need to be
4275 handled using a special case. */
4277 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4278 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4279 and UPPER1_P are nonzero if the respective argument is an upper bound
4280 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4281 must be specified for a comparison. ARG1 will be converted to ARG0's
4282 type if both are specified. */
4285 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4286 tree arg1
, int upper1_p
)
4292 /* If neither arg represents infinity, do the normal operation.
4293 Else, if not a comparison, return infinity. Else handle the special
4294 comparison rules. Note that most of the cases below won't occur, but
4295 are handled for consistency. */
4297 if (arg0
!= 0 && arg1
!= 0)
4299 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4300 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4302 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4305 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4308 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4309 for neither. In real maths, we cannot assume open ended ranges are
4310 the same. But, this is computer arithmetic, where numbers are finite.
4311 We can therefore make the transformation of any unbounded range with
4312 the value Z, Z being greater than any representable number. This permits
4313 us to treat unbounded ranges as equal. */
4314 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4315 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4319 result
= sgn0
== sgn1
;
4322 result
= sgn0
!= sgn1
;
4325 result
= sgn0
< sgn1
;
4328 result
= sgn0
<= sgn1
;
4331 result
= sgn0
> sgn1
;
4334 result
= sgn0
>= sgn1
;
4340 return constant_boolean_node (result
, type
);
4343 /* Given EXP, a logical expression, set the range it is testing into
4344 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4345 actually being tested. *PLOW and *PHIGH will be made of the same
4346 type as the returned expression. If EXP is not a comparison, we
4347 will most likely not be returning a useful value and range. Set
4348 *STRICT_OVERFLOW_P to true if the return value is only valid
4349 because signed overflow is undefined; otherwise, do not change
4350 *STRICT_OVERFLOW_P. */
4353 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4354 bool *strict_overflow_p
)
4356 enum tree_code code
;
4357 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4358 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
4360 tree low
, high
, n_low
, n_high
;
4362 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4363 and see if we can refine the range. Some of the cases below may not
4364 happen, but it doesn't seem worth worrying about this. We "continue"
4365 the outer loop when we've changed something; otherwise we "break"
4366 the switch, which will "break" the while. */
4369 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4373 code
= TREE_CODE (exp
);
4374 exp_type
= TREE_TYPE (exp
);
4376 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4378 if (TREE_OPERAND_LENGTH (exp
) > 0)
4379 arg0
= TREE_OPERAND (exp
, 0);
4380 if (TREE_CODE_CLASS (code
) == tcc_comparison
4381 || TREE_CODE_CLASS (code
) == tcc_unary
4382 || TREE_CODE_CLASS (code
) == tcc_binary
)
4383 arg0_type
= TREE_TYPE (arg0
);
4384 if (TREE_CODE_CLASS (code
) == tcc_binary
4385 || TREE_CODE_CLASS (code
) == tcc_comparison
4386 || (TREE_CODE_CLASS (code
) == tcc_expression
4387 && TREE_OPERAND_LENGTH (exp
) > 1))
4388 arg1
= TREE_OPERAND (exp
, 1);
4393 case TRUTH_NOT_EXPR
:
4394 in_p
= ! in_p
, exp
= arg0
;
4397 case EQ_EXPR
: case NE_EXPR
:
4398 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4399 /* We can only do something if the range is testing for zero
4400 and if the second operand is an integer constant. Note that
4401 saying something is "in" the range we make is done by
4402 complementing IN_P since it will set in the initial case of
4403 being not equal to zero; "out" is leaving it alone. */
4404 if (low
== 0 || high
== 0
4405 || ! integer_zerop (low
) || ! integer_zerop (high
)
4406 || TREE_CODE (arg1
) != INTEGER_CST
)
4411 case NE_EXPR
: /* - [c, c] */
4414 case EQ_EXPR
: /* + [c, c] */
4415 in_p
= ! in_p
, low
= high
= arg1
;
4417 case GT_EXPR
: /* - [-, c] */
4418 low
= 0, high
= arg1
;
4420 case GE_EXPR
: /* + [c, -] */
4421 in_p
= ! in_p
, low
= arg1
, high
= 0;
4423 case LT_EXPR
: /* - [c, -] */
4424 low
= arg1
, high
= 0;
4426 case LE_EXPR
: /* + [-, c] */
4427 in_p
= ! in_p
, low
= 0, high
= arg1
;
4433 /* If this is an unsigned comparison, we also know that EXP is
4434 greater than or equal to zero. We base the range tests we make
4435 on that fact, so we record it here so we can parse existing
4436 range tests. We test arg0_type since often the return type
4437 of, e.g. EQ_EXPR, is boolean. */
4438 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4440 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4442 build_int_cst (arg0_type
, 0),
4446 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4448 /* If the high bound is missing, but we have a nonzero low
4449 bound, reverse the range so it goes from zero to the low bound
4451 if (high
== 0 && low
&& ! integer_zerop (low
))
4454 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4455 integer_one_node
, 0);
4456 low
= build_int_cst (arg0_type
, 0);
4464 /* (-x) IN [a,b] -> x in [-b, -a] */
4465 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4466 build_int_cst (exp_type
, 0),
4468 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4469 build_int_cst (exp_type
, 0),
4471 low
= n_low
, high
= n_high
;
4477 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4478 build_int_cst (exp_type
, 1));
4481 case PLUS_EXPR
: case MINUS_EXPR
:
4482 if (TREE_CODE (arg1
) != INTEGER_CST
)
4485 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4486 move a constant to the other side. */
4487 if (!TYPE_UNSIGNED (arg0_type
)
4488 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4491 /* If EXP is signed, any overflow in the computation is undefined,
4492 so we don't worry about it so long as our computations on
4493 the bounds don't overflow. For unsigned, overflow is defined
4494 and this is exactly the right thing. */
4495 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4496 arg0_type
, low
, 0, arg1
, 0);
4497 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4498 arg0_type
, high
, 1, arg1
, 0);
4499 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4500 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4503 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4504 *strict_overflow_p
= true;
4506 /* Check for an unsigned range which has wrapped around the maximum
4507 value thus making n_high < n_low, and normalize it. */
4508 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4510 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4511 integer_one_node
, 0);
4512 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4513 integer_one_node
, 0);
4515 /* If the range is of the form +/- [ x+1, x ], we won't
4516 be able to normalize it. But then, it represents the
4517 whole range or the empty set, so make it
4519 if (tree_int_cst_equal (n_low
, low
)
4520 && tree_int_cst_equal (n_high
, high
))
4526 low
= n_low
, high
= n_high
;
4531 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
4532 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4535 if (! INTEGRAL_TYPE_P (arg0_type
)
4536 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4537 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4540 n_low
= low
, n_high
= high
;
4543 n_low
= fold_convert (arg0_type
, n_low
);
4546 n_high
= fold_convert (arg0_type
, n_high
);
4549 /* If we're converting arg0 from an unsigned type, to exp,
4550 a signed type, we will be doing the comparison as unsigned.
4551 The tests above have already verified that LOW and HIGH
4554 So we have to ensure that we will handle large unsigned
4555 values the same way that the current signed bounds treat
4558 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4562 /* For fixed-point modes, we need to pass the saturating flag
4563 as the 2nd parameter. */
4564 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4565 equiv_type
= lang_hooks
.types
.type_for_mode
4566 (TYPE_MODE (arg0_type
),
4567 TYPE_SATURATING (arg0_type
));
4569 equiv_type
= lang_hooks
.types
.type_for_mode
4570 (TYPE_MODE (arg0_type
), 1);
4572 /* A range without an upper bound is, naturally, unbounded.
4573 Since convert would have cropped a very large value, use
4574 the max value for the destination type. */
4576 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4577 : TYPE_MAX_VALUE (arg0_type
);
4579 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4580 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
4581 fold_convert (arg0_type
,
4583 build_int_cst (arg0_type
, 1));
4585 /* If the low bound is specified, "and" the range with the
4586 range for which the original unsigned value will be
4590 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4591 1, n_low
, n_high
, 1,
4592 fold_convert (arg0_type
,
4597 in_p
= (n_in_p
== in_p
);
4601 /* Otherwise, "or" the range with the range of the input
4602 that will be interpreted as negative. */
4603 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4604 0, n_low
, n_high
, 1,
4605 fold_convert (arg0_type
,
4610 in_p
= (in_p
!= n_in_p
);
4615 low
= n_low
, high
= n_high
;
4625 /* If EXP is a constant, we can evaluate whether this is true or false. */
4626 if (TREE_CODE (exp
) == INTEGER_CST
)
4628 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4630 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4636 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4640 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4641 type, TYPE, return an expression to test if EXP is in (or out of, depending
4642 on IN_P) the range. Return 0 if the test couldn't be created. */
4645 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4647 tree etype
= TREE_TYPE (exp
);
4650 #ifdef HAVE_canonicalize_funcptr_for_compare
4651 /* Disable this optimization for function pointer expressions
4652 on targets that require function pointer canonicalization. */
4653 if (HAVE_canonicalize_funcptr_for_compare
4654 && TREE_CODE (etype
) == POINTER_TYPE
4655 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4661 value
= build_range_check (type
, exp
, 1, low
, high
);
4663 return invert_truthvalue (value
);
4668 if (low
== 0 && high
== 0)
4669 return build_int_cst (type
, 1);
4672 return fold_build2 (LE_EXPR
, type
, exp
,
4673 fold_convert (etype
, high
));
4676 return fold_build2 (GE_EXPR
, type
, exp
,
4677 fold_convert (etype
, low
));
4679 if (operand_equal_p (low
, high
, 0))
4680 return fold_build2 (EQ_EXPR
, type
, exp
,
4681 fold_convert (etype
, low
));
4683 if (integer_zerop (low
))
4685 if (! TYPE_UNSIGNED (etype
))
4687 etype
= unsigned_type_for (etype
);
4688 high
= fold_convert (etype
, high
);
4689 exp
= fold_convert (etype
, exp
);
4691 return build_range_check (type
, exp
, 1, 0, high
);
4694 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4695 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4697 unsigned HOST_WIDE_INT lo
;
4701 prec
= TYPE_PRECISION (etype
);
4702 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4705 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4709 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4710 lo
= (unsigned HOST_WIDE_INT
) -1;
4713 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4715 if (TYPE_UNSIGNED (etype
))
4717 etype
= signed_type_for (etype
);
4718 exp
= fold_convert (etype
, exp
);
4720 return fold_build2 (GT_EXPR
, type
, exp
,
4721 build_int_cst (etype
, 0));
4725 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4726 This requires wrap-around arithmetics for the type of the expression. */
4727 switch (TREE_CODE (etype
))
4730 /* There is no requirement that LOW be within the range of ETYPE
4731 if the latter is a subtype. It must, however, be within the base
4732 type of ETYPE. So be sure we do the subtraction in that type. */
4733 if (TREE_TYPE (etype
))
4734 etype
= TREE_TYPE (etype
);
4739 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4740 TYPE_UNSIGNED (etype
));
4747 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4748 if (TREE_CODE (etype
) == INTEGER_TYPE
4749 && !TYPE_OVERFLOW_WRAPS (etype
))
4751 tree utype
, minv
, maxv
;
4753 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4754 for the type in question, as we rely on this here. */
4755 utype
= unsigned_type_for (etype
);
4756 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4757 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4758 integer_one_node
, 1);
4759 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4761 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4768 high
= fold_convert (etype
, high
);
4769 low
= fold_convert (etype
, low
);
4770 exp
= fold_convert (etype
, exp
);
4772 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4775 if (POINTER_TYPE_P (etype
))
4777 if (value
!= 0 && !TREE_OVERFLOW (value
))
4779 low
= fold_convert (sizetype
, low
);
4780 low
= fold_build1 (NEGATE_EXPR
, sizetype
, low
);
4781 return build_range_check (type
,
4782 fold_build2 (POINTER_PLUS_EXPR
, etype
, exp
, low
),
4783 1, build_int_cst (etype
, 0), value
);
4788 if (value
!= 0 && !TREE_OVERFLOW (value
))
4789 return build_range_check (type
,
4790 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4791 1, build_int_cst (etype
, 0), value
);
4796 /* Return the predecessor of VAL in its type, handling the infinite case. */
4799 range_predecessor (tree val
)
4801 tree type
= TREE_TYPE (val
);
4803 if (INTEGRAL_TYPE_P (type
)
4804 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4807 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4810 /* Return the successor of VAL in its type, handling the infinite case. */
4813 range_successor (tree val
)
4815 tree type
= TREE_TYPE (val
);
4817 if (INTEGRAL_TYPE_P (type
)
4818 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4821 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4824 /* Given two ranges, see if we can merge them into one. Return 1 if we
4825 can, 0 if we can't. Set the output range into the specified parameters. */
4828 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4829 tree high0
, int in1_p
, tree low1
, tree high1
)
4837 int lowequal
= ((low0
== 0 && low1
== 0)
4838 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4839 low0
, 0, low1
, 0)));
4840 int highequal
= ((high0
== 0 && high1
== 0)
4841 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4842 high0
, 1, high1
, 1)));
4844 /* Make range 0 be the range that starts first, or ends last if they
4845 start at the same value. Swap them if it isn't. */
4846 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4849 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4850 high1
, 1, high0
, 1))))
4852 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4853 tem
= low0
, low0
= low1
, low1
= tem
;
4854 tem
= high0
, high0
= high1
, high1
= tem
;
4857 /* Now flag two cases, whether the ranges are disjoint or whether the
4858 second range is totally subsumed in the first. Note that the tests
4859 below are simplified by the ones above. */
4860 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4861 high0
, 1, low1
, 0));
4862 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4863 high1
, 1, high0
, 1));
4865 /* We now have four cases, depending on whether we are including or
4866 excluding the two ranges. */
4869 /* If they don't overlap, the result is false. If the second range
4870 is a subset it is the result. Otherwise, the range is from the start
4871 of the second to the end of the first. */
4873 in_p
= 0, low
= high
= 0;
4875 in_p
= 1, low
= low1
, high
= high1
;
4877 in_p
= 1, low
= low1
, high
= high0
;
4880 else if (in0_p
&& ! in1_p
)
4882 /* If they don't overlap, the result is the first range. If they are
4883 equal, the result is false. If the second range is a subset of the
4884 first, and the ranges begin at the same place, we go from just after
4885 the end of the second range to the end of the first. If the second
4886 range is not a subset of the first, or if it is a subset and both
4887 ranges end at the same place, the range starts at the start of the
4888 first range and ends just before the second range.
4889 Otherwise, we can't describe this as a single range. */
4891 in_p
= 1, low
= low0
, high
= high0
;
4892 else if (lowequal
&& highequal
)
4893 in_p
= 0, low
= high
= 0;
4894 else if (subset
&& lowequal
)
4896 low
= range_successor (high1
);
4901 /* We are in the weird situation where high0 > high1 but
4902 high1 has no successor. Punt. */
4906 else if (! subset
|| highequal
)
4909 high
= range_predecessor (low1
);
4913 /* low0 < low1 but low1 has no predecessor. Punt. */
4921 else if (! in0_p
&& in1_p
)
4923 /* If they don't overlap, the result is the second range. If the second
4924 is a subset of the first, the result is false. Otherwise,
4925 the range starts just after the first range and ends at the
4926 end of the second. */
4928 in_p
= 1, low
= low1
, high
= high1
;
4929 else if (subset
|| highequal
)
4930 in_p
= 0, low
= high
= 0;
4933 low
= range_successor (high0
);
4938 /* high1 > high0 but high0 has no successor. Punt. */
4946 /* The case where we are excluding both ranges. Here the complex case
4947 is if they don't overlap. In that case, the only time we have a
4948 range is if they are adjacent. If the second is a subset of the
4949 first, the result is the first. Otherwise, the range to exclude
4950 starts at the beginning of the first range and ends at the end of the
4954 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4955 range_successor (high0
),
4957 in_p
= 0, low
= low0
, high
= high1
;
4960 /* Canonicalize - [min, x] into - [-, x]. */
4961 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4962 switch (TREE_CODE (TREE_TYPE (low0
)))
4965 if (TYPE_PRECISION (TREE_TYPE (low0
))
4966 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4970 if (tree_int_cst_equal (low0
,
4971 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4975 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4976 && integer_zerop (low0
))
4983 /* Canonicalize - [x, max] into - [x, -]. */
4984 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4985 switch (TREE_CODE (TREE_TYPE (high1
)))
4988 if (TYPE_PRECISION (TREE_TYPE (high1
))
4989 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4993 if (tree_int_cst_equal (high1
,
4994 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4998 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4999 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5001 integer_one_node
, 1)))
5008 /* The ranges might be also adjacent between the maximum and
5009 minimum values of the given type. For
5010 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5011 return + [x + 1, y - 1]. */
5012 if (low0
== 0 && high1
== 0)
5014 low
= range_successor (high0
);
5015 high
= range_predecessor (low1
);
5016 if (low
== 0 || high
== 0)
5026 in_p
= 0, low
= low0
, high
= high0
;
5028 in_p
= 0, low
= low0
, high
= high1
;
5031 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5036 /* Subroutine of fold, looking inside expressions of the form
5037 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5038 of the COND_EXPR. This function is being used also to optimize
5039 A op B ? C : A, by reversing the comparison first.
5041 Return a folded expression whose code is not a COND_EXPR
5042 anymore, or NULL_TREE if no folding opportunity is found. */
5045 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
5047 enum tree_code comp_code
= TREE_CODE (arg0
);
5048 tree arg00
= TREE_OPERAND (arg0
, 0);
5049 tree arg01
= TREE_OPERAND (arg0
, 1);
5050 tree arg1_type
= TREE_TYPE (arg1
);
5056 /* If we have A op 0 ? A : -A, consider applying the following
5059 A == 0? A : -A same as -A
5060 A != 0? A : -A same as A
5061 A >= 0? A : -A same as abs (A)
5062 A > 0? A : -A same as abs (A)
5063 A <= 0? A : -A same as -abs (A)
5064 A < 0? A : -A same as -abs (A)
5066 None of these transformations work for modes with signed
5067 zeros. If A is +/-0, the first two transformations will
5068 change the sign of the result (from +0 to -0, or vice
5069 versa). The last four will fix the sign of the result,
5070 even though the original expressions could be positive or
5071 negative, depending on the sign of A.
5073 Note that all these transformations are correct if A is
5074 NaN, since the two alternatives (A and -A) are also NaNs. */
5075 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
5076 ? real_zerop (arg01
)
5077 : integer_zerop (arg01
))
5078 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5079 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5080 /* In the case that A is of the form X-Y, '-A' (arg2) may
5081 have already been folded to Y-X, check for that. */
5082 || (TREE_CODE (arg1
) == MINUS_EXPR
5083 && TREE_CODE (arg2
) == MINUS_EXPR
5084 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5085 TREE_OPERAND (arg2
, 1), 0)
5086 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5087 TREE_OPERAND (arg2
, 0), 0))))
5092 tem
= fold_convert (arg1_type
, arg1
);
5093 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
5096 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5099 if (flag_trapping_math
)
5104 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5105 arg1
= fold_convert (signed_type_for
5106 (TREE_TYPE (arg1
)), arg1
);
5107 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5108 return pedantic_non_lvalue (fold_convert (type
, tem
));
5111 if (flag_trapping_math
)
5115 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5116 arg1
= fold_convert (signed_type_for
5117 (TREE_TYPE (arg1
)), arg1
);
5118 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5119 return negate_expr (fold_convert (type
, tem
));
5121 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5125 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5126 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5127 both transformations are correct when A is NaN: A != 0
5128 is then true, and A == 0 is false. */
5130 if (integer_zerop (arg01
) && integer_zerop (arg2
))
5132 if (comp_code
== NE_EXPR
)
5133 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5134 else if (comp_code
== EQ_EXPR
)
5135 return build_int_cst (type
, 0);
5138 /* Try some transformations of A op B ? A : B.
5140 A == B? A : B same as B
5141 A != B? A : B same as A
5142 A >= B? A : B same as max (A, B)
5143 A > B? A : B same as max (B, A)
5144 A <= B? A : B same as min (A, B)
5145 A < B? A : B same as min (B, A)
5147 As above, these transformations don't work in the presence
5148 of signed zeros. For example, if A and B are zeros of
5149 opposite sign, the first two transformations will change
5150 the sign of the result. In the last four, the original
5151 expressions give different results for (A=+0, B=-0) and
5152 (A=-0, B=+0), but the transformed expressions do not.
5154 The first two transformations are correct if either A or B
5155 is a NaN. In the first transformation, the condition will
5156 be false, and B will indeed be chosen. In the case of the
5157 second transformation, the condition A != B will be true,
5158 and A will be chosen.
5160 The conversions to max() and min() are not correct if B is
5161 a number and A is not. The conditions in the original
5162 expressions will be false, so all four give B. The min()
5163 and max() versions would give a NaN instead. */
5164 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5165 /* Avoid these transformations if the COND_EXPR may be used
5166 as an lvalue in the C++ front-end. PR c++/19199. */
5168 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
5169 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5170 || ! maybe_lvalue_p (arg1
)
5171 || ! maybe_lvalue_p (arg2
)))
5173 tree comp_op0
= arg00
;
5174 tree comp_op1
= arg01
;
5175 tree comp_type
= TREE_TYPE (comp_op0
);
5177 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5178 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5188 return pedantic_non_lvalue (fold_convert (type
, arg2
));
5190 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5195 /* In C++ a ?: expression can be an lvalue, so put the
5196 operand which will be used if they are equal first
5197 so that we can convert this back to the
5198 corresponding COND_EXPR. */
5199 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5201 comp_op0
= fold_convert (comp_type
, comp_op0
);
5202 comp_op1
= fold_convert (comp_type
, comp_op1
);
5203 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5204 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5205 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
5206 return pedantic_non_lvalue (fold_convert (type
, tem
));
5213 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5215 comp_op0
= fold_convert (comp_type
, comp_op0
);
5216 comp_op1
= fold_convert (comp_type
, comp_op1
);
5217 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5218 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5219 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
5220 return pedantic_non_lvalue (fold_convert (type
, tem
));
5224 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5225 return pedantic_non_lvalue (fold_convert (type
, arg2
));
5228 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5229 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5232 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5237 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5238 we might still be able to simplify this. For example,
5239 if C1 is one less or one more than C2, this might have started
5240 out as a MIN or MAX and been transformed by this function.
5241 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5243 if (INTEGRAL_TYPE_P (type
)
5244 && TREE_CODE (arg01
) == INTEGER_CST
5245 && TREE_CODE (arg2
) == INTEGER_CST
)
5249 /* We can replace A with C1 in this case. */
5250 arg1
= fold_convert (type
, arg01
);
5251 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
5254 /* If C1 is C2 + 1, this is min(A, C2). */
5255 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5257 && operand_equal_p (arg01
,
5258 const_binop (PLUS_EXPR
, arg2
,
5259 build_int_cst (type
, 1), 0),
5261 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5263 fold_convert (type
, arg1
),
5268 /* If C1 is C2 - 1, this is min(A, C2). */
5269 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5271 && operand_equal_p (arg01
,
5272 const_binop (MINUS_EXPR
, arg2
,
5273 build_int_cst (type
, 1), 0),
5275 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5277 fold_convert (type
, arg1
),
5282 /* If C1 is C2 - 1, this is max(A, C2). */
5283 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5285 && operand_equal_p (arg01
,
5286 const_binop (MINUS_EXPR
, arg2
,
5287 build_int_cst (type
, 1), 0),
5289 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5291 fold_convert (type
, arg1
),
5296 /* If C1 is C2 + 1, this is max(A, C2). */
5297 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5299 && operand_equal_p (arg01
,
5300 const_binop (PLUS_EXPR
, arg2
,
5301 build_int_cst (type
, 1), 0),
5303 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5305 fold_convert (type
, arg1
),
5319 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5320 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5323 /* EXP is some logical combination of boolean tests. See if we can
5324 merge it into some range test. Return the new tree if so. */
5327 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
5329 int or_op
= (code
== TRUTH_ORIF_EXPR
5330 || code
== TRUTH_OR_EXPR
);
5331 int in0_p
, in1_p
, in_p
;
5332 tree low0
, low1
, low
, high0
, high1
, high
;
5333 bool strict_overflow_p
= false;
5334 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5335 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5337 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5338 "when simplifying range test");
5340 /* If this is an OR operation, invert both sides; we will invert
5341 again at the end. */
5343 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5345 /* If both expressions are the same, if we can merge the ranges, and we
5346 can build the range test, return it or it inverted. If one of the
5347 ranges is always true or always false, consider it to be the same
5348 expression as the other. */
5349 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5350 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5352 && 0 != (tem
= (build_range_check (type
,
5354 : rhs
!= 0 ? rhs
: integer_zero_node
,
5357 if (strict_overflow_p
)
5358 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5359 return or_op
? invert_truthvalue (tem
) : tem
;
5362 /* On machines where the branch cost is expensive, if this is a
5363 short-circuited branch and the underlying object on both sides
5364 is the same, make a non-short-circuit operation. */
5365 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5366 && lhs
!= 0 && rhs
!= 0
5367 && (code
== TRUTH_ANDIF_EXPR
5368 || code
== TRUTH_ORIF_EXPR
)
5369 && operand_equal_p (lhs
, rhs
, 0))
5371 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5372 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5373 which cases we can't do this. */
5374 if (simple_operand_p (lhs
))
5375 return build2 (code
== TRUTH_ANDIF_EXPR
5376 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5379 else if (lang_hooks
.decls
.global_bindings_p () == 0
5380 && ! CONTAINS_PLACEHOLDER_P (lhs
))
5382 tree common
= save_expr (lhs
);
5384 if (0 != (lhs
= build_range_check (type
, common
,
5385 or_op
? ! in0_p
: in0_p
,
5387 && (0 != (rhs
= build_range_check (type
, common
,
5388 or_op
? ! in1_p
: in1_p
,
5391 if (strict_overflow_p
)
5392 fold_overflow_warning (warnmsg
,
5393 WARN_STRICT_OVERFLOW_COMPARISON
);
5394 return build2 (code
== TRUTH_ANDIF_EXPR
5395 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5404 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5405 bit value. Arrange things so the extra bits will be set to zero if and
5406 only if C is signed-extended to its full width. If MASK is nonzero,
5407 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5410 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5412 tree type
= TREE_TYPE (c
);
5413 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5416 if (p
== modesize
|| unsignedp
)
5419 /* We work by getting just the sign bit into the low-order bit, then
5420 into the high-order bit, then sign-extend. We then XOR that value
5422 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
5423 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
5425 /* We must use a signed type in order to get an arithmetic right shift.
5426 However, we must also avoid introducing accidental overflows, so that
5427 a subsequent call to integer_zerop will work. Hence we must
5428 do the type conversion here. At this point, the constant is either
5429 zero or one, and the conversion to a signed type can never overflow.
5430 We could get an overflow if this conversion is done anywhere else. */
5431 if (TYPE_UNSIGNED (type
))
5432 temp
= fold_convert (signed_type_for (type
), temp
);
5434 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
5435 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
5437 temp
= const_binop (BIT_AND_EXPR
, temp
,
5438 fold_convert (TREE_TYPE (c
), mask
), 0);
5439 /* If necessary, convert the type back to match the type of C. */
5440 if (TYPE_UNSIGNED (type
))
5441 temp
= fold_convert (type
, temp
);
5443 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
5446 /* Find ways of folding logical expressions of LHS and RHS:
5447 Try to merge two comparisons to the same innermost item.
5448 Look for range tests like "ch >= '0' && ch <= '9'".
5449 Look for combinations of simple terms on machines with expensive branches
5450 and evaluate the RHS unconditionally.
5452 For example, if we have p->a == 2 && p->b == 4 and we can make an
5453 object large enough to span both A and B, we can do this with a comparison
5454 against the object ANDed with the a mask.
5456 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5457 operations to do this with one comparison.
5459 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5460 function and the one above.
5462 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5463 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5465 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5468 We return the simplified tree or 0 if no optimization is possible. */
5471 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
5473 /* If this is the "or" of two comparisons, we can do something if
5474 the comparisons are NE_EXPR. If this is the "and", we can do something
5475 if the comparisons are EQ_EXPR. I.e.,
5476 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5478 WANTED_CODE is this operation code. For single bit fields, we can
5479 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5480 comparison for one-bit fields. */
5482 enum tree_code wanted_code
;
5483 enum tree_code lcode
, rcode
;
5484 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5485 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5486 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5487 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5488 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5489 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5490 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5491 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5492 enum machine_mode lnmode
, rnmode
;
5493 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5494 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5495 tree l_const
, r_const
;
5496 tree lntype
, rntype
, result
;
5497 int first_bit
, end_bit
;
5499 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5500 enum tree_code orig_code
= code
;
5502 /* Start by getting the comparison codes. Fail if anything is volatile.
5503 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5504 it were surrounded with a NE_EXPR. */
5506 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5509 lcode
= TREE_CODE (lhs
);
5510 rcode
= TREE_CODE (rhs
);
5512 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5514 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5515 build_int_cst (TREE_TYPE (lhs
), 0));
5519 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5521 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5522 build_int_cst (TREE_TYPE (rhs
), 0));
5526 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5527 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5530 ll_arg
= TREE_OPERAND (lhs
, 0);
5531 lr_arg
= TREE_OPERAND (lhs
, 1);
5532 rl_arg
= TREE_OPERAND (rhs
, 0);
5533 rr_arg
= TREE_OPERAND (rhs
, 1);
5535 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5536 if (simple_operand_p (ll_arg
)
5537 && simple_operand_p (lr_arg
))
5540 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5541 && operand_equal_p (lr_arg
, rr_arg
, 0))
5543 result
= combine_comparisons (code
, lcode
, rcode
,
5544 truth_type
, ll_arg
, lr_arg
);
5548 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5549 && operand_equal_p (lr_arg
, rl_arg
, 0))
5551 result
= combine_comparisons (code
, lcode
,
5552 swap_tree_comparison (rcode
),
5553 truth_type
, ll_arg
, lr_arg
);
5559 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5560 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5562 /* If the RHS can be evaluated unconditionally and its operands are
5563 simple, it wins to evaluate the RHS unconditionally on machines
5564 with expensive branches. In this case, this isn't a comparison
5565 that can be merged. Avoid doing this if the RHS is a floating-point
5566 comparison since those can trap. */
5568 if (BRANCH_COST
>= 2
5569 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5570 && simple_operand_p (rl_arg
)
5571 && simple_operand_p (rr_arg
))
5573 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5574 if (code
== TRUTH_OR_EXPR
5575 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5576 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5577 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5578 return build2 (NE_EXPR
, truth_type
,
5579 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5581 build_int_cst (TREE_TYPE (ll_arg
), 0));
5583 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5584 if (code
== TRUTH_AND_EXPR
5585 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5586 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5587 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5588 return build2 (EQ_EXPR
, truth_type
,
5589 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5591 build_int_cst (TREE_TYPE (ll_arg
), 0));
5593 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5595 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5596 return build2 (code
, truth_type
, lhs
, rhs
);
5601 /* See if the comparisons can be merged. Then get all the parameters for
5604 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5605 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5609 ll_inner
= decode_field_reference (ll_arg
,
5610 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5611 &ll_unsignedp
, &volatilep
, &ll_mask
,
5613 lr_inner
= decode_field_reference (lr_arg
,
5614 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5615 &lr_unsignedp
, &volatilep
, &lr_mask
,
5617 rl_inner
= decode_field_reference (rl_arg
,
5618 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5619 &rl_unsignedp
, &volatilep
, &rl_mask
,
5621 rr_inner
= decode_field_reference (rr_arg
,
5622 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5623 &rr_unsignedp
, &volatilep
, &rr_mask
,
5626 /* It must be true that the inner operation on the lhs of each
5627 comparison must be the same if we are to be able to do anything.
5628 Then see if we have constants. If not, the same must be true for
5630 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5631 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5634 if (TREE_CODE (lr_arg
) == INTEGER_CST
5635 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5636 l_const
= lr_arg
, r_const
= rr_arg
;
5637 else if (lr_inner
== 0 || rr_inner
== 0
5638 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5641 l_const
= r_const
= 0;
5643 /* If either comparison code is not correct for our logical operation,
5644 fail. However, we can convert a one-bit comparison against zero into
5645 the opposite comparison against that bit being set in the field. */
5647 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5648 if (lcode
!= wanted_code
)
5650 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5652 /* Make the left operand unsigned, since we are only interested
5653 in the value of one bit. Otherwise we are doing the wrong
5662 /* This is analogous to the code for l_const above. */
5663 if (rcode
!= wanted_code
)
5665 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5674 /* See if we can find a mode that contains both fields being compared on
5675 the left. If we can't, fail. Otherwise, update all constants and masks
5676 to be relative to a field of that size. */
5677 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5678 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5679 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5680 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5682 if (lnmode
== VOIDmode
)
5685 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5686 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5687 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5688 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5690 if (BYTES_BIG_ENDIAN
)
5692 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5693 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5696 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5697 size_int (xll_bitpos
), 0);
5698 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5699 size_int (xrl_bitpos
), 0);
5703 l_const
= fold_convert (lntype
, l_const
);
5704 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5705 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5706 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5707 fold_build1 (BIT_NOT_EXPR
,
5711 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5713 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5718 r_const
= fold_convert (lntype
, r_const
);
5719 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5720 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5721 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5722 fold_build1 (BIT_NOT_EXPR
,
5726 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5728 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5732 /* If the right sides are not constant, do the same for it. Also,
5733 disallow this optimization if a size or signedness mismatch occurs
5734 between the left and right sides. */
5737 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5738 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5739 /* Make sure the two fields on the right
5740 correspond to the left without being swapped. */
5741 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5744 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5745 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5746 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5747 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5749 if (rnmode
== VOIDmode
)
5752 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5753 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5754 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5755 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5757 if (BYTES_BIG_ENDIAN
)
5759 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5760 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5763 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
5764 size_int (xlr_bitpos
), 0);
5765 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
5766 size_int (xrr_bitpos
), 0);
5768 /* Make a mask that corresponds to both fields being compared.
5769 Do this for both items being compared. If the operands are the
5770 same size and the bits being compared are in the same position
5771 then we can do this by masking both and comparing the masked
5773 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5774 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5775 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5777 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5778 ll_unsignedp
|| rl_unsignedp
);
5779 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5780 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5782 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5783 lr_unsignedp
|| rr_unsignedp
);
5784 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5785 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5787 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5790 /* There is still another way we can do something: If both pairs of
5791 fields being compared are adjacent, we may be able to make a wider
5792 field containing them both.
5794 Note that we still must mask the lhs/rhs expressions. Furthermore,
5795 the mask must be shifted to account for the shift done by
5796 make_bit_field_ref. */
5797 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5798 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5799 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5800 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5804 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5805 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5806 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5807 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5809 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5810 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5811 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5812 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5814 /* Convert to the smaller type before masking out unwanted bits. */
5816 if (lntype
!= rntype
)
5818 if (lnbitsize
> rnbitsize
)
5820 lhs
= fold_convert (rntype
, lhs
);
5821 ll_mask
= fold_convert (rntype
, ll_mask
);
5824 else if (lnbitsize
< rnbitsize
)
5826 rhs
= fold_convert (lntype
, rhs
);
5827 lr_mask
= fold_convert (lntype
, lr_mask
);
5832 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5833 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5835 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5836 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5838 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5844 /* Handle the case of comparisons with constants. If there is something in
5845 common between the masks, those bits of the constants must be the same.
5846 If not, the condition is always false. Test for this to avoid generating
5847 incorrect code below. */
5848 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5849 if (! integer_zerop (result
)
5850 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5851 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5853 if (wanted_code
== NE_EXPR
)
5855 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5856 return constant_boolean_node (true, truth_type
);
5860 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5861 return constant_boolean_node (false, truth_type
);
5865 /* Construct the expression we will return. First get the component
5866 reference we will make. Unless the mask is all ones the width of
5867 that field, perform the mask operation. Then compare with the
5869 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5870 ll_unsignedp
|| rl_unsignedp
);
5872 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5873 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5874 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5876 return build2 (wanted_code
, truth_type
, result
,
5877 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5880 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5884 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5887 enum tree_code op_code
;
5888 tree comp_const
= op1
;
5890 int consts_equal
, consts_lt
;
5893 STRIP_SIGN_NOPS (arg0
);
5895 op_code
= TREE_CODE (arg0
);
5896 minmax_const
= TREE_OPERAND (arg0
, 1);
5897 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5898 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5899 inner
= TREE_OPERAND (arg0
, 0);
5901 /* If something does not permit us to optimize, return the original tree. */
5902 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5903 || TREE_CODE (comp_const
) != INTEGER_CST
5904 || TREE_OVERFLOW (comp_const
)
5905 || TREE_CODE (minmax_const
) != INTEGER_CST
5906 || TREE_OVERFLOW (minmax_const
))
5909 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5910 and GT_EXPR, doing the rest with recursive calls using logical
5914 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5916 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5919 return invert_truthvalue (tem
);
5925 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5926 optimize_minmax_comparison
5927 (EQ_EXPR
, type
, arg0
, comp_const
),
5928 optimize_minmax_comparison
5929 (GT_EXPR
, type
, arg0
, comp_const
));
5932 if (op_code
== MAX_EXPR
&& consts_equal
)
5933 /* MAX (X, 0) == 0 -> X <= 0 */
5934 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5936 else if (op_code
== MAX_EXPR
&& consts_lt
)
5937 /* MAX (X, 0) == 5 -> X == 5 */
5938 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5940 else if (op_code
== MAX_EXPR
)
5941 /* MAX (X, 0) == -1 -> false */
5942 return omit_one_operand (type
, integer_zero_node
, inner
);
5944 else if (consts_equal
)
5945 /* MIN (X, 0) == 0 -> X >= 0 */
5946 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5949 /* MIN (X, 0) == 5 -> false */
5950 return omit_one_operand (type
, integer_zero_node
, inner
);
5953 /* MIN (X, 0) == -1 -> X == -1 */
5954 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5957 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5958 /* MAX (X, 0) > 0 -> X > 0
5959 MAX (X, 0) > 5 -> X > 5 */
5960 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5962 else if (op_code
== MAX_EXPR
)
5963 /* MAX (X, 0) > -1 -> true */
5964 return omit_one_operand (type
, integer_one_node
, inner
);
5966 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5967 /* MIN (X, 0) > 0 -> false
5968 MIN (X, 0) > 5 -> false */
5969 return omit_one_operand (type
, integer_zero_node
, inner
);
5972 /* MIN (X, 0) > -1 -> X > -1 */
5973 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5980 /* T is an integer expression that is being multiplied, divided, or taken a
5981 modulus (CODE says which and what kind of divide or modulus) by a
5982 constant C. See if we can eliminate that operation by folding it with
5983 other operations already in T. WIDE_TYPE, if non-null, is a type that
5984 should be used for the computation if wider than our type.
5986 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5987 (X * 2) + (Y * 4). We must, however, be assured that either the original
5988 expression would not overflow or that overflow is undefined for the type
5989 in the language in question.
5991 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5992 the machine has a multiply-accumulate insn or that this is part of an
5993 addressing calculation.
5995 If we return a non-null expression, it is an equivalent form of the
5996 original computation, but need not be in the original type.
5998 We set *STRICT_OVERFLOW_P to true if the return values depends on
5999 signed overflow being undefined. Otherwise we do not change
6000 *STRICT_OVERFLOW_P. */
6003 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6004 bool *strict_overflow_p
)
6006 /* To avoid exponential search depth, refuse to allow recursion past
6007 three levels. Beyond that (1) it's highly unlikely that we'll find
6008 something interesting and (2) we've probably processed it before
6009 when we built the inner expression. */
6018 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6025 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6026 bool *strict_overflow_p
)
6028 tree type
= TREE_TYPE (t
);
6029 enum tree_code tcode
= TREE_CODE (t
);
6030 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
6031 > GET_MODE_SIZE (TYPE_MODE (type
)))
6032 ? wide_type
: type
);
6034 int same_p
= tcode
== code
;
6035 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6036 bool sub_strict_overflow_p
;
6038 /* Don't deal with constants of zero here; they confuse the code below. */
6039 if (integer_zerop (c
))
6042 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6043 op0
= TREE_OPERAND (t
, 0);
6045 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6046 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6048 /* Note that we need not handle conditional operations here since fold
6049 already handles those cases. So just do arithmetic here. */
6053 /* For a constant, we can always simplify if we are a multiply
6054 or (for divide and modulus) if it is a multiple of our constant. */
6055 if (code
== MULT_EXPR
6056 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
6057 return const_binop (code
, fold_convert (ctype
, t
),
6058 fold_convert (ctype
, c
), 0);
6061 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
6062 /* If op0 is an expression ... */
6063 if ((COMPARISON_CLASS_P (op0
)
6064 || UNARY_CLASS_P (op0
)
6065 || BINARY_CLASS_P (op0
)
6066 || VL_EXP_CLASS_P (op0
)
6067 || EXPRESSION_CLASS_P (op0
))
6068 /* ... and is unsigned, and its type is smaller than ctype,
6069 then we cannot pass through as widening. */
6070 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
6071 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
6072 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
6073 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
6074 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
6075 /* ... or this is a truncation (t is narrower than op0),
6076 then we cannot pass through this narrowing. */
6077 || (GET_MODE_SIZE (TYPE_MODE (type
))
6078 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
6079 /* ... or signedness changes for division or modulus,
6080 then we cannot pass through this conversion. */
6081 || (code
!= MULT_EXPR
6082 && (TYPE_UNSIGNED (ctype
)
6083 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6084 /* ... or has undefined overflow while the converted to
6085 type has not, we cannot do the operation in the inner type
6086 as that would introduce undefined overflow. */
6087 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
6088 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6091 /* Pass the constant down and see if we can make a simplification. If
6092 we can, replace this expression with the inner simplification for
6093 possible later conversion to our or some other type. */
6094 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6095 && TREE_CODE (t2
) == INTEGER_CST
6096 && !TREE_OVERFLOW (t2
)
6097 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6099 ? ctype
: NULL_TREE
,
6100 strict_overflow_p
))))
6105 /* If widening the type changes it from signed to unsigned, then we
6106 must avoid building ABS_EXPR itself as unsigned. */
6107 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6109 tree cstype
= (*signed_type_for
) (ctype
);
6110 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6113 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6114 return fold_convert (ctype
, t1
);
6118 /* If the constant is negative, we cannot simplify this. */
6119 if (tree_int_cst_sgn (c
) == -1)
6123 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6125 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6128 case MIN_EXPR
: case MAX_EXPR
:
6129 /* If widening the type changes the signedness, then we can't perform
6130 this optimization as that changes the result. */
6131 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6134 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6135 sub_strict_overflow_p
= false;
6136 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6137 &sub_strict_overflow_p
)) != 0
6138 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6139 &sub_strict_overflow_p
)) != 0)
6141 if (tree_int_cst_sgn (c
) < 0)
6142 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6143 if (sub_strict_overflow_p
)
6144 *strict_overflow_p
= true;
6145 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6146 fold_convert (ctype
, t2
));
6150 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6151 /* If the second operand is constant, this is a multiplication
6152 or floor division, by a power of two, so we can treat it that
6153 way unless the multiplier or divisor overflows. Signed
6154 left-shift overflow is implementation-defined rather than
6155 undefined in C90, so do not convert signed left shift into
6157 if (TREE_CODE (op1
) == INTEGER_CST
6158 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6159 /* const_binop may not detect overflow correctly,
6160 so check for it explicitly here. */
6161 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
6162 && TREE_INT_CST_HIGH (op1
) == 0
6163 && 0 != (t1
= fold_convert (ctype
,
6164 const_binop (LSHIFT_EXPR
,
6167 && !TREE_OVERFLOW (t1
))
6168 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6169 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6170 ctype
, fold_convert (ctype
, op0
), t1
),
6171 c
, code
, wide_type
, strict_overflow_p
);
6174 case PLUS_EXPR
: case MINUS_EXPR
:
6175 /* See if we can eliminate the operation on both sides. If we can, we
6176 can return a new PLUS or MINUS. If we can't, the only remaining
6177 cases where we can do anything are if the second operand is a
6179 sub_strict_overflow_p
= false;
6180 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6181 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6182 if (t1
!= 0 && t2
!= 0
6183 && (code
== MULT_EXPR
6184 /* If not multiplication, we can only do this if both operands
6185 are divisible by c. */
6186 || (multiple_of_p (ctype
, op0
, c
)
6187 && multiple_of_p (ctype
, op1
, c
))))
6189 if (sub_strict_overflow_p
)
6190 *strict_overflow_p
= true;
6191 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6192 fold_convert (ctype
, t2
));
6195 /* If this was a subtraction, negate OP1 and set it to be an addition.
6196 This simplifies the logic below. */
6197 if (tcode
== MINUS_EXPR
)
6198 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6200 if (TREE_CODE (op1
) != INTEGER_CST
)
6203 /* If either OP1 or C are negative, this optimization is not safe for
6204 some of the division and remainder types while for others we need
6205 to change the code. */
6206 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6208 if (code
== CEIL_DIV_EXPR
)
6209 code
= FLOOR_DIV_EXPR
;
6210 else if (code
== FLOOR_DIV_EXPR
)
6211 code
= CEIL_DIV_EXPR
;
6212 else if (code
!= MULT_EXPR
6213 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6217 /* If it's a multiply or a division/modulus operation of a multiple
6218 of our constant, do the operation and verify it doesn't overflow. */
6219 if (code
== MULT_EXPR
6220 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6222 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6223 fold_convert (ctype
, c
), 0);
6224 /* We allow the constant to overflow with wrapping semantics. */
6226 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6232 /* If we have an unsigned type is not a sizetype, we cannot widen
6233 the operation since it will change the result if the original
6234 computation overflowed. */
6235 if (TYPE_UNSIGNED (ctype
)
6236 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
6240 /* If we were able to eliminate our operation from the first side,
6241 apply our operation to the second side and reform the PLUS. */
6242 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6243 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6245 /* The last case is if we are a multiply. In that case, we can
6246 apply the distributive law to commute the multiply and addition
6247 if the multiplication of the constants doesn't overflow. */
6248 if (code
== MULT_EXPR
)
6249 return fold_build2 (tcode
, ctype
,
6250 fold_build2 (code
, ctype
,
6251 fold_convert (ctype
, op0
),
6252 fold_convert (ctype
, c
)),
6258 /* We have a special case here if we are doing something like
6259 (C * 8) % 4 since we know that's zero. */
6260 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6261 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6262 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6263 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6264 return omit_one_operand (type
, integer_zero_node
, op0
);
6266 /* ... fall through ... */
6268 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6269 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6270 /* If we can extract our operation from the LHS, do so and return a
6271 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6272 do something only if the second operand is a constant. */
6274 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6275 strict_overflow_p
)) != 0)
6276 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6277 fold_convert (ctype
, op1
));
6278 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6279 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6280 strict_overflow_p
)) != 0)
6281 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6282 fold_convert (ctype
, t1
));
6283 else if (TREE_CODE (op1
) != INTEGER_CST
)
6286 /* If these are the same operation types, we can associate them
6287 assuming no overflow. */
6289 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
6290 fold_convert (ctype
, c
), 0))
6291 && !TREE_OVERFLOW (t1
))
6292 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
6294 /* If these operations "cancel" each other, we have the main
6295 optimizations of this pass, which occur when either constant is a
6296 multiple of the other, in which case we replace this with either an
6297 operation or CODE or TCODE.
6299 If we have an unsigned type that is not a sizetype, we cannot do
6300 this since it will change the result if the original computation
6302 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
6303 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
6304 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6305 || (tcode
== MULT_EXPR
6306 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6307 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6308 && code
!= MULT_EXPR
)))
6310 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6312 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6313 *strict_overflow_p
= true;
6314 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6315 fold_convert (ctype
,
6316 const_binop (TRUNC_DIV_EXPR
,
6319 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
6321 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6322 *strict_overflow_p
= true;
6323 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6324 fold_convert (ctype
,
6325 const_binop (TRUNC_DIV_EXPR
,
6338 /* Return a node which has the indicated constant VALUE (either 0 or
6339 1), and is of the indicated TYPE. */
6342 constant_boolean_node (int value
, tree type
)
6344 if (type
== integer_type_node
)
6345 return value
? integer_one_node
: integer_zero_node
;
6346 else if (type
== boolean_type_node
)
6347 return value
? boolean_true_node
: boolean_false_node
;
6349 return build_int_cst (type
, value
);
6353 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6354 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6355 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6356 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6357 COND is the first argument to CODE; otherwise (as in the example
6358 given here), it is the second argument. TYPE is the type of the
6359 original expression. Return NULL_TREE if no simplification is
6363 fold_binary_op_with_conditional_arg (enum tree_code code
,
6364 tree type
, tree op0
, tree op1
,
6365 tree cond
, tree arg
, int cond_first_p
)
6367 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6368 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6369 tree test
, true_value
, false_value
;
6370 tree lhs
= NULL_TREE
;
6371 tree rhs
= NULL_TREE
;
6373 /* This transformation is only worthwhile if we don't have to wrap
6374 arg in a SAVE_EXPR, and the operation can be simplified on at least
6375 one of the branches once its pushed inside the COND_EXPR. */
6376 if (!TREE_CONSTANT (arg
))
6379 if (TREE_CODE (cond
) == COND_EXPR
)
6381 test
= TREE_OPERAND (cond
, 0);
6382 true_value
= TREE_OPERAND (cond
, 1);
6383 false_value
= TREE_OPERAND (cond
, 2);
6384 /* If this operand throws an expression, then it does not make
6385 sense to try to perform a logical or arithmetic operation
6387 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6389 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6394 tree testtype
= TREE_TYPE (cond
);
6396 true_value
= constant_boolean_node (true, testtype
);
6397 false_value
= constant_boolean_node (false, testtype
);
6400 arg
= fold_convert (arg_type
, arg
);
6403 true_value
= fold_convert (cond_type
, true_value
);
6405 lhs
= fold_build2 (code
, type
, true_value
, arg
);
6407 lhs
= fold_build2 (code
, type
, arg
, true_value
);
6411 false_value
= fold_convert (cond_type
, false_value
);
6413 rhs
= fold_build2 (code
, type
, false_value
, arg
);
6415 rhs
= fold_build2 (code
, type
, arg
, false_value
);
6418 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
6419 return fold_convert (type
, test
);
6423 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6425 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6426 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6427 ADDEND is the same as X.
6429 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6430 and finite. The problematic cases are when X is zero, and its mode
6431 has signed zeros. In the case of rounding towards -infinity,
6432 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6433 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6436 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6438 if (!real_zerop (addend
))
6441 /* Don't allow the fold with -fsignaling-nans. */
6442 if (HONOR_SNANS (TYPE_MODE (type
)))
6445 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6446 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6449 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6450 if (TREE_CODE (addend
) == REAL_CST
6451 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6454 /* The mode has signed zeros, and we have to honor their sign.
6455 In this situation, there is only one case we can return true for.
6456 X - 0 is the same as X unless rounding towards -infinity is
6458 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6461 /* Subroutine of fold() that checks comparisons of built-in math
6462 functions against real constants.
6464 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6465 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6466 is the type of the result and ARG0 and ARG1 are the operands of the
6467 comparison. ARG1 must be a TREE_REAL_CST.
6469 The function returns the constant folded tree if a simplification
6470 can be made, and NULL_TREE otherwise. */
6473 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
6474 tree type
, tree arg0
, tree arg1
)
6478 if (BUILTIN_SQRT_P (fcode
))
6480 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6481 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6483 c
= TREE_REAL_CST (arg1
);
6484 if (REAL_VALUE_NEGATIVE (c
))
6486 /* sqrt(x) < y is always false, if y is negative. */
6487 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6488 return omit_one_operand (type
, integer_zero_node
, arg
);
6490 /* sqrt(x) > y is always true, if y is negative and we
6491 don't care about NaNs, i.e. negative values of x. */
6492 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6493 return omit_one_operand (type
, integer_one_node
, arg
);
6495 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6496 return fold_build2 (GE_EXPR
, type
, arg
,
6497 build_real (TREE_TYPE (arg
), dconst0
));
6499 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6503 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6504 real_convert (&c2
, mode
, &c2
);
6506 if (REAL_VALUE_ISINF (c2
))
6508 /* sqrt(x) > y is x == +Inf, when y is very large. */
6509 if (HONOR_INFINITIES (mode
))
6510 return fold_build2 (EQ_EXPR
, type
, arg
,
6511 build_real (TREE_TYPE (arg
), c2
));
6513 /* sqrt(x) > y is always false, when y is very large
6514 and we don't care about infinities. */
6515 return omit_one_operand (type
, integer_zero_node
, arg
);
6518 /* sqrt(x) > c is the same as x > c*c. */
6519 return fold_build2 (code
, type
, arg
,
6520 build_real (TREE_TYPE (arg
), c2
));
6522 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6526 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6527 real_convert (&c2
, mode
, &c2
);
6529 if (REAL_VALUE_ISINF (c2
))
6531 /* sqrt(x) < y is always true, when y is a very large
6532 value and we don't care about NaNs or Infinities. */
6533 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6534 return omit_one_operand (type
, integer_one_node
, arg
);
6536 /* sqrt(x) < y is x != +Inf when y is very large and we
6537 don't care about NaNs. */
6538 if (! HONOR_NANS (mode
))
6539 return fold_build2 (NE_EXPR
, type
, arg
,
6540 build_real (TREE_TYPE (arg
), c2
));
6542 /* sqrt(x) < y is x >= 0 when y is very large and we
6543 don't care about Infinities. */
6544 if (! HONOR_INFINITIES (mode
))
6545 return fold_build2 (GE_EXPR
, type
, arg
,
6546 build_real (TREE_TYPE (arg
), dconst0
));
6548 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6549 if (lang_hooks
.decls
.global_bindings_p () != 0
6550 || CONTAINS_PLACEHOLDER_P (arg
))
6553 arg
= save_expr (arg
);
6554 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6555 fold_build2 (GE_EXPR
, type
, arg
,
6556 build_real (TREE_TYPE (arg
),
6558 fold_build2 (NE_EXPR
, type
, arg
,
6559 build_real (TREE_TYPE (arg
),
6563 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6564 if (! HONOR_NANS (mode
))
6565 return fold_build2 (code
, type
, arg
,
6566 build_real (TREE_TYPE (arg
), c2
));
6568 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6569 if (lang_hooks
.decls
.global_bindings_p () == 0
6570 && ! CONTAINS_PLACEHOLDER_P (arg
))
6572 arg
= save_expr (arg
);
6573 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6574 fold_build2 (GE_EXPR
, type
, arg
,
6575 build_real (TREE_TYPE (arg
),
6577 fold_build2 (code
, type
, arg
,
6578 build_real (TREE_TYPE (arg
),
6587 /* Subroutine of fold() that optimizes comparisons against Infinities,
6588 either +Inf or -Inf.
6590 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6591 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6592 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6594 The function returns the constant folded tree if a simplification
6595 can be made, and NULL_TREE otherwise. */
6598 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6600 enum machine_mode mode
;
6601 REAL_VALUE_TYPE max
;
6605 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6607 /* For negative infinity swap the sense of the comparison. */
6608 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6610 code
= swap_tree_comparison (code
);
6615 /* x > +Inf is always false, if with ignore sNANs. */
6616 if (HONOR_SNANS (mode
))
6618 return omit_one_operand (type
, integer_zero_node
, arg0
);
6621 /* x <= +Inf is always true, if we don't case about NaNs. */
6622 if (! HONOR_NANS (mode
))
6623 return omit_one_operand (type
, integer_one_node
, arg0
);
6625 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6626 if (lang_hooks
.decls
.global_bindings_p () == 0
6627 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6629 arg0
= save_expr (arg0
);
6630 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6636 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6637 real_maxval (&max
, neg
, mode
);
6638 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6639 arg0
, build_real (TREE_TYPE (arg0
), max
));
6642 /* x < +Inf is always equal to x <= DBL_MAX. */
6643 real_maxval (&max
, neg
, mode
);
6644 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6645 arg0
, build_real (TREE_TYPE (arg0
), max
));
6648 /* x != +Inf is always equal to !(x > DBL_MAX). */
6649 real_maxval (&max
, neg
, mode
);
6650 if (! HONOR_NANS (mode
))
6651 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6652 arg0
, build_real (TREE_TYPE (arg0
), max
));
6654 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6655 arg0
, build_real (TREE_TYPE (arg0
), max
));
6656 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6665 /* Subroutine of fold() that optimizes comparisons of a division by
6666 a nonzero integer constant against an integer constant, i.e.
6669 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6670 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6671 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6673 The function returns the constant folded tree if a simplification
6674 can be made, and NULL_TREE otherwise. */
6677 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6679 tree prod
, tmp
, hi
, lo
;
6680 tree arg00
= TREE_OPERAND (arg0
, 0);
6681 tree arg01
= TREE_OPERAND (arg0
, 1);
6682 unsigned HOST_WIDE_INT lpart
;
6683 HOST_WIDE_INT hpart
;
6684 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6688 /* We have to do this the hard way to detect unsigned overflow.
6689 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6690 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6691 TREE_INT_CST_HIGH (arg01
),
6692 TREE_INT_CST_LOW (arg1
),
6693 TREE_INT_CST_HIGH (arg1
),
6694 &lpart
, &hpart
, unsigned_p
);
6695 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6697 neg_overflow
= false;
6701 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6702 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6705 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6706 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6707 TREE_INT_CST_HIGH (prod
),
6708 TREE_INT_CST_LOW (tmp
),
6709 TREE_INT_CST_HIGH (tmp
),
6710 &lpart
, &hpart
, unsigned_p
);
6711 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6712 -1, overflow
| TREE_OVERFLOW (prod
));
6714 else if (tree_int_cst_sgn (arg01
) >= 0)
6716 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6717 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6718 switch (tree_int_cst_sgn (arg1
))
6721 neg_overflow
= true;
6722 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6727 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6732 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6742 /* A negative divisor reverses the relational operators. */
6743 code
= swap_tree_comparison (code
);
6745 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6746 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6747 switch (tree_int_cst_sgn (arg1
))
6750 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6755 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6760 neg_overflow
= true;
6761 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6773 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6774 return omit_one_operand (type
, integer_zero_node
, arg00
);
6775 if (TREE_OVERFLOW (hi
))
6776 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6777 if (TREE_OVERFLOW (lo
))
6778 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6779 return build_range_check (type
, arg00
, 1, lo
, hi
);
6782 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6783 return omit_one_operand (type
, integer_one_node
, arg00
);
6784 if (TREE_OVERFLOW (hi
))
6785 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6786 if (TREE_OVERFLOW (lo
))
6787 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6788 return build_range_check (type
, arg00
, 0, lo
, hi
);
6791 if (TREE_OVERFLOW (lo
))
6793 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6794 return omit_one_operand (type
, tmp
, arg00
);
6796 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6799 if (TREE_OVERFLOW (hi
))
6801 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6802 return omit_one_operand (type
, tmp
, arg00
);
6804 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6807 if (TREE_OVERFLOW (hi
))
6809 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6810 return omit_one_operand (type
, tmp
, arg00
);
6812 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6815 if (TREE_OVERFLOW (lo
))
6817 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6818 return omit_one_operand (type
, tmp
, arg00
);
6820 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6830 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6831 equality/inequality test, then return a simplified form of the test
6832 using a sign testing. Otherwise return NULL. TYPE is the desired
6836 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6839 /* If this is testing a single bit, we can optimize the test. */
6840 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6841 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6842 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6844 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6845 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6846 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6848 if (arg00
!= NULL_TREE
6849 /* This is only a win if casting to a signed type is cheap,
6850 i.e. when arg00's type is not a partial mode. */
6851 && TYPE_PRECISION (TREE_TYPE (arg00
))
6852 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6854 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6855 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6856 result_type
, fold_convert (stype
, arg00
),
6857 build_int_cst (stype
, 0));
6864 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6865 equality/inequality test, then return a simplified form of
6866 the test using shifts and logical operations. Otherwise return
6867 NULL. TYPE is the desired result type. */
6870 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6873 /* If this is testing a single bit, we can optimize the test. */
6874 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6875 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6876 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6878 tree inner
= TREE_OPERAND (arg0
, 0);
6879 tree type
= TREE_TYPE (arg0
);
6880 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6881 enum machine_mode operand_mode
= TYPE_MODE (type
);
6883 tree signed_type
, unsigned_type
, intermediate_type
;
6886 /* First, see if we can fold the single bit test into a sign-bit
6888 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6893 /* Otherwise we have (A & C) != 0 where C is a single bit,
6894 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6895 Similarly for (A & C) == 0. */
6897 /* If INNER is a right shift of a constant and it plus BITNUM does
6898 not overflow, adjust BITNUM and INNER. */
6899 if (TREE_CODE (inner
) == RSHIFT_EXPR
6900 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6901 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6902 && bitnum
< TYPE_PRECISION (type
)
6903 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6904 bitnum
- TYPE_PRECISION (type
)))
6906 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6907 inner
= TREE_OPERAND (inner
, 0);
6910 /* If we are going to be able to omit the AND below, we must do our
6911 operations as unsigned. If we must use the AND, we have a choice.
6912 Normally unsigned is faster, but for some machines signed is. */
6913 #ifdef LOAD_EXTEND_OP
6914 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6915 && !flag_syntax_only
) ? 0 : 1;
6920 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6921 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6922 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6923 inner
= fold_convert (intermediate_type
, inner
);
6926 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6927 inner
, size_int (bitnum
));
6929 one
= build_int_cst (intermediate_type
, 1);
6931 if (code
== EQ_EXPR
)
6932 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6934 /* Put the AND last so it can combine with more things. */
6935 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6937 /* Make sure to return the proper type. */
6938 inner
= fold_convert (result_type
, inner
);
6945 /* Check whether we are allowed to reorder operands arg0 and arg1,
6946 such that the evaluation of arg1 occurs before arg0. */
6949 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6951 if (! flag_evaluation_order
)
6953 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6955 return ! TREE_SIDE_EFFECTS (arg0
)
6956 && ! TREE_SIDE_EFFECTS (arg1
);
6959 /* Test whether it is preferable two swap two operands, ARG0 and
6960 ARG1, for example because ARG0 is an integer constant and ARG1
6961 isn't. If REORDER is true, only recommend swapping if we can
6962 evaluate the operands in reverse order. */
6965 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6967 STRIP_SIGN_NOPS (arg0
);
6968 STRIP_SIGN_NOPS (arg1
);
6970 if (TREE_CODE (arg1
) == INTEGER_CST
)
6972 if (TREE_CODE (arg0
) == INTEGER_CST
)
6975 if (TREE_CODE (arg1
) == REAL_CST
)
6977 if (TREE_CODE (arg0
) == REAL_CST
)
6980 if (TREE_CODE (arg1
) == FIXED_CST
)
6982 if (TREE_CODE (arg0
) == FIXED_CST
)
6985 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6987 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6990 if (TREE_CONSTANT (arg1
))
6992 if (TREE_CONSTANT (arg0
))
6998 if (reorder
&& flag_evaluation_order
6999 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
7002 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7003 for commutative and comparison operators. Ensuring a canonical
7004 form allows the optimizers to find additional redundancies without
7005 having to explicitly check for both orderings. */
7006 if (TREE_CODE (arg0
) == SSA_NAME
7007 && TREE_CODE (arg1
) == SSA_NAME
7008 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
7011 /* Put SSA_NAMEs last. */
7012 if (TREE_CODE (arg1
) == SSA_NAME
)
7014 if (TREE_CODE (arg0
) == SSA_NAME
)
7017 /* Put variables last. */
7026 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7027 ARG0 is extended to a wider type. */
7030 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7032 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
7034 tree shorter_type
, outer_type
;
7038 if (arg0_unw
== arg0
)
7040 shorter_type
= TREE_TYPE (arg0_unw
);
7042 #ifdef HAVE_canonicalize_funcptr_for_compare
7043 /* Disable this optimization if we're casting a function pointer
7044 type on targets that require function pointer canonicalization. */
7045 if (HAVE_canonicalize_funcptr_for_compare
7046 && TREE_CODE (shorter_type
) == POINTER_TYPE
7047 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
7051 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
7054 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
7056 /* If possible, express the comparison in the shorter mode. */
7057 if ((code
== EQ_EXPR
|| code
== NE_EXPR
7058 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
7059 && (TREE_TYPE (arg1_unw
) == shorter_type
7060 || (TYPE_PRECISION (shorter_type
)
7061 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
7062 || (TREE_CODE (arg1_unw
) == INTEGER_CST
7063 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
7064 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
7065 && int_fits_type_p (arg1_unw
, shorter_type
))))
7066 return fold_build2 (code
, type
, arg0_unw
,
7067 fold_convert (shorter_type
, arg1_unw
));
7069 if (TREE_CODE (arg1_unw
) != INTEGER_CST
7070 || TREE_CODE (shorter_type
) != INTEGER_TYPE
7071 || !int_fits_type_p (arg1_unw
, shorter_type
))
7074 /* If we are comparing with the integer that does not fit into the range
7075 of the shorter type, the result is known. */
7076 outer_type
= TREE_TYPE (arg1_unw
);
7077 min
= lower_bound_in_type (outer_type
, shorter_type
);
7078 max
= upper_bound_in_type (outer_type
, shorter_type
);
7080 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7082 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7089 return omit_one_operand (type
, integer_zero_node
, arg0
);
7094 return omit_one_operand (type
, integer_one_node
, arg0
);
7100 return omit_one_operand (type
, integer_one_node
, arg0
);
7102 return omit_one_operand (type
, integer_zero_node
, arg0
);
7107 return omit_one_operand (type
, integer_zero_node
, arg0
);
7109 return omit_one_operand (type
, integer_one_node
, arg0
);
7118 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7119 ARG0 just the signedness is changed. */
7122 fold_sign_changed_comparison (enum tree_code code
, tree type
,
7123 tree arg0
, tree arg1
)
7126 tree inner_type
, outer_type
;
7128 if (TREE_CODE (arg0
) != NOP_EXPR
7129 && TREE_CODE (arg0
) != CONVERT_EXPR
)
7132 outer_type
= TREE_TYPE (arg0
);
7133 arg0_inner
= TREE_OPERAND (arg0
, 0);
7134 inner_type
= TREE_TYPE (arg0_inner
);
7136 #ifdef HAVE_canonicalize_funcptr_for_compare
7137 /* Disable this optimization if we're casting a function pointer
7138 type on targets that require function pointer canonicalization. */
7139 if (HAVE_canonicalize_funcptr_for_compare
7140 && TREE_CODE (inner_type
) == POINTER_TYPE
7141 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
7145 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
7148 if (TREE_CODE (arg1
) != INTEGER_CST
7149 && !((TREE_CODE (arg1
) == NOP_EXPR
7150 || TREE_CODE (arg1
) == CONVERT_EXPR
)
7151 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
7154 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
7159 if (TREE_CODE (arg1
) == INTEGER_CST
)
7160 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
7161 TREE_INT_CST_HIGH (arg1
), 0,
7162 TREE_OVERFLOW (arg1
));
7164 arg1
= fold_convert (inner_type
, arg1
);
7166 return fold_build2 (code
, type
, arg0_inner
, arg1
);
7169 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7170 step of the array. Reconstructs s and delta in the case of s * delta
7171 being an integer constant (and thus already folded).
7172 ADDR is the address. MULT is the multiplicative expression.
7173 If the function succeeds, the new address expression is returned. Otherwise
7174 NULL_TREE is returned. */
7177 try_move_mult_to_index (tree addr
, tree op1
)
7179 tree s
, delta
, step
;
7180 tree ref
= TREE_OPERAND (addr
, 0), pref
;
7185 /* Strip the nops that might be added when converting op1 to sizetype. */
7188 /* Canonicalize op1 into a possibly non-constant delta
7189 and an INTEGER_CST s. */
7190 if (TREE_CODE (op1
) == MULT_EXPR
)
7192 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
7197 if (TREE_CODE (arg0
) == INTEGER_CST
)
7202 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7210 else if (TREE_CODE (op1
) == INTEGER_CST
)
7217 /* Simulate we are delta * 1. */
7219 s
= integer_one_node
;
7222 for (;; ref
= TREE_OPERAND (ref
, 0))
7224 if (TREE_CODE (ref
) == ARRAY_REF
)
7226 /* Remember if this was a multi-dimensional array. */
7227 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
7230 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
7234 step
= array_ref_element_size (ref
);
7235 if (TREE_CODE (step
) != INTEGER_CST
)
7240 if (! tree_int_cst_equal (step
, s
))
7245 /* Try if delta is a multiple of step. */
7246 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
7252 /* Only fold here if we can verify we do not overflow one
7253 dimension of a multi-dimensional array. */
7258 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7259 || !INTEGRAL_TYPE_P (itype
)
7260 || !TYPE_MAX_VALUE (itype
)
7261 || TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
)
7264 tmp
= fold_binary (PLUS_EXPR
, itype
,
7265 fold_convert (itype
,
7266 TREE_OPERAND (ref
, 1)),
7267 fold_convert (itype
, delta
));
7269 || TREE_CODE (tmp
) != INTEGER_CST
7270 || tree_int_cst_lt (TYPE_MAX_VALUE (itype
), tmp
))
7279 if (!handled_component_p (ref
))
7283 /* We found the suitable array reference. So copy everything up to it,
7284 and replace the index. */
7286 pref
= TREE_OPERAND (addr
, 0);
7287 ret
= copy_node (pref
);
7292 pref
= TREE_OPERAND (pref
, 0);
7293 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7294 pos
= TREE_OPERAND (pos
, 0);
7297 TREE_OPERAND (pos
, 1) = fold_build2 (PLUS_EXPR
, itype
,
7298 fold_convert (itype
,
7299 TREE_OPERAND (pos
, 1)),
7300 fold_convert (itype
, delta
));
7302 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7306 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7307 means A >= Y && A != MAX, but in this case we know that
7308 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7311 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
7313 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7315 if (TREE_CODE (bound
) == LT_EXPR
)
7316 a
= TREE_OPERAND (bound
, 0);
7317 else if (TREE_CODE (bound
) == GT_EXPR
)
7318 a
= TREE_OPERAND (bound
, 1);
7322 typea
= TREE_TYPE (a
);
7323 if (!INTEGRAL_TYPE_P (typea
)
7324 && !POINTER_TYPE_P (typea
))
7327 if (TREE_CODE (ineq
) == LT_EXPR
)
7329 a1
= TREE_OPERAND (ineq
, 1);
7330 y
= TREE_OPERAND (ineq
, 0);
7332 else if (TREE_CODE (ineq
) == GT_EXPR
)
7334 a1
= TREE_OPERAND (ineq
, 0);
7335 y
= TREE_OPERAND (ineq
, 1);
7340 if (TREE_TYPE (a1
) != typea
)
7343 if (POINTER_TYPE_P (typea
))
7345 /* Convert the pointer types into integer before taking the difference. */
7346 tree ta
= fold_convert (ssizetype
, a
);
7347 tree ta1
= fold_convert (ssizetype
, a1
);
7348 diff
= fold_binary (MINUS_EXPR
, ssizetype
, ta1
, ta
);
7351 diff
= fold_binary (MINUS_EXPR
, typea
, a1
, a
);
7353 if (!diff
|| !integer_onep (diff
))
7356 return fold_build2 (GE_EXPR
, type
, a
, y
);
7359 /* Fold a sum or difference of at least one multiplication.
7360 Returns the folded tree or NULL if no simplification could be made. */
7363 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7365 tree arg00
, arg01
, arg10
, arg11
;
7366 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7368 /* (A * C) +- (B * C) -> (A+-B) * C.
7369 (A * C) +- A -> A * (C+-1).
7370 We are most concerned about the case where C is a constant,
7371 but other combinations show up during loop reduction. Since
7372 it is not difficult, try all four possibilities. */
7374 if (TREE_CODE (arg0
) == MULT_EXPR
)
7376 arg00
= TREE_OPERAND (arg0
, 0);
7377 arg01
= TREE_OPERAND (arg0
, 1);
7379 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7381 arg00
= build_one_cst (type
);
7386 /* We cannot generate constant 1 for fract. */
7387 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7390 arg01
= build_one_cst (type
);
7392 if (TREE_CODE (arg1
) == MULT_EXPR
)
7394 arg10
= TREE_OPERAND (arg1
, 0);
7395 arg11
= TREE_OPERAND (arg1
, 1);
7397 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7399 arg10
= build_one_cst (type
);
7404 /* We cannot generate constant 1 for fract. */
7405 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7408 arg11
= build_one_cst (type
);
7412 if (operand_equal_p (arg01
, arg11
, 0))
7413 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7414 else if (operand_equal_p (arg00
, arg10
, 0))
7415 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7416 else if (operand_equal_p (arg00
, arg11
, 0))
7417 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7418 else if (operand_equal_p (arg01
, arg10
, 0))
7419 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7421 /* No identical multiplicands; see if we can find a common
7422 power-of-two factor in non-power-of-two multiplies. This
7423 can help in multi-dimensional array access. */
7424 else if (host_integerp (arg01
, 0)
7425 && host_integerp (arg11
, 0))
7427 HOST_WIDE_INT int01
, int11
, tmp
;
7430 int01
= TREE_INT_CST_LOW (arg01
);
7431 int11
= TREE_INT_CST_LOW (arg11
);
7433 /* Move min of absolute values to int11. */
7434 if ((int01
>= 0 ? int01
: -int01
)
7435 < (int11
>= 0 ? int11
: -int11
))
7437 tmp
= int01
, int01
= int11
, int11
= tmp
;
7438 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7445 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0)
7447 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7448 build_int_cst (TREE_TYPE (arg00
),
7453 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7458 return fold_build2 (MULT_EXPR
, type
,
7459 fold_build2 (code
, type
,
7460 fold_convert (type
, alt0
),
7461 fold_convert (type
, alt1
)),
7462 fold_convert (type
, same
));
7467 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7468 specified by EXPR into the buffer PTR of length LEN bytes.
7469 Return the number of bytes placed in the buffer, or zero
7473 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7475 tree type
= TREE_TYPE (expr
);
7476 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7477 int byte
, offset
, word
, words
;
7478 unsigned char value
;
7480 if (total_bytes
> len
)
7482 words
= total_bytes
/ UNITS_PER_WORD
;
7484 for (byte
= 0; byte
< total_bytes
; byte
++)
7486 int bitpos
= byte
* BITS_PER_UNIT
;
7487 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7488 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7490 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7491 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7493 if (total_bytes
> UNITS_PER_WORD
)
7495 word
= byte
/ UNITS_PER_WORD
;
7496 if (WORDS_BIG_ENDIAN
)
7497 word
= (words
- 1) - word
;
7498 offset
= word
* UNITS_PER_WORD
;
7499 if (BYTES_BIG_ENDIAN
)
7500 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7502 offset
+= byte
% UNITS_PER_WORD
;
7505 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7506 ptr
[offset
] = value
;
7512 /* Subroutine of native_encode_expr. Encode the REAL_CST
7513 specified by EXPR into the buffer PTR of length LEN bytes.
7514 Return the number of bytes placed in the buffer, or zero
7518 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7520 tree type
= TREE_TYPE (expr
);
7521 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7522 int byte
, offset
, word
, words
, bitpos
;
7523 unsigned char value
;
7525 /* There are always 32 bits in each long, no matter the size of
7526 the hosts long. We handle floating point representations with
7530 if (total_bytes
> len
)
7532 words
= 32 / UNITS_PER_WORD
;
7534 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7536 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7537 bitpos
+= BITS_PER_UNIT
)
7539 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7540 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7542 if (UNITS_PER_WORD
< 4)
7544 word
= byte
/ UNITS_PER_WORD
;
7545 if (WORDS_BIG_ENDIAN
)
7546 word
= (words
- 1) - word
;
7547 offset
= word
* UNITS_PER_WORD
;
7548 if (BYTES_BIG_ENDIAN
)
7549 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7551 offset
+= byte
% UNITS_PER_WORD
;
7554 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7555 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7560 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7561 specified by EXPR into the buffer PTR of length LEN bytes.
7562 Return the number of bytes placed in the buffer, or zero
7566 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7571 part
= TREE_REALPART (expr
);
7572 rsize
= native_encode_expr (part
, ptr
, len
);
7575 part
= TREE_IMAGPART (expr
);
7576 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7579 return rsize
+ isize
;
7583 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7584 specified by EXPR into the buffer PTR of length LEN bytes.
7585 Return the number of bytes placed in the buffer, or zero
7589 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7591 int i
, size
, offset
, count
;
7592 tree itype
, elem
, elements
;
7595 elements
= TREE_VECTOR_CST_ELTS (expr
);
7596 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7597 itype
= TREE_TYPE (TREE_TYPE (expr
));
7598 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7599 for (i
= 0; i
< count
; i
++)
7603 elem
= TREE_VALUE (elements
);
7604 elements
= TREE_CHAIN (elements
);
7611 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7616 if (offset
+ size
> len
)
7618 memset (ptr
+offset
, 0, size
);
7626 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7627 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7628 buffer PTR of length LEN bytes. Return the number of bytes
7629 placed in the buffer, or zero upon failure. */
7632 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7634 switch (TREE_CODE (expr
))
7637 return native_encode_int (expr
, ptr
, len
);
7640 return native_encode_real (expr
, ptr
, len
);
7643 return native_encode_complex (expr
, ptr
, len
);
7646 return native_encode_vector (expr
, ptr
, len
);
7654 /* Subroutine of native_interpret_expr. Interpret the contents of
7655 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7656 If the buffer cannot be interpreted, return NULL_TREE. */
7659 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7661 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7662 int byte
, offset
, word
, words
;
7663 unsigned char value
;
7664 unsigned int HOST_WIDE_INT lo
= 0;
7665 HOST_WIDE_INT hi
= 0;
7667 if (total_bytes
> len
)
7669 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7671 words
= total_bytes
/ UNITS_PER_WORD
;
7673 for (byte
= 0; byte
< total_bytes
; byte
++)
7675 int bitpos
= byte
* BITS_PER_UNIT
;
7676 if (total_bytes
> UNITS_PER_WORD
)
7678 word
= byte
/ UNITS_PER_WORD
;
7679 if (WORDS_BIG_ENDIAN
)
7680 word
= (words
- 1) - word
;
7681 offset
= word
* UNITS_PER_WORD
;
7682 if (BYTES_BIG_ENDIAN
)
7683 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7685 offset
+= byte
% UNITS_PER_WORD
;
7688 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7689 value
= ptr
[offset
];
7691 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7692 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7694 hi
|= (unsigned HOST_WIDE_INT
) value
7695 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7698 return build_int_cst_wide_type (type
, lo
, hi
);
7702 /* Subroutine of native_interpret_expr. Interpret the contents of
7703 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7704 If the buffer cannot be interpreted, return NULL_TREE. */
7707 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7709 enum machine_mode mode
= TYPE_MODE (type
);
7710 int total_bytes
= GET_MODE_SIZE (mode
);
7711 int byte
, offset
, word
, words
, bitpos
;
7712 unsigned char value
;
7713 /* There are always 32 bits in each long, no matter the size of
7714 the hosts long. We handle floating point representations with
7719 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7720 if (total_bytes
> len
|| total_bytes
> 24)
7722 words
= 32 / UNITS_PER_WORD
;
7724 memset (tmp
, 0, sizeof (tmp
));
7725 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7726 bitpos
+= BITS_PER_UNIT
)
7728 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7729 if (UNITS_PER_WORD
< 4)
7731 word
= byte
/ UNITS_PER_WORD
;
7732 if (WORDS_BIG_ENDIAN
)
7733 word
= (words
- 1) - word
;
7734 offset
= word
* UNITS_PER_WORD
;
7735 if (BYTES_BIG_ENDIAN
)
7736 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7738 offset
+= byte
% UNITS_PER_WORD
;
7741 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7742 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7744 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7747 real_from_target (&r
, tmp
, mode
);
7748 return build_real (type
, r
);
7752 /* Subroutine of native_interpret_expr. Interpret the contents of
7753 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7754 If the buffer cannot be interpreted, return NULL_TREE. */
7757 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7759 tree etype
, rpart
, ipart
;
7762 etype
= TREE_TYPE (type
);
7763 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7766 rpart
= native_interpret_expr (etype
, ptr
, size
);
7769 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7772 return build_complex (type
, rpart
, ipart
);
7776 /* Subroutine of native_interpret_expr. Interpret the contents of
7777 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7778 If the buffer cannot be interpreted, return NULL_TREE. */
7781 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7783 tree etype
, elem
, elements
;
7786 etype
= TREE_TYPE (type
);
7787 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7788 count
= TYPE_VECTOR_SUBPARTS (type
);
7789 if (size
* count
> len
)
7792 elements
= NULL_TREE
;
7793 for (i
= count
- 1; i
>= 0; i
--)
7795 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7798 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7800 return build_vector (type
, elements
);
7804 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7805 the buffer PTR of length LEN as a constant of type TYPE. For
7806 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7807 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7808 return NULL_TREE. */
7811 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7813 switch (TREE_CODE (type
))
7818 return native_interpret_int (type
, ptr
, len
);
7821 return native_interpret_real (type
, ptr
, len
);
7824 return native_interpret_complex (type
, ptr
, len
);
7827 return native_interpret_vector (type
, ptr
, len
);
7835 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7836 TYPE at compile-time. If we're unable to perform the conversion
7837 return NULL_TREE. */
7840 fold_view_convert_expr (tree type
, tree expr
)
7842 /* We support up to 512-bit values (for V8DFmode). */
7843 unsigned char buffer
[64];
7846 /* Check that the host and target are sane. */
7847 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7850 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7854 return native_interpret_expr (type
, buffer
, len
);
7857 /* Build an expression for the address of T. Folds away INDIRECT_REF
7858 to avoid confusing the gimplify process. When IN_FOLD is true
7859 avoid modifications of T. */
7862 build_fold_addr_expr_with_type_1 (tree t
, tree ptrtype
, bool in_fold
)
7864 /* The size of the object is not relevant when talking about its address. */
7865 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7866 t
= TREE_OPERAND (t
, 0);
7868 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7869 if (TREE_CODE (t
) == INDIRECT_REF
7870 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
7872 t
= TREE_OPERAND (t
, 0);
7874 if (TREE_TYPE (t
) != ptrtype
)
7875 t
= build1 (NOP_EXPR
, ptrtype
, t
);
7881 while (handled_component_p (base
))
7882 base
= TREE_OPERAND (base
, 0);
7885 TREE_ADDRESSABLE (base
) = 1;
7887 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7890 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7895 /* Build an expression for the address of T with type PTRTYPE. This
7896 function modifies the input parameter 'T' by sometimes setting the
7897 TREE_ADDRESSABLE flag. */
7900 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
7902 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, false);
7905 /* Build an expression for the address of T. This function modifies
7906 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7907 flag. When called from fold functions, use fold_addr_expr instead. */
7910 build_fold_addr_expr (tree t
)
7912 return build_fold_addr_expr_with_type_1 (t
,
7913 build_pointer_type (TREE_TYPE (t
)),
7917 /* Same as build_fold_addr_expr, builds an expression for the address
7918 of T, but avoids touching the input node 't'. Fold functions
7919 should use this version. */
7922 fold_addr_expr (tree t
)
7924 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7926 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, true);
7929 /* Fold a unary expression of code CODE and type TYPE with operand
7930 OP0. Return the folded expression if folding is successful.
7931 Otherwise, return NULL_TREE. */
7934 fold_unary (enum tree_code code
, tree type
, tree op0
)
7938 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7940 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7941 && TREE_CODE_LENGTH (code
) == 1);
7946 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
7947 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7949 /* Don't use STRIP_NOPS, because signedness of argument type
7951 STRIP_SIGN_NOPS (arg0
);
7955 /* Strip any conversions that don't change the mode. This
7956 is safe for every expression, except for a comparison
7957 expression because its signedness is derived from its
7960 Note that this is done as an internal manipulation within
7961 the constant folder, in order to find the simplest
7962 representation of the arguments so that their form can be
7963 studied. In any cases, the appropriate type conversions
7964 should be put back in the tree that will get out of the
7970 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7972 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7973 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7974 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7975 else if (TREE_CODE (arg0
) == COND_EXPR
)
7977 tree arg01
= TREE_OPERAND (arg0
, 1);
7978 tree arg02
= TREE_OPERAND (arg0
, 2);
7979 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7980 arg01
= fold_build1 (code
, type
, arg01
);
7981 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7982 arg02
= fold_build1 (code
, type
, arg02
);
7983 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7986 /* If this was a conversion, and all we did was to move into
7987 inside the COND_EXPR, bring it back out. But leave it if
7988 it is a conversion from integer to integer and the
7989 result precision is no wider than a word since such a
7990 conversion is cheap and may be optimized away by combine,
7991 while it couldn't if it were outside the COND_EXPR. Then return
7992 so we don't get into an infinite recursion loop taking the
7993 conversion out and then back in. */
7995 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7996 || code
== NON_LVALUE_EXPR
)
7997 && TREE_CODE (tem
) == COND_EXPR
7998 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7999 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
8000 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
8001 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
8002 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
8003 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
8004 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8006 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
8007 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
8008 || flag_syntax_only
))
8009 tem
= build1 (code
, type
,
8011 TREE_TYPE (TREE_OPERAND
8012 (TREE_OPERAND (tem
, 1), 0)),
8013 TREE_OPERAND (tem
, 0),
8014 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
8015 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
8018 else if (COMPARISON_CLASS_P (arg0
))
8020 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8022 arg0
= copy_node (arg0
);
8023 TREE_TYPE (arg0
) = type
;
8026 else if (TREE_CODE (type
) != INTEGER_TYPE
)
8027 return fold_build3 (COND_EXPR
, type
, arg0
,
8028 fold_build1 (code
, type
,
8030 fold_build1 (code
, type
,
8031 integer_zero_node
));
8038 /* Re-association barriers around constants and other re-association
8039 barriers can be removed. */
8040 if (CONSTANT_CLASS_P (op0
)
8041 || TREE_CODE (op0
) == PAREN_EXPR
)
8042 return fold_convert (type
, op0
);
8048 case FIX_TRUNC_EXPR
:
8049 if (TREE_TYPE (op0
) == type
)
8052 /* If we have (type) (a CMP b) and type is an integral type, return
8053 new expression involving the new type. */
8054 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
8055 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
8056 TREE_OPERAND (op0
, 1));
8058 /* Handle cases of two conversions in a row. */
8059 if (TREE_CODE (op0
) == NOP_EXPR
8060 || TREE_CODE (op0
) == CONVERT_EXPR
)
8062 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
8063 tree inter_type
= TREE_TYPE (op0
);
8064 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
8065 int inside_ptr
= POINTER_TYPE_P (inside_type
);
8066 int inside_float
= FLOAT_TYPE_P (inside_type
);
8067 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
8068 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
8069 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
8070 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
8071 int inter_ptr
= POINTER_TYPE_P (inter_type
);
8072 int inter_float
= FLOAT_TYPE_P (inter_type
);
8073 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
8074 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
8075 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
8076 int final_int
= INTEGRAL_TYPE_P (type
);
8077 int final_ptr
= POINTER_TYPE_P (type
);
8078 int final_float
= FLOAT_TYPE_P (type
);
8079 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
8080 unsigned int final_prec
= TYPE_PRECISION (type
);
8081 int final_unsignedp
= TYPE_UNSIGNED (type
);
8083 /* In addition to the cases of two conversions in a row
8084 handled below, if we are converting something to its own
8085 type via an object of identical or wider precision, neither
8086 conversion is needed. */
8087 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
8088 && (((inter_int
|| inter_ptr
) && final_int
)
8089 || (inter_float
&& final_float
))
8090 && inter_prec
>= final_prec
)
8091 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8093 /* Likewise, if the intermediate and final types are either both
8094 float or both integer, we don't need the middle conversion if
8095 it is wider than the final type and doesn't change the signedness
8096 (for integers). Avoid this if the final type is a pointer
8097 since then we sometimes need the inner conversion. Likewise if
8098 the outer has a precision not equal to the size of its mode. */
8099 if (((inter_int
&& inside_int
)
8100 || (inter_float
&& inside_float
)
8101 || (inter_vec
&& inside_vec
))
8102 && inter_prec
>= inside_prec
8103 && (inter_float
|| inter_vec
8104 || inter_unsignedp
== inside_unsignedp
)
8105 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
8106 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
8108 && (! final_vec
|| inter_prec
== inside_prec
))
8109 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8111 /* If we have a sign-extension of a zero-extended value, we can
8112 replace that by a single zero-extension. */
8113 if (inside_int
&& inter_int
&& final_int
8114 && inside_prec
< inter_prec
&& inter_prec
< final_prec
8115 && inside_unsignedp
&& !inter_unsignedp
)
8116 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8118 /* Two conversions in a row are not needed unless:
8119 - some conversion is floating-point (overstrict for now), or
8120 - some conversion is a vector (overstrict for now), or
8121 - the intermediate type is narrower than both initial and
8123 - the intermediate type and innermost type differ in signedness,
8124 and the outermost type is wider than the intermediate, or
8125 - the initial type is a pointer type and the precisions of the
8126 intermediate and final types differ, or
8127 - the final type is a pointer type and the precisions of the
8128 initial and intermediate types differ.
8129 - the initial type is a pointer to an array and the final type
8131 if (! inside_float
&& ! inter_float
&& ! final_float
8132 && ! inside_vec
&& ! inter_vec
&& ! final_vec
8133 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
8134 && ! (inside_int
&& inter_int
8135 && inter_unsignedp
!= inside_unsignedp
8136 && inter_prec
< final_prec
)
8137 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
8138 == (final_unsignedp
&& final_prec
> inter_prec
))
8139 && ! (inside_ptr
&& inter_prec
!= final_prec
)
8140 && ! (final_ptr
&& inside_prec
!= inter_prec
)
8141 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
8142 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
8143 && ! (inside_ptr
&& final_ptr
8144 && TREE_CODE (TREE_TYPE (inside_type
)) == ARRAY_TYPE
8145 && TREE_CODE (TREE_TYPE (type
)) != ARRAY_TYPE
))
8146 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8149 /* Handle (T *)&A.B.C for A being of type T and B and C
8150 living at offset zero. This occurs frequently in
8151 C++ upcasting and then accessing the base. */
8152 if (TREE_CODE (op0
) == ADDR_EXPR
8153 && POINTER_TYPE_P (type
)
8154 && handled_component_p (TREE_OPERAND (op0
, 0)))
8156 HOST_WIDE_INT bitsize
, bitpos
;
8158 enum machine_mode mode
;
8159 int unsignedp
, volatilep
;
8160 tree base
= TREE_OPERAND (op0
, 0);
8161 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8162 &mode
, &unsignedp
, &volatilep
, false);
8163 /* If the reference was to a (constant) zero offset, we can use
8164 the address of the base if it has the same base type
8165 as the result type. */
8166 if (! offset
&& bitpos
== 0
8167 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8168 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8169 return fold_convert (type
, fold_addr_expr (base
));
8172 if ((TREE_CODE (op0
) == MODIFY_EXPR
8173 || TREE_CODE (op0
) == GIMPLE_MODIFY_STMT
)
8174 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0
, 1))
8175 /* Detect assigning a bitfield. */
8176 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8178 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0
, 0), 1))))
8180 /* Don't leave an assignment inside a conversion
8181 unless assigning a bitfield. */
8182 tem
= fold_build1 (code
, type
, GENERIC_TREE_OPERAND (op0
, 1));
8183 /* First do the assignment, then return converted constant. */
8184 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8185 TREE_NO_WARNING (tem
) = 1;
8186 TREE_USED (tem
) = 1;
8190 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8191 constants (if x has signed type, the sign bit cannot be set
8192 in c). This folds extension into the BIT_AND_EXPR. */
8193 if (INTEGRAL_TYPE_P (type
)
8194 && TREE_CODE (type
) != BOOLEAN_TYPE
8195 && TREE_CODE (op0
) == BIT_AND_EXPR
8196 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8199 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
8202 if (TYPE_UNSIGNED (TREE_TYPE (and))
8203 || (TYPE_PRECISION (type
)
8204 <= TYPE_PRECISION (TREE_TYPE (and))))
8206 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8207 <= HOST_BITS_PER_WIDE_INT
8208 && host_integerp (and1
, 1))
8210 unsigned HOST_WIDE_INT cst
;
8212 cst
= tree_low_cst (and1
, 1);
8213 cst
&= (HOST_WIDE_INT
) -1
8214 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8215 change
= (cst
== 0);
8216 #ifdef LOAD_EXTEND_OP
8218 && !flag_syntax_only
8219 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8222 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8223 and0
= fold_convert (uns
, and0
);
8224 and1
= fold_convert (uns
, and1
);
8230 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
8231 TREE_INT_CST_HIGH (and1
), 0,
8232 TREE_OVERFLOW (and1
));
8233 return fold_build2 (BIT_AND_EXPR
, type
,
8234 fold_convert (type
, and0
), tem
);
8238 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8239 when one of the new casts will fold away. Conservatively we assume
8240 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8241 if (POINTER_TYPE_P (type
)
8242 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8243 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8244 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8245 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8247 tree arg00
= TREE_OPERAND (arg0
, 0);
8248 tree arg01
= TREE_OPERAND (arg0
, 1);
8250 return fold_build2 (TREE_CODE (arg0
), type
, fold_convert (type
, arg00
),
8251 fold_convert (sizetype
, arg01
));
8254 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8255 of the same precision, and X is an integer type not narrower than
8256 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8257 if (INTEGRAL_TYPE_P (type
)
8258 && TREE_CODE (op0
) == BIT_NOT_EXPR
8259 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8260 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
8261 || TREE_CODE (TREE_OPERAND (op0
, 0)) == CONVERT_EXPR
)
8262 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8264 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8265 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8266 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8267 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
8270 tem
= fold_convert_const (code
, type
, op0
);
8271 return tem
? tem
: NULL_TREE
;
8273 case FIXED_CONVERT_EXPR
:
8274 tem
= fold_convert_const (code
, type
, arg0
);
8275 return tem
? tem
: NULL_TREE
;
8277 case VIEW_CONVERT_EXPR
:
8278 if (TREE_TYPE (op0
) == type
)
8280 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
8281 || (TREE_CODE (op0
) == NOP_EXPR
8282 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8283 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8284 && TYPE_PRECISION (TREE_TYPE (op0
))
8285 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8286 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
8287 return fold_view_convert_expr (type
, op0
);
8290 tem
= fold_negate_expr (arg0
);
8292 return fold_convert (type
, tem
);
8296 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8297 return fold_abs_const (arg0
, type
);
8298 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8299 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8300 /* Convert fabs((double)float) into (double)fabsf(float). */
8301 else if (TREE_CODE (arg0
) == NOP_EXPR
8302 && TREE_CODE (type
) == REAL_TYPE
)
8304 tree targ0
= strip_float_extensions (arg0
);
8306 return fold_convert (type
, fold_build1 (ABS_EXPR
,
8310 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8311 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8313 else if (tree_expr_nonnegative_p (arg0
))
8316 /* Strip sign ops from argument. */
8317 if (TREE_CODE (type
) == REAL_TYPE
)
8319 tem
= fold_strip_sign_ops (arg0
);
8321 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
8326 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8327 return fold_convert (type
, arg0
);
8328 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8330 tree itype
= TREE_TYPE (type
);
8331 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
8332 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
8333 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
8335 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8337 tree itype
= TREE_TYPE (type
);
8338 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
8339 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
8340 return build_complex (type
, rpart
, negate_expr (ipart
));
8342 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8343 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8347 if (TREE_CODE (arg0
) == INTEGER_CST
)
8348 return fold_not_const (arg0
, type
);
8349 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8350 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8351 /* Convert ~ (-A) to A - 1. */
8352 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8353 return fold_build2 (MINUS_EXPR
, type
,
8354 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8355 build_int_cst (type
, 1));
8356 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8357 else if (INTEGRAL_TYPE_P (type
)
8358 && ((TREE_CODE (arg0
) == MINUS_EXPR
8359 && integer_onep (TREE_OPERAND (arg0
, 1)))
8360 || (TREE_CODE (arg0
) == PLUS_EXPR
8361 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8362 return fold_build1 (NEGATE_EXPR
, type
,
8363 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8364 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8365 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8366 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8368 TREE_OPERAND (arg0
, 0)))))
8369 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
8370 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
8371 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8372 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8374 TREE_OPERAND (arg0
, 1)))))
8375 return fold_build2 (BIT_XOR_EXPR
, type
,
8376 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
8377 /* Perform BIT_NOT_EXPR on each element individually. */
8378 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8380 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8381 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8383 for (i
= 0; i
< count
; i
++)
8387 elem
= TREE_VALUE (elements
);
8388 elem
= fold_unary (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8389 if (elem
== NULL_TREE
)
8391 elements
= TREE_CHAIN (elements
);
8394 elem
= build_int_cst (TREE_TYPE (type
), -1);
8395 list
= tree_cons (NULL_TREE
, elem
, list
);
8398 return build_vector (type
, nreverse (list
));
8403 case TRUTH_NOT_EXPR
:
8404 /* The argument to invert_truthvalue must have Boolean type. */
8405 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8406 arg0
= fold_convert (boolean_type_node
, arg0
);
8408 /* Note that the operand of this must be an int
8409 and its values must be 0 or 1.
8410 ("true" is a fixed value perhaps depending on the language,
8411 but we don't handle values other than 1 correctly yet.) */
8412 tem
= fold_truth_not_expr (arg0
);
8415 return fold_convert (type
, tem
);
8418 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8419 return fold_convert (type
, arg0
);
8420 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8421 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8422 TREE_OPERAND (arg0
, 1));
8423 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8424 return fold_convert (type
, TREE_REALPART (arg0
));
8425 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8427 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8428 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8429 fold_build1 (REALPART_EXPR
, itype
,
8430 TREE_OPERAND (arg0
, 0)),
8431 fold_build1 (REALPART_EXPR
, itype
,
8432 TREE_OPERAND (arg0
, 1)));
8433 return fold_convert (type
, tem
);
8435 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8437 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8438 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8439 return fold_convert (type
, tem
);
8441 if (TREE_CODE (arg0
) == CALL_EXPR
)
8443 tree fn
= get_callee_fndecl (arg0
);
8444 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8445 switch (DECL_FUNCTION_CODE (fn
))
8447 CASE_FLT_FN (BUILT_IN_CEXPI
):
8448 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8450 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8460 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8461 return fold_convert (type
, integer_zero_node
);
8462 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8463 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8464 TREE_OPERAND (arg0
, 0));
8465 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8466 return fold_convert (type
, TREE_IMAGPART (arg0
));
8467 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8469 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8470 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8471 fold_build1 (IMAGPART_EXPR
, itype
,
8472 TREE_OPERAND (arg0
, 0)),
8473 fold_build1 (IMAGPART_EXPR
, itype
,
8474 TREE_OPERAND (arg0
, 1)));
8475 return fold_convert (type
, tem
);
8477 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8479 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8480 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8481 return fold_convert (type
, negate_expr (tem
));
8483 if (TREE_CODE (arg0
) == CALL_EXPR
)
8485 tree fn
= get_callee_fndecl (arg0
);
8486 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8487 switch (DECL_FUNCTION_CODE (fn
))
8489 CASE_FLT_FN (BUILT_IN_CEXPI
):
8490 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8492 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8503 } /* switch (code) */
8506 /* Fold a binary expression of code CODE and type TYPE with operands
8507 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8508 Return the folded expression if folding is successful. Otherwise,
8509 return NULL_TREE. */
8512 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
8514 enum tree_code compl_code
;
8516 if (code
== MIN_EXPR
)
8517 compl_code
= MAX_EXPR
;
8518 else if (code
== MAX_EXPR
)
8519 compl_code
= MIN_EXPR
;
8523 /* MIN (MAX (a, b), b) == b. */
8524 if (TREE_CODE (op0
) == compl_code
8525 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8526 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
8528 /* MIN (MAX (b, a), b) == b. */
8529 if (TREE_CODE (op0
) == compl_code
8530 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8531 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8532 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
8534 /* MIN (a, MAX (a, b)) == a. */
8535 if (TREE_CODE (op1
) == compl_code
8536 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8537 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8538 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
8540 /* MIN (a, MAX (b, a)) == a. */
8541 if (TREE_CODE (op1
) == compl_code
8542 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8543 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8544 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
8549 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8550 by changing CODE to reduce the magnitude of constants involved in
8551 ARG0 of the comparison.
8552 Returns a canonicalized comparison tree if a simplification was
8553 possible, otherwise returns NULL_TREE.
8554 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8555 valid if signed overflow is undefined. */
8558 maybe_canonicalize_comparison_1 (enum tree_code code
, tree type
,
8559 tree arg0
, tree arg1
,
8560 bool *strict_overflow_p
)
8562 enum tree_code code0
= TREE_CODE (arg0
);
8563 tree t
, cst0
= NULL_TREE
;
8567 /* Match A +- CST code arg1 and CST code arg1. */
8568 if (!(((code0
== MINUS_EXPR
8569 || code0
== PLUS_EXPR
)
8570 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8571 || code0
== INTEGER_CST
))
8574 /* Identify the constant in arg0 and its sign. */
8575 if (code0
== INTEGER_CST
)
8578 cst0
= TREE_OPERAND (arg0
, 1);
8579 sgn0
= tree_int_cst_sgn (cst0
);
8581 /* Overflowed constants and zero will cause problems. */
8582 if (integer_zerop (cst0
)
8583 || TREE_OVERFLOW (cst0
))
8586 /* See if we can reduce the magnitude of the constant in
8587 arg0 by changing the comparison code. */
8588 if (code0
== INTEGER_CST
)
8590 /* CST <= arg1 -> CST-1 < arg1. */
8591 if (code
== LE_EXPR
&& sgn0
== 1)
8593 /* -CST < arg1 -> -CST-1 <= arg1. */
8594 else if (code
== LT_EXPR
&& sgn0
== -1)
8596 /* CST > arg1 -> CST-1 >= arg1. */
8597 else if (code
== GT_EXPR
&& sgn0
== 1)
8599 /* -CST >= arg1 -> -CST-1 > arg1. */
8600 else if (code
== GE_EXPR
&& sgn0
== -1)
8604 /* arg1 code' CST' might be more canonical. */
8609 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8611 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8613 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8614 else if (code
== GT_EXPR
8615 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8617 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8618 else if (code
== LE_EXPR
8619 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8621 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8622 else if (code
== GE_EXPR
8623 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8627 *strict_overflow_p
= true;
8630 /* Now build the constant reduced in magnitude. */
8631 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8632 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
8633 if (code0
!= INTEGER_CST
)
8634 t
= fold_build2 (code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8636 /* If swapping might yield to a more canonical form, do so. */
8638 return fold_build2 (swap_tree_comparison (code
), type
, arg1
, t
);
8640 return fold_build2 (code
, type
, t
, arg1
);
8643 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8644 overflow further. Try to decrease the magnitude of constants involved
8645 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8646 and put sole constants at the second argument position.
8647 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8650 maybe_canonicalize_comparison (enum tree_code code
, tree type
,
8651 tree arg0
, tree arg1
)
8654 bool strict_overflow_p
;
8655 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8656 "when reducing constant in comparison");
8658 /* In principle pointers also have undefined overflow behavior,
8659 but that causes problems elsewhere. */
8660 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8661 || POINTER_TYPE_P (TREE_TYPE (arg0
)))
8664 /* Try canonicalization by simplifying arg0. */
8665 strict_overflow_p
= false;
8666 t
= maybe_canonicalize_comparison_1 (code
, type
, arg0
, arg1
,
8667 &strict_overflow_p
);
8670 if (strict_overflow_p
)
8671 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8675 /* Try canonicalization by simplifying arg1 using the swapped
8677 code
= swap_tree_comparison (code
);
8678 strict_overflow_p
= false;
8679 t
= maybe_canonicalize_comparison_1 (code
, type
, arg1
, arg0
,
8680 &strict_overflow_p
);
8681 if (t
&& strict_overflow_p
)
8682 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8686 /* Subroutine of fold_binary. This routine performs all of the
8687 transformations that are common to the equality/inequality
8688 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8689 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8690 fold_binary should call fold_binary. Fold a comparison with
8691 tree code CODE and type TYPE with operands OP0 and OP1. Return
8692 the folded comparison or NULL_TREE. */
8695 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
8697 tree arg0
, arg1
, tem
;
8702 STRIP_SIGN_NOPS (arg0
);
8703 STRIP_SIGN_NOPS (arg1
);
8705 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8706 if (tem
!= NULL_TREE
)
8709 /* If one arg is a real or integer constant, put it last. */
8710 if (tree_swap_operands_p (arg0
, arg1
, true))
8711 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8713 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8714 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8715 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8716 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8717 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8718 && (TREE_CODE (arg1
) == INTEGER_CST
8719 && !TREE_OVERFLOW (arg1
)))
8721 tree const1
= TREE_OPERAND (arg0
, 1);
8723 tree variable
= TREE_OPERAND (arg0
, 0);
8726 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8728 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8729 TREE_TYPE (arg1
), const2
, const1
);
8731 /* If the constant operation overflowed this can be
8732 simplified as a comparison against INT_MAX/INT_MIN. */
8733 if (TREE_CODE (lhs
) == INTEGER_CST
8734 && TREE_OVERFLOW (lhs
))
8736 int const1_sgn
= tree_int_cst_sgn (const1
);
8737 enum tree_code code2
= code
;
8739 /* Get the sign of the constant on the lhs if the
8740 operation were VARIABLE + CONST1. */
8741 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8742 const1_sgn
= -const1_sgn
;
8744 /* The sign of the constant determines if we overflowed
8745 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8746 Canonicalize to the INT_MIN overflow by swapping the comparison
8748 if (const1_sgn
== -1)
8749 code2
= swap_tree_comparison (code
);
8751 /* We now can look at the canonicalized case
8752 VARIABLE + 1 CODE2 INT_MIN
8753 and decide on the result. */
8754 if (code2
== LT_EXPR
8756 || code2
== EQ_EXPR
)
8757 return omit_one_operand (type
, boolean_false_node
, variable
);
8758 else if (code2
== NE_EXPR
8760 || code2
== GT_EXPR
)
8761 return omit_one_operand (type
, boolean_true_node
, variable
);
8764 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8765 && (TREE_CODE (lhs
) != INTEGER_CST
8766 || !TREE_OVERFLOW (lhs
)))
8768 fold_overflow_warning (("assuming signed overflow does not occur "
8769 "when changing X +- C1 cmp C2 to "
8771 WARN_STRICT_OVERFLOW_COMPARISON
);
8772 return fold_build2 (code
, type
, variable
, lhs
);
8776 /* For comparisons of pointers we can decompose it to a compile time
8777 comparison of the base objects and the offsets into the object.
8778 This requires at least one operand being an ADDR_EXPR or a
8779 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8780 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8781 && (TREE_CODE (arg0
) == ADDR_EXPR
8782 || TREE_CODE (arg1
) == ADDR_EXPR
8783 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8784 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8786 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8787 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8788 enum machine_mode mode
;
8789 int volatilep
, unsignedp
;
8790 bool indirect_base0
= false;
8792 /* Get base and offset for the access. Strip ADDR_EXPR for
8793 get_inner_reference, but put it back by stripping INDIRECT_REF
8794 off the base object if possible. */
8796 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8798 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8799 &bitsize
, &bitpos0
, &offset0
, &mode
,
8800 &unsignedp
, &volatilep
, false);
8801 if (TREE_CODE (base0
) == INDIRECT_REF
)
8802 base0
= TREE_OPERAND (base0
, 0);
8804 indirect_base0
= true;
8806 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8808 base0
= TREE_OPERAND (arg0
, 0);
8809 offset0
= TREE_OPERAND (arg0
, 1);
8813 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8815 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8816 &bitsize
, &bitpos1
, &offset1
, &mode
,
8817 &unsignedp
, &volatilep
, false);
8818 /* We have to make sure to have an indirect/non-indirect base1
8819 just the same as we did for base0. */
8820 if (TREE_CODE (base1
) == INDIRECT_REF
8822 base1
= TREE_OPERAND (base1
, 0);
8823 else if (!indirect_base0
)
8826 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8828 base1
= TREE_OPERAND (arg1
, 0);
8829 offset1
= TREE_OPERAND (arg1
, 1);
8831 else if (indirect_base0
)
8834 /* If we have equivalent bases we might be able to simplify. */
8836 && operand_equal_p (base0
, base1
, 0))
8838 /* We can fold this expression to a constant if the non-constant
8839 offset parts are equal. */
8840 if (offset0
== offset1
8841 || (offset0
&& offset1
8842 && operand_equal_p (offset0
, offset1
, 0)))
8847 return build_int_cst (boolean_type_node
, bitpos0
== bitpos1
);
8849 return build_int_cst (boolean_type_node
, bitpos0
!= bitpos1
);
8851 return build_int_cst (boolean_type_node
, bitpos0
< bitpos1
);
8853 return build_int_cst (boolean_type_node
, bitpos0
<= bitpos1
);
8855 return build_int_cst (boolean_type_node
, bitpos0
>= bitpos1
);
8857 return build_int_cst (boolean_type_node
, bitpos0
> bitpos1
);
8861 /* We can simplify the comparison to a comparison of the variable
8862 offset parts if the constant offset parts are equal.
8863 Be careful to use signed size type here because otherwise we
8864 mess with array offsets in the wrong way. This is possible
8865 because pointer arithmetic is restricted to retain within an
8866 object and overflow on pointer differences is undefined as of
8867 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8868 else if (bitpos0
== bitpos1
)
8870 tree signed_size_type_node
;
8871 signed_size_type_node
= signed_type_for (size_type_node
);
8873 /* By converting to signed size type we cover middle-end pointer
8874 arithmetic which operates on unsigned pointer types of size
8875 type size and ARRAY_REF offsets which are properly sign or
8876 zero extended from their type in case it is narrower than
8878 if (offset0
== NULL_TREE
)
8879 offset0
= build_int_cst (signed_size_type_node
, 0);
8881 offset0
= fold_convert (signed_size_type_node
, offset0
);
8882 if (offset1
== NULL_TREE
)
8883 offset1
= build_int_cst (signed_size_type_node
, 0);
8885 offset1
= fold_convert (signed_size_type_node
, offset1
);
8887 return fold_build2 (code
, type
, offset0
, offset1
);
8892 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8893 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8894 the resulting offset is smaller in absolute value than the
8896 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8897 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8898 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8899 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8900 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8901 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8902 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8904 tree const1
= TREE_OPERAND (arg0
, 1);
8905 tree const2
= TREE_OPERAND (arg1
, 1);
8906 tree variable1
= TREE_OPERAND (arg0
, 0);
8907 tree variable2
= TREE_OPERAND (arg1
, 0);
8909 const char * const warnmsg
= G_("assuming signed overflow does not "
8910 "occur when combining constants around "
8913 /* Put the constant on the side where it doesn't overflow and is
8914 of lower absolute value than before. */
8915 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8916 ? MINUS_EXPR
: PLUS_EXPR
,
8918 if (!TREE_OVERFLOW (cst
)
8919 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8921 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8922 return fold_build2 (code
, type
,
8924 fold_build2 (TREE_CODE (arg1
), TREE_TYPE (arg1
),
8928 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8929 ? MINUS_EXPR
: PLUS_EXPR
,
8931 if (!TREE_OVERFLOW (cst
)
8932 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8934 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8935 return fold_build2 (code
, type
,
8936 fold_build2 (TREE_CODE (arg0
), TREE_TYPE (arg0
),
8942 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8943 signed arithmetic case. That form is created by the compiler
8944 often enough for folding it to be of value. One example is in
8945 computing loop trip counts after Operator Strength Reduction. */
8946 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8947 && TREE_CODE (arg0
) == MULT_EXPR
8948 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8949 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8950 && integer_zerop (arg1
))
8952 tree const1
= TREE_OPERAND (arg0
, 1);
8953 tree const2
= arg1
; /* zero */
8954 tree variable1
= TREE_OPERAND (arg0
, 0);
8955 enum tree_code cmp_code
= code
;
8957 gcc_assert (!integer_zerop (const1
));
8959 fold_overflow_warning (("assuming signed overflow does not occur when "
8960 "eliminating multiplication in comparison "
8962 WARN_STRICT_OVERFLOW_COMPARISON
);
8964 /* If const1 is negative we swap the sense of the comparison. */
8965 if (tree_int_cst_sgn (const1
) < 0)
8966 cmp_code
= swap_tree_comparison (cmp_code
);
8968 return fold_build2 (cmp_code
, type
, variable1
, const2
);
8971 tem
= maybe_canonicalize_comparison (code
, type
, op0
, op1
);
8975 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8977 tree targ0
= strip_float_extensions (arg0
);
8978 tree targ1
= strip_float_extensions (arg1
);
8979 tree newtype
= TREE_TYPE (targ0
);
8981 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8982 newtype
= TREE_TYPE (targ1
);
8984 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8985 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8986 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
8987 fold_convert (newtype
, targ1
));
8989 /* (-a) CMP (-b) -> b CMP a */
8990 if (TREE_CODE (arg0
) == NEGATE_EXPR
8991 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8992 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8993 TREE_OPERAND (arg0
, 0));
8995 if (TREE_CODE (arg1
) == REAL_CST
)
8997 REAL_VALUE_TYPE cst
;
8998 cst
= TREE_REAL_CST (arg1
);
9000 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9001 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9002 return fold_build2 (swap_tree_comparison (code
), type
,
9003 TREE_OPERAND (arg0
, 0),
9004 build_real (TREE_TYPE (arg1
),
9005 REAL_VALUE_NEGATE (cst
)));
9007 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9008 /* a CMP (-0) -> a CMP 0 */
9009 if (REAL_VALUE_MINUS_ZERO (cst
))
9010 return fold_build2 (code
, type
, arg0
,
9011 build_real (TREE_TYPE (arg1
), dconst0
));
9013 /* x != NaN is always true, other ops are always false. */
9014 if (REAL_VALUE_ISNAN (cst
)
9015 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9017 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9018 return omit_one_operand (type
, tem
, arg0
);
9021 /* Fold comparisons against infinity. */
9022 if (REAL_VALUE_ISINF (cst
))
9024 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
9025 if (tem
!= NULL_TREE
)
9030 /* If this is a comparison of a real constant with a PLUS_EXPR
9031 or a MINUS_EXPR of a real constant, we can convert it into a
9032 comparison with a revised real constant as long as no overflow
9033 occurs when unsafe_math_optimizations are enabled. */
9034 if (flag_unsafe_math_optimizations
9035 && TREE_CODE (arg1
) == REAL_CST
9036 && (TREE_CODE (arg0
) == PLUS_EXPR
9037 || TREE_CODE (arg0
) == MINUS_EXPR
)
9038 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9039 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9040 ? MINUS_EXPR
: PLUS_EXPR
,
9041 arg1
, TREE_OPERAND (arg0
, 1), 0))
9042 && !TREE_OVERFLOW (tem
))
9043 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9045 /* Likewise, we can simplify a comparison of a real constant with
9046 a MINUS_EXPR whose first operand is also a real constant, i.e.
9047 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9048 floating-point types only if -fassociative-math is set. */
9049 if (flag_associative_math
9050 && TREE_CODE (arg1
) == REAL_CST
9051 && TREE_CODE (arg0
) == MINUS_EXPR
9052 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9053 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9055 && !TREE_OVERFLOW (tem
))
9056 return fold_build2 (swap_tree_comparison (code
), type
,
9057 TREE_OPERAND (arg0
, 1), tem
);
9059 /* Fold comparisons against built-in math functions. */
9060 if (TREE_CODE (arg1
) == REAL_CST
9061 && flag_unsafe_math_optimizations
9062 && ! flag_errno_math
)
9064 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9066 if (fcode
!= END_BUILTINS
)
9068 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
9069 if (tem
!= NULL_TREE
)
9075 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9076 && (TREE_CODE (arg0
) == NOP_EXPR
9077 || TREE_CODE (arg0
) == CONVERT_EXPR
))
9079 /* If we are widening one operand of an integer comparison,
9080 see if the other operand is similarly being widened. Perhaps we
9081 can do the comparison in the narrower type. */
9082 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
9086 /* Or if we are changing signedness. */
9087 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
9092 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9093 constant, we can simplify it. */
9094 if (TREE_CODE (arg1
) == INTEGER_CST
9095 && (TREE_CODE (arg0
) == MIN_EXPR
9096 || TREE_CODE (arg0
) == MAX_EXPR
)
9097 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9099 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
9104 /* Simplify comparison of something with itself. (For IEEE
9105 floating-point, we can only do some of these simplifications.) */
9106 if (operand_equal_p (arg0
, arg1
, 0))
9111 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9112 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9113 return constant_boolean_node (1, type
);
9118 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9119 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9120 return constant_boolean_node (1, type
);
9121 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9124 /* For NE, we can only do this simplification if integer
9125 or we don't honor IEEE floating point NaNs. */
9126 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9127 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9129 /* ... fall through ... */
9132 return constant_boolean_node (0, type
);
9138 /* If we are comparing an expression that just has comparisons
9139 of two integer values, arithmetic expressions of those comparisons,
9140 and constants, we can simplify it. There are only three cases
9141 to check: the two values can either be equal, the first can be
9142 greater, or the second can be greater. Fold the expression for
9143 those three values. Since each value must be 0 or 1, we have
9144 eight possibilities, each of which corresponds to the constant 0
9145 or 1 or one of the six possible comparisons.
9147 This handles common cases like (a > b) == 0 but also handles
9148 expressions like ((x > y) - (y > x)) > 0, which supposedly
9149 occur in macroized code. */
9151 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9153 tree cval1
= 0, cval2
= 0;
9156 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9157 /* Don't handle degenerate cases here; they should already
9158 have been handled anyway. */
9159 && cval1
!= 0 && cval2
!= 0
9160 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9161 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9162 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9163 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9164 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9165 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9166 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9168 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9169 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9171 /* We can't just pass T to eval_subst in case cval1 or cval2
9172 was the same as ARG1. */
9175 = fold_build2 (code
, type
,
9176 eval_subst (arg0
, cval1
, maxval
,
9180 = fold_build2 (code
, type
,
9181 eval_subst (arg0
, cval1
, maxval
,
9185 = fold_build2 (code
, type
,
9186 eval_subst (arg0
, cval1
, minval
,
9190 /* All three of these results should be 0 or 1. Confirm they are.
9191 Then use those values to select the proper code to use. */
9193 if (TREE_CODE (high_result
) == INTEGER_CST
9194 && TREE_CODE (equal_result
) == INTEGER_CST
9195 && TREE_CODE (low_result
) == INTEGER_CST
)
9197 /* Make a 3-bit mask with the high-order bit being the
9198 value for `>', the next for '=', and the low for '<'. */
9199 switch ((integer_onep (high_result
) * 4)
9200 + (integer_onep (equal_result
) * 2)
9201 + integer_onep (low_result
))
9205 return omit_one_operand (type
, integer_zero_node
, arg0
);
9226 return omit_one_operand (type
, integer_one_node
, arg0
);
9230 return save_expr (build2 (code
, type
, cval1
, cval2
));
9231 return fold_build2 (code
, type
, cval1
, cval2
);
9236 /* Fold a comparison of the address of COMPONENT_REFs with the same
9237 type and component to a comparison of the address of the base
9238 object. In short, &x->a OP &y->a to x OP y and
9239 &x->a OP &y.a to x OP &y */
9240 if (TREE_CODE (arg0
) == ADDR_EXPR
9241 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
9242 && TREE_CODE (arg1
) == ADDR_EXPR
9243 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
9245 tree cref0
= TREE_OPERAND (arg0
, 0);
9246 tree cref1
= TREE_OPERAND (arg1
, 0);
9247 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
9249 tree op0
= TREE_OPERAND (cref0
, 0);
9250 tree op1
= TREE_OPERAND (cref1
, 0);
9251 return fold_build2 (code
, type
,
9252 fold_addr_expr (op0
),
9253 fold_addr_expr (op1
));
9257 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9258 into a single range test. */
9259 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9260 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9261 && TREE_CODE (arg1
) == INTEGER_CST
9262 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9263 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9264 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9265 && !TREE_OVERFLOW (arg1
))
9267 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
9268 if (tem
!= NULL_TREE
)
9272 /* Fold ~X op ~Y as Y op X. */
9273 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9274 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9276 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9277 return fold_build2 (code
, type
,
9278 fold_convert (cmp_type
, TREE_OPERAND (arg1
, 0)),
9279 TREE_OPERAND (arg0
, 0));
9282 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9283 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9284 && TREE_CODE (arg1
) == INTEGER_CST
)
9286 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9287 return fold_build2 (swap_tree_comparison (code
), type
,
9288 TREE_OPERAND (arg0
, 0),
9289 fold_build1 (BIT_NOT_EXPR
, cmp_type
,
9290 fold_convert (cmp_type
, arg1
)));
9297 /* Subroutine of fold_binary. Optimize complex multiplications of the
9298 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9299 argument EXPR represents the expression "z" of type TYPE. */
9302 fold_mult_zconjz (tree type
, tree expr
)
9304 tree itype
= TREE_TYPE (type
);
9305 tree rpart
, ipart
, tem
;
9307 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9309 rpart
= TREE_OPERAND (expr
, 0);
9310 ipart
= TREE_OPERAND (expr
, 1);
9312 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9314 rpart
= TREE_REALPART (expr
);
9315 ipart
= TREE_IMAGPART (expr
);
9319 expr
= save_expr (expr
);
9320 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
9321 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
9324 rpart
= save_expr (rpart
);
9325 ipart
= save_expr (ipart
);
9326 tem
= fold_build2 (PLUS_EXPR
, itype
,
9327 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
9328 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
9329 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
9330 fold_convert (itype
, integer_zero_node
));
9334 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9335 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9336 guarantees that P and N have the same least significant log2(M) bits.
9337 N is not otherwise constrained. In particular, N is not normalized to
9338 0 <= N < M as is common. In general, the precise value of P is unknown.
9339 M is chosen as large as possible such that constant N can be determined.
9341 Returns M and sets *RESIDUE to N. */
9343 static unsigned HOST_WIDE_INT
9344 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
)
9346 enum tree_code code
;
9350 code
= TREE_CODE (expr
);
9351 if (code
== ADDR_EXPR
)
9353 expr
= TREE_OPERAND (expr
, 0);
9354 if (handled_component_p (expr
))
9356 HOST_WIDE_INT bitsize
, bitpos
;
9358 enum machine_mode mode
;
9359 int unsignedp
, volatilep
;
9361 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9362 &mode
, &unsignedp
, &volatilep
, false);
9363 *residue
= bitpos
/ BITS_PER_UNIT
;
9366 if (TREE_CODE (offset
) == INTEGER_CST
)
9367 *residue
+= TREE_INT_CST_LOW (offset
);
9369 /* We don't handle more complicated offset expressions. */
9375 return DECL_ALIGN_UNIT (expr
);
9377 else if (code
== POINTER_PLUS_EXPR
)
9380 unsigned HOST_WIDE_INT modulus
;
9381 enum tree_code inner_code
;
9383 op0
= TREE_OPERAND (expr
, 0);
9385 modulus
= get_pointer_modulus_and_residue (op0
, residue
);
9387 op1
= TREE_OPERAND (expr
, 1);
9389 inner_code
= TREE_CODE (op1
);
9390 if (inner_code
== INTEGER_CST
)
9392 *residue
+= TREE_INT_CST_LOW (op1
);
9395 else if (inner_code
== MULT_EXPR
)
9397 op1
= TREE_OPERAND (op1
, 1);
9398 if (TREE_CODE (op1
) == INTEGER_CST
)
9400 unsigned HOST_WIDE_INT align
;
9402 /* Compute the greatest power-of-2 divisor of op1. */
9403 align
= TREE_INT_CST_LOW (op1
);
9406 /* If align is non-zero and less than *modulus, replace
9407 *modulus with align., If align is 0, then either op1 is 0
9408 or the greatest power-of-2 divisor of op1 doesn't fit in an
9409 unsigned HOST_WIDE_INT. In either case, no additional
9410 constraint is imposed. */
9412 modulus
= MIN (modulus
, align
);
9419 /* If we get here, we were unable to determine anything useful about the
9425 /* Fold a binary expression of code CODE and type TYPE with operands
9426 OP0 and OP1. Return the folded expression if folding is
9427 successful. Otherwise, return NULL_TREE. */
9430 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
9432 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9433 tree arg0
, arg1
, tem
;
9434 tree t1
= NULL_TREE
;
9435 bool strict_overflow_p
;
9437 gcc_assert ((IS_EXPR_CODE_CLASS (kind
)
9438 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
9439 && TREE_CODE_LENGTH (code
) == 2
9441 && op1
!= NULL_TREE
);
9446 /* Strip any conversions that don't change the mode. This is
9447 safe for every expression, except for a comparison expression
9448 because its signedness is derived from its operands. So, in
9449 the latter case, only strip conversions that don't change the
9452 Note that this is done as an internal manipulation within the
9453 constant folder, in order to find the simplest representation
9454 of the arguments so that their form can be studied. In any
9455 cases, the appropriate type conversions should be put back in
9456 the tree that will get out of the constant folder. */
9458 if (kind
== tcc_comparison
)
9460 STRIP_SIGN_NOPS (arg0
);
9461 STRIP_SIGN_NOPS (arg1
);
9469 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9470 constant but we can't do arithmetic on them. */
9471 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9472 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9473 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9474 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9475 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9476 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9478 if (kind
== tcc_binary
)
9480 /* Make sure type and arg0 have the same saturating flag. */
9481 gcc_assert (TYPE_SATURATING (type
)
9482 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9483 tem
= const_binop (code
, arg0
, arg1
, 0);
9485 else if (kind
== tcc_comparison
)
9486 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9490 if (tem
!= NULL_TREE
)
9492 if (TREE_TYPE (tem
) != type
)
9493 tem
= fold_convert (type
, tem
);
9498 /* If this is a commutative operation, and ARG0 is a constant, move it
9499 to ARG1 to reduce the number of tests below. */
9500 if (commutative_tree_code (code
)
9501 && tree_swap_operands_p (arg0
, arg1
, true))
9502 return fold_build2 (code
, type
, op1
, op0
);
9504 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9506 First check for cases where an arithmetic operation is applied to a
9507 compound, conditional, or comparison operation. Push the arithmetic
9508 operation inside the compound or conditional to see if any folding
9509 can then be done. Convert comparison to conditional for this purpose.
9510 The also optimizes non-constant cases that used to be done in
9513 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9514 one of the operands is a comparison and the other is a comparison, a
9515 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9516 code below would make the expression more complex. Change it to a
9517 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9518 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9520 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9521 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9522 && ((truth_value_p (TREE_CODE (arg0
))
9523 && (truth_value_p (TREE_CODE (arg1
))
9524 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9525 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9526 || (truth_value_p (TREE_CODE (arg1
))
9527 && (truth_value_p (TREE_CODE (arg0
))
9528 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9529 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9531 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9532 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9535 fold_convert (boolean_type_node
, arg0
),
9536 fold_convert (boolean_type_node
, arg1
));
9538 if (code
== EQ_EXPR
)
9539 tem
= invert_truthvalue (tem
);
9541 return fold_convert (type
, tem
);
9544 if (TREE_CODE_CLASS (code
) == tcc_binary
9545 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9547 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9548 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9549 fold_build2 (code
, type
,
9550 fold_convert (TREE_TYPE (op0
),
9551 TREE_OPERAND (arg0
, 1)),
9553 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9554 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9555 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9556 fold_build2 (code
, type
, op0
,
9557 fold_convert (TREE_TYPE (op1
),
9558 TREE_OPERAND (arg1
, 1))));
9560 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9562 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9564 /*cond_first_p=*/1);
9565 if (tem
!= NULL_TREE
)
9569 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9571 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9573 /*cond_first_p=*/0);
9574 if (tem
!= NULL_TREE
)
9581 case POINTER_PLUS_EXPR
:
9582 /* 0 +p index -> (type)index */
9583 if (integer_zerop (arg0
))
9584 return non_lvalue (fold_convert (type
, arg1
));
9586 /* PTR +p 0 -> PTR */
9587 if (integer_zerop (arg1
))
9588 return non_lvalue (fold_convert (type
, arg0
));
9590 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9591 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9592 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9593 return fold_convert (type
, fold_build2 (PLUS_EXPR
, sizetype
,
9594 fold_convert (sizetype
, arg1
),
9595 fold_convert (sizetype
, arg0
)));
9597 /* index +p PTR -> PTR +p index */
9598 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9599 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9600 return fold_build2 (POINTER_PLUS_EXPR
, type
,
9601 fold_convert (type
, arg1
),
9602 fold_convert (sizetype
, arg0
));
9604 /* (PTR +p B) +p A -> PTR +p (B + A) */
9605 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9608 tree arg01
= fold_convert (sizetype
, TREE_OPERAND (arg0
, 1));
9609 tree arg00
= TREE_OPERAND (arg0
, 0);
9610 inner
= fold_build2 (PLUS_EXPR
, sizetype
,
9611 arg01
, fold_convert (sizetype
, arg1
));
9612 return fold_convert (type
,
9613 fold_build2 (POINTER_PLUS_EXPR
,
9614 TREE_TYPE (arg00
), arg00
, inner
));
9617 /* PTR_CST +p CST -> CST1 */
9618 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9619 return fold_build2 (PLUS_EXPR
, type
, arg0
, fold_convert (type
, arg1
));
9621 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9622 of the array. Loop optimizer sometimes produce this type of
9624 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9626 tem
= try_move_mult_to_index (arg0
, fold_convert (sizetype
, arg1
));
9628 return fold_convert (type
, tem
);
9634 /* PTR + INT -> (INT)(PTR p+ INT) */
9635 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9636 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9637 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9640 fold_convert (sizetype
, arg1
)));
9641 /* INT + PTR -> (INT)(PTR p+ INT) */
9642 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9643 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9644 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9647 fold_convert (sizetype
, arg0
)));
9648 /* A + (-B) -> A - B */
9649 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9650 return fold_build2 (MINUS_EXPR
, type
,
9651 fold_convert (type
, arg0
),
9652 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9653 /* (-A) + B -> B - A */
9654 if (TREE_CODE (arg0
) == NEGATE_EXPR
9655 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9656 return fold_build2 (MINUS_EXPR
, type
,
9657 fold_convert (type
, arg1
),
9658 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9660 if (INTEGRAL_TYPE_P (type
))
9662 /* Convert ~A + 1 to -A. */
9663 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9664 && integer_onep (arg1
))
9665 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
9668 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9669 && !TYPE_OVERFLOW_TRAPS (type
))
9671 tree tem
= TREE_OPERAND (arg0
, 0);
9674 if (operand_equal_p (tem
, arg1
, 0))
9676 t1
= build_int_cst_type (type
, -1);
9677 return omit_one_operand (type
, t1
, arg1
);
9682 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9683 && !TYPE_OVERFLOW_TRAPS (type
))
9685 tree tem
= TREE_OPERAND (arg1
, 0);
9688 if (operand_equal_p (arg0
, tem
, 0))
9690 t1
= build_int_cst_type (type
, -1);
9691 return omit_one_operand (type
, t1
, arg0
);
9695 /* X + (X / CST) * -CST is X % CST. */
9696 if (TREE_CODE (arg1
) == MULT_EXPR
9697 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9698 && operand_equal_p (arg0
,
9699 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9701 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9702 tree cst1
= TREE_OPERAND (arg1
, 1);
9703 tree sum
= fold_binary (PLUS_EXPR
, TREE_TYPE (cst1
), cst1
, cst0
);
9704 if (sum
&& integer_zerop (sum
))
9705 return fold_convert (type
,
9706 fold_build2 (TRUNC_MOD_EXPR
,
9707 TREE_TYPE (arg0
), arg0
, cst0
));
9711 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9712 same or one. Make sure type is not saturating.
9713 fold_plusminus_mult_expr will re-associate. */
9714 if ((TREE_CODE (arg0
) == MULT_EXPR
9715 || TREE_CODE (arg1
) == MULT_EXPR
)
9716 && !TYPE_SATURATING (type
)
9717 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9719 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9724 if (! FLOAT_TYPE_P (type
))
9726 if (integer_zerop (arg1
))
9727 return non_lvalue (fold_convert (type
, arg0
));
9729 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9730 with a constant, and the two constants have no bits in common,
9731 we should treat this as a BIT_IOR_EXPR since this may produce more
9733 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9734 && TREE_CODE (arg1
) == BIT_AND_EXPR
9735 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9736 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9737 && integer_zerop (const_binop (BIT_AND_EXPR
,
9738 TREE_OPERAND (arg0
, 1),
9739 TREE_OPERAND (arg1
, 1), 0)))
9741 code
= BIT_IOR_EXPR
;
9745 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9746 (plus (plus (mult) (mult)) (foo)) so that we can
9747 take advantage of the factoring cases below. */
9748 if (((TREE_CODE (arg0
) == PLUS_EXPR
9749 || TREE_CODE (arg0
) == MINUS_EXPR
)
9750 && TREE_CODE (arg1
) == MULT_EXPR
)
9751 || ((TREE_CODE (arg1
) == PLUS_EXPR
9752 || TREE_CODE (arg1
) == MINUS_EXPR
)
9753 && TREE_CODE (arg0
) == MULT_EXPR
))
9755 tree parg0
, parg1
, parg
, marg
;
9756 enum tree_code pcode
;
9758 if (TREE_CODE (arg1
) == MULT_EXPR
)
9759 parg
= arg0
, marg
= arg1
;
9761 parg
= arg1
, marg
= arg0
;
9762 pcode
= TREE_CODE (parg
);
9763 parg0
= TREE_OPERAND (parg
, 0);
9764 parg1
= TREE_OPERAND (parg
, 1);
9768 if (TREE_CODE (parg0
) == MULT_EXPR
9769 && TREE_CODE (parg1
) != MULT_EXPR
)
9770 return fold_build2 (pcode
, type
,
9771 fold_build2 (PLUS_EXPR
, type
,
9772 fold_convert (type
, parg0
),
9773 fold_convert (type
, marg
)),
9774 fold_convert (type
, parg1
));
9775 if (TREE_CODE (parg0
) != MULT_EXPR
9776 && TREE_CODE (parg1
) == MULT_EXPR
)
9777 return fold_build2 (PLUS_EXPR
, type
,
9778 fold_convert (type
, parg0
),
9779 fold_build2 (pcode
, type
,
9780 fold_convert (type
, marg
),
9787 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9788 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9789 return non_lvalue (fold_convert (type
, arg0
));
9791 /* Likewise if the operands are reversed. */
9792 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9793 return non_lvalue (fold_convert (type
, arg1
));
9795 /* Convert X + -C into X - C. */
9796 if (TREE_CODE (arg1
) == REAL_CST
9797 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9799 tem
= fold_negate_const (arg1
, type
);
9800 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9801 return fold_build2 (MINUS_EXPR
, type
,
9802 fold_convert (type
, arg0
),
9803 fold_convert (type
, tem
));
9806 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9807 to __complex__ ( x, y ). This is not the same for SNaNs or
9808 if signed zeros are involved. */
9809 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9810 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9811 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9813 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9814 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9815 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9816 bool arg0rz
= false, arg0iz
= false;
9817 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9818 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9820 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9821 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9822 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9824 tree rp
= arg1r
? arg1r
9825 : build1 (REALPART_EXPR
, rtype
, arg1
);
9826 tree ip
= arg0i
? arg0i
9827 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9828 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9830 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9832 tree rp
= arg0r
? arg0r
9833 : build1 (REALPART_EXPR
, rtype
, arg0
);
9834 tree ip
= arg1i
? arg1i
9835 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9836 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9841 if (flag_unsafe_math_optimizations
9842 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9843 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9844 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9847 /* Convert x+x into x*2.0. */
9848 if (operand_equal_p (arg0
, arg1
, 0)
9849 && SCALAR_FLOAT_TYPE_P (type
))
9850 return fold_build2 (MULT_EXPR
, type
, arg0
,
9851 build_real (type
, dconst2
));
9853 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9854 We associate floats only if the user has specified
9855 -fassociative-math. */
9856 if (flag_associative_math
9857 && TREE_CODE (arg1
) == PLUS_EXPR
9858 && TREE_CODE (arg0
) != MULT_EXPR
)
9860 tree tree10
= TREE_OPERAND (arg1
, 0);
9861 tree tree11
= TREE_OPERAND (arg1
, 1);
9862 if (TREE_CODE (tree11
) == MULT_EXPR
9863 && TREE_CODE (tree10
) == MULT_EXPR
)
9866 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
9867 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
9870 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9871 We associate floats only if the user has specified
9872 -fassociative-math. */
9873 if (flag_associative_math
9874 && TREE_CODE (arg0
) == PLUS_EXPR
9875 && TREE_CODE (arg1
) != MULT_EXPR
)
9877 tree tree00
= TREE_OPERAND (arg0
, 0);
9878 tree tree01
= TREE_OPERAND (arg0
, 1);
9879 if (TREE_CODE (tree01
) == MULT_EXPR
9880 && TREE_CODE (tree00
) == MULT_EXPR
)
9883 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
9884 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
9890 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9891 is a rotate of A by C1 bits. */
9892 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9893 is a rotate of A by B bits. */
9895 enum tree_code code0
, code1
;
9897 code0
= TREE_CODE (arg0
);
9898 code1
= TREE_CODE (arg1
);
9899 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9900 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9901 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9902 TREE_OPERAND (arg1
, 0), 0)
9903 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9904 TYPE_UNSIGNED (rtype
))
9905 /* Only create rotates in complete modes. Other cases are not
9906 expanded properly. */
9907 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
9909 tree tree01
, tree11
;
9910 enum tree_code code01
, code11
;
9912 tree01
= TREE_OPERAND (arg0
, 1);
9913 tree11
= TREE_OPERAND (arg1
, 1);
9914 STRIP_NOPS (tree01
);
9915 STRIP_NOPS (tree11
);
9916 code01
= TREE_CODE (tree01
);
9917 code11
= TREE_CODE (tree11
);
9918 if (code01
== INTEGER_CST
9919 && code11
== INTEGER_CST
9920 && TREE_INT_CST_HIGH (tree01
) == 0
9921 && TREE_INT_CST_HIGH (tree11
) == 0
9922 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9923 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9924 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9925 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9926 else if (code11
== MINUS_EXPR
)
9928 tree tree110
, tree111
;
9929 tree110
= TREE_OPERAND (tree11
, 0);
9930 tree111
= TREE_OPERAND (tree11
, 1);
9931 STRIP_NOPS (tree110
);
9932 STRIP_NOPS (tree111
);
9933 if (TREE_CODE (tree110
) == INTEGER_CST
9934 && 0 == compare_tree_int (tree110
,
9936 (TREE_TYPE (TREE_OPERAND
9938 && operand_equal_p (tree01
, tree111
, 0))
9939 return build2 ((code0
== LSHIFT_EXPR
9942 type
, TREE_OPERAND (arg0
, 0), tree01
);
9944 else if (code01
== MINUS_EXPR
)
9946 tree tree010
, tree011
;
9947 tree010
= TREE_OPERAND (tree01
, 0);
9948 tree011
= TREE_OPERAND (tree01
, 1);
9949 STRIP_NOPS (tree010
);
9950 STRIP_NOPS (tree011
);
9951 if (TREE_CODE (tree010
) == INTEGER_CST
9952 && 0 == compare_tree_int (tree010
,
9954 (TREE_TYPE (TREE_OPERAND
9956 && operand_equal_p (tree11
, tree011
, 0))
9957 return build2 ((code0
!= LSHIFT_EXPR
9960 type
, TREE_OPERAND (arg0
, 0), tree11
);
9966 /* In most languages, can't associate operations on floats through
9967 parentheses. Rather than remember where the parentheses were, we
9968 don't associate floats at all, unless the user has specified
9970 And, we need to make sure type is not saturating. */
9972 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9973 && !TYPE_SATURATING (type
))
9975 tree var0
, con0
, lit0
, minus_lit0
;
9976 tree var1
, con1
, lit1
, minus_lit1
;
9979 /* Split both trees into variables, constants, and literals. Then
9980 associate each group together, the constants with literals,
9981 then the result with variables. This increases the chances of
9982 literals being recombined later and of generating relocatable
9983 expressions for the sum of a constant and literal. */
9984 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9985 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9986 code
== MINUS_EXPR
);
9988 /* With undefined overflow we can only associate constants
9989 with one variable. */
9990 if ((POINTER_TYPE_P (type
)
9991 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9997 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9998 tmp0
= TREE_OPERAND (tmp0
, 0);
9999 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10000 tmp1
= TREE_OPERAND (tmp1
, 0);
10001 /* The only case we can still associate with two variables
10002 is if they are the same, modulo negation. */
10003 if (!operand_equal_p (tmp0
, tmp1
, 0))
10007 /* Only do something if we found more than two objects. Otherwise,
10008 nothing has changed and we risk infinite recursion. */
10010 && (2 < ((var0
!= 0) + (var1
!= 0)
10011 + (con0
!= 0) + (con1
!= 0)
10012 + (lit0
!= 0) + (lit1
!= 0)
10013 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10015 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10016 if (code
== MINUS_EXPR
)
10019 var0
= associate_trees (var0
, var1
, code
, type
);
10020 con0
= associate_trees (con0
, con1
, code
, type
);
10021 lit0
= associate_trees (lit0
, lit1
, code
, type
);
10022 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
10024 /* Preserve the MINUS_EXPR if the negative part of the literal is
10025 greater than the positive part. Otherwise, the multiplicative
10026 folding code (i.e extract_muldiv) may be fooled in case
10027 unsigned constants are subtracted, like in the following
10028 example: ((X*2 + 4) - 8U)/2. */
10029 if (minus_lit0
&& lit0
)
10031 if (TREE_CODE (lit0
) == INTEGER_CST
10032 && TREE_CODE (minus_lit0
) == INTEGER_CST
10033 && tree_int_cst_lt (lit0
, minus_lit0
))
10035 minus_lit0
= associate_trees (minus_lit0
, lit0
,
10041 lit0
= associate_trees (lit0
, minus_lit0
,
10049 return fold_convert (type
,
10050 associate_trees (var0
, minus_lit0
,
10051 MINUS_EXPR
, type
));
10054 con0
= associate_trees (con0
, minus_lit0
,
10056 return fold_convert (type
,
10057 associate_trees (var0
, con0
,
10062 con0
= associate_trees (con0
, lit0
, code
, type
);
10063 return fold_convert (type
, associate_trees (var0
, con0
,
10071 /* Pointer simplifications for subtraction, simple reassociations. */
10072 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10074 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10075 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10076 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10078 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10079 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10080 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10081 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10082 return fold_build2 (PLUS_EXPR
, type
,
10083 fold_build2 (MINUS_EXPR
, type
, arg00
, arg10
),
10084 fold_build2 (MINUS_EXPR
, type
, arg01
, arg11
));
10086 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10087 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10089 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10090 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10091 tree tmp
= fold_binary (MINUS_EXPR
, type
, arg00
, fold_convert (type
, arg1
));
10093 return fold_build2 (PLUS_EXPR
, type
, tmp
, arg01
);
10096 /* A - (-B) -> A + B */
10097 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10098 return fold_build2 (PLUS_EXPR
, type
, op0
,
10099 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10100 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10101 if (TREE_CODE (arg0
) == NEGATE_EXPR
10102 && (FLOAT_TYPE_P (type
)
10103 || INTEGRAL_TYPE_P (type
))
10104 && negate_expr_p (arg1
)
10105 && reorder_operands_p (arg0
, arg1
))
10106 return fold_build2 (MINUS_EXPR
, type
,
10107 fold_convert (type
, negate_expr (arg1
)),
10108 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
10109 /* Convert -A - 1 to ~A. */
10110 if (INTEGRAL_TYPE_P (type
)
10111 && TREE_CODE (arg0
) == NEGATE_EXPR
10112 && integer_onep (arg1
)
10113 && !TYPE_OVERFLOW_TRAPS (type
))
10114 return fold_build1 (BIT_NOT_EXPR
, type
,
10115 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
10117 /* Convert -1 - A to ~A. */
10118 if (INTEGRAL_TYPE_P (type
)
10119 && integer_all_onesp (arg0
))
10120 return fold_build1 (BIT_NOT_EXPR
, type
, op1
);
10123 /* X - (X / CST) * CST is X % CST. */
10124 if (INTEGRAL_TYPE_P (type
)
10125 && TREE_CODE (arg1
) == MULT_EXPR
10126 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10127 && operand_equal_p (arg0
,
10128 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10129 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10130 TREE_OPERAND (arg1
, 1), 0))
10131 return fold_convert (type
,
10132 fold_build2 (TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10133 arg0
, TREE_OPERAND (arg1
, 1)));
10135 if (! FLOAT_TYPE_P (type
))
10137 if (integer_zerop (arg0
))
10138 return negate_expr (fold_convert (type
, arg1
));
10139 if (integer_zerop (arg1
))
10140 return non_lvalue (fold_convert (type
, arg0
));
10142 /* Fold A - (A & B) into ~B & A. */
10143 if (!TREE_SIDE_EFFECTS (arg0
)
10144 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10146 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10148 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10149 return fold_build2 (BIT_AND_EXPR
, type
,
10150 fold_build1 (BIT_NOT_EXPR
, type
, arg10
),
10151 fold_convert (type
, arg0
));
10153 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10155 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10156 return fold_build2 (BIT_AND_EXPR
, type
,
10157 fold_build1 (BIT_NOT_EXPR
, type
, arg11
),
10158 fold_convert (type
, arg0
));
10162 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10163 any power of 2 minus 1. */
10164 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10165 && TREE_CODE (arg1
) == BIT_AND_EXPR
10166 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10167 TREE_OPERAND (arg1
, 0), 0))
10169 tree mask0
= TREE_OPERAND (arg0
, 1);
10170 tree mask1
= TREE_OPERAND (arg1
, 1);
10171 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
10173 if (operand_equal_p (tem
, mask1
, 0))
10175 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
10176 TREE_OPERAND (arg0
, 0), mask1
);
10177 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
10182 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10183 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10184 return non_lvalue (fold_convert (type
, arg0
));
10186 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10187 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10188 (-ARG1 + ARG0) reduces to -ARG1. */
10189 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10190 return negate_expr (fold_convert (type
, arg1
));
10192 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10193 __complex__ ( x, -y ). This is not the same for SNaNs or if
10194 signed zeros are involved. */
10195 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10196 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10197 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10199 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10200 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
10201 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
10202 bool arg0rz
= false, arg0iz
= false;
10203 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10204 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10206 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
10207 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
10208 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10210 tree rp
= fold_build1 (NEGATE_EXPR
, rtype
,
10212 : build1 (REALPART_EXPR
, rtype
, arg1
));
10213 tree ip
= arg0i
? arg0i
10214 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10215 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
10217 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10219 tree rp
= arg0r
? arg0r
10220 : build1 (REALPART_EXPR
, rtype
, arg0
);
10221 tree ip
= fold_build1 (NEGATE_EXPR
, rtype
,
10223 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10224 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
10229 /* Fold &x - &x. This can happen from &x.foo - &x.
10230 This is unsafe for certain floats even in non-IEEE formats.
10231 In IEEE, it is unsafe because it does wrong for NaNs.
10232 Also note that operand_equal_p is always false if an operand
10235 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10236 && operand_equal_p (arg0
, arg1
, 0))
10237 return fold_convert (type
, integer_zero_node
);
10239 /* A - B -> A + (-B) if B is easily negatable. */
10240 if (negate_expr_p (arg1
)
10241 && ((FLOAT_TYPE_P (type
)
10242 /* Avoid this transformation if B is a positive REAL_CST. */
10243 && (TREE_CODE (arg1
) != REAL_CST
10244 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10245 || INTEGRAL_TYPE_P (type
)))
10246 return fold_build2 (PLUS_EXPR
, type
,
10247 fold_convert (type
, arg0
),
10248 fold_convert (type
, negate_expr (arg1
)));
10250 /* Try folding difference of addresses. */
10252 HOST_WIDE_INT diff
;
10254 if ((TREE_CODE (arg0
) == ADDR_EXPR
10255 || TREE_CODE (arg1
) == ADDR_EXPR
)
10256 && ptr_difference_const (arg0
, arg1
, &diff
))
10257 return build_int_cst_type (type
, diff
);
10260 /* Fold &a[i] - &a[j] to i-j. */
10261 if (TREE_CODE (arg0
) == ADDR_EXPR
10262 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10263 && TREE_CODE (arg1
) == ADDR_EXPR
10264 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10266 tree aref0
= TREE_OPERAND (arg0
, 0);
10267 tree aref1
= TREE_OPERAND (arg1
, 0);
10268 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10269 TREE_OPERAND (aref1
, 0), 0))
10271 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
10272 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
10273 tree esz
= array_ref_element_size (aref0
);
10274 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10275 return fold_build2 (MULT_EXPR
, type
, diff
,
10276 fold_convert (type
, esz
));
10281 if (flag_unsafe_math_optimizations
10282 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10283 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10284 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
10287 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10288 same or one. Make sure type is not saturating.
10289 fold_plusminus_mult_expr will re-associate. */
10290 if ((TREE_CODE (arg0
) == MULT_EXPR
10291 || TREE_CODE (arg1
) == MULT_EXPR
)
10292 && !TYPE_SATURATING (type
)
10293 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10295 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
10303 /* (-A) * (-B) -> A * B */
10304 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10305 return fold_build2 (MULT_EXPR
, type
,
10306 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10307 fold_convert (type
, negate_expr (arg1
)));
10308 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10309 return fold_build2 (MULT_EXPR
, type
,
10310 fold_convert (type
, negate_expr (arg0
)),
10311 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10313 if (! FLOAT_TYPE_P (type
))
10315 if (integer_zerop (arg1
))
10316 return omit_one_operand (type
, arg1
, arg0
);
10317 if (integer_onep (arg1
))
10318 return non_lvalue (fold_convert (type
, arg0
));
10319 /* Transform x * -1 into -x. Make sure to do the negation
10320 on the original operand with conversions not stripped
10321 because we can only strip non-sign-changing conversions. */
10322 if (integer_all_onesp (arg1
))
10323 return fold_convert (type
, negate_expr (op0
));
10324 /* Transform x * -C into -x * C if x is easily negatable. */
10325 if (TREE_CODE (arg1
) == INTEGER_CST
10326 && tree_int_cst_sgn (arg1
) == -1
10327 && negate_expr_p (arg0
)
10328 && (tem
= negate_expr (arg1
)) != arg1
10329 && !TREE_OVERFLOW (tem
))
10330 return fold_build2 (MULT_EXPR
, type
,
10331 fold_convert (type
, negate_expr (arg0
)), tem
);
10333 /* (a * (1 << b)) is (a << b) */
10334 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10335 && integer_onep (TREE_OPERAND (arg1
, 0)))
10336 return fold_build2 (LSHIFT_EXPR
, type
, op0
,
10337 TREE_OPERAND (arg1
, 1));
10338 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10339 && integer_onep (TREE_OPERAND (arg0
, 0)))
10340 return fold_build2 (LSHIFT_EXPR
, type
, op1
,
10341 TREE_OPERAND (arg0
, 1));
10343 strict_overflow_p
= false;
10344 if (TREE_CODE (arg1
) == INTEGER_CST
10345 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10346 &strict_overflow_p
)))
10348 if (strict_overflow_p
)
10349 fold_overflow_warning (("assuming signed overflow does not "
10350 "occur when simplifying "
10352 WARN_STRICT_OVERFLOW_MISC
);
10353 return fold_convert (type
, tem
);
10356 /* Optimize z * conj(z) for integer complex numbers. */
10357 if (TREE_CODE (arg0
) == CONJ_EXPR
10358 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10359 return fold_mult_zconjz (type
, arg1
);
10360 if (TREE_CODE (arg1
) == CONJ_EXPR
10361 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10362 return fold_mult_zconjz (type
, arg0
);
10366 /* Maybe fold x * 0 to 0. The expressions aren't the same
10367 when x is NaN, since x * 0 is also NaN. Nor are they the
10368 same in modes with signed zeros, since multiplying a
10369 negative value by 0 gives -0, not +0. */
10370 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10371 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10372 && real_zerop (arg1
))
10373 return omit_one_operand (type
, arg1
, arg0
);
10374 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10375 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10376 && real_onep (arg1
))
10377 return non_lvalue (fold_convert (type
, arg0
));
10379 /* Transform x * -1.0 into -x. */
10380 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10381 && real_minus_onep (arg1
))
10382 return fold_convert (type
, negate_expr (arg0
));
10384 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10385 the result for floating point types due to rounding so it is applied
10386 only if -fassociative-math was specify. */
10387 if (flag_associative_math
10388 && TREE_CODE (arg0
) == RDIV_EXPR
10389 && TREE_CODE (arg1
) == REAL_CST
10390 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10392 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10395 return fold_build2 (RDIV_EXPR
, type
, tem
,
10396 TREE_OPERAND (arg0
, 1));
10399 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10400 if (operand_equal_p (arg0
, arg1
, 0))
10402 tree tem
= fold_strip_sign_ops (arg0
);
10403 if (tem
!= NULL_TREE
)
10405 tem
= fold_convert (type
, tem
);
10406 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
10410 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10411 This is not the same for NaNs or if signed zeros are
10413 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10414 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10415 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10416 && TREE_CODE (arg1
) == COMPLEX_CST
10417 && real_zerop (TREE_REALPART (arg1
)))
10419 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10420 if (real_onep (TREE_IMAGPART (arg1
)))
10421 return fold_build2 (COMPLEX_EXPR
, type
,
10422 negate_expr (fold_build1 (IMAGPART_EXPR
,
10424 fold_build1 (REALPART_EXPR
, rtype
, arg0
));
10425 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10426 return fold_build2 (COMPLEX_EXPR
, type
,
10427 fold_build1 (IMAGPART_EXPR
, rtype
, arg0
),
10428 negate_expr (fold_build1 (REALPART_EXPR
,
10432 /* Optimize z * conj(z) for floating point complex numbers.
10433 Guarded by flag_unsafe_math_optimizations as non-finite
10434 imaginary components don't produce scalar results. */
10435 if (flag_unsafe_math_optimizations
10436 && TREE_CODE (arg0
) == CONJ_EXPR
10437 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10438 return fold_mult_zconjz (type
, arg1
);
10439 if (flag_unsafe_math_optimizations
10440 && TREE_CODE (arg1
) == CONJ_EXPR
10441 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10442 return fold_mult_zconjz (type
, arg0
);
10444 if (flag_unsafe_math_optimizations
)
10446 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10447 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10449 /* Optimizations of root(...)*root(...). */
10450 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10453 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10454 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10456 /* Optimize sqrt(x)*sqrt(x) as x. */
10457 if (BUILTIN_SQRT_P (fcode0
)
10458 && operand_equal_p (arg00
, arg10
, 0)
10459 && ! HONOR_SNANS (TYPE_MODE (type
)))
10462 /* Optimize root(x)*root(y) as root(x*y). */
10463 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10464 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10465 return build_call_expr (rootfn
, 1, arg
);
10468 /* Optimize expN(x)*expN(y) as expN(x+y). */
10469 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10471 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10472 tree arg
= fold_build2 (PLUS_EXPR
, type
,
10473 CALL_EXPR_ARG (arg0
, 0),
10474 CALL_EXPR_ARG (arg1
, 0));
10475 return build_call_expr (expfn
, 1, arg
);
10478 /* Optimizations of pow(...)*pow(...). */
10479 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10480 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10481 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10483 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10484 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10485 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10486 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10488 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10489 if (operand_equal_p (arg01
, arg11
, 0))
10491 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10492 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10493 return build_call_expr (powfn
, 2, arg
, arg01
);
10496 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10497 if (operand_equal_p (arg00
, arg10
, 0))
10499 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10500 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
10501 return build_call_expr (powfn
, 2, arg00
, arg
);
10505 /* Optimize tan(x)*cos(x) as sin(x). */
10506 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10507 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10508 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10509 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10510 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10511 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10512 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10513 CALL_EXPR_ARG (arg1
, 0), 0))
10515 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10517 if (sinfn
!= NULL_TREE
)
10518 return build_call_expr (sinfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10521 /* Optimize x*pow(x,c) as pow(x,c+1). */
10522 if (fcode1
== BUILT_IN_POW
10523 || fcode1
== BUILT_IN_POWF
10524 || fcode1
== BUILT_IN_POWL
)
10526 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10527 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10528 if (TREE_CODE (arg11
) == REAL_CST
10529 && !TREE_OVERFLOW (arg11
)
10530 && operand_equal_p (arg0
, arg10
, 0))
10532 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10536 c
= TREE_REAL_CST (arg11
);
10537 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10538 arg
= build_real (type
, c
);
10539 return build_call_expr (powfn
, 2, arg0
, arg
);
10543 /* Optimize pow(x,c)*x as pow(x,c+1). */
10544 if (fcode0
== BUILT_IN_POW
10545 || fcode0
== BUILT_IN_POWF
10546 || fcode0
== BUILT_IN_POWL
)
10548 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10549 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10550 if (TREE_CODE (arg01
) == REAL_CST
10551 && !TREE_OVERFLOW (arg01
)
10552 && operand_equal_p (arg1
, arg00
, 0))
10554 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10558 c
= TREE_REAL_CST (arg01
);
10559 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10560 arg
= build_real (type
, c
);
10561 return build_call_expr (powfn
, 2, arg1
, arg
);
10565 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10566 if (! optimize_size
10567 && operand_equal_p (arg0
, arg1
, 0))
10569 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10573 tree arg
= build_real (type
, dconst2
);
10574 return build_call_expr (powfn
, 2, arg0
, arg
);
10583 if (integer_all_onesp (arg1
))
10584 return omit_one_operand (type
, arg1
, arg0
);
10585 if (integer_zerop (arg1
))
10586 return non_lvalue (fold_convert (type
, arg0
));
10587 if (operand_equal_p (arg0
, arg1
, 0))
10588 return non_lvalue (fold_convert (type
, arg0
));
10590 /* ~X | X is -1. */
10591 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10592 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10594 t1
= fold_convert (type
, integer_zero_node
);
10595 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10596 return omit_one_operand (type
, t1
, arg1
);
10599 /* X | ~X is -1. */
10600 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10601 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10603 t1
= fold_convert (type
, integer_zero_node
);
10604 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10605 return omit_one_operand (type
, t1
, arg0
);
10608 /* Canonicalize (X & C1) | C2. */
10609 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10610 && TREE_CODE (arg1
) == INTEGER_CST
10611 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10613 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10614 int width
= TYPE_PRECISION (type
), w
;
10615 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10616 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10617 hi2
= TREE_INT_CST_HIGH (arg1
);
10618 lo2
= TREE_INT_CST_LOW (arg1
);
10620 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10621 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10622 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10624 if (width
> HOST_BITS_PER_WIDE_INT
)
10626 mhi
= (unsigned HOST_WIDE_INT
) -1
10627 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10633 mlo
= (unsigned HOST_WIDE_INT
) -1
10634 >> (HOST_BITS_PER_WIDE_INT
- width
);
10637 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10638 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10639 return fold_build2 (BIT_IOR_EXPR
, type
,
10640 TREE_OPERAND (arg0
, 0), arg1
);
10642 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10643 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10644 mode which allows further optimizations. */
10651 for (w
= BITS_PER_UNIT
;
10652 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10655 unsigned HOST_WIDE_INT mask
10656 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10657 if (((lo1
| lo2
) & mask
) == mask
10658 && (lo1
& ~mask
) == 0 && hi1
== 0)
10665 if (hi3
!= hi1
|| lo3
!= lo1
)
10666 return fold_build2 (BIT_IOR_EXPR
, type
,
10667 fold_build2 (BIT_AND_EXPR
, type
,
10668 TREE_OPERAND (arg0
, 0),
10669 build_int_cst_wide (type
,
10674 /* (X & Y) | Y is (X, Y). */
10675 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10676 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10677 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10678 /* (X & Y) | X is (Y, X). */
10679 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10680 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10681 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10682 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10683 /* X | (X & Y) is (Y, X). */
10684 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10685 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10686 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10687 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10688 /* X | (Y & X) is (Y, X). */
10689 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10690 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10691 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10692 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10694 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10695 if (t1
!= NULL_TREE
)
10698 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10700 This results in more efficient code for machines without a NAND
10701 instruction. Combine will canonicalize to the first form
10702 which will allow use of NAND instructions provided by the
10703 backend if they exist. */
10704 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10705 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10707 return fold_build1 (BIT_NOT_EXPR
, type
,
10708 build2 (BIT_AND_EXPR
, type
,
10709 TREE_OPERAND (arg0
, 0),
10710 TREE_OPERAND (arg1
, 0)));
10713 /* See if this can be simplified into a rotate first. If that
10714 is unsuccessful continue in the association code. */
10718 if (integer_zerop (arg1
))
10719 return non_lvalue (fold_convert (type
, arg0
));
10720 if (integer_all_onesp (arg1
))
10721 return fold_build1 (BIT_NOT_EXPR
, type
, op0
);
10722 if (operand_equal_p (arg0
, arg1
, 0))
10723 return omit_one_operand (type
, integer_zero_node
, arg0
);
10725 /* ~X ^ X is -1. */
10726 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10727 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10729 t1
= fold_convert (type
, integer_zero_node
);
10730 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10731 return omit_one_operand (type
, t1
, arg1
);
10734 /* X ^ ~X is -1. */
10735 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10736 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10738 t1
= fold_convert (type
, integer_zero_node
);
10739 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10740 return omit_one_operand (type
, t1
, arg0
);
10743 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10744 with a constant, and the two constants have no bits in common,
10745 we should treat this as a BIT_IOR_EXPR since this may produce more
10746 simplifications. */
10747 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10748 && TREE_CODE (arg1
) == BIT_AND_EXPR
10749 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10750 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10751 && integer_zerop (const_binop (BIT_AND_EXPR
,
10752 TREE_OPERAND (arg0
, 1),
10753 TREE_OPERAND (arg1
, 1), 0)))
10755 code
= BIT_IOR_EXPR
;
10759 /* (X | Y) ^ X -> Y & ~ X*/
10760 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10761 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10763 tree t2
= TREE_OPERAND (arg0
, 1);
10764 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10766 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10767 fold_convert (type
, t1
));
10771 /* (Y | X) ^ X -> Y & ~ X*/
10772 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10773 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10775 tree t2
= TREE_OPERAND (arg0
, 0);
10776 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10778 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10779 fold_convert (type
, t1
));
10783 /* X ^ (X | Y) -> Y & ~ X*/
10784 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10785 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10787 tree t2
= TREE_OPERAND (arg1
, 1);
10788 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10790 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10791 fold_convert (type
, t1
));
10795 /* X ^ (Y | X) -> Y & ~ X*/
10796 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10797 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10799 tree t2
= TREE_OPERAND (arg1
, 0);
10800 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10802 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10803 fold_convert (type
, t1
));
10807 /* Convert ~X ^ ~Y to X ^ Y. */
10808 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10809 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10810 return fold_build2 (code
, type
,
10811 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10812 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10814 /* Convert ~X ^ C to X ^ ~C. */
10815 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10816 && TREE_CODE (arg1
) == INTEGER_CST
)
10817 return fold_build2 (code
, type
,
10818 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10819 fold_build1 (BIT_NOT_EXPR
, type
, arg1
));
10821 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10822 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10823 && integer_onep (TREE_OPERAND (arg0
, 1))
10824 && integer_onep (arg1
))
10825 return fold_build2 (EQ_EXPR
, type
, arg0
,
10826 build_int_cst (TREE_TYPE (arg0
), 0));
10828 /* Fold (X & Y) ^ Y as ~X & Y. */
10829 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10830 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10832 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10833 return fold_build2 (BIT_AND_EXPR
, type
,
10834 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10835 fold_convert (type
, arg1
));
10837 /* Fold (X & Y) ^ X as ~Y & X. */
10838 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10839 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10840 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10842 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10843 return fold_build2 (BIT_AND_EXPR
, type
,
10844 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10845 fold_convert (type
, arg1
));
10847 /* Fold X ^ (X & Y) as X & ~Y. */
10848 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10849 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10851 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10852 return fold_build2 (BIT_AND_EXPR
, type
,
10853 fold_convert (type
, arg0
),
10854 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10856 /* Fold X ^ (Y & X) as ~Y & X. */
10857 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10858 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10859 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10861 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10862 return fold_build2 (BIT_AND_EXPR
, type
,
10863 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10864 fold_convert (type
, arg0
));
10867 /* See if this can be simplified into a rotate first. If that
10868 is unsuccessful continue in the association code. */
10872 if (integer_all_onesp (arg1
))
10873 return non_lvalue (fold_convert (type
, arg0
));
10874 if (integer_zerop (arg1
))
10875 return omit_one_operand (type
, arg1
, arg0
);
10876 if (operand_equal_p (arg0
, arg1
, 0))
10877 return non_lvalue (fold_convert (type
, arg0
));
10879 /* ~X & X is always zero. */
10880 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10881 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10882 return omit_one_operand (type
, integer_zero_node
, arg1
);
10884 /* X & ~X is always zero. */
10885 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10886 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10887 return omit_one_operand (type
, integer_zero_node
, arg0
);
10889 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10890 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10891 && TREE_CODE (arg1
) == INTEGER_CST
10892 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10894 tree tmp1
= fold_convert (TREE_TYPE (arg0
), arg1
);
10895 tree tmp2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10896 TREE_OPERAND (arg0
, 0), tmp1
);
10897 tree tmp3
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10898 TREE_OPERAND (arg0
, 1), tmp1
);
10899 return fold_convert (type
,
10900 fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (arg0
),
10904 /* (X | Y) & Y is (X, Y). */
10905 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10906 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10907 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10908 /* (X | Y) & X is (Y, X). */
10909 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10910 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10911 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10912 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10913 /* X & (X | Y) is (Y, X). */
10914 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10915 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10916 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10917 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10918 /* X & (Y | X) is (Y, X). */
10919 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10920 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10921 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10922 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10924 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10925 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10926 && integer_onep (TREE_OPERAND (arg0
, 1))
10927 && integer_onep (arg1
))
10929 tem
= TREE_OPERAND (arg0
, 0);
10930 return fold_build2 (EQ_EXPR
, type
,
10931 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10932 build_int_cst (TREE_TYPE (tem
), 1)),
10933 build_int_cst (TREE_TYPE (tem
), 0));
10935 /* Fold ~X & 1 as (X & 1) == 0. */
10936 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10937 && integer_onep (arg1
))
10939 tem
= TREE_OPERAND (arg0
, 0);
10940 return fold_build2 (EQ_EXPR
, type
,
10941 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10942 build_int_cst (TREE_TYPE (tem
), 1)),
10943 build_int_cst (TREE_TYPE (tem
), 0));
10946 /* Fold (X ^ Y) & Y as ~X & Y. */
10947 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10948 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10950 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10951 return fold_build2 (BIT_AND_EXPR
, type
,
10952 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10953 fold_convert (type
, arg1
));
10955 /* Fold (X ^ Y) & X as ~Y & X. */
10956 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10957 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10958 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10960 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10961 return fold_build2 (BIT_AND_EXPR
, type
,
10962 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10963 fold_convert (type
, arg1
));
10965 /* Fold X & (X ^ Y) as X & ~Y. */
10966 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10967 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10969 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10970 return fold_build2 (BIT_AND_EXPR
, type
,
10971 fold_convert (type
, arg0
),
10972 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10974 /* Fold X & (Y ^ X) as ~Y & X. */
10975 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10976 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10977 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10979 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10980 return fold_build2 (BIT_AND_EXPR
, type
,
10981 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10982 fold_convert (type
, arg0
));
10985 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10986 if (t1
!= NULL_TREE
)
10988 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10989 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10990 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10993 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10995 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
10996 && (~TREE_INT_CST_LOW (arg1
)
10997 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
10998 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
11001 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11003 This results in more efficient code for machines without a NOR
11004 instruction. Combine will canonicalize to the first form
11005 which will allow use of NOR instructions provided by the
11006 backend if they exist. */
11007 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11008 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11010 return fold_build1 (BIT_NOT_EXPR
, type
,
11011 build2 (BIT_IOR_EXPR
, type
,
11012 fold_convert (type
,
11013 TREE_OPERAND (arg0
, 0)),
11014 fold_convert (type
,
11015 TREE_OPERAND (arg1
, 0))));
11018 /* If arg0 is derived from the address of an object or function, we may
11019 be able to fold this expression using the object or function's
11021 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11023 unsigned HOST_WIDE_INT modulus
, residue
;
11024 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11026 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
);
11028 /* This works because modulus is a power of 2. If this weren't the
11029 case, we'd have to replace it by its greatest power-of-2
11030 divisor: modulus & -modulus. */
11032 return build_int_cst (type
, residue
& low
);
11035 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11036 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11037 if the new mask might be further optimized. */
11038 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11039 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11040 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11041 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11042 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11043 < TYPE_PRECISION (TREE_TYPE (arg0
))
11044 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11045 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11047 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11048 unsigned HOST_WIDE_INT mask
11049 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11050 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11051 tree shift_type
= TREE_TYPE (arg0
);
11053 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11054 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11055 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11056 && TYPE_PRECISION (TREE_TYPE (arg0
))
11057 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11059 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11060 tree arg00
= TREE_OPERAND (arg0
, 0);
11061 /* See if more bits can be proven as zero because of
11063 if (TREE_CODE (arg00
) == NOP_EXPR
11064 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11066 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11067 if (TYPE_PRECISION (inner_type
)
11068 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11069 && TYPE_PRECISION (inner_type
) < prec
)
11071 prec
= TYPE_PRECISION (inner_type
);
11072 /* See if we can shorten the right shift. */
11074 shift_type
= inner_type
;
11077 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11078 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11079 zerobits
<<= prec
- shiftc
;
11080 /* For arithmetic shift if sign bit could be set, zerobits
11081 can contain actually sign bits, so no transformation is
11082 possible, unless MASK masks them all away. In that
11083 case the shift needs to be converted into logical shift. */
11084 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11085 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11087 if ((mask
& zerobits
) == 0)
11088 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11094 /* ((X << 16) & 0xff00) is (X, 0). */
11095 if ((mask
& zerobits
) == mask
)
11096 return omit_one_operand (type
, build_int_cst (type
, 0), arg0
);
11098 newmask
= mask
| zerobits
;
11099 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11103 /* Only do the transformation if NEWMASK is some integer
11105 for (prec
= BITS_PER_UNIT
;
11106 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11107 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11109 if (prec
< HOST_BITS_PER_WIDE_INT
11110 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11112 if (shift_type
!= TREE_TYPE (arg0
))
11114 tem
= fold_build2 (TREE_CODE (arg0
), shift_type
,
11115 fold_convert (shift_type
,
11116 TREE_OPERAND (arg0
, 0)),
11117 TREE_OPERAND (arg0
, 1));
11118 tem
= fold_convert (type
, tem
);
11122 return fold_build2 (BIT_AND_EXPR
, type
, tem
,
11123 build_int_cst_type (TREE_TYPE (op1
),
11132 /* Don't touch a floating-point divide by zero unless the mode
11133 of the constant can represent infinity. */
11134 if (TREE_CODE (arg1
) == REAL_CST
11135 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11136 && real_zerop (arg1
))
11139 /* Optimize A / A to 1.0 if we don't care about
11140 NaNs or Infinities. Skip the transformation
11141 for non-real operands. */
11142 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11143 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11144 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11145 && operand_equal_p (arg0
, arg1
, 0))
11147 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11149 return omit_two_operands (type
, r
, arg0
, arg1
);
11152 /* The complex version of the above A / A optimization. */
11153 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11154 && operand_equal_p (arg0
, arg1
, 0))
11156 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11157 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11158 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11160 tree r
= build_real (elem_type
, dconst1
);
11161 /* omit_two_operands will call fold_convert for us. */
11162 return omit_two_operands (type
, r
, arg0
, arg1
);
11166 /* (-A) / (-B) -> A / B */
11167 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11168 return fold_build2 (RDIV_EXPR
, type
,
11169 TREE_OPERAND (arg0
, 0),
11170 negate_expr (arg1
));
11171 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11172 return fold_build2 (RDIV_EXPR
, type
,
11173 negate_expr (arg0
),
11174 TREE_OPERAND (arg1
, 0));
11176 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11177 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11178 && real_onep (arg1
))
11179 return non_lvalue (fold_convert (type
, arg0
));
11181 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11182 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11183 && real_minus_onep (arg1
))
11184 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
11186 /* If ARG1 is a constant, we can convert this to a multiply by the
11187 reciprocal. This does not have the same rounding properties,
11188 so only do this if -freciprocal-math. We can actually
11189 always safely do it if ARG1 is a power of two, but it's hard to
11190 tell if it is or not in a portable manner. */
11191 if (TREE_CODE (arg1
) == REAL_CST
)
11193 if (flag_reciprocal_math
11194 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11196 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
11197 /* Find the reciprocal if optimizing and the result is exact. */
11201 r
= TREE_REAL_CST (arg1
);
11202 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11204 tem
= build_real (type
, r
);
11205 return fold_build2 (MULT_EXPR
, type
,
11206 fold_convert (type
, arg0
), tem
);
11210 /* Convert A/B/C to A/(B*C). */
11211 if (flag_reciprocal_math
11212 && TREE_CODE (arg0
) == RDIV_EXPR
)
11213 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11214 fold_build2 (MULT_EXPR
, type
,
11215 TREE_OPERAND (arg0
, 1), arg1
));
11217 /* Convert A/(B/C) to (A/B)*C. */
11218 if (flag_reciprocal_math
11219 && TREE_CODE (arg1
) == RDIV_EXPR
)
11220 return fold_build2 (MULT_EXPR
, type
,
11221 fold_build2 (RDIV_EXPR
, type
, arg0
,
11222 TREE_OPERAND (arg1
, 0)),
11223 TREE_OPERAND (arg1
, 1));
11225 /* Convert C1/(X*C2) into (C1/C2)/X. */
11226 if (flag_reciprocal_math
11227 && TREE_CODE (arg1
) == MULT_EXPR
11228 && TREE_CODE (arg0
) == REAL_CST
11229 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11231 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11232 TREE_OPERAND (arg1
, 1), 0);
11234 return fold_build2 (RDIV_EXPR
, type
, tem
,
11235 TREE_OPERAND (arg1
, 0));
11238 if (flag_unsafe_math_optimizations
)
11240 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11241 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11243 /* Optimize sin(x)/cos(x) as tan(x). */
11244 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11245 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11246 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11247 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11248 CALL_EXPR_ARG (arg1
, 0), 0))
11250 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11252 if (tanfn
!= NULL_TREE
)
11253 return build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11256 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11257 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11258 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11259 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11260 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11261 CALL_EXPR_ARG (arg1
, 0), 0))
11263 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11265 if (tanfn
!= NULL_TREE
)
11267 tree tmp
= build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11268 return fold_build2 (RDIV_EXPR
, type
,
11269 build_real (type
, dconst1
), tmp
);
11273 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11274 NaNs or Infinities. */
11275 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11276 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11277 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11279 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11280 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11282 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11283 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11284 && operand_equal_p (arg00
, arg01
, 0))
11286 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11288 if (cosfn
!= NULL_TREE
)
11289 return build_call_expr (cosfn
, 1, arg00
);
11293 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11294 NaNs or Infinities. */
11295 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11296 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11297 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11299 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11300 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11302 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11303 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11304 && operand_equal_p (arg00
, arg01
, 0))
11306 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11308 if (cosfn
!= NULL_TREE
)
11310 tree tmp
= build_call_expr (cosfn
, 1, arg00
);
11311 return fold_build2 (RDIV_EXPR
, type
,
11312 build_real (type
, dconst1
),
11318 /* Optimize pow(x,c)/x as pow(x,c-1). */
11319 if (fcode0
== BUILT_IN_POW
11320 || fcode0
== BUILT_IN_POWF
11321 || fcode0
== BUILT_IN_POWL
)
11323 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11324 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11325 if (TREE_CODE (arg01
) == REAL_CST
11326 && !TREE_OVERFLOW (arg01
)
11327 && operand_equal_p (arg1
, arg00
, 0))
11329 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11333 c
= TREE_REAL_CST (arg01
);
11334 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11335 arg
= build_real (type
, c
);
11336 return build_call_expr (powfn
, 2, arg1
, arg
);
11340 /* Optimize a/root(b/c) into a*root(c/b). */
11341 if (BUILTIN_ROOT_P (fcode1
))
11343 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11345 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11347 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11348 tree b
= TREE_OPERAND (rootarg
, 0);
11349 tree c
= TREE_OPERAND (rootarg
, 1);
11351 tree tmp
= fold_build2 (RDIV_EXPR
, type
, c
, b
);
11353 tmp
= build_call_expr (rootfn
, 1, tmp
);
11354 return fold_build2 (MULT_EXPR
, type
, arg0
, tmp
);
11358 /* Optimize x/expN(y) into x*expN(-y). */
11359 if (BUILTIN_EXPONENT_P (fcode1
))
11361 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11362 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11363 arg1
= build_call_expr (expfn
, 1, fold_convert (type
, arg
));
11364 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11367 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11368 if (fcode1
== BUILT_IN_POW
11369 || fcode1
== BUILT_IN_POWF
11370 || fcode1
== BUILT_IN_POWL
)
11372 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11373 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11374 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11375 tree neg11
= fold_convert (type
, negate_expr (arg11
));
11376 arg1
= build_call_expr (powfn
, 2, arg10
, neg11
);
11377 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11382 case TRUNC_DIV_EXPR
:
11383 case FLOOR_DIV_EXPR
:
11384 /* Simplify A / (B << N) where A and B are positive and B is
11385 a power of 2, to A >> (N + log2(B)). */
11386 strict_overflow_p
= false;
11387 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11388 && (TYPE_UNSIGNED (type
)
11389 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11391 tree sval
= TREE_OPERAND (arg1
, 0);
11392 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11394 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11395 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11397 if (strict_overflow_p
)
11398 fold_overflow_warning (("assuming signed overflow does not "
11399 "occur when simplifying A / (B << N)"),
11400 WARN_STRICT_OVERFLOW_MISC
);
11402 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11403 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
11404 return fold_build2 (RSHIFT_EXPR
, type
,
11405 fold_convert (type
, arg0
), sh_cnt
);
11409 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11410 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11411 if (INTEGRAL_TYPE_P (type
)
11412 && TYPE_UNSIGNED (type
)
11413 && code
== FLOOR_DIV_EXPR
)
11414 return fold_build2 (TRUNC_DIV_EXPR
, type
, op0
, op1
);
11418 case ROUND_DIV_EXPR
:
11419 case CEIL_DIV_EXPR
:
11420 case EXACT_DIV_EXPR
:
11421 if (integer_onep (arg1
))
11422 return non_lvalue (fold_convert (type
, arg0
));
11423 if (integer_zerop (arg1
))
11425 /* X / -1 is -X. */
11426 if (!TYPE_UNSIGNED (type
)
11427 && TREE_CODE (arg1
) == INTEGER_CST
11428 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11429 && TREE_INT_CST_HIGH (arg1
) == -1)
11430 return fold_convert (type
, negate_expr (arg0
));
11432 /* Convert -A / -B to A / B when the type is signed and overflow is
11434 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11435 && TREE_CODE (arg0
) == NEGATE_EXPR
11436 && negate_expr_p (arg1
))
11438 if (INTEGRAL_TYPE_P (type
))
11439 fold_overflow_warning (("assuming signed overflow does not occur "
11440 "when distributing negation across "
11442 WARN_STRICT_OVERFLOW_MISC
);
11443 return fold_build2 (code
, type
,
11444 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11445 negate_expr (arg1
));
11447 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11448 && TREE_CODE (arg1
) == NEGATE_EXPR
11449 && negate_expr_p (arg0
))
11451 if (INTEGRAL_TYPE_P (type
))
11452 fold_overflow_warning (("assuming signed overflow does not occur "
11453 "when distributing negation across "
11455 WARN_STRICT_OVERFLOW_MISC
);
11456 return fold_build2 (code
, type
, negate_expr (arg0
),
11457 TREE_OPERAND (arg1
, 0));
11460 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11461 operation, EXACT_DIV_EXPR.
11463 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11464 At one time others generated faster code, it's not clear if they do
11465 after the last round to changes to the DIV code in expmed.c. */
11466 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11467 && multiple_of_p (type
, arg0
, arg1
))
11468 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11470 strict_overflow_p
= false;
11471 if (TREE_CODE (arg1
) == INTEGER_CST
11472 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11473 &strict_overflow_p
)))
11475 if (strict_overflow_p
)
11476 fold_overflow_warning (("assuming signed overflow does not occur "
11477 "when simplifying division"),
11478 WARN_STRICT_OVERFLOW_MISC
);
11479 return fold_convert (type
, tem
);
11484 case CEIL_MOD_EXPR
:
11485 case FLOOR_MOD_EXPR
:
11486 case ROUND_MOD_EXPR
:
11487 case TRUNC_MOD_EXPR
:
11488 /* X % 1 is always zero, but be sure to preserve any side
11490 if (integer_onep (arg1
))
11491 return omit_one_operand (type
, integer_zero_node
, arg0
);
11493 /* X % 0, return X % 0 unchanged so that we can get the
11494 proper warnings and errors. */
11495 if (integer_zerop (arg1
))
11498 /* 0 % X is always zero, but be sure to preserve any side
11499 effects in X. Place this after checking for X == 0. */
11500 if (integer_zerop (arg0
))
11501 return omit_one_operand (type
, integer_zero_node
, arg1
);
11503 /* X % -1 is zero. */
11504 if (!TYPE_UNSIGNED (type
)
11505 && TREE_CODE (arg1
) == INTEGER_CST
11506 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11507 && TREE_INT_CST_HIGH (arg1
) == -1)
11508 return omit_one_operand (type
, integer_zero_node
, arg0
);
11510 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11511 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11512 strict_overflow_p
= false;
11513 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11514 && (TYPE_UNSIGNED (type
)
11515 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11518 /* Also optimize A % (C << N) where C is a power of 2,
11519 to A & ((C << N) - 1). */
11520 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11521 c
= TREE_OPERAND (arg1
, 0);
11523 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11525 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11526 build_int_cst (TREE_TYPE (arg1
), 1));
11527 if (strict_overflow_p
)
11528 fold_overflow_warning (("assuming signed overflow does not "
11529 "occur when simplifying "
11530 "X % (power of two)"),
11531 WARN_STRICT_OVERFLOW_MISC
);
11532 return fold_build2 (BIT_AND_EXPR
, type
,
11533 fold_convert (type
, arg0
),
11534 fold_convert (type
, mask
));
11538 /* X % -C is the same as X % C. */
11539 if (code
== TRUNC_MOD_EXPR
11540 && !TYPE_UNSIGNED (type
)
11541 && TREE_CODE (arg1
) == INTEGER_CST
11542 && !TREE_OVERFLOW (arg1
)
11543 && TREE_INT_CST_HIGH (arg1
) < 0
11544 && !TYPE_OVERFLOW_TRAPS (type
)
11545 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11546 && !sign_bit_p (arg1
, arg1
))
11547 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11548 fold_convert (type
, negate_expr (arg1
)));
11550 /* X % -Y is the same as X % Y. */
11551 if (code
== TRUNC_MOD_EXPR
11552 && !TYPE_UNSIGNED (type
)
11553 && TREE_CODE (arg1
) == NEGATE_EXPR
11554 && !TYPE_OVERFLOW_TRAPS (type
))
11555 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11556 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
11558 if (TREE_CODE (arg1
) == INTEGER_CST
11559 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11560 &strict_overflow_p
)))
11562 if (strict_overflow_p
)
11563 fold_overflow_warning (("assuming signed overflow does not occur "
11564 "when simplifying modulos"),
11565 WARN_STRICT_OVERFLOW_MISC
);
11566 return fold_convert (type
, tem
);
11573 if (integer_all_onesp (arg0
))
11574 return omit_one_operand (type
, arg0
, arg1
);
11578 /* Optimize -1 >> x for arithmetic right shifts. */
11579 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
11580 return omit_one_operand (type
, arg0
, arg1
);
11581 /* ... fall through ... */
11585 if (integer_zerop (arg1
))
11586 return non_lvalue (fold_convert (type
, arg0
));
11587 if (integer_zerop (arg0
))
11588 return omit_one_operand (type
, arg0
, arg1
);
11590 /* Since negative shift count is not well-defined,
11591 don't try to compute it in the compiler. */
11592 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11595 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11596 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11597 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11598 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11599 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11601 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11602 + TREE_INT_CST_LOW (arg1
));
11604 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11605 being well defined. */
11606 if (low
>= TYPE_PRECISION (type
))
11608 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11609 low
= low
% TYPE_PRECISION (type
);
11610 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11611 return build_int_cst (type
, 0);
11613 low
= TYPE_PRECISION (type
) - 1;
11616 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11617 build_int_cst (type
, low
));
11620 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11621 into x & ((unsigned)-1 >> c) for unsigned types. */
11622 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11623 || (TYPE_UNSIGNED (type
)
11624 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11625 && host_integerp (arg1
, false)
11626 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11627 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11628 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11630 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11631 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11637 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
11639 lshift
= build_int_cst (type
, -1);
11640 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
11642 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
11646 /* Rewrite an LROTATE_EXPR by a constant into an
11647 RROTATE_EXPR by a new constant. */
11648 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
11650 tree tem
= build_int_cst (TREE_TYPE (arg1
),
11651 TYPE_PRECISION (type
));
11652 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
11653 return fold_build2 (RROTATE_EXPR
, type
, op0
, tem
);
11656 /* If we have a rotate of a bit operation with the rotate count and
11657 the second operand of the bit operation both constant,
11658 permute the two operations. */
11659 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11660 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11661 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11662 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11663 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11664 return fold_build2 (TREE_CODE (arg0
), type
,
11665 fold_build2 (code
, type
,
11666 TREE_OPERAND (arg0
, 0), arg1
),
11667 fold_build2 (code
, type
,
11668 TREE_OPERAND (arg0
, 1), arg1
));
11670 /* Two consecutive rotates adding up to the precision of the
11671 type can be ignored. */
11672 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11673 && TREE_CODE (arg0
) == RROTATE_EXPR
11674 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11675 && TREE_INT_CST_HIGH (arg1
) == 0
11676 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
11677 && ((TREE_INT_CST_LOW (arg1
)
11678 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
11679 == (unsigned int) TYPE_PRECISION (type
)))
11680 return TREE_OPERAND (arg0
, 0);
11682 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11683 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11684 if the latter can be further optimized. */
11685 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11686 && TREE_CODE (arg0
) == BIT_AND_EXPR
11687 && TREE_CODE (arg1
) == INTEGER_CST
11688 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11690 tree mask
= fold_build2 (code
, type
,
11691 fold_convert (type
, TREE_OPERAND (arg0
, 1)),
11693 tree shift
= fold_build2 (code
, type
,
11694 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11696 tem
= fold_binary (BIT_AND_EXPR
, type
, shift
, mask
);
11704 if (operand_equal_p (arg0
, arg1
, 0))
11705 return omit_one_operand (type
, arg0
, arg1
);
11706 if (INTEGRAL_TYPE_P (type
)
11707 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
11708 return omit_one_operand (type
, arg1
, arg0
);
11709 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
11715 if (operand_equal_p (arg0
, arg1
, 0))
11716 return omit_one_operand (type
, arg0
, arg1
);
11717 if (INTEGRAL_TYPE_P (type
)
11718 && TYPE_MAX_VALUE (type
)
11719 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
11720 return omit_one_operand (type
, arg1
, arg0
);
11721 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
11726 case TRUTH_ANDIF_EXPR
:
11727 /* Note that the operands of this must be ints
11728 and their values must be 0 or 1.
11729 ("true" is a fixed value perhaps depending on the language.) */
11730 /* If first arg is constant zero, return it. */
11731 if (integer_zerop (arg0
))
11732 return fold_convert (type
, arg0
);
11733 case TRUTH_AND_EXPR
:
11734 /* If either arg is constant true, drop it. */
11735 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11736 return non_lvalue (fold_convert (type
, arg1
));
11737 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11738 /* Preserve sequence points. */
11739 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11740 return non_lvalue (fold_convert (type
, arg0
));
11741 /* If second arg is constant zero, result is zero, but first arg
11742 must be evaluated. */
11743 if (integer_zerop (arg1
))
11744 return omit_one_operand (type
, arg1
, arg0
);
11745 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11746 case will be handled here. */
11747 if (integer_zerop (arg0
))
11748 return omit_one_operand (type
, arg0
, arg1
);
11750 /* !X && X is always false. */
11751 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11752 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11753 return omit_one_operand (type
, integer_zero_node
, arg1
);
11754 /* X && !X is always false. */
11755 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11756 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11757 return omit_one_operand (type
, integer_zero_node
, arg0
);
11759 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11760 means A >= Y && A != MAX, but in this case we know that
11763 if (!TREE_SIDE_EFFECTS (arg0
)
11764 && !TREE_SIDE_EFFECTS (arg1
))
11766 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
11767 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11768 return fold_build2 (code
, type
, tem
, arg1
);
11770 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
11771 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11772 return fold_build2 (code
, type
, arg0
, tem
);
11776 /* We only do these simplifications if we are optimizing. */
11780 /* Check for things like (A || B) && (A || C). We can convert this
11781 to A || (B && C). Note that either operator can be any of the four
11782 truth and/or operations and the transformation will still be
11783 valid. Also note that we only care about order for the
11784 ANDIF and ORIF operators. If B contains side effects, this
11785 might change the truth-value of A. */
11786 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
11787 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
11788 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
11789 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
11790 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
11791 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
11793 tree a00
= TREE_OPERAND (arg0
, 0);
11794 tree a01
= TREE_OPERAND (arg0
, 1);
11795 tree a10
= TREE_OPERAND (arg1
, 0);
11796 tree a11
= TREE_OPERAND (arg1
, 1);
11797 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
11798 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
11799 && (code
== TRUTH_AND_EXPR
11800 || code
== TRUTH_OR_EXPR
));
11802 if (operand_equal_p (a00
, a10
, 0))
11803 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11804 fold_build2 (code
, type
, a01
, a11
));
11805 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
11806 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11807 fold_build2 (code
, type
, a01
, a10
));
11808 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
11809 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
11810 fold_build2 (code
, type
, a00
, a11
));
11812 /* This case if tricky because we must either have commutative
11813 operators or else A10 must not have side-effects. */
11815 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
11816 && operand_equal_p (a01
, a11
, 0))
11817 return fold_build2 (TREE_CODE (arg0
), type
,
11818 fold_build2 (code
, type
, a00
, a10
),
11822 /* See if we can build a range comparison. */
11823 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
11826 /* Check for the possibility of merging component references. If our
11827 lhs is another similar operation, try to merge its rhs with our
11828 rhs. Then try to merge our lhs and rhs. */
11829 if (TREE_CODE (arg0
) == code
11830 && 0 != (tem
= fold_truthop (code
, type
,
11831 TREE_OPERAND (arg0
, 1), arg1
)))
11832 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11834 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
11839 case TRUTH_ORIF_EXPR
:
11840 /* Note that the operands of this must be ints
11841 and their values must be 0 or true.
11842 ("true" is a fixed value perhaps depending on the language.) */
11843 /* If first arg is constant true, return it. */
11844 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11845 return fold_convert (type
, arg0
);
11846 case TRUTH_OR_EXPR
:
11847 /* If either arg is constant zero, drop it. */
11848 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11849 return non_lvalue (fold_convert (type
, arg1
));
11850 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11851 /* Preserve sequence points. */
11852 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11853 return non_lvalue (fold_convert (type
, arg0
));
11854 /* If second arg is constant true, result is true, but we must
11855 evaluate first arg. */
11856 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11857 return omit_one_operand (type
, arg1
, arg0
);
11858 /* Likewise for first arg, but note this only occurs here for
11860 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11861 return omit_one_operand (type
, arg0
, arg1
);
11863 /* !X || X is always true. */
11864 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11865 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11866 return omit_one_operand (type
, integer_one_node
, arg1
);
11867 /* X || !X is always true. */
11868 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11869 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11870 return omit_one_operand (type
, integer_one_node
, arg0
);
11874 case TRUTH_XOR_EXPR
:
11875 /* If the second arg is constant zero, drop it. */
11876 if (integer_zerop (arg1
))
11877 return non_lvalue (fold_convert (type
, arg0
));
11878 /* If the second arg is constant true, this is a logical inversion. */
11879 if (integer_onep (arg1
))
11881 /* Only call invert_truthvalue if operand is a truth value. */
11882 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
11883 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
11885 tem
= invert_truthvalue (arg0
);
11886 return non_lvalue (fold_convert (type
, tem
));
11888 /* Identical arguments cancel to zero. */
11889 if (operand_equal_p (arg0
, arg1
, 0))
11890 return omit_one_operand (type
, integer_zero_node
, arg0
);
11892 /* !X ^ X is always true. */
11893 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11894 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11895 return omit_one_operand (type
, integer_one_node
, arg1
);
11897 /* X ^ !X is always true. */
11898 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11899 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11900 return omit_one_operand (type
, integer_one_node
, arg0
);
11906 tem
= fold_comparison (code
, type
, op0
, op1
);
11907 if (tem
!= NULL_TREE
)
11910 /* bool_var != 0 becomes bool_var. */
11911 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11912 && code
== NE_EXPR
)
11913 return non_lvalue (fold_convert (type
, arg0
));
11915 /* bool_var == 1 becomes bool_var. */
11916 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11917 && code
== EQ_EXPR
)
11918 return non_lvalue (fold_convert (type
, arg0
));
11920 /* bool_var != 1 becomes !bool_var. */
11921 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11922 && code
== NE_EXPR
)
11923 return fold_build1 (TRUTH_NOT_EXPR
, type
, fold_convert (type
, arg0
));
11925 /* bool_var == 0 becomes !bool_var. */
11926 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11927 && code
== EQ_EXPR
)
11928 return fold_build1 (TRUTH_NOT_EXPR
, type
, fold_convert (type
, arg0
));
11930 /* If this is an equality comparison of the address of two non-weak,
11931 unaliased symbols neither of which are extern (since we do not
11932 have access to attributes for externs), then we know the result. */
11933 if (TREE_CODE (arg0
) == ADDR_EXPR
11934 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
11935 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
11936 && ! lookup_attribute ("alias",
11937 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
11938 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
11939 && TREE_CODE (arg1
) == ADDR_EXPR
11940 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
11941 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
11942 && ! lookup_attribute ("alias",
11943 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
11944 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
11946 /* We know that we're looking at the address of two
11947 non-weak, unaliased, static _DECL nodes.
11949 It is both wasteful and incorrect to call operand_equal_p
11950 to compare the two ADDR_EXPR nodes. It is wasteful in that
11951 all we need to do is test pointer equality for the arguments
11952 to the two ADDR_EXPR nodes. It is incorrect to use
11953 operand_equal_p as that function is NOT equivalent to a
11954 C equality test. It can in fact return false for two
11955 objects which would test as equal using the C equality
11957 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
11958 return constant_boolean_node (equal
11959 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
11963 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11964 a MINUS_EXPR of a constant, we can convert it into a comparison with
11965 a revised constant as long as no overflow occurs. */
11966 if (TREE_CODE (arg1
) == INTEGER_CST
11967 && (TREE_CODE (arg0
) == PLUS_EXPR
11968 || TREE_CODE (arg0
) == MINUS_EXPR
)
11969 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11970 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
11971 ? MINUS_EXPR
: PLUS_EXPR
,
11972 fold_convert (TREE_TYPE (arg0
), arg1
),
11973 TREE_OPERAND (arg0
, 1), 0))
11974 && !TREE_OVERFLOW (tem
))
11975 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11977 /* Similarly for a NEGATE_EXPR. */
11978 if (TREE_CODE (arg0
) == NEGATE_EXPR
11979 && TREE_CODE (arg1
) == INTEGER_CST
11980 && 0 != (tem
= negate_expr (arg1
))
11981 && TREE_CODE (tem
) == INTEGER_CST
11982 && !TREE_OVERFLOW (tem
))
11983 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11985 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11986 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11987 && TREE_CODE (arg1
) == INTEGER_CST
11988 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11989 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11990 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg0
),
11991 fold_convert (TREE_TYPE (arg0
), arg1
),
11992 TREE_OPERAND (arg0
, 1)));
11994 /* Transform comparisons of the form X +- C CMP X. */
11995 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11996 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11997 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11998 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11999 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12001 tree cst
= TREE_OPERAND (arg0
, 1);
12003 if (code
== EQ_EXPR
12004 && !integer_zerop (cst
))
12005 return omit_two_operands (type
, boolean_false_node
,
12006 TREE_OPERAND (arg0
, 0), arg1
);
12008 return omit_two_operands (type
, boolean_true_node
,
12009 TREE_OPERAND (arg0
, 0), arg1
);
12012 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12013 for !=. Don't do this for ordered comparisons due to overflow. */
12014 if (TREE_CODE (arg0
) == MINUS_EXPR
12015 && integer_zerop (arg1
))
12016 return fold_build2 (code
, type
,
12017 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12019 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12020 if (TREE_CODE (arg0
) == ABS_EXPR
12021 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12022 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12024 /* If this is an EQ or NE comparison with zero and ARG0 is
12025 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12026 two operations, but the latter can be done in one less insn
12027 on machines that have only two-operand insns or on which a
12028 constant cannot be the first operand. */
12029 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12030 && integer_zerop (arg1
))
12032 tree arg00
= TREE_OPERAND (arg0
, 0);
12033 tree arg01
= TREE_OPERAND (arg0
, 1);
12034 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12035 && integer_onep (TREE_OPERAND (arg00
, 0)))
12037 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
12038 arg01
, TREE_OPERAND (arg00
, 1));
12039 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12040 build_int_cst (TREE_TYPE (arg0
), 1));
12041 return fold_build2 (code
, type
,
12042 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
12044 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12045 && integer_onep (TREE_OPERAND (arg01
, 0)))
12047 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
12048 arg00
, TREE_OPERAND (arg01
, 1));
12049 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12050 build_int_cst (TREE_TYPE (arg0
), 1));
12051 return fold_build2 (code
, type
,
12052 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
12056 /* If this is an NE or EQ comparison of zero against the result of a
12057 signed MOD operation whose second operand is a power of 2, make
12058 the MOD operation unsigned since it is simpler and equivalent. */
12059 if (integer_zerop (arg1
)
12060 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12061 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12062 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12063 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12064 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12065 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12067 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12068 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
12069 fold_convert (newtype
,
12070 TREE_OPERAND (arg0
, 0)),
12071 fold_convert (newtype
,
12072 TREE_OPERAND (arg0
, 1)));
12074 return fold_build2 (code
, type
, newmod
,
12075 fold_convert (newtype
, arg1
));
12078 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12079 C1 is a valid shift constant, and C2 is a power of two, i.e.
12081 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12082 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12083 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12085 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12086 && integer_zerop (arg1
))
12088 tree itype
= TREE_TYPE (arg0
);
12089 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12090 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12092 /* Check for a valid shift count. */
12093 if (TREE_INT_CST_HIGH (arg001
) == 0
12094 && TREE_INT_CST_LOW (arg001
) < prec
)
12096 tree arg01
= TREE_OPERAND (arg0
, 1);
12097 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12098 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12099 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12100 can be rewritten as (X & (C2 << C1)) != 0. */
12101 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12103 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
12104 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
12105 return fold_build2 (code
, type
, tem
, arg1
);
12107 /* Otherwise, for signed (arithmetic) shifts,
12108 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12109 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12110 else if (!TYPE_UNSIGNED (itype
))
12111 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12112 arg000
, build_int_cst (itype
, 0));
12113 /* Otherwise, of unsigned (logical) shifts,
12114 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12115 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12117 return omit_one_operand (type
,
12118 code
== EQ_EXPR
? integer_one_node
12119 : integer_zero_node
,
12124 /* If this is an NE comparison of zero with an AND of one, remove the
12125 comparison since the AND will give the correct value. */
12126 if (code
== NE_EXPR
12127 && integer_zerop (arg1
)
12128 && TREE_CODE (arg0
) == BIT_AND_EXPR
12129 && integer_onep (TREE_OPERAND (arg0
, 1)))
12130 return fold_convert (type
, arg0
);
12132 /* If we have (A & C) == C where C is a power of 2, convert this into
12133 (A & C) != 0. Similarly for NE_EXPR. */
12134 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12135 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12136 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12137 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12138 arg0
, fold_convert (TREE_TYPE (arg0
),
12139 integer_zero_node
));
12141 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12142 bit, then fold the expression into A < 0 or A >= 0. */
12143 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
12147 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12148 Similarly for NE_EXPR. */
12149 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12150 && TREE_CODE (arg1
) == INTEGER_CST
12151 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12153 tree notc
= fold_build1 (BIT_NOT_EXPR
,
12154 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12155 TREE_OPERAND (arg0
, 1));
12156 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
12158 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12159 if (integer_nonzerop (dandnotc
))
12160 return omit_one_operand (type
, rslt
, arg0
);
12163 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12164 Similarly for NE_EXPR. */
12165 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12166 && TREE_CODE (arg1
) == INTEGER_CST
12167 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12169 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12170 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
12171 TREE_OPERAND (arg0
, 1), notd
);
12172 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12173 if (integer_nonzerop (candnotd
))
12174 return omit_one_operand (type
, rslt
, arg0
);
12177 /* If this is a comparison of a field, we may be able to simplify it. */
12178 if ((TREE_CODE (arg0
) == COMPONENT_REF
12179 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12180 /* Handle the constant case even without -O
12181 to make sure the warnings are given. */
12182 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12184 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
12189 /* Optimize comparisons of strlen vs zero to a compare of the
12190 first character of the string vs zero. To wit,
12191 strlen(ptr) == 0 => *ptr == 0
12192 strlen(ptr) != 0 => *ptr != 0
12193 Other cases should reduce to one of these two (or a constant)
12194 due to the return value of strlen being unsigned. */
12195 if (TREE_CODE (arg0
) == CALL_EXPR
12196 && integer_zerop (arg1
))
12198 tree fndecl
= get_callee_fndecl (arg0
);
12201 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12202 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12203 && call_expr_nargs (arg0
) == 1
12204 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12206 tree iref
= build_fold_indirect_ref (CALL_EXPR_ARG (arg0
, 0));
12207 return fold_build2 (code
, type
, iref
,
12208 build_int_cst (TREE_TYPE (iref
), 0));
12212 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12213 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12214 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12215 && integer_zerop (arg1
)
12216 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12218 tree arg00
= TREE_OPERAND (arg0
, 0);
12219 tree arg01
= TREE_OPERAND (arg0
, 1);
12220 tree itype
= TREE_TYPE (arg00
);
12221 if (TREE_INT_CST_HIGH (arg01
) == 0
12222 && TREE_INT_CST_LOW (arg01
)
12223 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12225 if (TYPE_UNSIGNED (itype
))
12227 itype
= signed_type_for (itype
);
12228 arg00
= fold_convert (itype
, arg00
);
12230 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12231 type
, arg00
, build_int_cst (itype
, 0));
12235 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12236 if (integer_zerop (arg1
)
12237 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12238 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12239 TREE_OPERAND (arg0
, 1));
12241 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12242 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12243 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12244 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12245 build_int_cst (TREE_TYPE (arg1
), 0));
12246 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12247 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12248 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12249 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12250 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
12251 build_int_cst (TREE_TYPE (arg1
), 0));
12253 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12254 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12255 && TREE_CODE (arg1
) == INTEGER_CST
12256 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12257 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12258 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12259 TREE_OPERAND (arg0
, 1), arg1
));
12261 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12262 (X & C) == 0 when C is a single bit. */
12263 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12264 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12265 && integer_zerop (arg1
)
12266 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12268 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
12269 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12270 TREE_OPERAND (arg0
, 1));
12271 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12275 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12276 constant C is a power of two, i.e. a single bit. */
12277 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12278 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12279 && integer_zerop (arg1
)
12280 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12281 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12282 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12284 tree arg00
= TREE_OPERAND (arg0
, 0);
12285 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12286 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12289 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12290 when is C is a power of two, i.e. a single bit. */
12291 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12292 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12293 && integer_zerop (arg1
)
12294 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12295 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12296 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12298 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12299 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
12300 arg000
, TREE_OPERAND (arg0
, 1));
12301 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12302 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12305 if (integer_zerop (arg1
)
12306 && tree_expr_nonzero_p (arg0
))
12308 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12309 return omit_one_operand (type
, res
, arg0
);
12312 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12313 if (TREE_CODE (arg0
) == NEGATE_EXPR
12314 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12315 return fold_build2 (code
, type
,
12316 TREE_OPERAND (arg0
, 0),
12317 TREE_OPERAND (arg1
, 0));
12319 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12320 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12321 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12323 tree arg00
= TREE_OPERAND (arg0
, 0);
12324 tree arg01
= TREE_OPERAND (arg0
, 1);
12325 tree arg10
= TREE_OPERAND (arg1
, 0);
12326 tree arg11
= TREE_OPERAND (arg1
, 1);
12327 tree itype
= TREE_TYPE (arg0
);
12329 if (operand_equal_p (arg01
, arg11
, 0))
12330 return fold_build2 (code
, type
,
12331 fold_build2 (BIT_AND_EXPR
, itype
,
12332 fold_build2 (BIT_XOR_EXPR
, itype
,
12335 build_int_cst (itype
, 0));
12337 if (operand_equal_p (arg01
, arg10
, 0))
12338 return fold_build2 (code
, type
,
12339 fold_build2 (BIT_AND_EXPR
, itype
,
12340 fold_build2 (BIT_XOR_EXPR
, itype
,
12343 build_int_cst (itype
, 0));
12345 if (operand_equal_p (arg00
, arg11
, 0))
12346 return fold_build2 (code
, type
,
12347 fold_build2 (BIT_AND_EXPR
, itype
,
12348 fold_build2 (BIT_XOR_EXPR
, itype
,
12351 build_int_cst (itype
, 0));
12353 if (operand_equal_p (arg00
, arg10
, 0))
12354 return fold_build2 (code
, type
,
12355 fold_build2 (BIT_AND_EXPR
, itype
,
12356 fold_build2 (BIT_XOR_EXPR
, itype
,
12359 build_int_cst (itype
, 0));
12362 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12363 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12365 tree arg00
= TREE_OPERAND (arg0
, 0);
12366 tree arg01
= TREE_OPERAND (arg0
, 1);
12367 tree arg10
= TREE_OPERAND (arg1
, 0);
12368 tree arg11
= TREE_OPERAND (arg1
, 1);
12369 tree itype
= TREE_TYPE (arg0
);
12371 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12372 operand_equal_p guarantees no side-effects so we don't need
12373 to use omit_one_operand on Z. */
12374 if (operand_equal_p (arg01
, arg11
, 0))
12375 return fold_build2 (code
, type
, arg00
, arg10
);
12376 if (operand_equal_p (arg01
, arg10
, 0))
12377 return fold_build2 (code
, type
, arg00
, arg11
);
12378 if (operand_equal_p (arg00
, arg11
, 0))
12379 return fold_build2 (code
, type
, arg01
, arg10
);
12380 if (operand_equal_p (arg00
, arg10
, 0))
12381 return fold_build2 (code
, type
, arg01
, arg11
);
12383 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12384 if (TREE_CODE (arg01
) == INTEGER_CST
12385 && TREE_CODE (arg11
) == INTEGER_CST
)
12386 return fold_build2 (code
, type
,
12387 fold_build2 (BIT_XOR_EXPR
, itype
, arg00
,
12388 fold_build2 (BIT_XOR_EXPR
, itype
,
12393 /* Attempt to simplify equality/inequality comparisons of complex
12394 values. Only lower the comparison if the result is known or
12395 can be simplified to a single scalar comparison. */
12396 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12397 || TREE_CODE (arg0
) == COMPLEX_CST
)
12398 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12399 || TREE_CODE (arg1
) == COMPLEX_CST
))
12401 tree real0
, imag0
, real1
, imag1
;
12404 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12406 real0
= TREE_OPERAND (arg0
, 0);
12407 imag0
= TREE_OPERAND (arg0
, 1);
12411 real0
= TREE_REALPART (arg0
);
12412 imag0
= TREE_IMAGPART (arg0
);
12415 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12417 real1
= TREE_OPERAND (arg1
, 0);
12418 imag1
= TREE_OPERAND (arg1
, 1);
12422 real1
= TREE_REALPART (arg1
);
12423 imag1
= TREE_IMAGPART (arg1
);
12426 rcond
= fold_binary (code
, type
, real0
, real1
);
12427 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12429 if (integer_zerop (rcond
))
12431 if (code
== EQ_EXPR
)
12432 return omit_two_operands (type
, boolean_false_node
,
12434 return fold_build2 (NE_EXPR
, type
, imag0
, imag1
);
12438 if (code
== NE_EXPR
)
12439 return omit_two_operands (type
, boolean_true_node
,
12441 return fold_build2 (EQ_EXPR
, type
, imag0
, imag1
);
12445 icond
= fold_binary (code
, type
, imag0
, imag1
);
12446 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12448 if (integer_zerop (icond
))
12450 if (code
== EQ_EXPR
)
12451 return omit_two_operands (type
, boolean_false_node
,
12453 return fold_build2 (NE_EXPR
, type
, real0
, real1
);
12457 if (code
== NE_EXPR
)
12458 return omit_two_operands (type
, boolean_true_node
,
12460 return fold_build2 (EQ_EXPR
, type
, real0
, real1
);
12471 tem
= fold_comparison (code
, type
, op0
, op1
);
12472 if (tem
!= NULL_TREE
)
12475 /* Transform comparisons of the form X +- C CMP X. */
12476 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12477 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12478 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12479 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12480 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12481 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12483 tree arg01
= TREE_OPERAND (arg0
, 1);
12484 enum tree_code code0
= TREE_CODE (arg0
);
12487 if (TREE_CODE (arg01
) == REAL_CST
)
12488 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12490 is_positive
= tree_int_cst_sgn (arg01
);
12492 /* (X - c) > X becomes false. */
12493 if (code
== GT_EXPR
12494 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12495 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12497 if (TREE_CODE (arg01
) == INTEGER_CST
12498 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12499 fold_overflow_warning (("assuming signed overflow does not "
12500 "occur when assuming that (X - c) > X "
12501 "is always false"),
12502 WARN_STRICT_OVERFLOW_ALL
);
12503 return constant_boolean_node (0, type
);
12506 /* Likewise (X + c) < X becomes false. */
12507 if (code
== LT_EXPR
12508 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12509 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12511 if (TREE_CODE (arg01
) == INTEGER_CST
12512 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12513 fold_overflow_warning (("assuming signed overflow does not "
12514 "occur when assuming that "
12515 "(X + c) < X is always false"),
12516 WARN_STRICT_OVERFLOW_ALL
);
12517 return constant_boolean_node (0, type
);
12520 /* Convert (X - c) <= X to true. */
12521 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12523 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12524 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12526 if (TREE_CODE (arg01
) == INTEGER_CST
12527 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12528 fold_overflow_warning (("assuming signed overflow does not "
12529 "occur when assuming that "
12530 "(X - c) <= X is always true"),
12531 WARN_STRICT_OVERFLOW_ALL
);
12532 return constant_boolean_node (1, type
);
12535 /* Convert (X + c) >= X to true. */
12536 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12538 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12539 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12541 if (TREE_CODE (arg01
) == INTEGER_CST
12542 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12543 fold_overflow_warning (("assuming signed overflow does not "
12544 "occur when assuming that "
12545 "(X + c) >= X is always true"),
12546 WARN_STRICT_OVERFLOW_ALL
);
12547 return constant_boolean_node (1, type
);
12550 if (TREE_CODE (arg01
) == INTEGER_CST
)
12552 /* Convert X + c > X and X - c < X to true for integers. */
12553 if (code
== GT_EXPR
12554 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12555 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12557 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12558 fold_overflow_warning (("assuming signed overflow does "
12559 "not occur when assuming that "
12560 "(X + c) > X is always true"),
12561 WARN_STRICT_OVERFLOW_ALL
);
12562 return constant_boolean_node (1, type
);
12565 if (code
== LT_EXPR
12566 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12567 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12569 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12570 fold_overflow_warning (("assuming signed overflow does "
12571 "not occur when assuming that "
12572 "(X - c) < X is always true"),
12573 WARN_STRICT_OVERFLOW_ALL
);
12574 return constant_boolean_node (1, type
);
12577 /* Convert X + c <= X and X - c >= X to false for integers. */
12578 if (code
== LE_EXPR
12579 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12580 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12582 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12583 fold_overflow_warning (("assuming signed overflow does "
12584 "not occur when assuming that "
12585 "(X + c) <= X is always false"),
12586 WARN_STRICT_OVERFLOW_ALL
);
12587 return constant_boolean_node (0, type
);
12590 if (code
== GE_EXPR
12591 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12592 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12594 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12595 fold_overflow_warning (("assuming signed overflow does "
12596 "not occur when assuming that "
12597 "(X - c) >= X is always false"),
12598 WARN_STRICT_OVERFLOW_ALL
);
12599 return constant_boolean_node (0, type
);
12604 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12605 This transformation affects the cases which are handled in later
12606 optimizations involving comparisons with non-negative constants. */
12607 if (TREE_CODE (arg1
) == INTEGER_CST
12608 && TREE_CODE (arg0
) != INTEGER_CST
12609 && tree_int_cst_sgn (arg1
) > 0)
12611 if (code
== GE_EXPR
)
12613 arg1
= const_binop (MINUS_EXPR
, arg1
,
12614 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12615 return fold_build2 (GT_EXPR
, type
, arg0
,
12616 fold_convert (TREE_TYPE (arg0
), arg1
));
12618 if (code
== LT_EXPR
)
12620 arg1
= const_binop (MINUS_EXPR
, arg1
,
12621 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12622 return fold_build2 (LE_EXPR
, type
, arg0
,
12623 fold_convert (TREE_TYPE (arg0
), arg1
));
12627 /* Comparisons with the highest or lowest possible integer of
12628 the specified precision will have known values. */
12630 tree arg1_type
= TREE_TYPE (arg1
);
12631 unsigned int width
= TYPE_PRECISION (arg1_type
);
12633 if (TREE_CODE (arg1
) == INTEGER_CST
12634 && !TREE_OVERFLOW (arg1
)
12635 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12636 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12638 HOST_WIDE_INT signed_max_hi
;
12639 unsigned HOST_WIDE_INT signed_max_lo
;
12640 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12642 if (width
<= HOST_BITS_PER_WIDE_INT
)
12644 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12649 if (TYPE_UNSIGNED (arg1_type
))
12651 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12657 max_lo
= signed_max_lo
;
12658 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12664 width
-= HOST_BITS_PER_WIDE_INT
;
12665 signed_max_lo
= -1;
12666 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12671 if (TYPE_UNSIGNED (arg1_type
))
12673 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12678 max_hi
= signed_max_hi
;
12679 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12683 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
12684 && TREE_INT_CST_LOW (arg1
) == max_lo
)
12688 return omit_one_operand (type
, integer_zero_node
, arg0
);
12691 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12694 return omit_one_operand (type
, integer_one_node
, arg0
);
12697 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12699 /* The GE_EXPR and LT_EXPR cases above are not normally
12700 reached because of previous transformations. */
12705 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12707 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
12711 arg1
= const_binop (PLUS_EXPR
, arg1
,
12712 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12713 return fold_build2 (EQ_EXPR
, type
,
12714 fold_convert (TREE_TYPE (arg1
), arg0
),
12717 arg1
= const_binop (PLUS_EXPR
, arg1
,
12718 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12719 return fold_build2 (NE_EXPR
, type
,
12720 fold_convert (TREE_TYPE (arg1
), arg0
),
12725 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12727 && TREE_INT_CST_LOW (arg1
) == min_lo
)
12731 return omit_one_operand (type
, integer_zero_node
, arg0
);
12734 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12737 return omit_one_operand (type
, integer_one_node
, arg0
);
12740 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12745 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12747 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
12751 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12752 return fold_build2 (NE_EXPR
, type
,
12753 fold_convert (TREE_TYPE (arg1
), arg0
),
12756 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12757 return fold_build2 (EQ_EXPR
, type
,
12758 fold_convert (TREE_TYPE (arg1
), arg0
),
12764 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
12765 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
12766 && TYPE_UNSIGNED (arg1_type
)
12767 /* We will flip the signedness of the comparison operator
12768 associated with the mode of arg1, so the sign bit is
12769 specified by this mode. Check that arg1 is the signed
12770 max associated with this sign bit. */
12771 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
12772 /* signed_type does not work on pointer types. */
12773 && INTEGRAL_TYPE_P (arg1_type
))
12775 /* The following case also applies to X < signed_max+1
12776 and X >= signed_max+1 because previous transformations. */
12777 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12780 st
= signed_type_for (TREE_TYPE (arg1
));
12781 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12782 type
, fold_convert (st
, arg0
),
12783 build_int_cst (st
, 0));
12789 /* If we are comparing an ABS_EXPR with a constant, we can
12790 convert all the cases into explicit comparisons, but they may
12791 well not be faster than doing the ABS and one comparison.
12792 But ABS (X) <= C is a range comparison, which becomes a subtraction
12793 and a comparison, and is probably faster. */
12794 if (code
== LE_EXPR
12795 && TREE_CODE (arg1
) == INTEGER_CST
12796 && TREE_CODE (arg0
) == ABS_EXPR
12797 && ! TREE_SIDE_EFFECTS (arg0
)
12798 && (0 != (tem
= negate_expr (arg1
)))
12799 && TREE_CODE (tem
) == INTEGER_CST
12800 && !TREE_OVERFLOW (tem
))
12801 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12802 build2 (GE_EXPR
, type
,
12803 TREE_OPERAND (arg0
, 0), tem
),
12804 build2 (LE_EXPR
, type
,
12805 TREE_OPERAND (arg0
, 0), arg1
));
12807 /* Convert ABS_EXPR<x> >= 0 to true. */
12808 strict_overflow_p
= false;
12809 if (code
== GE_EXPR
12810 && (integer_zerop (arg1
)
12811 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12812 && real_zerop (arg1
)))
12813 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12815 if (strict_overflow_p
)
12816 fold_overflow_warning (("assuming signed overflow does not occur "
12817 "when simplifying comparison of "
12818 "absolute value and zero"),
12819 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12820 return omit_one_operand (type
, integer_one_node
, arg0
);
12823 /* Convert ABS_EXPR<x> < 0 to false. */
12824 strict_overflow_p
= false;
12825 if (code
== LT_EXPR
12826 && (integer_zerop (arg1
) || real_zerop (arg1
))
12827 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12829 if (strict_overflow_p
)
12830 fold_overflow_warning (("assuming signed overflow does not occur "
12831 "when simplifying comparison of "
12832 "absolute value and zero"),
12833 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12834 return omit_one_operand (type
, integer_zero_node
, arg0
);
12837 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12838 and similarly for >= into !=. */
12839 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12840 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12841 && TREE_CODE (arg1
) == LSHIFT_EXPR
12842 && integer_onep (TREE_OPERAND (arg1
, 0)))
12843 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12844 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12845 TREE_OPERAND (arg1
, 1)),
12846 build_int_cst (TREE_TYPE (arg0
), 0));
12848 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12849 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12850 && (TREE_CODE (arg1
) == NOP_EXPR
12851 || TREE_CODE (arg1
) == CONVERT_EXPR
)
12852 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12853 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12855 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12856 fold_convert (TREE_TYPE (arg0
),
12857 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12858 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
12860 build_int_cst (TREE_TYPE (arg0
), 0));
12864 case UNORDERED_EXPR
:
12872 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
12874 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
12875 if (t1
!= NULL_TREE
)
12879 /* If the first operand is NaN, the result is constant. */
12880 if (TREE_CODE (arg0
) == REAL_CST
12881 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
12882 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12884 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12885 ? integer_zero_node
12886 : integer_one_node
;
12887 return omit_one_operand (type
, t1
, arg1
);
12890 /* If the second operand is NaN, the result is constant. */
12891 if (TREE_CODE (arg1
) == REAL_CST
12892 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
12893 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12895 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12896 ? integer_zero_node
12897 : integer_one_node
;
12898 return omit_one_operand (type
, t1
, arg0
);
12901 /* Simplify unordered comparison of something with itself. */
12902 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
12903 && operand_equal_p (arg0
, arg1
, 0))
12904 return constant_boolean_node (1, type
);
12906 if (code
== LTGT_EXPR
12907 && !flag_trapping_math
12908 && operand_equal_p (arg0
, arg1
, 0))
12909 return constant_boolean_node (0, type
);
12911 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12913 tree targ0
= strip_float_extensions (arg0
);
12914 tree targ1
= strip_float_extensions (arg1
);
12915 tree newtype
= TREE_TYPE (targ0
);
12917 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12918 newtype
= TREE_TYPE (targ1
);
12920 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12921 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
12922 fold_convert (newtype
, targ1
));
12927 case COMPOUND_EXPR
:
12928 /* When pedantic, a compound expression can be neither an lvalue
12929 nor an integer constant expression. */
12930 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12932 /* Don't let (0, 0) be null pointer constant. */
12933 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12934 : fold_convert (type
, arg1
);
12935 return pedantic_non_lvalue (tem
);
12938 if ((TREE_CODE (arg0
) == REAL_CST
12939 && TREE_CODE (arg1
) == REAL_CST
)
12940 || (TREE_CODE (arg0
) == INTEGER_CST
12941 && TREE_CODE (arg1
) == INTEGER_CST
))
12942 return build_complex (type
, arg0
, arg1
);
12946 /* An ASSERT_EXPR should never be passed to fold_binary. */
12947 gcc_unreachable ();
12951 } /* switch (code) */
12954 /* Callback for walk_tree, looking for LABEL_EXPR.
12955 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12956 Do not check the sub-tree of GOTO_EXPR. */
12959 contains_label_1 (tree
*tp
,
12960 int *walk_subtrees
,
12961 void *data ATTRIBUTE_UNUSED
)
12963 switch (TREE_CODE (*tp
))
12968 *walk_subtrees
= 0;
12975 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12976 accessible from outside the sub-tree. Returns NULL_TREE if no
12977 addressable label is found. */
12980 contains_label_p (tree st
)
12982 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
12985 /* Fold a ternary expression of code CODE and type TYPE with operands
12986 OP0, OP1, and OP2. Return the folded expression if folding is
12987 successful. Otherwise, return NULL_TREE. */
12990 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
12993 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
12994 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12996 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12997 && TREE_CODE_LENGTH (code
) == 3);
12999 /* Strip any conversions that don't change the mode. This is safe
13000 for every expression, except for a comparison expression because
13001 its signedness is derived from its operands. So, in the latter
13002 case, only strip conversions that don't change the signedness.
13004 Note that this is done as an internal manipulation within the
13005 constant folder, in order to find the simplest representation of
13006 the arguments so that their form can be studied. In any cases,
13007 the appropriate type conversions should be put back in the tree
13008 that will get out of the constant folder. */
13023 case COMPONENT_REF
:
13024 if (TREE_CODE (arg0
) == CONSTRUCTOR
13025 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13027 unsigned HOST_WIDE_INT idx
;
13029 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13036 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13037 so all simple results must be passed through pedantic_non_lvalue. */
13038 if (TREE_CODE (arg0
) == INTEGER_CST
)
13040 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13041 tem
= integer_zerop (arg0
) ? op2
: op1
;
13042 /* Only optimize constant conditions when the selected branch
13043 has the same type as the COND_EXPR. This avoids optimizing
13044 away "c ? x : throw", where the throw has a void type.
13045 Avoid throwing away that operand which contains label. */
13046 if ((!TREE_SIDE_EFFECTS (unused_op
)
13047 || !contains_label_p (unused_op
))
13048 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13049 || VOID_TYPE_P (type
)))
13050 return pedantic_non_lvalue (tem
);
13053 if (operand_equal_p (arg1
, op2
, 0))
13054 return pedantic_omit_one_operand (type
, arg1
, arg0
);
13056 /* If we have A op B ? A : C, we may be able to convert this to a
13057 simpler expression, depending on the operation and the values
13058 of B and C. Signed zeros prevent all of these transformations,
13059 for reasons given above each one.
13061 Also try swapping the arguments and inverting the conditional. */
13062 if (COMPARISON_CLASS_P (arg0
)
13063 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13064 arg1
, TREE_OPERAND (arg0
, 1))
13065 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13067 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
13072 if (COMPARISON_CLASS_P (arg0
)
13073 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13075 TREE_OPERAND (arg0
, 1))
13076 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13078 tem
= fold_truth_not_expr (arg0
);
13079 if (tem
&& COMPARISON_CLASS_P (tem
))
13081 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
13087 /* If the second operand is simpler than the third, swap them
13088 since that produces better jump optimization results. */
13089 if (truth_value_p (TREE_CODE (arg0
))
13090 && tree_swap_operands_p (op1
, op2
, false))
13092 /* See if this can be inverted. If it can't, possibly because
13093 it was a floating-point inequality comparison, don't do
13095 tem
= fold_truth_not_expr (arg0
);
13097 return fold_build3 (code
, type
, tem
, op2
, op1
);
13100 /* Convert A ? 1 : 0 to simply A. */
13101 if (integer_onep (op1
)
13102 && integer_zerop (op2
)
13103 /* If we try to convert OP0 to our type, the
13104 call to fold will try to move the conversion inside
13105 a COND, which will recurse. In that case, the COND_EXPR
13106 is probably the best choice, so leave it alone. */
13107 && type
== TREE_TYPE (arg0
))
13108 return pedantic_non_lvalue (arg0
);
13110 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13111 over COND_EXPR in cases such as floating point comparisons. */
13112 if (integer_zerop (op1
)
13113 && integer_onep (op2
)
13114 && truth_value_p (TREE_CODE (arg0
)))
13115 return pedantic_non_lvalue (fold_convert (type
,
13116 invert_truthvalue (arg0
)));
13118 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13119 if (TREE_CODE (arg0
) == LT_EXPR
13120 && integer_zerop (TREE_OPERAND (arg0
, 1))
13121 && integer_zerop (op2
)
13122 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13124 /* sign_bit_p only checks ARG1 bits within A's precision.
13125 If <sign bit of A> has wider type than A, bits outside
13126 of A's precision in <sign bit of A> need to be checked.
13127 If they are all 0, this optimization needs to be done
13128 in unsigned A's type, if they are all 1 in signed A's type,
13129 otherwise this can't be done. */
13130 if (TYPE_PRECISION (TREE_TYPE (tem
))
13131 < TYPE_PRECISION (TREE_TYPE (arg1
))
13132 && TYPE_PRECISION (TREE_TYPE (tem
))
13133 < TYPE_PRECISION (type
))
13135 unsigned HOST_WIDE_INT mask_lo
;
13136 HOST_WIDE_INT mask_hi
;
13137 int inner_width
, outer_width
;
13140 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13141 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13142 if (outer_width
> TYPE_PRECISION (type
))
13143 outer_width
= TYPE_PRECISION (type
);
13145 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13147 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13148 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13154 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13155 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13157 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13159 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13160 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13164 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13165 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13167 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13168 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13170 tem_type
= signed_type_for (TREE_TYPE (tem
));
13171 tem
= fold_convert (tem_type
, tem
);
13173 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13174 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13176 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13177 tem
= fold_convert (tem_type
, tem
);
13184 return fold_convert (type
,
13185 fold_build2 (BIT_AND_EXPR
,
13186 TREE_TYPE (tem
), tem
,
13187 fold_convert (TREE_TYPE (tem
),
13191 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13192 already handled above. */
13193 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13194 && integer_onep (TREE_OPERAND (arg0
, 1))
13195 && integer_zerop (op2
)
13196 && integer_pow2p (arg1
))
13198 tree tem
= TREE_OPERAND (arg0
, 0);
13200 if (TREE_CODE (tem
) == RSHIFT_EXPR
13201 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13202 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13203 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13204 return fold_build2 (BIT_AND_EXPR
, type
,
13205 TREE_OPERAND (tem
, 0), arg1
);
13208 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13209 is probably obsolete because the first operand should be a
13210 truth value (that's why we have the two cases above), but let's
13211 leave it in until we can confirm this for all front-ends. */
13212 if (integer_zerop (op2
)
13213 && TREE_CODE (arg0
) == NE_EXPR
13214 && integer_zerop (TREE_OPERAND (arg0
, 1))
13215 && integer_pow2p (arg1
)
13216 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13217 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13218 arg1
, OEP_ONLY_CONST
))
13219 return pedantic_non_lvalue (fold_convert (type
,
13220 TREE_OPERAND (arg0
, 0)));
13222 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13223 if (integer_zerop (op2
)
13224 && truth_value_p (TREE_CODE (arg0
))
13225 && truth_value_p (TREE_CODE (arg1
)))
13226 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
13227 fold_convert (type
, arg0
),
13230 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13231 if (integer_onep (op2
)
13232 && truth_value_p (TREE_CODE (arg0
))
13233 && truth_value_p (TREE_CODE (arg1
)))
13235 /* Only perform transformation if ARG0 is easily inverted. */
13236 tem
= fold_truth_not_expr (arg0
);
13238 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
13239 fold_convert (type
, tem
),
13243 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13244 if (integer_zerop (arg1
)
13245 && truth_value_p (TREE_CODE (arg0
))
13246 && truth_value_p (TREE_CODE (op2
)))
13248 /* Only perform transformation if ARG0 is easily inverted. */
13249 tem
= fold_truth_not_expr (arg0
);
13251 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
13252 fold_convert (type
, tem
),
13256 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13257 if (integer_onep (arg1
)
13258 && truth_value_p (TREE_CODE (arg0
))
13259 && truth_value_p (TREE_CODE (op2
)))
13260 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
13261 fold_convert (type
, arg0
),
13267 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13268 of fold_ternary on them. */
13269 gcc_unreachable ();
13271 case BIT_FIELD_REF
:
13272 if ((TREE_CODE (arg0
) == VECTOR_CST
13273 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
13274 && type
== TREE_TYPE (TREE_TYPE (arg0
))
13275 && host_integerp (arg1
, 1)
13276 && host_integerp (op2
, 1))
13278 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13279 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13282 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13283 && (idx
% width
) == 0
13284 && (idx
= idx
/ width
)
13285 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13287 tree elements
= NULL_TREE
;
13289 if (TREE_CODE (arg0
) == VECTOR_CST
)
13290 elements
= TREE_VECTOR_CST_ELTS (arg0
);
13293 unsigned HOST_WIDE_INT idx
;
13296 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
13297 elements
= tree_cons (NULL_TREE
, value
, elements
);
13299 while (idx
-- > 0 && elements
)
13300 elements
= TREE_CHAIN (elements
);
13302 return TREE_VALUE (elements
);
13304 return fold_convert (type
, integer_zero_node
);
13311 } /* switch (code) */
13314 /* Perform constant folding and related simplification of EXPR.
13315 The related simplifications include x*1 => x, x*0 => 0, etc.,
13316 and application of the associative law.
13317 NOP_EXPR conversions may be removed freely (as long as we
13318 are careful not to change the type of the overall expression).
13319 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13320 but we can constant-fold them if they have constant operands. */
13322 #ifdef ENABLE_FOLD_CHECKING
13323 # define fold(x) fold_1 (x)
13324 static tree
fold_1 (tree
);
13330 const tree t
= expr
;
13331 enum tree_code code
= TREE_CODE (t
);
13332 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13335 /* Return right away if a constant. */
13336 if (kind
== tcc_constant
)
13339 /* CALL_EXPR-like objects with variable numbers of operands are
13340 treated specially. */
13341 if (kind
== tcc_vl_exp
)
13343 if (code
== CALL_EXPR
)
13345 tem
= fold_call_expr (expr
, false);
13346 return tem
? tem
: expr
;
13351 if (IS_EXPR_CODE_CLASS (kind
)
13352 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
13354 tree type
= TREE_TYPE (t
);
13355 tree op0
, op1
, op2
;
13357 switch (TREE_CODE_LENGTH (code
))
13360 op0
= TREE_OPERAND (t
, 0);
13361 tem
= fold_unary (code
, type
, op0
);
13362 return tem
? tem
: expr
;
13364 op0
= TREE_OPERAND (t
, 0);
13365 op1
= TREE_OPERAND (t
, 1);
13366 tem
= fold_binary (code
, type
, op0
, op1
);
13367 return tem
? tem
: expr
;
13369 op0
= TREE_OPERAND (t
, 0);
13370 op1
= TREE_OPERAND (t
, 1);
13371 op2
= TREE_OPERAND (t
, 2);
13372 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13373 return tem
? tem
: expr
;
13382 return fold (DECL_INITIAL (t
));
13386 } /* switch (code) */
13389 #ifdef ENABLE_FOLD_CHECKING
13392 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13393 static void fold_check_failed (const_tree
, const_tree
);
13394 void print_fold_checksum (const_tree
);
13396 /* When --enable-checking=fold, compute a digest of expr before
13397 and after actual fold call to see if fold did not accidentally
13398 change original expr. */
13404 struct md5_ctx ctx
;
13405 unsigned char checksum_before
[16], checksum_after
[16];
13408 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13409 md5_init_ctx (&ctx
);
13410 fold_checksum_tree (expr
, &ctx
, ht
);
13411 md5_finish_ctx (&ctx
, checksum_before
);
13414 ret
= fold_1 (expr
);
13416 md5_init_ctx (&ctx
);
13417 fold_checksum_tree (expr
, &ctx
, ht
);
13418 md5_finish_ctx (&ctx
, checksum_after
);
13421 if (memcmp (checksum_before
, checksum_after
, 16))
13422 fold_check_failed (expr
, ret
);
13428 print_fold_checksum (const_tree expr
)
13430 struct md5_ctx ctx
;
13431 unsigned char checksum
[16], cnt
;
13434 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13435 md5_init_ctx (&ctx
);
13436 fold_checksum_tree (expr
, &ctx
, ht
);
13437 md5_finish_ctx (&ctx
, checksum
);
13439 for (cnt
= 0; cnt
< 16; ++cnt
)
13440 fprintf (stderr
, "%02x", checksum
[cnt
]);
13441 putc ('\n', stderr
);
13445 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13447 internal_error ("fold check: original tree changed by fold");
13451 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13454 enum tree_code code
;
13455 struct tree_function_decl buf
;
13460 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
13461 <= sizeof (struct tree_function_decl
))
13462 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
13465 slot
= (const void **) htab_find_slot (ht
, expr
, INSERT
);
13469 code
= TREE_CODE (expr
);
13470 if (TREE_CODE_CLASS (code
) == tcc_declaration
13471 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13473 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13474 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13475 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13476 expr
= (tree
) &buf
;
13478 else if (TREE_CODE_CLASS (code
) == tcc_type
13479 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
13480 || TYPE_CACHED_VALUES_P (expr
)
13481 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
13483 /* Allow these fields to be modified. */
13485 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13486 expr
= tmp
= (tree
) &buf
;
13487 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13488 TYPE_POINTER_TO (tmp
) = NULL
;
13489 TYPE_REFERENCE_TO (tmp
) = NULL
;
13490 if (TYPE_CACHED_VALUES_P (tmp
))
13492 TYPE_CACHED_VALUES_P (tmp
) = 0;
13493 TYPE_CACHED_VALUES (tmp
) = NULL
;
13496 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13497 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13498 if (TREE_CODE_CLASS (code
) != tcc_type
13499 && TREE_CODE_CLASS (code
) != tcc_declaration
13500 && code
!= TREE_LIST
13501 && code
!= SSA_NAME
)
13502 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13503 switch (TREE_CODE_CLASS (code
))
13509 md5_process_bytes (TREE_STRING_POINTER (expr
),
13510 TREE_STRING_LENGTH (expr
), ctx
);
13513 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13514 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13517 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13523 case tcc_exceptional
:
13527 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13528 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13529 expr
= TREE_CHAIN (expr
);
13530 goto recursive_label
;
13533 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13534 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13540 case tcc_expression
:
13541 case tcc_reference
:
13542 case tcc_comparison
:
13545 case tcc_statement
:
13547 len
= TREE_OPERAND_LENGTH (expr
);
13548 for (i
= 0; i
< len
; ++i
)
13549 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13551 case tcc_declaration
:
13552 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13553 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13554 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13556 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13557 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13558 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13559 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13560 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13562 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13563 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
13565 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13567 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13568 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13569 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
13573 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13574 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13575 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13576 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13577 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13578 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13579 if (INTEGRAL_TYPE_P (expr
)
13580 || SCALAR_FLOAT_TYPE_P (expr
))
13582 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13583 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13585 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13586 if (TREE_CODE (expr
) == RECORD_TYPE
13587 || TREE_CODE (expr
) == UNION_TYPE
13588 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13589 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13590 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13597 /* Helper function for outputting the checksum of a tree T. When
13598 debugging with gdb, you can "define mynext" to be "next" followed
13599 by "call debug_fold_checksum (op0)", then just trace down till the
13603 debug_fold_checksum (const_tree t
)
13606 unsigned char checksum
[16];
13607 struct md5_ctx ctx
;
13608 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13610 md5_init_ctx (&ctx
);
13611 fold_checksum_tree (t
, &ctx
, ht
);
13612 md5_finish_ctx (&ctx
, checksum
);
13615 for (i
= 0; i
< 16; i
++)
13616 fprintf (stderr
, "%d ", checksum
[i
]);
13618 fprintf (stderr
, "\n");
13623 /* Fold a unary tree expression with code CODE of type TYPE with an
13624 operand OP0. Return a folded expression if successful. Otherwise,
13625 return a tree expression with code CODE of type TYPE with an
13629 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13632 #ifdef ENABLE_FOLD_CHECKING
13633 unsigned char checksum_before
[16], checksum_after
[16];
13634 struct md5_ctx ctx
;
13637 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13638 md5_init_ctx (&ctx
);
13639 fold_checksum_tree (op0
, &ctx
, ht
);
13640 md5_finish_ctx (&ctx
, checksum_before
);
13644 tem
= fold_unary (code
, type
, op0
);
13646 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
13648 #ifdef ENABLE_FOLD_CHECKING
13649 md5_init_ctx (&ctx
);
13650 fold_checksum_tree (op0
, &ctx
, ht
);
13651 md5_finish_ctx (&ctx
, checksum_after
);
13654 if (memcmp (checksum_before
, checksum_after
, 16))
13655 fold_check_failed (op0
, tem
);
13660 /* Fold a binary tree expression with code CODE of type TYPE with
13661 operands OP0 and OP1. Return a folded expression if successful.
13662 Otherwise, return a tree expression with code CODE of type TYPE
13663 with operands OP0 and OP1. */
13666 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
13670 #ifdef ENABLE_FOLD_CHECKING
13671 unsigned char checksum_before_op0
[16],
13672 checksum_before_op1
[16],
13673 checksum_after_op0
[16],
13674 checksum_after_op1
[16];
13675 struct md5_ctx ctx
;
13678 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13679 md5_init_ctx (&ctx
);
13680 fold_checksum_tree (op0
, &ctx
, ht
);
13681 md5_finish_ctx (&ctx
, checksum_before_op0
);
13684 md5_init_ctx (&ctx
);
13685 fold_checksum_tree (op1
, &ctx
, ht
);
13686 md5_finish_ctx (&ctx
, checksum_before_op1
);
13690 tem
= fold_binary (code
, type
, op0
, op1
);
13692 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
13694 #ifdef ENABLE_FOLD_CHECKING
13695 md5_init_ctx (&ctx
);
13696 fold_checksum_tree (op0
, &ctx
, ht
);
13697 md5_finish_ctx (&ctx
, checksum_after_op0
);
13700 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13701 fold_check_failed (op0
, tem
);
13703 md5_init_ctx (&ctx
);
13704 fold_checksum_tree (op1
, &ctx
, ht
);
13705 md5_finish_ctx (&ctx
, checksum_after_op1
);
13708 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13709 fold_check_failed (op1
, tem
);
13714 /* Fold a ternary tree expression with code CODE of type TYPE with
13715 operands OP0, OP1, and OP2. Return a folded expression if
13716 successful. Otherwise, return a tree expression with code CODE of
13717 type TYPE with operands OP0, OP1, and OP2. */
13720 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
13724 #ifdef ENABLE_FOLD_CHECKING
13725 unsigned char checksum_before_op0
[16],
13726 checksum_before_op1
[16],
13727 checksum_before_op2
[16],
13728 checksum_after_op0
[16],
13729 checksum_after_op1
[16],
13730 checksum_after_op2
[16];
13731 struct md5_ctx ctx
;
13734 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13735 md5_init_ctx (&ctx
);
13736 fold_checksum_tree (op0
, &ctx
, ht
);
13737 md5_finish_ctx (&ctx
, checksum_before_op0
);
13740 md5_init_ctx (&ctx
);
13741 fold_checksum_tree (op1
, &ctx
, ht
);
13742 md5_finish_ctx (&ctx
, checksum_before_op1
);
13745 md5_init_ctx (&ctx
);
13746 fold_checksum_tree (op2
, &ctx
, ht
);
13747 md5_finish_ctx (&ctx
, checksum_before_op2
);
13751 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13752 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13754 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13756 #ifdef ENABLE_FOLD_CHECKING
13757 md5_init_ctx (&ctx
);
13758 fold_checksum_tree (op0
, &ctx
, ht
);
13759 md5_finish_ctx (&ctx
, checksum_after_op0
);
13762 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13763 fold_check_failed (op0
, tem
);
13765 md5_init_ctx (&ctx
);
13766 fold_checksum_tree (op1
, &ctx
, ht
);
13767 md5_finish_ctx (&ctx
, checksum_after_op1
);
13770 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13771 fold_check_failed (op1
, tem
);
13773 md5_init_ctx (&ctx
);
13774 fold_checksum_tree (op2
, &ctx
, ht
);
13775 md5_finish_ctx (&ctx
, checksum_after_op2
);
13778 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13779 fold_check_failed (op2
, tem
);
13784 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13785 arguments in ARGARRAY, and a null static chain.
13786 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13787 of type TYPE from the given operands as constructed by build_call_array. */
13790 fold_build_call_array (tree type
, tree fn
, int nargs
, tree
*argarray
)
13793 #ifdef ENABLE_FOLD_CHECKING
13794 unsigned char checksum_before_fn
[16],
13795 checksum_before_arglist
[16],
13796 checksum_after_fn
[16],
13797 checksum_after_arglist
[16];
13798 struct md5_ctx ctx
;
13802 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13803 md5_init_ctx (&ctx
);
13804 fold_checksum_tree (fn
, &ctx
, ht
);
13805 md5_finish_ctx (&ctx
, checksum_before_fn
);
13808 md5_init_ctx (&ctx
);
13809 for (i
= 0; i
< nargs
; i
++)
13810 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13811 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13815 tem
= fold_builtin_call_array (type
, fn
, nargs
, argarray
);
13817 #ifdef ENABLE_FOLD_CHECKING
13818 md5_init_ctx (&ctx
);
13819 fold_checksum_tree (fn
, &ctx
, ht
);
13820 md5_finish_ctx (&ctx
, checksum_after_fn
);
13823 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13824 fold_check_failed (fn
, tem
);
13826 md5_init_ctx (&ctx
);
13827 for (i
= 0; i
< nargs
; i
++)
13828 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13829 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13832 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13833 fold_check_failed (NULL_TREE
, tem
);
13838 /* Perform constant folding and related simplification of initializer
13839 expression EXPR. These behave identically to "fold_buildN" but ignore
13840 potential run-time traps and exceptions that fold must preserve. */
13842 #define START_FOLD_INIT \
13843 int saved_signaling_nans = flag_signaling_nans;\
13844 int saved_trapping_math = flag_trapping_math;\
13845 int saved_rounding_math = flag_rounding_math;\
13846 int saved_trapv = flag_trapv;\
13847 int saved_folding_initializer = folding_initializer;\
13848 flag_signaling_nans = 0;\
13849 flag_trapping_math = 0;\
13850 flag_rounding_math = 0;\
13852 folding_initializer = 1;
13854 #define END_FOLD_INIT \
13855 flag_signaling_nans = saved_signaling_nans;\
13856 flag_trapping_math = saved_trapping_math;\
13857 flag_rounding_math = saved_rounding_math;\
13858 flag_trapv = saved_trapv;\
13859 folding_initializer = saved_folding_initializer;
13862 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
13867 result
= fold_build1 (code
, type
, op
);
13874 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
13879 result
= fold_build2 (code
, type
, op0
, op1
);
13886 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
13892 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
13899 fold_build_call_array_initializer (tree type
, tree fn
,
13900 int nargs
, tree
*argarray
)
13905 result
= fold_build_call_array (type
, fn
, nargs
, argarray
);
13911 #undef START_FOLD_INIT
13912 #undef END_FOLD_INIT
13914 /* Determine if first argument is a multiple of second argument. Return 0 if
13915 it is not, or we cannot easily determined it to be.
13917 An example of the sort of thing we care about (at this point; this routine
13918 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13919 fold cases do now) is discovering that
13921 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13927 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13929 This code also handles discovering that
13931 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13933 is a multiple of 8 so we don't have to worry about dealing with a
13934 possible remainder.
13936 Note that we *look* inside a SAVE_EXPR only to determine how it was
13937 calculated; it is not safe for fold to do much of anything else with the
13938 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13939 at run time. For example, the latter example above *cannot* be implemented
13940 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13941 evaluation time of the original SAVE_EXPR is not necessarily the same at
13942 the time the new expression is evaluated. The only optimization of this
13943 sort that would be valid is changing
13945 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13949 SAVE_EXPR (I) * SAVE_EXPR (J)
13951 (where the same SAVE_EXPR (J) is used in the original and the
13952 transformed version). */
13955 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13957 if (operand_equal_p (top
, bottom
, 0))
13960 if (TREE_CODE (type
) != INTEGER_TYPE
)
13963 switch (TREE_CODE (top
))
13966 /* Bitwise and provides a power of two multiple. If the mask is
13967 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13968 if (!integer_pow2p (bottom
))
13973 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13974 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13978 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13979 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13982 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13986 op1
= TREE_OPERAND (top
, 1);
13987 /* const_binop may not detect overflow correctly,
13988 so check for it explicitly here. */
13989 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
13990 > TREE_INT_CST_LOW (op1
)
13991 && TREE_INT_CST_HIGH (op1
) == 0
13992 && 0 != (t1
= fold_convert (type
,
13993 const_binop (LSHIFT_EXPR
,
13996 && !TREE_OVERFLOW (t1
))
13997 return multiple_of_p (type
, t1
, bottom
);
14002 /* Can't handle conversions from non-integral or wider integral type. */
14003 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14004 || (TYPE_PRECISION (type
)
14005 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14008 /* .. fall through ... */
14011 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14014 if (TREE_CODE (bottom
) != INTEGER_CST
14015 || integer_zerop (bottom
)
14016 || (TYPE_UNSIGNED (type
)
14017 && (tree_int_cst_sgn (top
) < 0
14018 || tree_int_cst_sgn (bottom
) < 0)))
14020 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14028 /* Return true if `t' is known to be non-negative. If the return
14029 value is based on the assumption that signed overflow is undefined,
14030 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14031 *STRICT_OVERFLOW_P. */
14034 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14036 if (t
== error_mark_node
)
14039 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14042 switch (TREE_CODE (t
))
14045 /* Query VRP to see if it has recorded any information about
14046 the range of this object. */
14047 return ssa_name_nonnegative_p (t
);
14050 /* We can't return 1 if flag_wrapv is set because
14051 ABS_EXPR<INT_MIN> = INT_MIN. */
14052 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
14054 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
14056 *strict_overflow_p
= true;
14062 return tree_int_cst_sgn (t
) >= 0;
14065 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14068 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14070 case POINTER_PLUS_EXPR
:
14072 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
14073 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14075 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14076 strict_overflow_p
));
14078 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14079 both unsigned and at least 2 bits shorter than the result. */
14080 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
14081 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
14082 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
14084 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
14085 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
14086 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14087 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14089 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14090 TYPE_PRECISION (inner2
)) + 1;
14091 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
14097 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
14099 /* x * x for floating point x is always non-negative. */
14100 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
14102 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14104 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14105 strict_overflow_p
));
14108 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14109 both unsigned and their total bits is shorter than the result. */
14110 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
14111 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
14112 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
14114 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
14115 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
14116 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14117 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14118 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
14119 < TYPE_PRECISION (TREE_TYPE (t
));
14125 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14127 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14128 strict_overflow_p
));
14134 case TRUNC_DIV_EXPR
:
14135 case CEIL_DIV_EXPR
:
14136 case FLOOR_DIV_EXPR
:
14137 case ROUND_DIV_EXPR
:
14138 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14140 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14141 strict_overflow_p
));
14143 case TRUNC_MOD_EXPR
:
14144 case CEIL_MOD_EXPR
:
14145 case FLOOR_MOD_EXPR
:
14146 case ROUND_MOD_EXPR
:
14148 case NON_LVALUE_EXPR
:
14150 case FIX_TRUNC_EXPR
:
14151 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14152 strict_overflow_p
);
14154 case COMPOUND_EXPR
:
14156 case GIMPLE_MODIFY_STMT
:
14157 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14158 strict_overflow_p
);
14161 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14162 strict_overflow_p
);
14165 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14167 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14168 strict_overflow_p
));
14172 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
14173 tree outer_type
= TREE_TYPE (t
);
14175 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14177 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14178 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14179 strict_overflow_p
);
14180 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14182 if (TYPE_UNSIGNED (inner_type
))
14184 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14185 strict_overflow_p
);
14188 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14190 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14191 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
,0),
14192 strict_overflow_p
);
14193 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14194 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14195 && TYPE_UNSIGNED (inner_type
);
14202 tree temp
= TARGET_EXPR_SLOT (t
);
14203 t
= TARGET_EXPR_INITIAL (t
);
14205 /* If the initializer is non-void, then it's a normal expression
14206 that will be assigned to the slot. */
14207 if (!VOID_TYPE_P (t
))
14208 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14210 /* Otherwise, the initializer sets the slot in some way. One common
14211 way is an assignment statement at the end of the initializer. */
14214 if (TREE_CODE (t
) == BIND_EXPR
)
14215 t
= expr_last (BIND_EXPR_BODY (t
));
14216 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14217 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14218 t
= expr_last (TREE_OPERAND (t
, 0));
14219 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14224 if ((TREE_CODE (t
) == MODIFY_EXPR
14225 || TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
14226 && GENERIC_TREE_OPERAND (t
, 0) == temp
)
14227 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14228 strict_overflow_p
);
14235 tree fndecl
= get_callee_fndecl (t
);
14236 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14237 switch (DECL_FUNCTION_CODE (fndecl
))
14239 CASE_FLT_FN (BUILT_IN_ACOS
):
14240 CASE_FLT_FN (BUILT_IN_ACOSH
):
14241 CASE_FLT_FN (BUILT_IN_CABS
):
14242 CASE_FLT_FN (BUILT_IN_COSH
):
14243 CASE_FLT_FN (BUILT_IN_ERFC
):
14244 CASE_FLT_FN (BUILT_IN_EXP
):
14245 CASE_FLT_FN (BUILT_IN_EXP10
):
14246 CASE_FLT_FN (BUILT_IN_EXP2
):
14247 CASE_FLT_FN (BUILT_IN_FABS
):
14248 CASE_FLT_FN (BUILT_IN_FDIM
):
14249 CASE_FLT_FN (BUILT_IN_HYPOT
):
14250 CASE_FLT_FN (BUILT_IN_POW10
):
14251 CASE_INT_FN (BUILT_IN_FFS
):
14252 CASE_INT_FN (BUILT_IN_PARITY
):
14253 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14254 case BUILT_IN_BSWAP32
:
14255 case BUILT_IN_BSWAP64
:
14259 CASE_FLT_FN (BUILT_IN_SQRT
):
14260 /* sqrt(-0.0) is -0.0. */
14261 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
14263 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14264 strict_overflow_p
);
14266 CASE_FLT_FN (BUILT_IN_ASINH
):
14267 CASE_FLT_FN (BUILT_IN_ATAN
):
14268 CASE_FLT_FN (BUILT_IN_ATANH
):
14269 CASE_FLT_FN (BUILT_IN_CBRT
):
14270 CASE_FLT_FN (BUILT_IN_CEIL
):
14271 CASE_FLT_FN (BUILT_IN_ERF
):
14272 CASE_FLT_FN (BUILT_IN_EXPM1
):
14273 CASE_FLT_FN (BUILT_IN_FLOOR
):
14274 CASE_FLT_FN (BUILT_IN_FMOD
):
14275 CASE_FLT_FN (BUILT_IN_FREXP
):
14276 CASE_FLT_FN (BUILT_IN_LCEIL
):
14277 CASE_FLT_FN (BUILT_IN_LDEXP
):
14278 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14279 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14280 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14281 CASE_FLT_FN (BUILT_IN_LLRINT
):
14282 CASE_FLT_FN (BUILT_IN_LLROUND
):
14283 CASE_FLT_FN (BUILT_IN_LRINT
):
14284 CASE_FLT_FN (BUILT_IN_LROUND
):
14285 CASE_FLT_FN (BUILT_IN_MODF
):
14286 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14287 CASE_FLT_FN (BUILT_IN_RINT
):
14288 CASE_FLT_FN (BUILT_IN_ROUND
):
14289 CASE_FLT_FN (BUILT_IN_SCALB
):
14290 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14291 CASE_FLT_FN (BUILT_IN_SCALBN
):
14292 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14293 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14294 CASE_FLT_FN (BUILT_IN_SINH
):
14295 CASE_FLT_FN (BUILT_IN_TANH
):
14296 CASE_FLT_FN (BUILT_IN_TRUNC
):
14297 /* True if the 1st argument is nonnegative. */
14298 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14299 strict_overflow_p
);
14301 CASE_FLT_FN (BUILT_IN_FMAX
):
14302 /* True if the 1st OR 2nd arguments are nonnegative. */
14303 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14305 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14306 strict_overflow_p
)));
14308 CASE_FLT_FN (BUILT_IN_FMIN
):
14309 /* True if the 1st AND 2nd arguments are nonnegative. */
14310 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14312 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14313 strict_overflow_p
)));
14315 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14316 /* True if the 2nd argument is nonnegative. */
14317 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14318 strict_overflow_p
);
14320 CASE_FLT_FN (BUILT_IN_POWI
):
14321 /* True if the 1st argument is nonnegative or the second
14322 argument is an even integer. */
14323 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == INTEGER_CST
)
14325 tree arg1
= CALL_EXPR_ARG (t
, 1);
14326 if ((TREE_INT_CST_LOW (arg1
) & 1) == 0)
14329 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14330 strict_overflow_p
);
14332 CASE_FLT_FN (BUILT_IN_POW
):
14333 /* True if the 1st argument is nonnegative or the second
14334 argument is an even integer valued real. */
14335 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == REAL_CST
)
14340 c
= TREE_REAL_CST (CALL_EXPR_ARG (t
, 1));
14341 n
= real_to_integer (&c
);
14344 REAL_VALUE_TYPE cint
;
14345 real_from_integer (&cint
, VOIDmode
, n
,
14346 n
< 0 ? -1 : 0, 0);
14347 if (real_identical (&c
, &cint
))
14351 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14352 strict_overflow_p
);
14359 /* ... fall through ... */
14363 tree type
= TREE_TYPE (t
);
14364 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14365 && truth_value_p (TREE_CODE (t
)))
14366 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14367 have a signed:1 type (where the value is -1 and 0). */
14372 /* We don't know sign of `t', so be conservative and return false. */
14376 /* Return true if `t' is known to be non-negative. Handle warnings
14377 about undefined signed overflow. */
14380 tree_expr_nonnegative_p (tree t
)
14382 bool ret
, strict_overflow_p
;
14384 strict_overflow_p
= false;
14385 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14386 if (strict_overflow_p
)
14387 fold_overflow_warning (("assuming signed overflow does not occur when "
14388 "determining that expression is always "
14390 WARN_STRICT_OVERFLOW_MISC
);
14394 /* Return true when T is an address and is known to be nonzero.
14395 For floating point we further ensure that T is not denormal.
14396 Similar logic is present in nonzero_address in rtlanal.h.
14398 If the return value is based on the assumption that signed overflow
14399 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14400 change *STRICT_OVERFLOW_P. */
14403 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14405 tree type
= TREE_TYPE (t
);
14406 bool sub_strict_overflow_p
;
14408 /* Doing something useful for floating point would need more work. */
14409 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
14412 switch (TREE_CODE (t
))
14415 /* Query VRP to see if it has recorded any information about
14416 the range of this object. */
14417 return ssa_name_nonzero_p (t
);
14420 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14421 strict_overflow_p
);
14424 return !integer_zerop (t
);
14426 case POINTER_PLUS_EXPR
:
14428 if (TYPE_OVERFLOW_UNDEFINED (type
))
14430 /* With the presence of negative values it is hard
14431 to say something. */
14432 sub_strict_overflow_p
= false;
14433 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14434 &sub_strict_overflow_p
)
14435 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14436 &sub_strict_overflow_p
))
14438 /* One of operands must be positive and the other non-negative. */
14439 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14440 overflows, on a twos-complement machine the sum of two
14441 nonnegative numbers can never be zero. */
14442 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14444 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14445 strict_overflow_p
));
14450 if (TYPE_OVERFLOW_UNDEFINED (type
))
14452 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14454 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14455 strict_overflow_p
))
14457 *strict_overflow_p
= true;
14465 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
14466 tree outer_type
= TREE_TYPE (t
);
14468 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14469 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14470 strict_overflow_p
));
14476 tree base
= get_base_address (TREE_OPERAND (t
, 0));
14481 /* Weak declarations may link to NULL. */
14482 if (VAR_OR_FUNCTION_DECL_P (base
))
14483 return !DECL_WEAK (base
);
14485 /* Constants are never weak. */
14486 if (CONSTANT_CLASS_P (base
))
14493 sub_strict_overflow_p
= false;
14494 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14495 &sub_strict_overflow_p
)
14496 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14497 &sub_strict_overflow_p
))
14499 if (sub_strict_overflow_p
)
14500 *strict_overflow_p
= true;
14506 sub_strict_overflow_p
= false;
14507 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14508 &sub_strict_overflow_p
)
14509 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14510 &sub_strict_overflow_p
))
14512 if (sub_strict_overflow_p
)
14513 *strict_overflow_p
= true;
14518 sub_strict_overflow_p
= false;
14519 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14520 &sub_strict_overflow_p
))
14522 if (sub_strict_overflow_p
)
14523 *strict_overflow_p
= true;
14525 /* When both operands are nonzero, then MAX must be too. */
14526 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14527 strict_overflow_p
))
14530 /* MAX where operand 0 is positive is positive. */
14531 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14532 strict_overflow_p
);
14534 /* MAX where operand 1 is positive is positive. */
14535 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14536 &sub_strict_overflow_p
)
14537 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14538 &sub_strict_overflow_p
))
14540 if (sub_strict_overflow_p
)
14541 *strict_overflow_p
= true;
14546 case COMPOUND_EXPR
:
14548 case GIMPLE_MODIFY_STMT
:
14550 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14551 strict_overflow_p
);
14554 case NON_LVALUE_EXPR
:
14555 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14556 strict_overflow_p
);
14559 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14561 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14562 strict_overflow_p
));
14565 return alloca_call_p (t
);
14573 /* Return true when T is an address and is known to be nonzero.
14574 Handle warnings about undefined signed overflow. */
14577 tree_expr_nonzero_p (tree t
)
14579 bool ret
, strict_overflow_p
;
14581 strict_overflow_p
= false;
14582 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
14583 if (strict_overflow_p
)
14584 fold_overflow_warning (("assuming signed overflow does not occur when "
14585 "determining that expression is always "
14587 WARN_STRICT_OVERFLOW_MISC
);
14591 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14592 attempt to fold the expression to a constant without modifying TYPE,
14595 If the expression could be simplified to a constant, then return
14596 the constant. If the expression would not be simplified to a
14597 constant, then return NULL_TREE. */
14600 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
14602 tree tem
= fold_binary (code
, type
, op0
, op1
);
14603 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14606 /* Given the components of a unary expression CODE, TYPE and OP0,
14607 attempt to fold the expression to a constant without modifying
14610 If the expression could be simplified to a constant, then return
14611 the constant. If the expression would not be simplified to a
14612 constant, then return NULL_TREE. */
14615 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
14617 tree tem
= fold_unary (code
, type
, op0
);
14618 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14621 /* If EXP represents referencing an element in a constant string
14622 (either via pointer arithmetic or array indexing), return the
14623 tree representing the value accessed, otherwise return NULL. */
14626 fold_read_from_constant_string (tree exp
)
14628 if ((TREE_CODE (exp
) == INDIRECT_REF
14629 || TREE_CODE (exp
) == ARRAY_REF
)
14630 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
14632 tree exp1
= TREE_OPERAND (exp
, 0);
14636 if (TREE_CODE (exp
) == INDIRECT_REF
)
14637 string
= string_constant (exp1
, &index
);
14640 tree low_bound
= array_ref_low_bound (exp
);
14641 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
14643 /* Optimize the special-case of a zero lower bound.
14645 We convert the low_bound to sizetype to avoid some problems
14646 with constant folding. (E.g. suppose the lower bound is 1,
14647 and its mode is QI. Without the conversion,l (ARRAY
14648 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14649 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14650 if (! integer_zerop (low_bound
))
14651 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
14657 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
14658 && TREE_CODE (string
) == STRING_CST
14659 && TREE_CODE (index
) == INTEGER_CST
14660 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
14661 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
14663 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
14664 return build_int_cst_type (TREE_TYPE (exp
),
14665 (TREE_STRING_POINTER (string
)
14666 [TREE_INT_CST_LOW (index
)]));
14671 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14672 an integer constant, real, or fixed-point constant.
14674 TYPE is the type of the result. */
14677 fold_negate_const (tree arg0
, tree type
)
14679 tree t
= NULL_TREE
;
14681 switch (TREE_CODE (arg0
))
14685 unsigned HOST_WIDE_INT low
;
14686 HOST_WIDE_INT high
;
14687 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14688 TREE_INT_CST_HIGH (arg0
),
14690 t
= force_fit_type_double (type
, low
, high
, 1,
14691 (overflow
| TREE_OVERFLOW (arg0
))
14692 && !TYPE_UNSIGNED (type
));
14697 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14702 FIXED_VALUE_TYPE f
;
14703 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
14704 &(TREE_FIXED_CST (arg0
)), NULL
,
14705 TYPE_SATURATING (type
));
14706 t
= build_fixed (type
, f
);
14707 /* Propagate overflow flags. */
14708 if (overflow_p
| TREE_OVERFLOW (arg0
))
14710 TREE_OVERFLOW (t
) = 1;
14711 TREE_CONSTANT_OVERFLOW (t
) = 1;
14713 else if (TREE_CONSTANT_OVERFLOW (arg0
))
14714 TREE_CONSTANT_OVERFLOW (t
) = 1;
14719 gcc_unreachable ();
14725 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14726 an integer constant or real constant.
14728 TYPE is the type of the result. */
14731 fold_abs_const (tree arg0
, tree type
)
14733 tree t
= NULL_TREE
;
14735 switch (TREE_CODE (arg0
))
14738 /* If the value is unsigned, then the absolute value is
14739 the same as the ordinary value. */
14740 if (TYPE_UNSIGNED (type
))
14742 /* Similarly, if the value is non-negative. */
14743 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
14745 /* If the value is negative, then the absolute value is
14749 unsigned HOST_WIDE_INT low
;
14750 HOST_WIDE_INT high
;
14751 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14752 TREE_INT_CST_HIGH (arg0
),
14754 t
= force_fit_type_double (type
, low
, high
, -1,
14755 overflow
| TREE_OVERFLOW (arg0
));
14760 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14761 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14767 gcc_unreachable ();
14773 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14774 constant. TYPE is the type of the result. */
14777 fold_not_const (tree arg0
, tree type
)
14779 tree t
= NULL_TREE
;
14781 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14783 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
14784 ~TREE_INT_CST_HIGH (arg0
), 0,
14785 TREE_OVERFLOW (arg0
));
14790 /* Given CODE, a relational operator, the target type, TYPE and two
14791 constant operands OP0 and OP1, return the result of the
14792 relational operation. If the result is not a compile time
14793 constant, then return NULL_TREE. */
14796 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14798 int result
, invert
;
14800 /* From here on, the only cases we handle are when the result is
14801 known to be a constant. */
14803 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14805 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14806 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14808 /* Handle the cases where either operand is a NaN. */
14809 if (real_isnan (c0
) || real_isnan (c1
))
14819 case UNORDERED_EXPR
:
14833 if (flag_trapping_math
)
14839 gcc_unreachable ();
14842 return constant_boolean_node (result
, type
);
14845 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14848 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14850 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14851 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14852 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14855 /* Handle equality/inequality of complex constants. */
14856 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14858 tree rcond
= fold_relational_const (code
, type
,
14859 TREE_REALPART (op0
),
14860 TREE_REALPART (op1
));
14861 tree icond
= fold_relational_const (code
, type
,
14862 TREE_IMAGPART (op0
),
14863 TREE_IMAGPART (op1
));
14864 if (code
== EQ_EXPR
)
14865 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14866 else if (code
== NE_EXPR
)
14867 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
14872 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14874 To compute GT, swap the arguments and do LT.
14875 To compute GE, do LT and invert the result.
14876 To compute LE, swap the arguments, do LT and invert the result.
14877 To compute NE, do EQ and invert the result.
14879 Therefore, the code below must handle only EQ and LT. */
14881 if (code
== LE_EXPR
|| code
== GT_EXPR
)
14886 code
= swap_tree_comparison (code
);
14889 /* Note that it is safe to invert for real values here because we
14890 have already handled the one case that it matters. */
14893 if (code
== NE_EXPR
|| code
== GE_EXPR
)
14896 code
= invert_tree_comparison (code
, false);
14899 /* Compute a result for LT or EQ if args permit;
14900 Otherwise return T. */
14901 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
14903 if (code
== EQ_EXPR
)
14904 result
= tree_int_cst_equal (op0
, op1
);
14905 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
14906 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
14908 result
= INT_CST_LT (op0
, op1
);
14915 return constant_boolean_node (result
, type
);
14918 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14919 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14923 fold_build_cleanup_point_expr (tree type
, tree expr
)
14925 /* If the expression does not have side effects then we don't have to wrap
14926 it with a cleanup point expression. */
14927 if (!TREE_SIDE_EFFECTS (expr
))
14930 /* If the expression is a return, check to see if the expression inside the
14931 return has no side effects or the right hand side of the modify expression
14932 inside the return. If either don't have side effects set we don't need to
14933 wrap the expression in a cleanup point expression. Note we don't check the
14934 left hand side of the modify because it should always be a return decl. */
14935 if (TREE_CODE (expr
) == RETURN_EXPR
)
14937 tree op
= TREE_OPERAND (expr
, 0);
14938 if (!op
|| !TREE_SIDE_EFFECTS (op
))
14940 op
= TREE_OPERAND (op
, 1);
14941 if (!TREE_SIDE_EFFECTS (op
))
14945 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
14948 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14949 of an indirection through OP0, or NULL_TREE if no simplification is
14953 fold_indirect_ref_1 (tree type
, tree op0
)
14959 subtype
= TREE_TYPE (sub
);
14960 if (!POINTER_TYPE_P (subtype
))
14963 if (TREE_CODE (sub
) == ADDR_EXPR
)
14965 tree op
= TREE_OPERAND (sub
, 0);
14966 tree optype
= TREE_TYPE (op
);
14967 /* *&CONST_DECL -> to the value of the const decl. */
14968 if (TREE_CODE (op
) == CONST_DECL
)
14969 return DECL_INITIAL (op
);
14970 /* *&p => p; make sure to handle *&"str"[cst] here. */
14971 if (type
== optype
)
14973 tree fop
= fold_read_from_constant_string (op
);
14979 /* *(foo *)&fooarray => fooarray[0] */
14980 else if (TREE_CODE (optype
) == ARRAY_TYPE
14981 && type
== TREE_TYPE (optype
))
14983 tree type_domain
= TYPE_DOMAIN (optype
);
14984 tree min_val
= size_zero_node
;
14985 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14986 min_val
= TYPE_MIN_VALUE (type_domain
);
14987 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
14989 /* *(foo *)&complexfoo => __real__ complexfoo */
14990 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14991 && type
== TREE_TYPE (optype
))
14992 return fold_build1 (REALPART_EXPR
, type
, op
);
14993 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14994 else if (TREE_CODE (optype
) == VECTOR_TYPE
14995 && type
== TREE_TYPE (optype
))
14997 tree part_width
= TYPE_SIZE (type
);
14998 tree index
= bitsize_int (0);
14999 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
15003 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15004 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15005 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15007 tree op00
= TREE_OPERAND (sub
, 0);
15008 tree op01
= TREE_OPERAND (sub
, 1);
15012 op00type
= TREE_TYPE (op00
);
15013 if (TREE_CODE (op00
) == ADDR_EXPR
15014 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
15015 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
15017 tree size
= TYPE_SIZE_UNIT (type
);
15018 if (tree_int_cst_equal (size
, op01
))
15019 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
15023 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15024 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15025 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
15028 tree min_val
= size_zero_node
;
15029 sub
= build_fold_indirect_ref (sub
);
15030 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15031 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15032 min_val
= TYPE_MIN_VALUE (type_domain
);
15033 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
15039 /* Builds an expression for an indirection through T, simplifying some
15043 build_fold_indirect_ref (tree t
)
15045 tree type
= TREE_TYPE (TREE_TYPE (t
));
15046 tree sub
= fold_indirect_ref_1 (type
, t
);
15051 return build1 (INDIRECT_REF
, type
, t
);
15054 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15057 fold_indirect_ref (tree t
)
15059 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15067 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15068 whose result is ignored. The type of the returned tree need not be
15069 the same as the original expression. */
15072 fold_ignored_result (tree t
)
15074 if (!TREE_SIDE_EFFECTS (t
))
15075 return integer_zero_node
;
15078 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15081 t
= TREE_OPERAND (t
, 0);
15085 case tcc_comparison
:
15086 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15087 t
= TREE_OPERAND (t
, 0);
15088 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15089 t
= TREE_OPERAND (t
, 1);
15094 case tcc_expression
:
15095 switch (TREE_CODE (t
))
15097 case COMPOUND_EXPR
:
15098 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15100 t
= TREE_OPERAND (t
, 0);
15104 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15105 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15107 t
= TREE_OPERAND (t
, 0);
15120 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15121 This can only be applied to objects of a sizetype. */
15124 round_up (tree value
, int divisor
)
15126 tree div
= NULL_TREE
;
15128 gcc_assert (divisor
> 0);
15132 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15133 have to do anything. Only do this when we are not given a const,
15134 because in that case, this check is more expensive than just
15136 if (TREE_CODE (value
) != INTEGER_CST
)
15138 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15140 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15144 /* If divisor is a power of two, simplify this to bit manipulation. */
15145 if (divisor
== (divisor
& -divisor
))
15147 if (TREE_CODE (value
) == INTEGER_CST
)
15149 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
15150 unsigned HOST_WIDE_INT high
;
15153 if ((low
& (divisor
- 1)) == 0)
15156 overflow_p
= TREE_OVERFLOW (value
);
15157 high
= TREE_INT_CST_HIGH (value
);
15158 low
&= ~(divisor
- 1);
15167 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
15174 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15175 value
= size_binop (PLUS_EXPR
, value
, t
);
15176 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15177 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15183 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15184 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
15185 value
= size_binop (MULT_EXPR
, value
, div
);
15191 /* Likewise, but round down. */
15194 round_down (tree value
, int divisor
)
15196 tree div
= NULL_TREE
;
15198 gcc_assert (divisor
> 0);
15202 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15203 have to do anything. Only do this when we are not given a const,
15204 because in that case, this check is more expensive than just
15206 if (TREE_CODE (value
) != INTEGER_CST
)
15208 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15210 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15214 /* If divisor is a power of two, simplify this to bit manipulation. */
15215 if (divisor
== (divisor
& -divisor
))
15219 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15220 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15225 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15226 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
15227 value
= size_binop (MULT_EXPR
, value
, div
);
15233 /* Returns the pointer to the base of the object addressed by EXP and
15234 extracts the information about the offset of the access, storing it
15235 to PBITPOS and POFFSET. */
15238 split_address_to_core_and_offset (tree exp
,
15239 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15242 enum machine_mode mode
;
15243 int unsignedp
, volatilep
;
15244 HOST_WIDE_INT bitsize
;
15246 if (TREE_CODE (exp
) == ADDR_EXPR
)
15248 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15249 poffset
, &mode
, &unsignedp
, &volatilep
,
15251 core
= fold_addr_expr (core
);
15257 *poffset
= NULL_TREE
;
15263 /* Returns true if addresses of E1 and E2 differ by a constant, false
15264 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15267 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15270 HOST_WIDE_INT bitpos1
, bitpos2
;
15271 tree toffset1
, toffset2
, tdiff
, type
;
15273 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15274 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15276 if (bitpos1
% BITS_PER_UNIT
!= 0
15277 || bitpos2
% BITS_PER_UNIT
!= 0
15278 || !operand_equal_p (core1
, core2
, 0))
15281 if (toffset1
&& toffset2
)
15283 type
= TREE_TYPE (toffset1
);
15284 if (type
!= TREE_TYPE (toffset2
))
15285 toffset2
= fold_convert (type
, toffset2
);
15287 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15288 if (!cst_and_fits_in_hwi (tdiff
))
15291 *diff
= int_cst_value (tdiff
);
15293 else if (toffset1
|| toffset2
)
15295 /* If only one of the offsets is non-constant, the difference cannot
15302 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
15306 /* Simplify the floating point expression EXP when the sign of the
15307 result is not significant. Return NULL_TREE if no simplification
15311 fold_strip_sign_ops (tree exp
)
15315 switch (TREE_CODE (exp
))
15319 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15320 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
15324 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
15326 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15327 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15328 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
15329 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
15330 arg0
? arg0
: TREE_OPERAND (exp
, 0),
15331 arg1
? arg1
: TREE_OPERAND (exp
, 1));
15334 case COMPOUND_EXPR
:
15335 arg0
= TREE_OPERAND (exp
, 0);
15336 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15338 return fold_build2 (COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
15342 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15343 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
15345 return fold_build3 (COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
15346 arg0
? arg0
: TREE_OPERAND (exp
, 1),
15347 arg1
? arg1
: TREE_OPERAND (exp
, 2));
15352 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
15355 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15356 /* Strip copysign function call, return the 1st argument. */
15357 arg0
= CALL_EXPR_ARG (exp
, 0);
15358 arg1
= CALL_EXPR_ARG (exp
, 1);
15359 return omit_one_operand (TREE_TYPE (exp
), arg0
, arg1
);
15362 /* Strip sign ops from the argument of "odd" math functions. */
15363 if (negate_mathfn_p (fcode
))
15365 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
15367 return build_call_expr (get_callee_fndecl (exp
), 1, arg0
);