1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
53 #include "coretypes.h"
64 #include "langhooks.h"
67 /* Non-zero if we are folding constants inside an initializer; zero
69 int folding_initializer
= 0;
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code
{
93 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
94 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
95 static bool negate_mathfn_p (enum built_in_function
);
96 static bool negate_expr_p (tree
);
97 static tree
negate_expr (tree
);
98 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
99 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
100 static tree
const_binop (enum tree_code
, tree
, tree
, int);
101 static enum comparison_code
comparison_to_compcode (enum tree_code
);
102 static enum tree_code
compcode_to_comparison (enum comparison_code
);
103 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
104 enum tree_code
, tree
, tree
, tree
);
105 static int truth_value_p (enum tree_code
);
106 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
107 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
108 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
109 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
110 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
111 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
112 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
113 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
114 enum machine_mode
*, int *, int *,
116 static int all_ones_mask_p (tree
, int);
117 static tree
sign_bit_p (tree
, tree
);
118 static int simple_operand_p (tree
);
119 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
120 static tree
range_predecessor (tree
);
121 static tree
range_successor (tree
);
122 static tree
make_range (tree
, int *, tree
*, tree
*);
123 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
124 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
126 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
127 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
128 static tree
unextend (tree
, int, int, tree
);
129 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
130 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
131 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
132 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
133 static int multiple_of_p (tree
, tree
, tree
);
134 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
137 static bool fold_real_zero_addition_p (tree
, tree
, int);
138 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
140 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
141 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
142 static bool reorder_operands_p (tree
, tree
);
143 static tree
fold_negate_const (tree
, tree
);
144 static tree
fold_not_const (tree
, tree
);
145 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
146 static int native_encode_expr (tree
, unsigned char *, int);
147 static tree
native_interpret_expr (tree
, unsigned char *, int);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
176 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
178 words
[0] = LOWPART (low
);
179 words
[1] = HIGHPART (low
);
180 words
[2] = LOWPART (hi
);
181 words
[3] = HIGHPART (hi
);
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
189 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
192 *low
= words
[0] + words
[1] * BASE
;
193 *hi
= words
[2] + words
[3] * BASE
;
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
202 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
203 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, tree type
)
205 unsigned HOST_WIDE_INT low0
= l1
;
206 HOST_WIDE_INT high0
= h1
;
208 int sign_extended_type
;
210 if (POINTER_TYPE_P (type
)
211 || TREE_CODE (type
) == OFFSET_TYPE
)
214 prec
= TYPE_PRECISION (type
);
216 /* Size types *are* sign extended. */
217 sign_extended_type
= (!TYPE_UNSIGNED (type
)
218 || (TREE_CODE (type
) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type
)));
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
224 else if (prec
> HOST_BITS_PER_WIDE_INT
)
225 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
229 if (prec
< HOST_BITS_PER_WIDE_INT
)
230 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type
)
235 /* No sign extension */;
236 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
237 /* Correct width already. */;
238 else if (prec
> HOST_BITS_PER_WIDE_INT
)
240 /* Sign extend top half? */
241 if (h1
& ((unsigned HOST_WIDE_INT
)1
242 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
243 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
245 else if (prec
== HOST_BITS_PER_WIDE_INT
)
247 if ((HOST_WIDE_INT
)l1
< 0)
252 /* Sign extend bottom half? */
253 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
256 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
263 /* If the value didn't fit, signal overflow. */
264 return l1
!= low0
|| h1
!= high0
;
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
283 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
284 HOST_WIDE_INT high
, int overflowable
,
287 int sign_extended_type
;
290 /* Size types *are* sign extended. */
291 sign_extended_type
= (!TYPE_UNSIGNED (type
)
292 || (TREE_CODE (type
) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type
)));
295 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed
|| overflow
)
302 || (overflowable
> 0 && sign_extended_type
))
304 tree t
= make_node (INTEGER_CST
);
305 TREE_INT_CST_LOW (t
) = low
;
306 TREE_INT_CST_HIGH (t
) = high
;
307 TREE_TYPE (t
) = type
;
308 TREE_OVERFLOW (t
) = 1;
313 /* Else build a shared node. */
314 return build_int_cst_wide (type
, low
, high
);
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
324 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
325 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
326 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
329 unsigned HOST_WIDE_INT l
;
333 h
= h1
+ h2
+ (l
< l1
);
339 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
341 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
351 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
357 return (*hv
& h1
) < 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
375 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
376 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
379 HOST_WIDE_INT arg1
[4];
380 HOST_WIDE_INT arg2
[4];
381 HOST_WIDE_INT prod
[4 * 2];
382 unsigned HOST_WIDE_INT carry
;
384 unsigned HOST_WIDE_INT toplow
, neglow
;
385 HOST_WIDE_INT tophigh
, neghigh
;
387 encode (arg1
, l1
, h1
);
388 encode (arg2
, l2
, h2
);
390 memset (prod
, 0, sizeof prod
);
392 for (i
= 0; i
< 4; i
++)
395 for (j
= 0; j
< 4; j
++)
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry
+= arg1
[i
] * arg2
[j
];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 prod
[k
] = LOWPART (carry
);
403 carry
= HIGHPART (carry
);
408 decode (prod
, lv
, hv
);
409 decode (prod
+ 4, &toplow
, &tophigh
);
411 /* Unsigned overflow is immediate. */
413 return (toplow
| tophigh
) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
419 neg_double (l2
, h2
, &neglow
, &neghigh
);
420 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
424 neg_double (l1
, h1
, &neglow
, &neghigh
);
425 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
427 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
438 HOST_WIDE_INT count
, unsigned int prec
,
439 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
441 unsigned HOST_WIDE_INT signmask
;
445 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
449 if (SHIFT_COUNT_TRUNCATED
)
452 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
459 else if (count
>= HOST_BITS_PER_WIDE_INT
)
461 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
466 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
467 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
471 /* Sign extend all bits that are beyond the precision. */
473 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT
) *hv
475 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
476 : (*lv
>> (prec
- 1))) & 1);
478 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
480 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
482 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
483 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
488 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
489 *lv
|= signmask
<< prec
;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
500 HOST_WIDE_INT count
, unsigned int prec
,
501 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
504 unsigned HOST_WIDE_INT signmask
;
507 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
510 if (SHIFT_COUNT_TRUNCATED
)
513 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
520 else if (count
>= HOST_BITS_PER_WIDE_INT
)
523 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
527 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
529 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count
>= (HOST_WIDE_INT
)prec
)
539 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
541 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
543 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
544 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
549 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
550 *lv
|= signmask
<< (prec
- count
);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
561 HOST_WIDE_INT count
, unsigned int prec
,
562 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
564 unsigned HOST_WIDE_INT s1l
, s2l
;
565 HOST_WIDE_INT s1h
, s2h
;
571 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
572 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
583 HOST_WIDE_INT count
, unsigned int prec
,
584 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
586 unsigned HOST_WIDE_INT s1l
, s2l
;
587 HOST_WIDE_INT s1h
, s2h
;
593 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
594 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code
, int uns
,
610 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig
,
612 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig
,
614 unsigned HOST_WIDE_INT
*lquo
,
615 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
619 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den
[4], quo
[4];
622 unsigned HOST_WIDE_INT work
;
623 unsigned HOST_WIDE_INT carry
= 0;
624 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
625 HOST_WIDE_INT hnum
= hnum_orig
;
626 unsigned HOST_WIDE_INT lden
= lden_orig
;
627 HOST_WIDE_INT hden
= hden_orig
;
630 if (hden
== 0 && lden
== 0)
631 overflow
= 1, lden
= 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
641 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
647 neg_double (lden
, hden
, &lden
, &hden
);
651 if (hnum
== 0 && hden
== 0)
652 { /* single precision */
654 /* This unsigned division rounds toward zero. */
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
668 memset (quo
, 0, sizeof quo
);
670 memset (num
, 0, sizeof num
); /* to zero 9th element */
671 memset (den
, 0, sizeof den
);
673 encode (num
, lnum
, hnum
);
674 encode (den
, lden
, hden
);
676 /* Special code for when the divisor < BASE. */
677 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
679 /* hnum != 0 already checked. */
680 for (i
= 4 - 1; i
>= 0; i
--)
682 work
= num
[i
] + carry
* BASE
;
683 quo
[i
] = work
/ lden
;
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig
, den_hi_sig
;
692 unsigned HOST_WIDE_INT quo_est
, scale
;
694 /* Find the highest nonzero divisor digit. */
695 for (i
= 4 - 1;; i
--)
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale
= BASE
/ (den
[den_hi_sig
] + 1);
707 { /* scale divisor and dividend */
709 for (i
= 0; i
<= 4 - 1; i
++)
711 work
= (num
[i
] * scale
) + carry
;
712 num
[i
] = LOWPART (work
);
713 carry
= HIGHPART (work
);
718 for (i
= 0; i
<= 4 - 1; i
++)
720 work
= (den
[i
] * scale
) + carry
;
721 den
[i
] = LOWPART (work
);
722 carry
= HIGHPART (work
);
723 if (den
[i
] != 0) den_hi_sig
= i
;
730 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp
;
737 num_hi_sig
= i
+ den_hi_sig
+ 1;
738 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
739 if (num
[num_hi_sig
] != den
[den_hi_sig
])
740 quo_est
= work
/ den
[den_hi_sig
];
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp
= work
- quo_est
* den
[den_hi_sig
];
747 && (den
[den_hi_sig
- 1] * quo_est
748 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
756 for (j
= 0; j
<= den_hi_sig
; j
++)
758 work
= quo_est
* den
[j
] + carry
;
759 carry
= HIGHPART (work
);
760 work
= num
[i
+ j
] - LOWPART (work
);
761 num
[i
+ j
] = LOWPART (work
);
762 carry
+= HIGHPART (work
) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
770 carry
= 0; /* add divisor back in */
771 for (j
= 0; j
<= den_hi_sig
; j
++)
773 work
= num
[i
+ j
] + den
[j
] + carry
;
774 carry
= HIGHPART (work
);
775 num
[i
+ j
] = LOWPART (work
);
778 num
[num_hi_sig
] += carry
;
781 /* Store the quotient digit. */
786 decode (quo
, lquo
, hquo
);
789 /* If result is negative, make it so. */
791 neg_double (*lquo
, *hquo
, lquo
, hquo
);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
795 neg_double (*lrem
, *hrem
, lrem
, hrem
);
796 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
801 case TRUNC_MOD_EXPR
: /* round toward zero */
802 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
806 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
807 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
810 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
818 case CEIL_MOD_EXPR
: /* round toward positive infinity */
819 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
829 case ROUND_MOD_EXPR
: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
832 HOST_WIDE_INT habs_rem
= *hrem
;
833 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
834 HOST_WIDE_INT habs_den
= hden
, htwice
;
836 /* Get absolute values. */
838 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
840 neg_double (lden
, hden
, &labs_den
, &habs_den
);
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
844 labs_rem
, habs_rem
, <wice
, &htwice
);
846 if (((unsigned HOST_WIDE_INT
) habs_den
847 < (unsigned HOST_WIDE_INT
) htwice
)
848 || (((unsigned HOST_WIDE_INT
) habs_den
849 == (unsigned HOST_WIDE_INT
) htwice
)
850 && (labs_den
< ltwice
)))
854 add_double (*lquo
, *hquo
,
855 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
858 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
872 neg_double (*lrem
, *hrem
, lrem
, hrem
);
873 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
882 div_if_zero_remainder (enum tree_code code
, tree arg1
, tree arg2
)
884 unsigned HOST_WIDE_INT int1l
, int2l
;
885 HOST_WIDE_INT int1h
, int2h
;
886 unsigned HOST_WIDE_INT quol
, reml
;
887 HOST_WIDE_INT quoh
, remh
;
888 tree type
= TREE_TYPE (arg1
);
889 int uns
= TYPE_UNSIGNED (type
);
891 int1l
= TREE_INT_CST_LOW (arg1
);
892 int1h
= TREE_INT_CST_HIGH (arg1
);
893 int2l
= TREE_INT_CST_LOW (arg2
);
894 int2h
= TREE_INT_CST_HIGH (arg2
);
896 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
897 &quol
, &quoh
, &reml
, &remh
);
898 if (remh
!= 0 || reml
!= 0)
901 return build_int_cst_wide (type
, quol
, quoh
);
904 /* Return true if the built-in mathematical function specified by CODE
905 is odd, i.e. -f(x) == f(-x). */
908 negate_mathfn_p (enum built_in_function code
)
912 CASE_FLT_FN (BUILT_IN_ASIN
):
913 CASE_FLT_FN (BUILT_IN_ASINH
):
914 CASE_FLT_FN (BUILT_IN_ATAN
):
915 CASE_FLT_FN (BUILT_IN_ATANH
):
916 CASE_FLT_FN (BUILT_IN_CASIN
):
917 CASE_FLT_FN (BUILT_IN_CASINH
):
918 CASE_FLT_FN (BUILT_IN_CATAN
):
919 CASE_FLT_FN (BUILT_IN_CATANH
):
920 CASE_FLT_FN (BUILT_IN_CBRT
):
921 CASE_FLT_FN (BUILT_IN_CPROJ
):
922 CASE_FLT_FN (BUILT_IN_CSIN
):
923 CASE_FLT_FN (BUILT_IN_CSINH
):
924 CASE_FLT_FN (BUILT_IN_CTAN
):
925 CASE_FLT_FN (BUILT_IN_CTANH
):
926 CASE_FLT_FN (BUILT_IN_ERF
):
927 CASE_FLT_FN (BUILT_IN_LLROUND
):
928 CASE_FLT_FN (BUILT_IN_LROUND
):
929 CASE_FLT_FN (BUILT_IN_ROUND
):
930 CASE_FLT_FN (BUILT_IN_SIN
):
931 CASE_FLT_FN (BUILT_IN_SINH
):
932 CASE_FLT_FN (BUILT_IN_TAN
):
933 CASE_FLT_FN (BUILT_IN_TANH
):
934 CASE_FLT_FN (BUILT_IN_TRUNC
):
937 CASE_FLT_FN (BUILT_IN_LLRINT
):
938 CASE_FLT_FN (BUILT_IN_LRINT
):
939 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
940 CASE_FLT_FN (BUILT_IN_RINT
):
941 return !flag_rounding_math
;
949 /* Check whether we may negate an integer constant T without causing
953 may_negate_without_overflow_p (tree t
)
955 unsigned HOST_WIDE_INT val
;
959 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
961 type
= TREE_TYPE (t
);
962 if (TYPE_UNSIGNED (type
))
965 prec
= TYPE_PRECISION (type
);
966 if (prec
> HOST_BITS_PER_WIDE_INT
)
968 if (TREE_INT_CST_LOW (t
) != 0)
970 prec
-= HOST_BITS_PER_WIDE_INT
;
971 val
= TREE_INT_CST_HIGH (t
);
974 val
= TREE_INT_CST_LOW (t
);
975 if (prec
< HOST_BITS_PER_WIDE_INT
)
976 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
977 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
980 /* Determine whether an expression T can be cheaply negated using
981 the function negate_expr without introducing undefined overflow. */
984 negate_expr_p (tree t
)
991 type
= TREE_TYPE (t
);
994 switch (TREE_CODE (t
))
997 if (TYPE_OVERFLOW_WRAPS (type
))
1000 /* Check that -CST will not overflow type. */
1001 return may_negate_without_overflow_p (t
);
1003 return (INTEGRAL_TYPE_P (type
)
1004 && TYPE_OVERFLOW_WRAPS (type
));
1011 return negate_expr_p (TREE_REALPART (t
))
1012 && negate_expr_p (TREE_IMAGPART (t
));
1015 return negate_expr_p (TREE_OPERAND (t
, 0))
1016 && negate_expr_p (TREE_OPERAND (t
, 1));
1019 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1020 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1022 /* -(A + B) -> (-B) - A. */
1023 if (negate_expr_p (TREE_OPERAND (t
, 1))
1024 && reorder_operands_p (TREE_OPERAND (t
, 0),
1025 TREE_OPERAND (t
, 1)))
1027 /* -(A + B) -> (-A) - B. */
1028 return negate_expr_p (TREE_OPERAND (t
, 0));
1031 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1032 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1033 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1034 && reorder_operands_p (TREE_OPERAND (t
, 0),
1035 TREE_OPERAND (t
, 1));
1038 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1044 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1045 return negate_expr_p (TREE_OPERAND (t
, 1))
1046 || negate_expr_p (TREE_OPERAND (t
, 0));
1049 case TRUNC_DIV_EXPR
:
1050 case ROUND_DIV_EXPR
:
1051 case FLOOR_DIV_EXPR
:
1053 case EXACT_DIV_EXPR
:
1054 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
1055 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
1057 return negate_expr_p (TREE_OPERAND (t
, 1))
1058 || negate_expr_p (TREE_OPERAND (t
, 0));
1061 /* Negate -((double)float) as (double)(-float). */
1062 if (TREE_CODE (type
) == REAL_TYPE
)
1064 tree tem
= strip_float_extensions (t
);
1066 return negate_expr_p (tem
);
1071 /* Negate -f(x) as f(-x). */
1072 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1073 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
1077 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1078 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1080 tree op1
= TREE_OPERAND (t
, 1);
1081 if (TREE_INT_CST_HIGH (op1
) == 0
1082 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1083 == TREE_INT_CST_LOW (op1
))
1094 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1095 simplification is possible.
1096 If negate_expr_p would return true for T, NULL_TREE will never be
1100 fold_negate_expr (tree t
)
1102 tree type
= TREE_TYPE (t
);
1105 switch (TREE_CODE (t
))
1107 /* Convert - (~A) to A + 1. */
1109 if (INTEGRAL_TYPE_P (type
))
1110 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1111 build_int_cst (type
, 1));
1115 tem
= fold_negate_const (t
, type
);
1116 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
1117 || !TYPE_OVERFLOW_TRAPS (type
))
1122 tem
= fold_negate_const (t
, type
);
1123 /* Two's complement FP formats, such as c4x, may overflow. */
1124 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1130 tree rpart
= negate_expr (TREE_REALPART (t
));
1131 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1133 if ((TREE_CODE (rpart
) == REAL_CST
1134 && TREE_CODE (ipart
) == REAL_CST
)
1135 || (TREE_CODE (rpart
) == INTEGER_CST
1136 && TREE_CODE (ipart
) == INTEGER_CST
))
1137 return build_complex (type
, rpart
, ipart
);
1142 if (negate_expr_p (t
))
1143 return fold_build2 (COMPLEX_EXPR
, type
,
1144 fold_negate_expr (TREE_OPERAND (t
, 0)),
1145 fold_negate_expr (TREE_OPERAND (t
, 1)));
1149 return TREE_OPERAND (t
, 0);
1152 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1153 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1155 /* -(A + B) -> (-B) - A. */
1156 if (negate_expr_p (TREE_OPERAND (t
, 1))
1157 && reorder_operands_p (TREE_OPERAND (t
, 0),
1158 TREE_OPERAND (t
, 1)))
1160 tem
= negate_expr (TREE_OPERAND (t
, 1));
1161 return fold_build2 (MINUS_EXPR
, type
,
1162 tem
, TREE_OPERAND (t
, 0));
1165 /* -(A + B) -> (-A) - B. */
1166 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1168 tem
= negate_expr (TREE_OPERAND (t
, 0));
1169 return fold_build2 (MINUS_EXPR
, type
,
1170 tem
, TREE_OPERAND (t
, 1));
1176 /* - (A - B) -> B - A */
1177 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1178 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1179 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1180 return fold_build2 (MINUS_EXPR
, type
,
1181 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1185 if (TYPE_UNSIGNED (type
))
1191 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1193 tem
= TREE_OPERAND (t
, 1);
1194 if (negate_expr_p (tem
))
1195 return fold_build2 (TREE_CODE (t
), type
,
1196 TREE_OPERAND (t
, 0), negate_expr (tem
));
1197 tem
= TREE_OPERAND (t
, 0);
1198 if (negate_expr_p (tem
))
1199 return fold_build2 (TREE_CODE (t
), type
,
1200 negate_expr (tem
), TREE_OPERAND (t
, 1));
1204 case TRUNC_DIV_EXPR
:
1205 case ROUND_DIV_EXPR
:
1206 case FLOOR_DIV_EXPR
:
1208 case EXACT_DIV_EXPR
:
1209 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
1211 tem
= TREE_OPERAND (t
, 1);
1212 if (negate_expr_p (tem
))
1213 return fold_build2 (TREE_CODE (t
), type
,
1214 TREE_OPERAND (t
, 0), negate_expr (tem
));
1215 tem
= TREE_OPERAND (t
, 0);
1216 if (negate_expr_p (tem
))
1217 return fold_build2 (TREE_CODE (t
), type
,
1218 negate_expr (tem
), TREE_OPERAND (t
, 1));
1223 /* Convert -((double)float) into (double)(-float). */
1224 if (TREE_CODE (type
) == REAL_TYPE
)
1226 tem
= strip_float_extensions (t
);
1227 if (tem
!= t
&& negate_expr_p (tem
))
1228 return negate_expr (tem
);
1233 /* Negate -f(x) as f(-x). */
1234 if (negate_mathfn_p (builtin_mathfn_code (t
))
1235 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1237 tree fndecl
, arg
, arglist
;
1239 fndecl
= get_callee_fndecl (t
);
1240 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1241 arglist
= build_tree_list (NULL_TREE
, arg
);
1242 return build_function_call_expr (fndecl
, arglist
);
1247 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1248 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1250 tree op1
= TREE_OPERAND (t
, 1);
1251 if (TREE_INT_CST_HIGH (op1
) == 0
1252 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1253 == TREE_INT_CST_LOW (op1
))
1255 tree ntype
= TYPE_UNSIGNED (type
)
1256 ? lang_hooks
.types
.signed_type (type
)
1257 : lang_hooks
.types
.unsigned_type (type
);
1258 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1259 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1260 return fold_convert (type
, temp
);
1272 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1273 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1274 return NULL_TREE. */
1277 negate_expr (tree t
)
1284 type
= TREE_TYPE (t
);
1285 STRIP_SIGN_NOPS (t
);
1287 tem
= fold_negate_expr (t
);
1289 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1290 return fold_convert (type
, tem
);
1293 /* Split a tree IN into a constant, literal and variable parts that could be
1294 combined with CODE to make IN. "constant" means an expression with
1295 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1296 commutative arithmetic operation. Store the constant part into *CONP,
1297 the literal in *LITP and return the variable part. If a part isn't
1298 present, set it to null. If the tree does not decompose in this way,
1299 return the entire tree as the variable part and the other parts as null.
1301 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1302 case, we negate an operand that was subtracted. Except if it is a
1303 literal for which we use *MINUS_LITP instead.
1305 If NEGATE_P is true, we are negating all of IN, again except a literal
1306 for which we use *MINUS_LITP instead.
1308 If IN is itself a literal or constant, return it as appropriate.
1310 Note that we do not guarantee that any of the three values will be the
1311 same type as IN, but they will have the same signedness and mode. */
1314 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1315 tree
*minus_litp
, int negate_p
)
1323 /* Strip any conversions that don't change the machine mode or signedness. */
1324 STRIP_SIGN_NOPS (in
);
1326 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1328 else if (TREE_CODE (in
) == code
1329 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1330 /* We can associate addition and subtraction together (even
1331 though the C standard doesn't say so) for integers because
1332 the value is not affected. For reals, the value might be
1333 affected, so we can't. */
1334 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1335 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1337 tree op0
= TREE_OPERAND (in
, 0);
1338 tree op1
= TREE_OPERAND (in
, 1);
1339 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1340 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1342 /* First see if either of the operands is a literal, then a constant. */
1343 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1344 *litp
= op0
, op0
= 0;
1345 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1346 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1348 if (op0
!= 0 && TREE_CONSTANT (op0
))
1349 *conp
= op0
, op0
= 0;
1350 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1351 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1353 /* If we haven't dealt with either operand, this is not a case we can
1354 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1355 if (op0
!= 0 && op1
!= 0)
1360 var
= op1
, neg_var_p
= neg1_p
;
1362 /* Now do any needed negations. */
1364 *minus_litp
= *litp
, *litp
= 0;
1366 *conp
= negate_expr (*conp
);
1368 var
= negate_expr (var
);
1370 else if (TREE_CONSTANT (in
))
1378 *minus_litp
= *litp
, *litp
= 0;
1379 else if (*minus_litp
)
1380 *litp
= *minus_litp
, *minus_litp
= 0;
1381 *conp
= negate_expr (*conp
);
1382 var
= negate_expr (var
);
1388 /* Re-associate trees split by the above function. T1 and T2 are either
1389 expressions to associate or null. Return the new expression, if any. If
1390 we build an operation, do it in TYPE and with CODE. */
1393 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1400 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1401 try to fold this since we will have infinite recursion. But do
1402 deal with any NEGATE_EXPRs. */
1403 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1404 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1406 if (code
== PLUS_EXPR
)
1408 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1409 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1410 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1411 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1412 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1413 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1414 else if (integer_zerop (t2
))
1415 return fold_convert (type
, t1
);
1417 else if (code
== MINUS_EXPR
)
1419 if (integer_zerop (t2
))
1420 return fold_convert (type
, t1
);
1423 return build2 (code
, type
, fold_convert (type
, t1
),
1424 fold_convert (type
, t2
));
1427 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1428 fold_convert (type
, t2
));
1431 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1432 for use in int_const_binop, size_binop and size_diffop. */
1435 int_binop_types_match_p (enum tree_code code
, tree type1
, tree type2
)
1437 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1439 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1454 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1455 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1456 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1460 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1461 to produce a new constant. Return NULL_TREE if we don't know how
1462 to evaluate CODE at compile-time.
1464 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1467 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1469 unsigned HOST_WIDE_INT int1l
, int2l
;
1470 HOST_WIDE_INT int1h
, int2h
;
1471 unsigned HOST_WIDE_INT low
;
1473 unsigned HOST_WIDE_INT garbagel
;
1474 HOST_WIDE_INT garbageh
;
1476 tree type
= TREE_TYPE (arg1
);
1477 int uns
= TYPE_UNSIGNED (type
);
1479 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1482 int1l
= TREE_INT_CST_LOW (arg1
);
1483 int1h
= TREE_INT_CST_HIGH (arg1
);
1484 int2l
= TREE_INT_CST_LOW (arg2
);
1485 int2h
= TREE_INT_CST_HIGH (arg2
);
1490 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1494 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1498 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1504 /* It's unclear from the C standard whether shifts can overflow.
1505 The following code ignores overflow; perhaps a C standard
1506 interpretation ruling is needed. */
1507 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1514 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1519 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1523 neg_double (int2l
, int2h
, &low
, &hi
);
1524 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1525 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1529 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1532 case TRUNC_DIV_EXPR
:
1533 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1534 case EXACT_DIV_EXPR
:
1535 /* This is a shortcut for a common special case. */
1536 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1537 && !TREE_OVERFLOW (arg1
)
1538 && !TREE_OVERFLOW (arg2
)
1539 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1541 if (code
== CEIL_DIV_EXPR
)
1544 low
= int1l
/ int2l
, hi
= 0;
1548 /* ... fall through ... */
1550 case ROUND_DIV_EXPR
:
1551 if (int2h
== 0 && int2l
== 0)
1553 if (int2h
== 0 && int2l
== 1)
1555 low
= int1l
, hi
= int1h
;
1558 if (int1l
== int2l
&& int1h
== int2h
1559 && ! (int1l
== 0 && int1h
== 0))
1564 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1565 &low
, &hi
, &garbagel
, &garbageh
);
1568 case TRUNC_MOD_EXPR
:
1569 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1570 /* This is a shortcut for a common special case. */
1571 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1572 && !TREE_OVERFLOW (arg1
)
1573 && !TREE_OVERFLOW (arg2
)
1574 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1576 if (code
== CEIL_MOD_EXPR
)
1578 low
= int1l
% int2l
, hi
= 0;
1582 /* ... fall through ... */
1584 case ROUND_MOD_EXPR
:
1585 if (int2h
== 0 && int2l
== 0)
1587 overflow
= div_and_round_double (code
, uns
,
1588 int1l
, int1h
, int2l
, int2h
,
1589 &garbagel
, &garbageh
, &low
, &hi
);
1595 low
= (((unsigned HOST_WIDE_INT
) int1h
1596 < (unsigned HOST_WIDE_INT
) int2h
)
1597 || (((unsigned HOST_WIDE_INT
) int1h
1598 == (unsigned HOST_WIDE_INT
) int2h
)
1601 low
= (int1h
< int2h
1602 || (int1h
== int2h
&& int1l
< int2l
));
1604 if (low
== (code
== MIN_EXPR
))
1605 low
= int1l
, hi
= int1h
;
1607 low
= int2l
, hi
= int2h
;
1616 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1618 /* Propagate overflow flags ourselves. */
1619 if (((!uns
|| is_sizetype
) && overflow
)
1620 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1623 TREE_OVERFLOW (t
) = 1;
1627 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1628 ((!uns
|| is_sizetype
) && overflow
)
1629 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1634 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1635 constant. We assume ARG1 and ARG2 have the same data type, or at least
1636 are the same kind of constant and the same machine mode. Return zero if
1637 combining the constants is not allowed in the current operating mode.
1639 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1642 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1644 /* Sanity check for the recursive cases. */
1651 if (TREE_CODE (arg1
) == INTEGER_CST
)
1652 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1654 if (TREE_CODE (arg1
) == REAL_CST
)
1656 enum machine_mode mode
;
1659 REAL_VALUE_TYPE value
;
1660 REAL_VALUE_TYPE result
;
1664 /* The following codes are handled by real_arithmetic. */
1679 d1
= TREE_REAL_CST (arg1
);
1680 d2
= TREE_REAL_CST (arg2
);
1682 type
= TREE_TYPE (arg1
);
1683 mode
= TYPE_MODE (type
);
1685 /* Don't perform operation if we honor signaling NaNs and
1686 either operand is a NaN. */
1687 if (HONOR_SNANS (mode
)
1688 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1691 /* Don't perform operation if it would raise a division
1692 by zero exception. */
1693 if (code
== RDIV_EXPR
1694 && REAL_VALUES_EQUAL (d2
, dconst0
)
1695 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1698 /* If either operand is a NaN, just return it. Otherwise, set up
1699 for floating-point trap; we return an overflow. */
1700 if (REAL_VALUE_ISNAN (d1
))
1702 else if (REAL_VALUE_ISNAN (d2
))
1705 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1706 real_convert (&result
, mode
, &value
);
1708 /* Don't constant fold this floating point operation if
1709 the result has overflowed and flag_trapping_math. */
1710 if (flag_trapping_math
1711 && MODE_HAS_INFINITIES (mode
)
1712 && REAL_VALUE_ISINF (result
)
1713 && !REAL_VALUE_ISINF (d1
)
1714 && !REAL_VALUE_ISINF (d2
))
1717 /* Don't constant fold this floating point operation if the
1718 result may dependent upon the run-time rounding mode and
1719 flag_rounding_math is set, or if GCC's software emulation
1720 is unable to accurately represent the result. */
1721 if ((flag_rounding_math
1722 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1723 && !flag_unsafe_math_optimizations
))
1724 && (inexact
|| !real_identical (&result
, &value
)))
1727 t
= build_real (type
, result
);
1729 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1733 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1735 tree type
= TREE_TYPE (arg1
);
1736 tree r1
= TREE_REALPART (arg1
);
1737 tree i1
= TREE_IMAGPART (arg1
);
1738 tree r2
= TREE_REALPART (arg2
);
1739 tree i2
= TREE_IMAGPART (arg2
);
1746 real
= const_binop (code
, r1
, r2
, notrunc
);
1747 imag
= const_binop (code
, i1
, i2
, notrunc
);
1751 real
= const_binop (MINUS_EXPR
,
1752 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1753 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1755 imag
= const_binop (PLUS_EXPR
,
1756 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1757 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1764 = const_binop (PLUS_EXPR
,
1765 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1766 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1769 = const_binop (PLUS_EXPR
,
1770 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1771 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1774 = const_binop (MINUS_EXPR
,
1775 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1776 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1779 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1780 code
= TRUNC_DIV_EXPR
;
1782 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1783 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
1792 return build_complex (type
, real
, imag
);
1798 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1799 indicates which particular sizetype to create. */
1802 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1804 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1807 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1808 is a tree code. The type of the result is taken from the operands.
1809 Both must be equivalent integer types, ala int_binop_types_match_p.
1810 If the operands are constant, so is the result. */
1813 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1815 tree type
= TREE_TYPE (arg0
);
1817 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1818 return error_mark_node
;
1820 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1823 /* Handle the special case of two integer constants faster. */
1824 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1826 /* And some specific cases even faster than that. */
1827 if (code
== PLUS_EXPR
)
1829 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1831 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1834 else if (code
== MINUS_EXPR
)
1836 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1839 else if (code
== MULT_EXPR
)
1841 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1845 /* Handle general case of two integer constants. */
1846 return int_const_binop (code
, arg0
, arg1
, 0);
1849 return fold_build2 (code
, type
, arg0
, arg1
);
1852 /* Given two values, either both of sizetype or both of bitsizetype,
1853 compute the difference between the two values. Return the value
1854 in signed type corresponding to the type of the operands. */
1857 size_diffop (tree arg0
, tree arg1
)
1859 tree type
= TREE_TYPE (arg0
);
1862 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1865 /* If the type is already signed, just do the simple thing. */
1866 if (!TYPE_UNSIGNED (type
))
1867 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1869 if (type
== sizetype
)
1871 else if (type
== bitsizetype
)
1872 ctype
= sbitsizetype
;
1874 ctype
= lang_hooks
.types
.signed_type (type
);
1876 /* If either operand is not a constant, do the conversions to the signed
1877 type and subtract. The hardware will do the right thing with any
1878 overflow in the subtraction. */
1879 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1880 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1881 fold_convert (ctype
, arg1
));
1883 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1884 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1885 overflow) and negate (which can't either). Special-case a result
1886 of zero while we're here. */
1887 if (tree_int_cst_equal (arg0
, arg1
))
1888 return build_int_cst (ctype
, 0);
1889 else if (tree_int_cst_lt (arg1
, arg0
))
1890 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1892 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
1893 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1897 /* A subroutine of fold_convert_const handling conversions of an
1898 INTEGER_CST to another integer type. */
1901 fold_convert_const_int_from_int (tree type
, tree arg1
)
1905 /* Given an integer constant, make new constant with new type,
1906 appropriately sign-extended or truncated. */
1907 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
1908 TREE_INT_CST_HIGH (arg1
),
1909 /* Don't set the overflow when
1910 converting a pointer */
1911 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1912 (TREE_INT_CST_HIGH (arg1
) < 0
1913 && (TYPE_UNSIGNED (type
)
1914 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1915 | TREE_OVERFLOW (arg1
));
1920 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1921 to an integer type. */
1924 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1929 /* The following code implements the floating point to integer
1930 conversion rules required by the Java Language Specification,
1931 that IEEE NaNs are mapped to zero and values that overflow
1932 the target precision saturate, i.e. values greater than
1933 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1934 are mapped to INT_MIN. These semantics are allowed by the
1935 C and C++ standards that simply state that the behavior of
1936 FP-to-integer conversion is unspecified upon overflow. */
1938 HOST_WIDE_INT high
, low
;
1940 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1944 case FIX_TRUNC_EXPR
:
1945 real_trunc (&r
, VOIDmode
, &x
);
1952 /* If R is NaN, return zero and show we have an overflow. */
1953 if (REAL_VALUE_ISNAN (r
))
1960 /* See if R is less than the lower bound or greater than the
1965 tree lt
= TYPE_MIN_VALUE (type
);
1966 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1967 if (REAL_VALUES_LESS (r
, l
))
1970 high
= TREE_INT_CST_HIGH (lt
);
1971 low
= TREE_INT_CST_LOW (lt
);
1977 tree ut
= TYPE_MAX_VALUE (type
);
1980 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1981 if (REAL_VALUES_LESS (u
, r
))
1984 high
= TREE_INT_CST_HIGH (ut
);
1985 low
= TREE_INT_CST_LOW (ut
);
1991 REAL_VALUE_TO_INT (&low
, &high
, r
);
1993 t
= force_fit_type_double (type
, low
, high
, -1,
1994 overflow
| TREE_OVERFLOW (arg1
));
1998 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1999 to another floating point type. */
2002 fold_convert_const_real_from_real (tree type
, tree arg1
)
2004 REAL_VALUE_TYPE value
;
2007 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2008 t
= build_real (type
, value
);
2010 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2014 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2015 type TYPE. If no simplification can be done return NULL_TREE. */
2018 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2020 if (TREE_TYPE (arg1
) == type
)
2023 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2025 if (TREE_CODE (arg1
) == INTEGER_CST
)
2026 return fold_convert_const_int_from_int (type
, arg1
);
2027 else if (TREE_CODE (arg1
) == REAL_CST
)
2028 return fold_convert_const_int_from_real (code
, type
, arg1
);
2030 else if (TREE_CODE (type
) == REAL_TYPE
)
2032 if (TREE_CODE (arg1
) == INTEGER_CST
)
2033 return build_real_from_int_cst (type
, arg1
);
2034 if (TREE_CODE (arg1
) == REAL_CST
)
2035 return fold_convert_const_real_from_real (type
, arg1
);
2040 /* Construct a vector of zero elements of vector type TYPE. */
2043 build_zero_vector (tree type
)
2048 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2049 units
= TYPE_VECTOR_SUBPARTS (type
);
2052 for (i
= 0; i
< units
; i
++)
2053 list
= tree_cons (NULL_TREE
, elem
, list
);
2054 return build_vector (type
, list
);
2057 /* Convert expression ARG to type TYPE. Used by the middle-end for
2058 simple conversions in preference to calling the front-end's convert. */
2061 fold_convert (tree type
, tree arg
)
2063 tree orig
= TREE_TYPE (arg
);
2069 if (TREE_CODE (arg
) == ERROR_MARK
2070 || TREE_CODE (type
) == ERROR_MARK
2071 || TREE_CODE (orig
) == ERROR_MARK
)
2072 return error_mark_node
;
2074 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
2075 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
2076 TYPE_MAIN_VARIANT (orig
)))
2077 return fold_build1 (NOP_EXPR
, type
, arg
);
2079 switch (TREE_CODE (type
))
2081 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2082 case POINTER_TYPE
: case REFERENCE_TYPE
:
2084 if (TREE_CODE (arg
) == INTEGER_CST
)
2086 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2087 if (tem
!= NULL_TREE
)
2090 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2091 || TREE_CODE (orig
) == OFFSET_TYPE
)
2092 return fold_build1 (NOP_EXPR
, type
, arg
);
2093 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2095 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2096 return fold_convert (type
, tem
);
2098 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2099 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2100 return fold_build1 (NOP_EXPR
, type
, arg
);
2103 if (TREE_CODE (arg
) == INTEGER_CST
)
2105 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2106 if (tem
!= NULL_TREE
)
2109 else if (TREE_CODE (arg
) == REAL_CST
)
2111 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2112 if (tem
!= NULL_TREE
)
2116 switch (TREE_CODE (orig
))
2119 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2120 case POINTER_TYPE
: case REFERENCE_TYPE
:
2121 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2124 return fold_build1 (NOP_EXPR
, type
, arg
);
2127 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2128 return fold_convert (type
, tem
);
2135 switch (TREE_CODE (orig
))
2138 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2139 case POINTER_TYPE
: case REFERENCE_TYPE
:
2141 return build2 (COMPLEX_EXPR
, type
,
2142 fold_convert (TREE_TYPE (type
), arg
),
2143 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2148 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2150 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2151 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2152 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2155 arg
= save_expr (arg
);
2156 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2157 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2158 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2159 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2160 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2168 if (integer_zerop (arg
))
2169 return build_zero_vector (type
);
2170 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2171 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2172 || TREE_CODE (orig
) == VECTOR_TYPE
);
2173 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2176 tem
= fold_ignored_result (arg
);
2177 if (TREE_CODE (tem
) == GIMPLE_MODIFY_STMT
)
2179 return fold_build1 (NOP_EXPR
, type
, tem
);
2186 /* Return false if expr can be assumed not to be an lvalue, true
2190 maybe_lvalue_p (tree x
)
2192 /* We only need to wrap lvalue tree codes. */
2193 switch (TREE_CODE (x
))
2204 case ALIGN_INDIRECT_REF
:
2205 case MISALIGNED_INDIRECT_REF
:
2207 case ARRAY_RANGE_REF
:
2213 case PREINCREMENT_EXPR
:
2214 case PREDECREMENT_EXPR
:
2216 case TRY_CATCH_EXPR
:
2217 case WITH_CLEANUP_EXPR
:
2220 case GIMPLE_MODIFY_STMT
:
2229 /* Assume the worst for front-end tree codes. */
2230 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2238 /* Return an expr equal to X but certainly not valid as an lvalue. */
2243 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2248 if (! maybe_lvalue_p (x
))
2250 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2253 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2254 Zero means allow extended lvalues. */
2256 int pedantic_lvalues
;
2258 /* When pedantic, return an expr equal to X but certainly not valid as a
2259 pedantic lvalue. Otherwise, return X. */
2262 pedantic_non_lvalue (tree x
)
2264 if (pedantic_lvalues
)
2265 return non_lvalue (x
);
2270 /* Given a tree comparison code, return the code that is the logical inverse
2271 of the given code. It is not safe to do this for floating-point
2272 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2273 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2276 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2278 if (honor_nans
&& flag_trapping_math
)
2288 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2290 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2292 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2294 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2308 return UNORDERED_EXPR
;
2309 case UNORDERED_EXPR
:
2310 return ORDERED_EXPR
;
2316 /* Similar, but return the comparison that results if the operands are
2317 swapped. This is safe for floating-point. */
2320 swap_tree_comparison (enum tree_code code
)
2327 case UNORDERED_EXPR
:
2353 /* Convert a comparison tree code from an enum tree_code representation
2354 into a compcode bit-based encoding. This function is the inverse of
2355 compcode_to_comparison. */
2357 static enum comparison_code
2358 comparison_to_compcode (enum tree_code code
)
2375 return COMPCODE_ORD
;
2376 case UNORDERED_EXPR
:
2377 return COMPCODE_UNORD
;
2379 return COMPCODE_UNLT
;
2381 return COMPCODE_UNEQ
;
2383 return COMPCODE_UNLE
;
2385 return COMPCODE_UNGT
;
2387 return COMPCODE_LTGT
;
2389 return COMPCODE_UNGE
;
2395 /* Convert a compcode bit-based encoding of a comparison operator back
2396 to GCC's enum tree_code representation. This function is the
2397 inverse of comparison_to_compcode. */
2399 static enum tree_code
2400 compcode_to_comparison (enum comparison_code code
)
2417 return ORDERED_EXPR
;
2418 case COMPCODE_UNORD
:
2419 return UNORDERED_EXPR
;
2437 /* Return a tree for the comparison which is the combination of
2438 doing the AND or OR (depending on CODE) of the two operations LCODE
2439 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2440 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2441 if this makes the transformation invalid. */
2444 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2445 enum tree_code rcode
, tree truth_type
,
2446 tree ll_arg
, tree lr_arg
)
2448 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2449 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2450 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2451 enum comparison_code compcode
;
2455 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2456 compcode
= lcompcode
& rcompcode
;
2459 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2460 compcode
= lcompcode
| rcompcode
;
2469 /* Eliminate unordered comparisons, as well as LTGT and ORD
2470 which are not used unless the mode has NaNs. */
2471 compcode
&= ~COMPCODE_UNORD
;
2472 if (compcode
== COMPCODE_LTGT
)
2473 compcode
= COMPCODE_NE
;
2474 else if (compcode
== COMPCODE_ORD
)
2475 compcode
= COMPCODE_TRUE
;
2477 else if (flag_trapping_math
)
2479 /* Check that the original operation and the optimized ones will trap
2480 under the same condition. */
2481 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2482 && (lcompcode
!= COMPCODE_EQ
)
2483 && (lcompcode
!= COMPCODE_ORD
);
2484 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2485 && (rcompcode
!= COMPCODE_EQ
)
2486 && (rcompcode
!= COMPCODE_ORD
);
2487 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2488 && (compcode
!= COMPCODE_EQ
)
2489 && (compcode
!= COMPCODE_ORD
);
2491 /* In a short-circuited boolean expression the LHS might be
2492 such that the RHS, if evaluated, will never trap. For
2493 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2494 if neither x nor y is NaN. (This is a mixed blessing: for
2495 example, the expression above will never trap, hence
2496 optimizing it to x < y would be invalid). */
2497 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2498 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2501 /* If the comparison was short-circuited, and only the RHS
2502 trapped, we may now generate a spurious trap. */
2504 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2507 /* If we changed the conditions that cause a trap, we lose. */
2508 if ((ltrap
|| rtrap
) != trap
)
2512 if (compcode
== COMPCODE_TRUE
)
2513 return constant_boolean_node (true, truth_type
);
2514 else if (compcode
== COMPCODE_FALSE
)
2515 return constant_boolean_node (false, truth_type
);
2517 return fold_build2 (compcode_to_comparison (compcode
),
2518 truth_type
, ll_arg
, lr_arg
);
2521 /* Return nonzero if CODE is a tree code that represents a truth value. */
2524 truth_value_p (enum tree_code code
)
2526 return (TREE_CODE_CLASS (code
) == tcc_comparison
2527 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2528 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2529 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2532 /* Return nonzero if two operands (typically of the same tree node)
2533 are necessarily equal. If either argument has side-effects this
2534 function returns zero. FLAGS modifies behavior as follows:
2536 If OEP_ONLY_CONST is set, only return nonzero for constants.
2537 This function tests whether the operands are indistinguishable;
2538 it does not test whether they are equal using C's == operation.
2539 The distinction is important for IEEE floating point, because
2540 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2541 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2543 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2544 even though it may hold multiple values during a function.
2545 This is because a GCC tree node guarantees that nothing else is
2546 executed between the evaluation of its "operands" (which may often
2547 be evaluated in arbitrary order). Hence if the operands themselves
2548 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2549 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2550 unset means assuming isochronic (or instantaneous) tree equivalence.
2551 Unless comparing arbitrary expression trees, such as from different
2552 statements, this flag can usually be left unset.
2554 If OEP_PURE_SAME is set, then pure functions with identical arguments
2555 are considered the same. It is used when the caller has other ways
2556 to ensure that global memory is unchanged in between. */
2559 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2561 /* If either is ERROR_MARK, they aren't equal. */
2562 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2565 /* If both types don't have the same signedness, then we can't consider
2566 them equal. We must check this before the STRIP_NOPS calls
2567 because they may change the signedness of the arguments. */
2568 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2571 /* If both types don't have the same precision, then it is not safe
2573 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2579 /* In case both args are comparisons but with different comparison
2580 code, try to swap the comparison operands of one arg to produce
2581 a match and compare that variant. */
2582 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2583 && COMPARISON_CLASS_P (arg0
)
2584 && COMPARISON_CLASS_P (arg1
))
2586 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2588 if (TREE_CODE (arg0
) == swap_code
)
2589 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2590 TREE_OPERAND (arg1
, 1), flags
)
2591 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2592 TREE_OPERAND (arg1
, 0), flags
);
2595 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2596 /* This is needed for conversions and for COMPONENT_REF.
2597 Might as well play it safe and always test this. */
2598 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2599 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2600 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2603 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2604 We don't care about side effects in that case because the SAVE_EXPR
2605 takes care of that for us. In all other cases, two expressions are
2606 equal if they have no side effects. If we have two identical
2607 expressions with side effects that should be treated the same due
2608 to the only side effects being identical SAVE_EXPR's, that will
2609 be detected in the recursive calls below. */
2610 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2611 && (TREE_CODE (arg0
) == SAVE_EXPR
2612 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2615 /* Next handle constant cases, those for which we can return 1 even
2616 if ONLY_CONST is set. */
2617 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2618 switch (TREE_CODE (arg0
))
2621 return tree_int_cst_equal (arg0
, arg1
);
2624 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2625 TREE_REAL_CST (arg1
)))
2629 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2631 /* If we do not distinguish between signed and unsigned zero,
2632 consider them equal. */
2633 if (real_zerop (arg0
) && real_zerop (arg1
))
2642 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2643 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2646 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2649 v1
= TREE_CHAIN (v1
);
2650 v2
= TREE_CHAIN (v2
);
2657 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2659 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2663 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2664 && ! memcmp (TREE_STRING_POINTER (arg0
),
2665 TREE_STRING_POINTER (arg1
),
2666 TREE_STRING_LENGTH (arg0
)));
2669 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2675 if (flags
& OEP_ONLY_CONST
)
2678 /* Define macros to test an operand from arg0 and arg1 for equality and a
2679 variant that allows null and views null as being different from any
2680 non-null value. In the latter case, if either is null, the both
2681 must be; otherwise, do the normal comparison. */
2682 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2683 TREE_OPERAND (arg1, N), flags)
2685 #define OP_SAME_WITH_NULL(N) \
2686 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2687 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2689 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2692 /* Two conversions are equal only if signedness and modes match. */
2693 switch (TREE_CODE (arg0
))
2697 case FIX_TRUNC_EXPR
:
2698 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2699 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2709 case tcc_comparison
:
2711 if (OP_SAME (0) && OP_SAME (1))
2714 /* For commutative ops, allow the other order. */
2715 return (commutative_tree_code (TREE_CODE (arg0
))
2716 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2717 TREE_OPERAND (arg1
, 1), flags
)
2718 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2719 TREE_OPERAND (arg1
, 0), flags
));
2722 /* If either of the pointer (or reference) expressions we are
2723 dereferencing contain a side effect, these cannot be equal. */
2724 if (TREE_SIDE_EFFECTS (arg0
)
2725 || TREE_SIDE_EFFECTS (arg1
))
2728 switch (TREE_CODE (arg0
))
2731 case ALIGN_INDIRECT_REF
:
2732 case MISALIGNED_INDIRECT_REF
:
2738 case ARRAY_RANGE_REF
:
2739 /* Operands 2 and 3 may be null. */
2742 && OP_SAME_WITH_NULL (2)
2743 && OP_SAME_WITH_NULL (3));
2746 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2747 may be NULL when we're called to compare MEM_EXPRs. */
2748 return OP_SAME_WITH_NULL (0)
2750 && OP_SAME_WITH_NULL (2);
2753 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2759 case tcc_expression
:
2760 switch (TREE_CODE (arg0
))
2763 case TRUTH_NOT_EXPR
:
2766 case TRUTH_ANDIF_EXPR
:
2767 case TRUTH_ORIF_EXPR
:
2768 return OP_SAME (0) && OP_SAME (1);
2770 case TRUTH_AND_EXPR
:
2772 case TRUTH_XOR_EXPR
:
2773 if (OP_SAME (0) && OP_SAME (1))
2776 /* Otherwise take into account this is a commutative operation. */
2777 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2778 TREE_OPERAND (arg1
, 1), flags
)
2779 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2780 TREE_OPERAND (arg1
, 0), flags
));
2783 /* If the CALL_EXPRs call different functions, then they
2784 clearly can not be equal. */
2789 unsigned int cef
= call_expr_flags (arg0
);
2790 if (flags
& OEP_PURE_SAME
)
2791 cef
&= ECF_CONST
| ECF_PURE
;
2798 /* Now see if all the arguments are the same. operand_equal_p
2799 does not handle TREE_LIST, so we walk the operands here
2800 feeding them to operand_equal_p. */
2801 arg0
= TREE_OPERAND (arg0
, 1);
2802 arg1
= TREE_OPERAND (arg1
, 1);
2803 while (arg0
&& arg1
)
2805 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2809 arg0
= TREE_CHAIN (arg0
);
2810 arg1
= TREE_CHAIN (arg1
);
2813 /* If we get here and both argument lists are exhausted
2814 then the CALL_EXPRs are equal. */
2815 return ! (arg0
|| arg1
);
2821 case tcc_declaration
:
2822 /* Consider __builtin_sqrt equal to sqrt. */
2823 return (TREE_CODE (arg0
) == FUNCTION_DECL
2824 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2825 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2826 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2833 #undef OP_SAME_WITH_NULL
2836 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2837 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2839 When in doubt, return 0. */
2842 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2844 int unsignedp1
, unsignedpo
;
2845 tree primarg0
, primarg1
, primother
;
2846 unsigned int correct_width
;
2848 if (operand_equal_p (arg0
, arg1
, 0))
2851 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2852 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2855 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2856 and see if the inner values are the same. This removes any
2857 signedness comparison, which doesn't matter here. */
2858 primarg0
= arg0
, primarg1
= arg1
;
2859 STRIP_NOPS (primarg0
);
2860 STRIP_NOPS (primarg1
);
2861 if (operand_equal_p (primarg0
, primarg1
, 0))
2864 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2865 actual comparison operand, ARG0.
2867 First throw away any conversions to wider types
2868 already present in the operands. */
2870 primarg1
= get_narrower (arg1
, &unsignedp1
);
2871 primother
= get_narrower (other
, &unsignedpo
);
2873 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2874 if (unsignedp1
== unsignedpo
2875 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2876 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2878 tree type
= TREE_TYPE (arg0
);
2880 /* Make sure shorter operand is extended the right way
2881 to match the longer operand. */
2882 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2883 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2885 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2892 /* See if ARG is an expression that is either a comparison or is performing
2893 arithmetic on comparisons. The comparisons must only be comparing
2894 two different values, which will be stored in *CVAL1 and *CVAL2; if
2895 they are nonzero it means that some operands have already been found.
2896 No variables may be used anywhere else in the expression except in the
2897 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2898 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2900 If this is true, return 1. Otherwise, return zero. */
2903 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2905 enum tree_code code
= TREE_CODE (arg
);
2906 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2908 /* We can handle some of the tcc_expression cases here. */
2909 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2911 else if (class == tcc_expression
2912 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2913 || code
== COMPOUND_EXPR
))
2916 else if (class == tcc_expression
&& code
== SAVE_EXPR
2917 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2919 /* If we've already found a CVAL1 or CVAL2, this expression is
2920 two complex to handle. */
2921 if (*cval1
|| *cval2
)
2931 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2934 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2935 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2936 cval1
, cval2
, save_p
));
2941 case tcc_expression
:
2942 if (code
== COND_EXPR
)
2943 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2944 cval1
, cval2
, save_p
)
2945 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2946 cval1
, cval2
, save_p
)
2947 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2948 cval1
, cval2
, save_p
));
2951 case tcc_comparison
:
2952 /* First see if we can handle the first operand, then the second. For
2953 the second operand, we know *CVAL1 can't be zero. It must be that
2954 one side of the comparison is each of the values; test for the
2955 case where this isn't true by failing if the two operands
2958 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2959 TREE_OPERAND (arg
, 1), 0))
2963 *cval1
= TREE_OPERAND (arg
, 0);
2964 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2966 else if (*cval2
== 0)
2967 *cval2
= TREE_OPERAND (arg
, 0);
2968 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2973 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2975 else if (*cval2
== 0)
2976 *cval2
= TREE_OPERAND (arg
, 1);
2977 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2989 /* ARG is a tree that is known to contain just arithmetic operations and
2990 comparisons. Evaluate the operations in the tree substituting NEW0 for
2991 any occurrence of OLD0 as an operand of a comparison and likewise for
2995 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2997 tree type
= TREE_TYPE (arg
);
2998 enum tree_code code
= TREE_CODE (arg
);
2999 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3001 /* We can handle some of the tcc_expression cases here. */
3002 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3004 else if (class == tcc_expression
3005 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3011 return fold_build1 (code
, type
,
3012 eval_subst (TREE_OPERAND (arg
, 0),
3013 old0
, new0
, old1
, new1
));
3016 return fold_build2 (code
, type
,
3017 eval_subst (TREE_OPERAND (arg
, 0),
3018 old0
, new0
, old1
, new1
),
3019 eval_subst (TREE_OPERAND (arg
, 1),
3020 old0
, new0
, old1
, new1
));
3022 case tcc_expression
:
3026 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
3029 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
3032 return fold_build3 (code
, type
,
3033 eval_subst (TREE_OPERAND (arg
, 0),
3034 old0
, new0
, old1
, new1
),
3035 eval_subst (TREE_OPERAND (arg
, 1),
3036 old0
, new0
, old1
, new1
),
3037 eval_subst (TREE_OPERAND (arg
, 2),
3038 old0
, new0
, old1
, new1
));
3042 /* Fall through - ??? */
3044 case tcc_comparison
:
3046 tree arg0
= TREE_OPERAND (arg
, 0);
3047 tree arg1
= TREE_OPERAND (arg
, 1);
3049 /* We need to check both for exact equality and tree equality. The
3050 former will be true if the operand has a side-effect. In that
3051 case, we know the operand occurred exactly once. */
3053 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3055 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3058 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3060 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3063 return fold_build2 (code
, type
, arg0
, arg1
);
3071 /* Return a tree for the case when the result of an expression is RESULT
3072 converted to TYPE and OMITTED was previously an operand of the expression
3073 but is now not needed (e.g., we folded OMITTED * 0).
3075 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3076 the conversion of RESULT to TYPE. */
3079 omit_one_operand (tree type
, tree result
, tree omitted
)
3081 tree t
= fold_convert (type
, result
);
3083 if (TREE_SIDE_EFFECTS (omitted
))
3084 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3086 return non_lvalue (t
);
3089 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3092 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3094 tree t
= fold_convert (type
, result
);
3096 if (TREE_SIDE_EFFECTS (omitted
))
3097 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3099 return pedantic_non_lvalue (t
);
3102 /* Return a tree for the case when the result of an expression is RESULT
3103 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3104 of the expression but are now not needed.
3106 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3107 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3108 evaluated before OMITTED2. Otherwise, if neither has side effects,
3109 just do the conversion of RESULT to TYPE. */
3112 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3114 tree t
= fold_convert (type
, result
);
3116 if (TREE_SIDE_EFFECTS (omitted2
))
3117 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3118 if (TREE_SIDE_EFFECTS (omitted1
))
3119 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3121 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3125 /* Return a simplified tree node for the truth-negation of ARG. This
3126 never alters ARG itself. We assume that ARG is an operation that
3127 returns a truth value (0 or 1).
3129 FIXME: one would think we would fold the result, but it causes
3130 problems with the dominator optimizer. */
3133 fold_truth_not_expr (tree arg
)
3135 tree type
= TREE_TYPE (arg
);
3136 enum tree_code code
= TREE_CODE (arg
);
3138 /* If this is a comparison, we can simply invert it, except for
3139 floating-point non-equality comparisons, in which case we just
3140 enclose a TRUTH_NOT_EXPR around what we have. */
3142 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3144 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3145 if (FLOAT_TYPE_P (op_type
)
3146 && flag_trapping_math
3147 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3148 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3152 code
= invert_tree_comparison (code
,
3153 HONOR_NANS (TYPE_MODE (op_type
)));
3154 if (code
== ERROR_MARK
)
3157 return build2 (code
, type
,
3158 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3165 return constant_boolean_node (integer_zerop (arg
), type
);
3167 case TRUTH_AND_EXPR
:
3168 return build2 (TRUTH_OR_EXPR
, type
,
3169 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3170 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3173 return build2 (TRUTH_AND_EXPR
, type
,
3174 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3175 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3177 case TRUTH_XOR_EXPR
:
3178 /* Here we can invert either operand. We invert the first operand
3179 unless the second operand is a TRUTH_NOT_EXPR in which case our
3180 result is the XOR of the first operand with the inside of the
3181 negation of the second operand. */
3183 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3184 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3185 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3187 return build2 (TRUTH_XOR_EXPR
, type
,
3188 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3189 TREE_OPERAND (arg
, 1));
3191 case TRUTH_ANDIF_EXPR
:
3192 return build2 (TRUTH_ORIF_EXPR
, type
,
3193 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3194 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3196 case TRUTH_ORIF_EXPR
:
3197 return build2 (TRUTH_ANDIF_EXPR
, type
,
3198 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3199 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3201 case TRUTH_NOT_EXPR
:
3202 return TREE_OPERAND (arg
, 0);
3206 tree arg1
= TREE_OPERAND (arg
, 1);
3207 tree arg2
= TREE_OPERAND (arg
, 2);
3208 /* A COND_EXPR may have a throw as one operand, which
3209 then has void type. Just leave void operands
3211 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3212 VOID_TYPE_P (TREE_TYPE (arg1
))
3213 ? arg1
: invert_truthvalue (arg1
),
3214 VOID_TYPE_P (TREE_TYPE (arg2
))
3215 ? arg2
: invert_truthvalue (arg2
));
3219 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3220 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3222 case NON_LVALUE_EXPR
:
3223 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3226 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3227 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3231 return build1 (TREE_CODE (arg
), type
,
3232 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3235 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3237 return build2 (EQ_EXPR
, type
, arg
,
3238 build_int_cst (type
, 0));
3241 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3243 case CLEANUP_POINT_EXPR
:
3244 return build1 (CLEANUP_POINT_EXPR
, type
,
3245 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3254 /* Return a simplified tree node for the truth-negation of ARG. This
3255 never alters ARG itself. We assume that ARG is an operation that
3256 returns a truth value (0 or 1).
3258 FIXME: one would think we would fold the result, but it causes
3259 problems with the dominator optimizer. */
3262 invert_truthvalue (tree arg
)
3266 if (TREE_CODE (arg
) == ERROR_MARK
)
3269 tem
= fold_truth_not_expr (arg
);
3271 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3276 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3277 operands are another bit-wise operation with a common input. If so,
3278 distribute the bit operations to save an operation and possibly two if
3279 constants are involved. For example, convert
3280 (A | B) & (A | C) into A | (B & C)
3281 Further simplification will occur if B and C are constants.
3283 If this optimization cannot be done, 0 will be returned. */
3286 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3291 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3292 || TREE_CODE (arg0
) == code
3293 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3294 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3297 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3299 common
= TREE_OPERAND (arg0
, 0);
3300 left
= TREE_OPERAND (arg0
, 1);
3301 right
= TREE_OPERAND (arg1
, 1);
3303 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3305 common
= TREE_OPERAND (arg0
, 0);
3306 left
= TREE_OPERAND (arg0
, 1);
3307 right
= TREE_OPERAND (arg1
, 0);
3309 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3311 common
= TREE_OPERAND (arg0
, 1);
3312 left
= TREE_OPERAND (arg0
, 0);
3313 right
= TREE_OPERAND (arg1
, 1);
3315 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3317 common
= TREE_OPERAND (arg0
, 1);
3318 left
= TREE_OPERAND (arg0
, 0);
3319 right
= TREE_OPERAND (arg1
, 0);
3324 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3325 fold_build2 (code
, type
, left
, right
));
3328 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3329 with code CODE. This optimization is unsafe. */
3331 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3333 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3334 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3336 /* (A / C) +- (B / C) -> (A +- B) / C. */
3338 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3339 TREE_OPERAND (arg1
, 1), 0))
3340 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3341 fold_build2 (code
, type
,
3342 TREE_OPERAND (arg0
, 0),
3343 TREE_OPERAND (arg1
, 0)),
3344 TREE_OPERAND (arg0
, 1));
3346 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3347 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3348 TREE_OPERAND (arg1
, 0), 0)
3349 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3350 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3352 REAL_VALUE_TYPE r0
, r1
;
3353 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3354 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3356 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3358 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3359 real_arithmetic (&r0
, code
, &r0
, &r1
);
3360 return fold_build2 (MULT_EXPR
, type
,
3361 TREE_OPERAND (arg0
, 0),
3362 build_real (type
, r0
));
3368 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3369 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3372 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3379 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3380 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3381 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3382 && host_integerp (size
, 0)
3383 && tree_low_cst (size
, 0) == bitsize
)
3384 return fold_convert (type
, inner
);
3387 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3388 size_int (bitsize
), bitsize_int (bitpos
));
3390 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3395 /* Optimize a bit-field compare.
3397 There are two cases: First is a compare against a constant and the
3398 second is a comparison of two items where the fields are at the same
3399 bit position relative to the start of a chunk (byte, halfword, word)
3400 large enough to contain it. In these cases we can avoid the shift
3401 implicit in bitfield extractions.
3403 For constants, we emit a compare of the shifted constant with the
3404 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3405 compared. For two fields at the same position, we do the ANDs with the
3406 similar mask and compare the result of the ANDs.
3408 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3409 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3410 are the left and right operands of the comparison, respectively.
3412 If the optimization described above can be done, we return the resulting
3413 tree. Otherwise we return zero. */
3416 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3419 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3420 tree type
= TREE_TYPE (lhs
);
3421 tree signed_type
, unsigned_type
;
3422 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3423 enum machine_mode lmode
, rmode
, nmode
;
3424 int lunsignedp
, runsignedp
;
3425 int lvolatilep
= 0, rvolatilep
= 0;
3426 tree linner
, rinner
= NULL_TREE
;
3430 /* Get all the information about the extractions being done. If the bit size
3431 if the same as the size of the underlying object, we aren't doing an
3432 extraction at all and so can do nothing. We also don't want to
3433 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3434 then will no longer be able to replace it. */
3435 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3436 &lunsignedp
, &lvolatilep
, false);
3437 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3438 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3443 /* If this is not a constant, we can only do something if bit positions,
3444 sizes, and signedness are the same. */
3445 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3446 &runsignedp
, &rvolatilep
, false);
3448 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3449 || lunsignedp
!= runsignedp
|| offset
!= 0
3450 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3454 /* See if we can find a mode to refer to this field. We should be able to,
3455 but fail if we can't. */
3456 nmode
= get_best_mode (lbitsize
, lbitpos
,
3457 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3458 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3459 TYPE_ALIGN (TREE_TYPE (rinner
))),
3460 word_mode
, lvolatilep
|| rvolatilep
);
3461 if (nmode
== VOIDmode
)
3464 /* Set signed and unsigned types of the precision of this mode for the
3466 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3467 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3469 /* Compute the bit position and size for the new reference and our offset
3470 within it. If the new reference is the same size as the original, we
3471 won't optimize anything, so return zero. */
3472 nbitsize
= GET_MODE_BITSIZE (nmode
);
3473 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3475 if (nbitsize
== lbitsize
)
3478 if (BYTES_BIG_ENDIAN
)
3479 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3481 /* Make the mask to be used against the extracted field. */
3482 mask
= build_int_cst_type (unsigned_type
, -1);
3483 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3484 mask
= const_binop (RSHIFT_EXPR
, mask
,
3485 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3488 /* If not comparing with constant, just rework the comparison
3490 return fold_build2 (code
, compare_type
,
3491 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3492 make_bit_field_ref (linner
,
3497 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3498 make_bit_field_ref (rinner
,
3504 /* Otherwise, we are handling the constant case. See if the constant is too
3505 big for the field. Warn and return a tree of for 0 (false) if so. We do
3506 this not only for its own sake, but to avoid having to test for this
3507 error case below. If we didn't, we might generate wrong code.
3509 For unsigned fields, the constant shifted right by the field length should
3510 be all zero. For signed fields, the high-order bits should agree with
3515 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3516 fold_convert (unsigned_type
, rhs
),
3517 size_int (lbitsize
), 0)))
3519 warning (0, "comparison is always %d due to width of bit-field",
3521 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3526 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3527 size_int (lbitsize
- 1), 0);
3528 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3530 warning (0, "comparison is always %d due to width of bit-field",
3532 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3536 /* Single-bit compares should always be against zero. */
3537 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3539 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3540 rhs
= build_int_cst (type
, 0);
3543 /* Make a new bitfield reference, shift the constant over the
3544 appropriate number of bits and mask it with the computed mask
3545 (in case this was a signed field). If we changed it, make a new one. */
3546 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3549 TREE_SIDE_EFFECTS (lhs
) = 1;
3550 TREE_THIS_VOLATILE (lhs
) = 1;
3553 rhs
= const_binop (BIT_AND_EXPR
,
3554 const_binop (LSHIFT_EXPR
,
3555 fold_convert (unsigned_type
, rhs
),
3556 size_int (lbitpos
), 0),
3559 return build2 (code
, compare_type
,
3560 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3564 /* Subroutine for fold_truthop: decode a field reference.
3566 If EXP is a comparison reference, we return the innermost reference.
3568 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3569 set to the starting bit number.
3571 If the innermost field can be completely contained in a mode-sized
3572 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3574 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3575 otherwise it is not changed.
3577 *PUNSIGNEDP is set to the signedness of the field.
3579 *PMASK is set to the mask used. This is either contained in a
3580 BIT_AND_EXPR or derived from the width of the field.
3582 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3584 Return 0 if this is not a component reference or is one that we can't
3585 do anything with. */
3588 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3589 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3590 int *punsignedp
, int *pvolatilep
,
3591 tree
*pmask
, tree
*pand_mask
)
3593 tree outer_type
= 0;
3595 tree mask
, inner
, offset
;
3597 unsigned int precision
;
3599 /* All the optimizations using this function assume integer fields.
3600 There are problems with FP fields since the type_for_size call
3601 below can fail for, e.g., XFmode. */
3602 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3605 /* We are interested in the bare arrangement of bits, so strip everything
3606 that doesn't affect the machine mode. However, record the type of the
3607 outermost expression if it may matter below. */
3608 if (TREE_CODE (exp
) == NOP_EXPR
3609 || TREE_CODE (exp
) == CONVERT_EXPR
3610 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3611 outer_type
= TREE_TYPE (exp
);
3614 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3616 and_mask
= TREE_OPERAND (exp
, 1);
3617 exp
= TREE_OPERAND (exp
, 0);
3618 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3619 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3623 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3624 punsignedp
, pvolatilep
, false);
3625 if ((inner
== exp
&& and_mask
== 0)
3626 || *pbitsize
< 0 || offset
!= 0
3627 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3630 /* If the number of bits in the reference is the same as the bitsize of
3631 the outer type, then the outer type gives the signedness. Otherwise
3632 (in case of a small bitfield) the signedness is unchanged. */
3633 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3634 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3636 /* Compute the mask to access the bitfield. */
3637 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3638 precision
= TYPE_PRECISION (unsigned_type
);
3640 mask
= build_int_cst_type (unsigned_type
, -1);
3642 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3643 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3645 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3647 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3648 fold_convert (unsigned_type
, and_mask
), mask
);
3651 *pand_mask
= and_mask
;
3655 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3659 all_ones_mask_p (tree mask
, int size
)
3661 tree type
= TREE_TYPE (mask
);
3662 unsigned int precision
= TYPE_PRECISION (type
);
3665 tmask
= build_int_cst_type (lang_hooks
.types
.signed_type (type
), -1);
3668 tree_int_cst_equal (mask
,
3669 const_binop (RSHIFT_EXPR
,
3670 const_binop (LSHIFT_EXPR
, tmask
,
3671 size_int (precision
- size
),
3673 size_int (precision
- size
), 0));
3676 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3677 represents the sign bit of EXP's type. If EXP represents a sign
3678 or zero extension, also test VAL against the unextended type.
3679 The return value is the (sub)expression whose sign bit is VAL,
3680 or NULL_TREE otherwise. */
3683 sign_bit_p (tree exp
, tree val
)
3685 unsigned HOST_WIDE_INT mask_lo
, lo
;
3686 HOST_WIDE_INT mask_hi
, hi
;
3690 /* Tree EXP must have an integral type. */
3691 t
= TREE_TYPE (exp
);
3692 if (! INTEGRAL_TYPE_P (t
))
3695 /* Tree VAL must be an integer constant. */
3696 if (TREE_CODE (val
) != INTEGER_CST
3697 || TREE_OVERFLOW (val
))
3700 width
= TYPE_PRECISION (t
);
3701 if (width
> HOST_BITS_PER_WIDE_INT
)
3703 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3706 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3707 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3713 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3716 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3717 >> (HOST_BITS_PER_WIDE_INT
- width
));
3720 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3721 treat VAL as if it were unsigned. */
3722 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3723 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3726 /* Handle extension from a narrower type. */
3727 if (TREE_CODE (exp
) == NOP_EXPR
3728 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3729 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3734 /* Subroutine for fold_truthop: determine if an operand is simple enough
3735 to be evaluated unconditionally. */
3738 simple_operand_p (tree exp
)
3740 /* Strip any conversions that don't change the machine mode. */
3743 return (CONSTANT_CLASS_P (exp
)
3744 || TREE_CODE (exp
) == SSA_NAME
3746 && ! TREE_ADDRESSABLE (exp
)
3747 && ! TREE_THIS_VOLATILE (exp
)
3748 && ! DECL_NONLOCAL (exp
)
3749 /* Don't regard global variables as simple. They may be
3750 allocated in ways unknown to the compiler (shared memory,
3751 #pragma weak, etc). */
3752 && ! TREE_PUBLIC (exp
)
3753 && ! DECL_EXTERNAL (exp
)
3754 /* Loading a static variable is unduly expensive, but global
3755 registers aren't expensive. */
3756 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3759 /* The following functions are subroutines to fold_range_test and allow it to
3760 try to change a logical combination of comparisons into a range test.
3763 X == 2 || X == 3 || X == 4 || X == 5
3767 (unsigned) (X - 2) <= 3
3769 We describe each set of comparisons as being either inside or outside
3770 a range, using a variable named like IN_P, and then describe the
3771 range with a lower and upper bound. If one of the bounds is omitted,
3772 it represents either the highest or lowest value of the type.
3774 In the comments below, we represent a range by two numbers in brackets
3775 preceded by a "+" to designate being inside that range, or a "-" to
3776 designate being outside that range, so the condition can be inverted by
3777 flipping the prefix. An omitted bound is represented by a "-". For
3778 example, "- [-, 10]" means being outside the range starting at the lowest
3779 possible value and ending at 10, in other words, being greater than 10.
3780 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3783 We set up things so that the missing bounds are handled in a consistent
3784 manner so neither a missing bound nor "true" and "false" need to be
3785 handled using a special case. */
3787 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3788 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3789 and UPPER1_P are nonzero if the respective argument is an upper bound
3790 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3791 must be specified for a comparison. ARG1 will be converted to ARG0's
3792 type if both are specified. */
3795 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3796 tree arg1
, int upper1_p
)
3802 /* If neither arg represents infinity, do the normal operation.
3803 Else, if not a comparison, return infinity. Else handle the special
3804 comparison rules. Note that most of the cases below won't occur, but
3805 are handled for consistency. */
3807 if (arg0
!= 0 && arg1
!= 0)
3809 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3810 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3812 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3815 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3818 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3819 for neither. In real maths, we cannot assume open ended ranges are
3820 the same. But, this is computer arithmetic, where numbers are finite.
3821 We can therefore make the transformation of any unbounded range with
3822 the value Z, Z being greater than any representable number. This permits
3823 us to treat unbounded ranges as equal. */
3824 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3825 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3829 result
= sgn0
== sgn1
;
3832 result
= sgn0
!= sgn1
;
3835 result
= sgn0
< sgn1
;
3838 result
= sgn0
<= sgn1
;
3841 result
= sgn0
> sgn1
;
3844 result
= sgn0
>= sgn1
;
3850 return constant_boolean_node (result
, type
);
3853 /* Given EXP, a logical expression, set the range it is testing into
3854 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3855 actually being tested. *PLOW and *PHIGH will be made of the same type
3856 as the returned expression. If EXP is not a comparison, we will most
3857 likely not be returning a useful value and range. */
3860 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3862 enum tree_code code
;
3863 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3864 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3866 tree low
, high
, n_low
, n_high
;
3868 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3869 and see if we can refine the range. Some of the cases below may not
3870 happen, but it doesn't seem worth worrying about this. We "continue"
3871 the outer loop when we've changed something; otherwise we "break"
3872 the switch, which will "break" the while. */
3875 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
3879 code
= TREE_CODE (exp
);
3880 exp_type
= TREE_TYPE (exp
);
3882 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3884 if (TREE_CODE_LENGTH (code
) > 0)
3885 arg0
= TREE_OPERAND (exp
, 0);
3886 if (TREE_CODE_CLASS (code
) == tcc_comparison
3887 || TREE_CODE_CLASS (code
) == tcc_unary
3888 || TREE_CODE_CLASS (code
) == tcc_binary
)
3889 arg0_type
= TREE_TYPE (arg0
);
3890 if (TREE_CODE_CLASS (code
) == tcc_binary
3891 || TREE_CODE_CLASS (code
) == tcc_comparison
3892 || (TREE_CODE_CLASS (code
) == tcc_expression
3893 && TREE_CODE_LENGTH (code
) > 1))
3894 arg1
= TREE_OPERAND (exp
, 1);
3899 case TRUTH_NOT_EXPR
:
3900 in_p
= ! in_p
, exp
= arg0
;
3903 case EQ_EXPR
: case NE_EXPR
:
3904 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3905 /* We can only do something if the range is testing for zero
3906 and if the second operand is an integer constant. Note that
3907 saying something is "in" the range we make is done by
3908 complementing IN_P since it will set in the initial case of
3909 being not equal to zero; "out" is leaving it alone. */
3910 if (low
== 0 || high
== 0
3911 || ! integer_zerop (low
) || ! integer_zerop (high
)
3912 || TREE_CODE (arg1
) != INTEGER_CST
)
3917 case NE_EXPR
: /* - [c, c] */
3920 case EQ_EXPR
: /* + [c, c] */
3921 in_p
= ! in_p
, low
= high
= arg1
;
3923 case GT_EXPR
: /* - [-, c] */
3924 low
= 0, high
= arg1
;
3926 case GE_EXPR
: /* + [c, -] */
3927 in_p
= ! in_p
, low
= arg1
, high
= 0;
3929 case LT_EXPR
: /* - [c, -] */
3930 low
= arg1
, high
= 0;
3932 case LE_EXPR
: /* + [-, c] */
3933 in_p
= ! in_p
, low
= 0, high
= arg1
;
3939 /* If this is an unsigned comparison, we also know that EXP is
3940 greater than or equal to zero. We base the range tests we make
3941 on that fact, so we record it here so we can parse existing
3942 range tests. We test arg0_type since often the return type
3943 of, e.g. EQ_EXPR, is boolean. */
3944 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3946 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3948 build_int_cst (arg0_type
, 0),
3952 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3954 /* If the high bound is missing, but we have a nonzero low
3955 bound, reverse the range so it goes from zero to the low bound
3957 if (high
== 0 && low
&& ! integer_zerop (low
))
3960 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3961 integer_one_node
, 0);
3962 low
= build_int_cst (arg0_type
, 0);
3970 /* (-x) IN [a,b] -> x in [-b, -a] */
3971 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3972 build_int_cst (exp_type
, 0),
3974 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3975 build_int_cst (exp_type
, 0),
3977 low
= n_low
, high
= n_high
;
3983 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3984 build_int_cst (exp_type
, 1));
3987 case PLUS_EXPR
: case MINUS_EXPR
:
3988 if (TREE_CODE (arg1
) != INTEGER_CST
)
3991 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3992 move a constant to the other side. */
3993 if (!TYPE_UNSIGNED (arg0_type
)
3994 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3997 /* If EXP is signed, any overflow in the computation is undefined,
3998 so we don't worry about it so long as our computations on
3999 the bounds don't overflow. For unsigned, overflow is defined
4000 and this is exactly the right thing. */
4001 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4002 arg0_type
, low
, 0, arg1
, 0);
4003 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4004 arg0_type
, high
, 1, arg1
, 0);
4005 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4006 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4009 /* Check for an unsigned range which has wrapped around the maximum
4010 value thus making n_high < n_low, and normalize it. */
4011 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4013 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4014 integer_one_node
, 0);
4015 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4016 integer_one_node
, 0);
4018 /* If the range is of the form +/- [ x+1, x ], we won't
4019 be able to normalize it. But then, it represents the
4020 whole range or the empty set, so make it
4022 if (tree_int_cst_equal (n_low
, low
)
4023 && tree_int_cst_equal (n_high
, high
))
4029 low
= n_low
, high
= n_high
;
4034 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
4035 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4038 if (! INTEGRAL_TYPE_P (arg0_type
)
4039 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4040 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4043 n_low
= low
, n_high
= high
;
4046 n_low
= fold_convert (arg0_type
, n_low
);
4049 n_high
= fold_convert (arg0_type
, n_high
);
4052 /* If we're converting arg0 from an unsigned type, to exp,
4053 a signed type, we will be doing the comparison as unsigned.
4054 The tests above have already verified that LOW and HIGH
4057 So we have to ensure that we will handle large unsigned
4058 values the same way that the current signed bounds treat
4061 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4064 tree equiv_type
= lang_hooks
.types
.type_for_mode
4065 (TYPE_MODE (arg0_type
), 1);
4067 /* A range without an upper bound is, naturally, unbounded.
4068 Since convert would have cropped a very large value, use
4069 the max value for the destination type. */
4071 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4072 : TYPE_MAX_VALUE (arg0_type
);
4074 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4075 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
4076 fold_convert (arg0_type
,
4078 build_int_cst (arg0_type
, 1));
4080 /* If the low bound is specified, "and" the range with the
4081 range for which the original unsigned value will be
4085 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4086 1, n_low
, n_high
, 1,
4087 fold_convert (arg0_type
,
4092 in_p
= (n_in_p
== in_p
);
4096 /* Otherwise, "or" the range with the range of the input
4097 that will be interpreted as negative. */
4098 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4099 0, n_low
, n_high
, 1,
4100 fold_convert (arg0_type
,
4105 in_p
= (in_p
!= n_in_p
);
4110 low
= n_low
, high
= n_high
;
4120 /* If EXP is a constant, we can evaluate whether this is true or false. */
4121 if (TREE_CODE (exp
) == INTEGER_CST
)
4123 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4125 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4131 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4135 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4136 type, TYPE, return an expression to test if EXP is in (or out of, depending
4137 on IN_P) the range. Return 0 if the test couldn't be created. */
4140 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4142 tree etype
= TREE_TYPE (exp
);
4145 #ifdef HAVE_canonicalize_funcptr_for_compare
4146 /* Disable this optimization for function pointer expressions
4147 on targets that require function pointer canonicalization. */
4148 if (HAVE_canonicalize_funcptr_for_compare
4149 && TREE_CODE (etype
) == POINTER_TYPE
4150 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4156 value
= build_range_check (type
, exp
, 1, low
, high
);
4158 return invert_truthvalue (value
);
4163 if (low
== 0 && high
== 0)
4164 return build_int_cst (type
, 1);
4167 return fold_build2 (LE_EXPR
, type
, exp
,
4168 fold_convert (etype
, high
));
4171 return fold_build2 (GE_EXPR
, type
, exp
,
4172 fold_convert (etype
, low
));
4174 if (operand_equal_p (low
, high
, 0))
4175 return fold_build2 (EQ_EXPR
, type
, exp
,
4176 fold_convert (etype
, low
));
4178 if (integer_zerop (low
))
4180 if (! TYPE_UNSIGNED (etype
))
4182 etype
= lang_hooks
.types
.unsigned_type (etype
);
4183 high
= fold_convert (etype
, high
);
4184 exp
= fold_convert (etype
, exp
);
4186 return build_range_check (type
, exp
, 1, 0, high
);
4189 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4190 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4192 unsigned HOST_WIDE_INT lo
;
4196 prec
= TYPE_PRECISION (etype
);
4197 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4200 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4204 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4205 lo
= (unsigned HOST_WIDE_INT
) -1;
4208 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4210 if (TYPE_UNSIGNED (etype
))
4212 etype
= lang_hooks
.types
.signed_type (etype
);
4213 exp
= fold_convert (etype
, exp
);
4215 return fold_build2 (GT_EXPR
, type
, exp
,
4216 build_int_cst (etype
, 0));
4220 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4221 This requires wrap-around arithmetics for the type of the expression. */
4222 switch (TREE_CODE (etype
))
4225 /* There is no requirement that LOW be within the range of ETYPE
4226 if the latter is a subtype. It must, however, be within the base
4227 type of ETYPE. So be sure we do the subtraction in that type. */
4228 if (TREE_TYPE (etype
))
4229 etype
= TREE_TYPE (etype
);
4234 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4235 TYPE_UNSIGNED (etype
));
4242 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4243 if (TREE_CODE (etype
) == INTEGER_TYPE
4244 && !TYPE_OVERFLOW_WRAPS (etype
))
4246 tree utype
, minv
, maxv
;
4248 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4249 for the type in question, as we rely on this here. */
4250 utype
= lang_hooks
.types
.unsigned_type (etype
);
4251 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4252 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4253 integer_one_node
, 1);
4254 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4256 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4263 high
= fold_convert (etype
, high
);
4264 low
= fold_convert (etype
, low
);
4265 exp
= fold_convert (etype
, exp
);
4267 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4269 if (value
!= 0 && !TREE_OVERFLOW (value
))
4270 return build_range_check (type
,
4271 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4272 1, build_int_cst (etype
, 0), value
);
4277 /* Return the predecessor of VAL in its type, handling the infinite case. */
4280 range_predecessor (tree val
)
4282 tree type
= TREE_TYPE (val
);
4284 if (INTEGRAL_TYPE_P (type
)
4285 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4288 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4291 /* Return the successor of VAL in its type, handling the infinite case. */
4294 range_successor (tree val
)
4296 tree type
= TREE_TYPE (val
);
4298 if (INTEGRAL_TYPE_P (type
)
4299 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4302 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4305 /* Given two ranges, see if we can merge them into one. Return 1 if we
4306 can, 0 if we can't. Set the output range into the specified parameters. */
4309 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4310 tree high0
, int in1_p
, tree low1
, tree high1
)
4318 int lowequal
= ((low0
== 0 && low1
== 0)
4319 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4320 low0
, 0, low1
, 0)));
4321 int highequal
= ((high0
== 0 && high1
== 0)
4322 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4323 high0
, 1, high1
, 1)));
4325 /* Make range 0 be the range that starts first, or ends last if they
4326 start at the same value. Swap them if it isn't. */
4327 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4330 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4331 high1
, 1, high0
, 1))))
4333 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4334 tem
= low0
, low0
= low1
, low1
= tem
;
4335 tem
= high0
, high0
= high1
, high1
= tem
;
4338 /* Now flag two cases, whether the ranges are disjoint or whether the
4339 second range is totally subsumed in the first. Note that the tests
4340 below are simplified by the ones above. */
4341 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4342 high0
, 1, low1
, 0));
4343 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4344 high1
, 1, high0
, 1));
4346 /* We now have four cases, depending on whether we are including or
4347 excluding the two ranges. */
4350 /* If they don't overlap, the result is false. If the second range
4351 is a subset it is the result. Otherwise, the range is from the start
4352 of the second to the end of the first. */
4354 in_p
= 0, low
= high
= 0;
4356 in_p
= 1, low
= low1
, high
= high1
;
4358 in_p
= 1, low
= low1
, high
= high0
;
4361 else if (in0_p
&& ! in1_p
)
4363 /* If they don't overlap, the result is the first range. If they are
4364 equal, the result is false. If the second range is a subset of the
4365 first, and the ranges begin at the same place, we go from just after
4366 the end of the second range to the end of the first. If the second
4367 range is not a subset of the first, or if it is a subset and both
4368 ranges end at the same place, the range starts at the start of the
4369 first range and ends just before the second range.
4370 Otherwise, we can't describe this as a single range. */
4372 in_p
= 1, low
= low0
, high
= high0
;
4373 else if (lowequal
&& highequal
)
4374 in_p
= 0, low
= high
= 0;
4375 else if (subset
&& lowequal
)
4377 low
= range_successor (high1
);
4381 else if (! subset
|| highequal
)
4384 high
= range_predecessor (low1
);
4391 else if (! in0_p
&& in1_p
)
4393 /* If they don't overlap, the result is the second range. If the second
4394 is a subset of the first, the result is false. Otherwise,
4395 the range starts just after the first range and ends at the
4396 end of the second. */
4398 in_p
= 1, low
= low1
, high
= high1
;
4399 else if (subset
|| highequal
)
4400 in_p
= 0, low
= high
= 0;
4403 low
= range_successor (high0
);
4411 /* The case where we are excluding both ranges. Here the complex case
4412 is if they don't overlap. In that case, the only time we have a
4413 range is if they are adjacent. If the second is a subset of the
4414 first, the result is the first. Otherwise, the range to exclude
4415 starts at the beginning of the first range and ends at the end of the
4419 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4420 range_successor (high0
),
4422 in_p
= 0, low
= low0
, high
= high1
;
4425 /* Canonicalize - [min, x] into - [-, x]. */
4426 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4427 switch (TREE_CODE (TREE_TYPE (low0
)))
4430 if (TYPE_PRECISION (TREE_TYPE (low0
))
4431 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4435 if (tree_int_cst_equal (low0
,
4436 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4440 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4441 && integer_zerop (low0
))
4448 /* Canonicalize - [x, max] into - [x, -]. */
4449 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4450 switch (TREE_CODE (TREE_TYPE (high1
)))
4453 if (TYPE_PRECISION (TREE_TYPE (high1
))
4454 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4458 if (tree_int_cst_equal (high1
,
4459 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4463 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4464 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4466 integer_one_node
, 1)))
4473 /* The ranges might be also adjacent between the maximum and
4474 minimum values of the given type. For
4475 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4476 return + [x + 1, y - 1]. */
4477 if (low0
== 0 && high1
== 0)
4479 low
= range_successor (high0
);
4480 high
= range_predecessor (low1
);
4481 if (low
== 0 || high
== 0)
4491 in_p
= 0, low
= low0
, high
= high0
;
4493 in_p
= 0, low
= low0
, high
= high1
;
4496 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4501 /* Subroutine of fold, looking inside expressions of the form
4502 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4503 of the COND_EXPR. This function is being used also to optimize
4504 A op B ? C : A, by reversing the comparison first.
4506 Return a folded expression whose code is not a COND_EXPR
4507 anymore, or NULL_TREE if no folding opportunity is found. */
4510 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4512 enum tree_code comp_code
= TREE_CODE (arg0
);
4513 tree arg00
= TREE_OPERAND (arg0
, 0);
4514 tree arg01
= TREE_OPERAND (arg0
, 1);
4515 tree arg1_type
= TREE_TYPE (arg1
);
4521 /* If we have A op 0 ? A : -A, consider applying the following
4524 A == 0? A : -A same as -A
4525 A != 0? A : -A same as A
4526 A >= 0? A : -A same as abs (A)
4527 A > 0? A : -A same as abs (A)
4528 A <= 0? A : -A same as -abs (A)
4529 A < 0? A : -A same as -abs (A)
4531 None of these transformations work for modes with signed
4532 zeros. If A is +/-0, the first two transformations will
4533 change the sign of the result (from +0 to -0, or vice
4534 versa). The last four will fix the sign of the result,
4535 even though the original expressions could be positive or
4536 negative, depending on the sign of A.
4538 Note that all these transformations are correct if A is
4539 NaN, since the two alternatives (A and -A) are also NaNs. */
4540 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4541 ? real_zerop (arg01
)
4542 : integer_zerop (arg01
))
4543 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4544 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4545 /* In the case that A is of the form X-Y, '-A' (arg2) may
4546 have already been folded to Y-X, check for that. */
4547 || (TREE_CODE (arg1
) == MINUS_EXPR
4548 && TREE_CODE (arg2
) == MINUS_EXPR
4549 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4550 TREE_OPERAND (arg2
, 1), 0)
4551 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4552 TREE_OPERAND (arg2
, 0), 0))))
4557 tem
= fold_convert (arg1_type
, arg1
);
4558 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4561 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4564 if (flag_trapping_math
)
4569 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4570 arg1
= fold_convert (lang_hooks
.types
.signed_type
4571 (TREE_TYPE (arg1
)), arg1
);
4572 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4573 return pedantic_non_lvalue (fold_convert (type
, tem
));
4576 if (flag_trapping_math
)
4580 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4581 arg1
= fold_convert (lang_hooks
.types
.signed_type
4582 (TREE_TYPE (arg1
)), arg1
);
4583 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4584 return negate_expr (fold_convert (type
, tem
));
4586 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4590 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4591 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4592 both transformations are correct when A is NaN: A != 0
4593 is then true, and A == 0 is false. */
4595 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4597 if (comp_code
== NE_EXPR
)
4598 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4599 else if (comp_code
== EQ_EXPR
)
4600 return build_int_cst (type
, 0);
4603 /* Try some transformations of A op B ? A : B.
4605 A == B? A : B same as B
4606 A != B? A : B same as A
4607 A >= B? A : B same as max (A, B)
4608 A > B? A : B same as max (B, A)
4609 A <= B? A : B same as min (A, B)
4610 A < B? A : B same as min (B, A)
4612 As above, these transformations don't work in the presence
4613 of signed zeros. For example, if A and B are zeros of
4614 opposite sign, the first two transformations will change
4615 the sign of the result. In the last four, the original
4616 expressions give different results for (A=+0, B=-0) and
4617 (A=-0, B=+0), but the transformed expressions do not.
4619 The first two transformations are correct if either A or B
4620 is a NaN. In the first transformation, the condition will
4621 be false, and B will indeed be chosen. In the case of the
4622 second transformation, the condition A != B will be true,
4623 and A will be chosen.
4625 The conversions to max() and min() are not correct if B is
4626 a number and A is not. The conditions in the original
4627 expressions will be false, so all four give B. The min()
4628 and max() versions would give a NaN instead. */
4629 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4630 /* Avoid these transformations if the COND_EXPR may be used
4631 as an lvalue in the C++ front-end. PR c++/19199. */
4633 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4634 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4635 || ! maybe_lvalue_p (arg1
)
4636 || ! maybe_lvalue_p (arg2
)))
4638 tree comp_op0
= arg00
;
4639 tree comp_op1
= arg01
;
4640 tree comp_type
= TREE_TYPE (comp_op0
);
4642 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4643 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4653 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4655 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4660 /* In C++ a ?: expression can be an lvalue, so put the
4661 operand which will be used if they are equal first
4662 so that we can convert this back to the
4663 corresponding COND_EXPR. */
4664 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4666 comp_op0
= fold_convert (comp_type
, comp_op0
);
4667 comp_op1
= fold_convert (comp_type
, comp_op1
);
4668 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4669 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4670 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4671 return pedantic_non_lvalue (fold_convert (type
, tem
));
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4680 comp_op0
= fold_convert (comp_type
, comp_op0
);
4681 comp_op1
= fold_convert (comp_type
, comp_op1
);
4682 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4683 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4684 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4685 return pedantic_non_lvalue (fold_convert (type
, tem
));
4689 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4690 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4693 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4694 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4697 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4702 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4703 we might still be able to simplify this. For example,
4704 if C1 is one less or one more than C2, this might have started
4705 out as a MIN or MAX and been transformed by this function.
4706 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4708 if (INTEGRAL_TYPE_P (type
)
4709 && TREE_CODE (arg01
) == INTEGER_CST
4710 && TREE_CODE (arg2
) == INTEGER_CST
)
4714 /* We can replace A with C1 in this case. */
4715 arg1
= fold_convert (type
, arg01
);
4716 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4719 /* If C1 is C2 + 1, this is min(A, C2). */
4720 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4722 && operand_equal_p (arg01
,
4723 const_binop (PLUS_EXPR
, arg2
,
4724 build_int_cst (type
, 1), 0),
4726 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4731 /* If C1 is C2 - 1, this is min(A, C2). */
4732 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4734 && operand_equal_p (arg01
,
4735 const_binop (MINUS_EXPR
, arg2
,
4736 build_int_cst (type
, 1), 0),
4738 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4743 /* If C1 is C2 - 1, this is max(A, C2). */
4744 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4746 && operand_equal_p (arg01
,
4747 const_binop (MINUS_EXPR
, arg2
,
4748 build_int_cst (type
, 1), 0),
4750 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4755 /* If C1 is C2 + 1, this is max(A, C2). */
4756 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4758 && operand_equal_p (arg01
,
4759 const_binop (PLUS_EXPR
, arg2
,
4760 build_int_cst (type
, 1), 0),
4762 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4776 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4777 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4780 /* EXP is some logical combination of boolean tests. See if we can
4781 merge it into some range test. Return the new tree if so. */
4784 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4786 int or_op
= (code
== TRUTH_ORIF_EXPR
4787 || code
== TRUTH_OR_EXPR
);
4788 int in0_p
, in1_p
, in_p
;
4789 tree low0
, low1
, low
, high0
, high1
, high
;
4790 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4791 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4794 /* If this is an OR operation, invert both sides; we will invert
4795 again at the end. */
4797 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4799 /* If both expressions are the same, if we can merge the ranges, and we
4800 can build the range test, return it or it inverted. If one of the
4801 ranges is always true or always false, consider it to be the same
4802 expression as the other. */
4803 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4804 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4806 && 0 != (tem
= (build_range_check (type
,
4808 : rhs
!= 0 ? rhs
: integer_zero_node
,
4810 return or_op
? invert_truthvalue (tem
) : tem
;
4812 /* On machines where the branch cost is expensive, if this is a
4813 short-circuited branch and the underlying object on both sides
4814 is the same, make a non-short-circuit operation. */
4815 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4816 && lhs
!= 0 && rhs
!= 0
4817 && (code
== TRUTH_ANDIF_EXPR
4818 || code
== TRUTH_ORIF_EXPR
)
4819 && operand_equal_p (lhs
, rhs
, 0))
4821 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4822 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4823 which cases we can't do this. */
4824 if (simple_operand_p (lhs
))
4825 return build2 (code
== TRUTH_ANDIF_EXPR
4826 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4829 else if (lang_hooks
.decls
.global_bindings_p () == 0
4830 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4832 tree common
= save_expr (lhs
);
4834 if (0 != (lhs
= build_range_check (type
, common
,
4835 or_op
? ! in0_p
: in0_p
,
4837 && (0 != (rhs
= build_range_check (type
, common
,
4838 or_op
? ! in1_p
: in1_p
,
4840 return build2 (code
== TRUTH_ANDIF_EXPR
4841 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4849 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4850 bit value. Arrange things so the extra bits will be set to zero if and
4851 only if C is signed-extended to its full width. If MASK is nonzero,
4852 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4855 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4857 tree type
= TREE_TYPE (c
);
4858 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4861 if (p
== modesize
|| unsignedp
)
4864 /* We work by getting just the sign bit into the low-order bit, then
4865 into the high-order bit, then sign-extend. We then XOR that value
4867 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4868 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4870 /* We must use a signed type in order to get an arithmetic right shift.
4871 However, we must also avoid introducing accidental overflows, so that
4872 a subsequent call to integer_zerop will work. Hence we must
4873 do the type conversion here. At this point, the constant is either
4874 zero or one, and the conversion to a signed type can never overflow.
4875 We could get an overflow if this conversion is done anywhere else. */
4876 if (TYPE_UNSIGNED (type
))
4877 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4879 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4880 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4882 temp
= const_binop (BIT_AND_EXPR
, temp
,
4883 fold_convert (TREE_TYPE (c
), mask
), 0);
4884 /* If necessary, convert the type back to match the type of C. */
4885 if (TYPE_UNSIGNED (type
))
4886 temp
= fold_convert (type
, temp
);
4888 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4891 /* Find ways of folding logical expressions of LHS and RHS:
4892 Try to merge two comparisons to the same innermost item.
4893 Look for range tests like "ch >= '0' && ch <= '9'".
4894 Look for combinations of simple terms on machines with expensive branches
4895 and evaluate the RHS unconditionally.
4897 For example, if we have p->a == 2 && p->b == 4 and we can make an
4898 object large enough to span both A and B, we can do this with a comparison
4899 against the object ANDed with the a mask.
4901 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4902 operations to do this with one comparison.
4904 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4905 function and the one above.
4907 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4908 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4910 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4913 We return the simplified tree or 0 if no optimization is possible. */
4916 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4918 /* If this is the "or" of two comparisons, we can do something if
4919 the comparisons are NE_EXPR. If this is the "and", we can do something
4920 if the comparisons are EQ_EXPR. I.e.,
4921 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4923 WANTED_CODE is this operation code. For single bit fields, we can
4924 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4925 comparison for one-bit fields. */
4927 enum tree_code wanted_code
;
4928 enum tree_code lcode
, rcode
;
4929 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4930 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4931 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4932 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4933 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4934 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4935 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4936 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4937 enum machine_mode lnmode
, rnmode
;
4938 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4939 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4940 tree l_const
, r_const
;
4941 tree lntype
, rntype
, result
;
4942 int first_bit
, end_bit
;
4944 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4945 enum tree_code orig_code
= code
;
4947 /* Start by getting the comparison codes. Fail if anything is volatile.
4948 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4949 it were surrounded with a NE_EXPR. */
4951 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4954 lcode
= TREE_CODE (lhs
);
4955 rcode
= TREE_CODE (rhs
);
4957 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4959 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4960 build_int_cst (TREE_TYPE (lhs
), 0));
4964 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4966 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4967 build_int_cst (TREE_TYPE (rhs
), 0));
4971 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4972 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4975 ll_arg
= TREE_OPERAND (lhs
, 0);
4976 lr_arg
= TREE_OPERAND (lhs
, 1);
4977 rl_arg
= TREE_OPERAND (rhs
, 0);
4978 rr_arg
= TREE_OPERAND (rhs
, 1);
4980 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4981 if (simple_operand_p (ll_arg
)
4982 && simple_operand_p (lr_arg
))
4985 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4986 && operand_equal_p (lr_arg
, rr_arg
, 0))
4988 result
= combine_comparisons (code
, lcode
, rcode
,
4989 truth_type
, ll_arg
, lr_arg
);
4993 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4994 && operand_equal_p (lr_arg
, rl_arg
, 0))
4996 result
= combine_comparisons (code
, lcode
,
4997 swap_tree_comparison (rcode
),
4998 truth_type
, ll_arg
, lr_arg
);
5004 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5005 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5007 /* If the RHS can be evaluated unconditionally and its operands are
5008 simple, it wins to evaluate the RHS unconditionally on machines
5009 with expensive branches. In this case, this isn't a comparison
5010 that can be merged. Avoid doing this if the RHS is a floating-point
5011 comparison since those can trap. */
5013 if (BRANCH_COST
>= 2
5014 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5015 && simple_operand_p (rl_arg
)
5016 && simple_operand_p (rr_arg
))
5018 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5019 if (code
== TRUTH_OR_EXPR
5020 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5021 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5022 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5023 return build2 (NE_EXPR
, truth_type
,
5024 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5026 build_int_cst (TREE_TYPE (ll_arg
), 0));
5028 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5029 if (code
== TRUTH_AND_EXPR
5030 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5031 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5032 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5033 return build2 (EQ_EXPR
, truth_type
,
5034 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5036 build_int_cst (TREE_TYPE (ll_arg
), 0));
5038 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5040 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5041 return build2 (code
, truth_type
, lhs
, rhs
);
5046 /* See if the comparisons can be merged. Then get all the parameters for
5049 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5050 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5054 ll_inner
= decode_field_reference (ll_arg
,
5055 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5056 &ll_unsignedp
, &volatilep
, &ll_mask
,
5058 lr_inner
= decode_field_reference (lr_arg
,
5059 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5060 &lr_unsignedp
, &volatilep
, &lr_mask
,
5062 rl_inner
= decode_field_reference (rl_arg
,
5063 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5064 &rl_unsignedp
, &volatilep
, &rl_mask
,
5066 rr_inner
= decode_field_reference (rr_arg
,
5067 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5068 &rr_unsignedp
, &volatilep
, &rr_mask
,
5071 /* It must be true that the inner operation on the lhs of each
5072 comparison must be the same if we are to be able to do anything.
5073 Then see if we have constants. If not, the same must be true for
5075 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5076 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5079 if (TREE_CODE (lr_arg
) == INTEGER_CST
5080 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5081 l_const
= lr_arg
, r_const
= rr_arg
;
5082 else if (lr_inner
== 0 || rr_inner
== 0
5083 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5086 l_const
= r_const
= 0;
5088 /* If either comparison code is not correct for our logical operation,
5089 fail. However, we can convert a one-bit comparison against zero into
5090 the opposite comparison against that bit being set in the field. */
5092 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5093 if (lcode
!= wanted_code
)
5095 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5097 /* Make the left operand unsigned, since we are only interested
5098 in the value of one bit. Otherwise we are doing the wrong
5107 /* This is analogous to the code for l_const above. */
5108 if (rcode
!= wanted_code
)
5110 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5119 /* See if we can find a mode that contains both fields being compared on
5120 the left. If we can't, fail. Otherwise, update all constants and masks
5121 to be relative to a field of that size. */
5122 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5123 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5124 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5125 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5127 if (lnmode
== VOIDmode
)
5130 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5131 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5132 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5133 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5135 if (BYTES_BIG_ENDIAN
)
5137 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5138 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5141 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5142 size_int (xll_bitpos
), 0);
5143 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5144 size_int (xrl_bitpos
), 0);
5148 l_const
= fold_convert (lntype
, l_const
);
5149 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5150 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5151 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5152 fold_build1 (BIT_NOT_EXPR
,
5156 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5158 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5163 r_const
= fold_convert (lntype
, r_const
);
5164 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5165 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5166 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5167 fold_build1 (BIT_NOT_EXPR
,
5171 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5173 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5177 /* If the right sides are not constant, do the same for it. Also,
5178 disallow this optimization if a size or signedness mismatch occurs
5179 between the left and right sides. */
5182 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5183 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5184 /* Make sure the two fields on the right
5185 correspond to the left without being swapped. */
5186 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5189 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5190 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5191 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5192 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5194 if (rnmode
== VOIDmode
)
5197 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5198 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5199 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5200 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5202 if (BYTES_BIG_ENDIAN
)
5204 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5205 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5208 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
5209 size_int (xlr_bitpos
), 0);
5210 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
5211 size_int (xrr_bitpos
), 0);
5213 /* Make a mask that corresponds to both fields being compared.
5214 Do this for both items being compared. If the operands are the
5215 same size and the bits being compared are in the same position
5216 then we can do this by masking both and comparing the masked
5218 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5219 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5220 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5222 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5223 ll_unsignedp
|| rl_unsignedp
);
5224 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5225 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5227 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5228 lr_unsignedp
|| rr_unsignedp
);
5229 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5230 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5232 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5235 /* There is still another way we can do something: If both pairs of
5236 fields being compared are adjacent, we may be able to make a wider
5237 field containing them both.
5239 Note that we still must mask the lhs/rhs expressions. Furthermore,
5240 the mask must be shifted to account for the shift done by
5241 make_bit_field_ref. */
5242 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5243 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5244 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5245 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5249 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5250 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5251 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5252 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5254 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5255 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5256 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5257 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5259 /* Convert to the smaller type before masking out unwanted bits. */
5261 if (lntype
!= rntype
)
5263 if (lnbitsize
> rnbitsize
)
5265 lhs
= fold_convert (rntype
, lhs
);
5266 ll_mask
= fold_convert (rntype
, ll_mask
);
5269 else if (lnbitsize
< rnbitsize
)
5271 rhs
= fold_convert (lntype
, rhs
);
5272 lr_mask
= fold_convert (lntype
, lr_mask
);
5277 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5278 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5280 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5281 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5283 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5289 /* Handle the case of comparisons with constants. If there is something in
5290 common between the masks, those bits of the constants must be the same.
5291 If not, the condition is always false. Test for this to avoid generating
5292 incorrect code below. */
5293 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5294 if (! integer_zerop (result
)
5295 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5296 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5298 if (wanted_code
== NE_EXPR
)
5300 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5301 return constant_boolean_node (true, truth_type
);
5305 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5306 return constant_boolean_node (false, truth_type
);
5310 /* Construct the expression we will return. First get the component
5311 reference we will make. Unless the mask is all ones the width of
5312 that field, perform the mask operation. Then compare with the
5314 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5315 ll_unsignedp
|| rl_unsignedp
);
5317 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5318 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5319 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5321 return build2 (wanted_code
, truth_type
, result
,
5322 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5325 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5329 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5332 enum tree_code op_code
;
5333 tree comp_const
= op1
;
5335 int consts_equal
, consts_lt
;
5338 STRIP_SIGN_NOPS (arg0
);
5340 op_code
= TREE_CODE (arg0
);
5341 minmax_const
= TREE_OPERAND (arg0
, 1);
5342 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5343 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5344 inner
= TREE_OPERAND (arg0
, 0);
5346 /* If something does not permit us to optimize, return the original tree. */
5347 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5348 || TREE_CODE (comp_const
) != INTEGER_CST
5349 || TREE_OVERFLOW (comp_const
)
5350 || TREE_CODE (minmax_const
) != INTEGER_CST
5351 || TREE_OVERFLOW (minmax_const
))
5354 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5355 and GT_EXPR, doing the rest with recursive calls using logical
5359 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5361 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5364 return invert_truthvalue (tem
);
5370 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5371 optimize_minmax_comparison
5372 (EQ_EXPR
, type
, arg0
, comp_const
),
5373 optimize_minmax_comparison
5374 (GT_EXPR
, type
, arg0
, comp_const
));
5377 if (op_code
== MAX_EXPR
&& consts_equal
)
5378 /* MAX (X, 0) == 0 -> X <= 0 */
5379 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5381 else if (op_code
== MAX_EXPR
&& consts_lt
)
5382 /* MAX (X, 0) == 5 -> X == 5 */
5383 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5385 else if (op_code
== MAX_EXPR
)
5386 /* MAX (X, 0) == -1 -> false */
5387 return omit_one_operand (type
, integer_zero_node
, inner
);
5389 else if (consts_equal
)
5390 /* MIN (X, 0) == 0 -> X >= 0 */
5391 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5394 /* MIN (X, 0) == 5 -> false */
5395 return omit_one_operand (type
, integer_zero_node
, inner
);
5398 /* MIN (X, 0) == -1 -> X == -1 */
5399 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5402 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5403 /* MAX (X, 0) > 0 -> X > 0
5404 MAX (X, 0) > 5 -> X > 5 */
5405 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5407 else if (op_code
== MAX_EXPR
)
5408 /* MAX (X, 0) > -1 -> true */
5409 return omit_one_operand (type
, integer_one_node
, inner
);
5411 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5412 /* MIN (X, 0) > 0 -> false
5413 MIN (X, 0) > 5 -> false */
5414 return omit_one_operand (type
, integer_zero_node
, inner
);
5417 /* MIN (X, 0) > -1 -> X > -1 */
5418 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5425 /* T is an integer expression that is being multiplied, divided, or taken a
5426 modulus (CODE says which and what kind of divide or modulus) by a
5427 constant C. See if we can eliminate that operation by folding it with
5428 other operations already in T. WIDE_TYPE, if non-null, is a type that
5429 should be used for the computation if wider than our type.
5431 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5432 (X * 2) + (Y * 4). We must, however, be assured that either the original
5433 expression would not overflow or that overflow is undefined for the type
5434 in the language in question.
5436 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5437 the machine has a multiply-accumulate insn or that this is part of an
5438 addressing calculation.
5440 If we return a non-null expression, it is an equivalent form of the
5441 original computation, but need not be in the original type. */
5444 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5446 /* To avoid exponential search depth, refuse to allow recursion past
5447 three levels. Beyond that (1) it's highly unlikely that we'll find
5448 something interesting and (2) we've probably processed it before
5449 when we built the inner expression. */
5458 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5465 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5467 tree type
= TREE_TYPE (t
);
5468 enum tree_code tcode
= TREE_CODE (t
);
5469 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5470 > GET_MODE_SIZE (TYPE_MODE (type
)))
5471 ? wide_type
: type
);
5473 int same_p
= tcode
== code
;
5474 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5476 /* Don't deal with constants of zero here; they confuse the code below. */
5477 if (integer_zerop (c
))
5480 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5481 op0
= TREE_OPERAND (t
, 0);
5483 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5484 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5486 /* Note that we need not handle conditional operations here since fold
5487 already handles those cases. So just do arithmetic here. */
5491 /* For a constant, we can always simplify if we are a multiply
5492 or (for divide and modulus) if it is a multiple of our constant. */
5493 if (code
== MULT_EXPR
5494 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5495 return const_binop (code
, fold_convert (ctype
, t
),
5496 fold_convert (ctype
, c
), 0);
5499 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5500 /* If op0 is an expression ... */
5501 if ((COMPARISON_CLASS_P (op0
)
5502 || UNARY_CLASS_P (op0
)
5503 || BINARY_CLASS_P (op0
)
5504 || EXPRESSION_CLASS_P (op0
))
5505 /* ... and is unsigned, and its type is smaller than ctype,
5506 then we cannot pass through as widening. */
5507 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5508 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5509 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5510 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5511 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5512 /* ... or this is a truncation (t is narrower than op0),
5513 then we cannot pass through this narrowing. */
5514 || (GET_MODE_SIZE (TYPE_MODE (type
))
5515 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5516 /* ... or signedness changes for division or modulus,
5517 then we cannot pass through this conversion. */
5518 || (code
!= MULT_EXPR
5519 && (TYPE_UNSIGNED (ctype
)
5520 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5523 /* Pass the constant down and see if we can make a simplification. If
5524 we can, replace this expression with the inner simplification for
5525 possible later conversion to our or some other type. */
5526 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5527 && TREE_CODE (t2
) == INTEGER_CST
5528 && !TREE_OVERFLOW (t2
)
5529 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5531 ? ctype
: NULL_TREE
))))
5536 /* If widening the type changes it from signed to unsigned, then we
5537 must avoid building ABS_EXPR itself as unsigned. */
5538 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5540 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5541 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5543 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5544 return fold_convert (ctype
, t1
);
5550 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5551 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5554 case MIN_EXPR
: case MAX_EXPR
:
5555 /* If widening the type changes the signedness, then we can't perform
5556 this optimization as that changes the result. */
5557 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5560 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5561 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5562 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5564 if (tree_int_cst_sgn (c
) < 0)
5565 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5567 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5568 fold_convert (ctype
, t2
));
5572 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5573 /* If the second operand is constant, this is a multiplication
5574 or floor division, by a power of two, so we can treat it that
5575 way unless the multiplier or divisor overflows. Signed
5576 left-shift overflow is implementation-defined rather than
5577 undefined in C90, so do not convert signed left shift into
5579 if (TREE_CODE (op1
) == INTEGER_CST
5580 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5581 /* const_binop may not detect overflow correctly,
5582 so check for it explicitly here. */
5583 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5584 && TREE_INT_CST_HIGH (op1
) == 0
5585 && 0 != (t1
= fold_convert (ctype
,
5586 const_binop (LSHIFT_EXPR
,
5589 && !TREE_OVERFLOW (t1
))
5590 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5591 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5592 ctype
, fold_convert (ctype
, op0
), t1
),
5593 c
, code
, wide_type
);
5596 case PLUS_EXPR
: case MINUS_EXPR
:
5597 /* See if we can eliminate the operation on both sides. If we can, we
5598 can return a new PLUS or MINUS. If we can't, the only remaining
5599 cases where we can do anything are if the second operand is a
5601 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5602 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5603 if (t1
!= 0 && t2
!= 0
5604 && (code
== MULT_EXPR
5605 /* If not multiplication, we can only do this if both operands
5606 are divisible by c. */
5607 || (multiple_of_p (ctype
, op0
, c
)
5608 && multiple_of_p (ctype
, op1
, c
))))
5609 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5610 fold_convert (ctype
, t2
));
5612 /* If this was a subtraction, negate OP1 and set it to be an addition.
5613 This simplifies the logic below. */
5614 if (tcode
== MINUS_EXPR
)
5615 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5617 if (TREE_CODE (op1
) != INTEGER_CST
)
5620 /* If either OP1 or C are negative, this optimization is not safe for
5621 some of the division and remainder types while for others we need
5622 to change the code. */
5623 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5625 if (code
== CEIL_DIV_EXPR
)
5626 code
= FLOOR_DIV_EXPR
;
5627 else if (code
== FLOOR_DIV_EXPR
)
5628 code
= CEIL_DIV_EXPR
;
5629 else if (code
!= MULT_EXPR
5630 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5634 /* If it's a multiply or a division/modulus operation of a multiple
5635 of our constant, do the operation and verify it doesn't overflow. */
5636 if (code
== MULT_EXPR
5637 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5639 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5640 fold_convert (ctype
, c
), 0);
5641 /* We allow the constant to overflow with wrapping semantics. */
5643 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5649 /* If we have an unsigned type is not a sizetype, we cannot widen
5650 the operation since it will change the result if the original
5651 computation overflowed. */
5652 if (TYPE_UNSIGNED (ctype
)
5653 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5657 /* If we were able to eliminate our operation from the first side,
5658 apply our operation to the second side and reform the PLUS. */
5659 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5660 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5662 /* The last case is if we are a multiply. In that case, we can
5663 apply the distributive law to commute the multiply and addition
5664 if the multiplication of the constants doesn't overflow. */
5665 if (code
== MULT_EXPR
)
5666 return fold_build2 (tcode
, ctype
,
5667 fold_build2 (code
, ctype
,
5668 fold_convert (ctype
, op0
),
5669 fold_convert (ctype
, c
)),
5675 /* We have a special case here if we are doing something like
5676 (C * 8) % 4 since we know that's zero. */
5677 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5678 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5679 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5680 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5681 return omit_one_operand (type
, integer_zero_node
, op0
);
5683 /* ... fall through ... */
5685 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5686 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5687 /* If we can extract our operation from the LHS, do so and return a
5688 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5689 do something only if the second operand is a constant. */
5691 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5692 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5693 fold_convert (ctype
, op1
));
5694 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5695 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5696 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5697 fold_convert (ctype
, t1
));
5698 else if (TREE_CODE (op1
) != INTEGER_CST
)
5701 /* If these are the same operation types, we can associate them
5702 assuming no overflow. */
5704 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5705 fold_convert (ctype
, c
), 0))
5706 && !TREE_OVERFLOW (t1
))
5707 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5709 /* If these operations "cancel" each other, we have the main
5710 optimizations of this pass, which occur when either constant is a
5711 multiple of the other, in which case we replace this with either an
5712 operation or CODE or TCODE.
5714 If we have an unsigned type that is not a sizetype, we cannot do
5715 this since it will change the result if the original computation
5717 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5718 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5719 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5720 || (tcode
== MULT_EXPR
5721 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5722 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5724 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5725 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5726 fold_convert (ctype
,
5727 const_binop (TRUNC_DIV_EXPR
,
5729 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5730 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5731 fold_convert (ctype
,
5732 const_binop (TRUNC_DIV_EXPR
,
5744 /* Return a node which has the indicated constant VALUE (either 0 or
5745 1), and is of the indicated TYPE. */
5748 constant_boolean_node (int value
, tree type
)
5750 if (type
== integer_type_node
)
5751 return value
? integer_one_node
: integer_zero_node
;
5752 else if (type
== boolean_type_node
)
5753 return value
? boolean_true_node
: boolean_false_node
;
5755 return build_int_cst (type
, value
);
5759 /* Return true if expr looks like an ARRAY_REF and set base and
5760 offset to the appropriate trees. If there is no offset,
5761 offset is set to NULL_TREE. Base will be canonicalized to
5762 something you can get the element type from using
5763 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5764 in bytes to the base. */
5767 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5769 /* One canonical form is a PLUS_EXPR with the first
5770 argument being an ADDR_EXPR with a possible NOP_EXPR
5772 if (TREE_CODE (expr
) == PLUS_EXPR
)
5774 tree op0
= TREE_OPERAND (expr
, 0);
5775 tree inner_base
, dummy1
;
5776 /* Strip NOP_EXPRs here because the C frontends and/or
5777 folders present us (int *)&x.a + 4B possibly. */
5779 if (extract_array_ref (op0
, &inner_base
, &dummy1
))
5782 if (dummy1
== NULL_TREE
)
5783 *offset
= TREE_OPERAND (expr
, 1);
5785 *offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (expr
),
5786 dummy1
, TREE_OPERAND (expr
, 1));
5790 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5791 which we transform into an ADDR_EXPR with appropriate
5792 offset. For other arguments to the ADDR_EXPR we assume
5793 zero offset and as such do not care about the ADDR_EXPR
5794 type and strip possible nops from it. */
5795 else if (TREE_CODE (expr
) == ADDR_EXPR
)
5797 tree op0
= TREE_OPERAND (expr
, 0);
5798 if (TREE_CODE (op0
) == ARRAY_REF
)
5800 tree idx
= TREE_OPERAND (op0
, 1);
5801 *base
= TREE_OPERAND (op0
, 0);
5802 *offset
= fold_build2 (MULT_EXPR
, TREE_TYPE (idx
), idx
,
5803 array_ref_element_size (op0
));
5807 /* Handle array-to-pointer decay as &a. */
5808 if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
5809 *base
= TREE_OPERAND (expr
, 0);
5812 *offset
= NULL_TREE
;
5816 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5817 else if (SSA_VAR_P (expr
)
5818 && TREE_CODE (TREE_TYPE (expr
)) == POINTER_TYPE
)
5821 *offset
= NULL_TREE
;
5829 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5830 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5831 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5832 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5833 COND is the first argument to CODE; otherwise (as in the example
5834 given here), it is the second argument. TYPE is the type of the
5835 original expression. Return NULL_TREE if no simplification is
5839 fold_binary_op_with_conditional_arg (enum tree_code code
,
5840 tree type
, tree op0
, tree op1
,
5841 tree cond
, tree arg
, int cond_first_p
)
5843 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5844 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5845 tree test
, true_value
, false_value
;
5846 tree lhs
= NULL_TREE
;
5847 tree rhs
= NULL_TREE
;
5849 /* This transformation is only worthwhile if we don't have to wrap
5850 arg in a SAVE_EXPR, and the operation can be simplified on at least
5851 one of the branches once its pushed inside the COND_EXPR. */
5852 if (!TREE_CONSTANT (arg
))
5855 if (TREE_CODE (cond
) == COND_EXPR
)
5857 test
= TREE_OPERAND (cond
, 0);
5858 true_value
= TREE_OPERAND (cond
, 1);
5859 false_value
= TREE_OPERAND (cond
, 2);
5860 /* If this operand throws an expression, then it does not make
5861 sense to try to perform a logical or arithmetic operation
5863 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5865 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5870 tree testtype
= TREE_TYPE (cond
);
5872 true_value
= constant_boolean_node (true, testtype
);
5873 false_value
= constant_boolean_node (false, testtype
);
5876 arg
= fold_convert (arg_type
, arg
);
5879 true_value
= fold_convert (cond_type
, true_value
);
5881 lhs
= fold_build2 (code
, type
, true_value
, arg
);
5883 lhs
= fold_build2 (code
, type
, arg
, true_value
);
5887 false_value
= fold_convert (cond_type
, false_value
);
5889 rhs
= fold_build2 (code
, type
, false_value
, arg
);
5891 rhs
= fold_build2 (code
, type
, arg
, false_value
);
5894 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
5895 return fold_convert (type
, test
);
5899 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5901 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5902 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5903 ADDEND is the same as X.
5905 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5906 and finite. The problematic cases are when X is zero, and its mode
5907 has signed zeros. In the case of rounding towards -infinity,
5908 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5909 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5912 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5914 if (!real_zerop (addend
))
5917 /* Don't allow the fold with -fsignaling-nans. */
5918 if (HONOR_SNANS (TYPE_MODE (type
)))
5921 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5922 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5925 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5926 if (TREE_CODE (addend
) == REAL_CST
5927 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5930 /* The mode has signed zeros, and we have to honor their sign.
5931 In this situation, there is only one case we can return true for.
5932 X - 0 is the same as X unless rounding towards -infinity is
5934 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5937 /* Subroutine of fold() that checks comparisons of built-in math
5938 functions against real constants.
5940 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5941 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5942 is the type of the result and ARG0 and ARG1 are the operands of the
5943 comparison. ARG1 must be a TREE_REAL_CST.
5945 The function returns the constant folded tree if a simplification
5946 can be made, and NULL_TREE otherwise. */
5949 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5950 tree type
, tree arg0
, tree arg1
)
5954 if (BUILTIN_SQRT_P (fcode
))
5956 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5957 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5959 c
= TREE_REAL_CST (arg1
);
5960 if (REAL_VALUE_NEGATIVE (c
))
5962 /* sqrt(x) < y is always false, if y is negative. */
5963 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5964 return omit_one_operand (type
, integer_zero_node
, arg
);
5966 /* sqrt(x) > y is always true, if y is negative and we
5967 don't care about NaNs, i.e. negative values of x. */
5968 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5969 return omit_one_operand (type
, integer_one_node
, arg
);
5971 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5972 return fold_build2 (GE_EXPR
, type
, arg
,
5973 build_real (TREE_TYPE (arg
), dconst0
));
5975 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5979 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5980 real_convert (&c2
, mode
, &c2
);
5982 if (REAL_VALUE_ISINF (c2
))
5984 /* sqrt(x) > y is x == +Inf, when y is very large. */
5985 if (HONOR_INFINITIES (mode
))
5986 return fold_build2 (EQ_EXPR
, type
, arg
,
5987 build_real (TREE_TYPE (arg
), c2
));
5989 /* sqrt(x) > y is always false, when y is very large
5990 and we don't care about infinities. */
5991 return omit_one_operand (type
, integer_zero_node
, arg
);
5994 /* sqrt(x) > c is the same as x > c*c. */
5995 return fold_build2 (code
, type
, arg
,
5996 build_real (TREE_TYPE (arg
), c2
));
5998 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6002 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6003 real_convert (&c2
, mode
, &c2
);
6005 if (REAL_VALUE_ISINF (c2
))
6007 /* sqrt(x) < y is always true, when y is a very large
6008 value and we don't care about NaNs or Infinities. */
6009 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6010 return omit_one_operand (type
, integer_one_node
, arg
);
6012 /* sqrt(x) < y is x != +Inf when y is very large and we
6013 don't care about NaNs. */
6014 if (! HONOR_NANS (mode
))
6015 return fold_build2 (NE_EXPR
, type
, arg
,
6016 build_real (TREE_TYPE (arg
), c2
));
6018 /* sqrt(x) < y is x >= 0 when y is very large and we
6019 don't care about Infinities. */
6020 if (! HONOR_INFINITIES (mode
))
6021 return fold_build2 (GE_EXPR
, type
, arg
,
6022 build_real (TREE_TYPE (arg
), dconst0
));
6024 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6025 if (lang_hooks
.decls
.global_bindings_p () != 0
6026 || CONTAINS_PLACEHOLDER_P (arg
))
6029 arg
= save_expr (arg
);
6030 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6031 fold_build2 (GE_EXPR
, type
, arg
,
6032 build_real (TREE_TYPE (arg
),
6034 fold_build2 (NE_EXPR
, type
, arg
,
6035 build_real (TREE_TYPE (arg
),
6039 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6040 if (! HONOR_NANS (mode
))
6041 return fold_build2 (code
, type
, arg
,
6042 build_real (TREE_TYPE (arg
), c2
));
6044 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6045 if (lang_hooks
.decls
.global_bindings_p () == 0
6046 && ! CONTAINS_PLACEHOLDER_P (arg
))
6048 arg
= save_expr (arg
);
6049 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6050 fold_build2 (GE_EXPR
, type
, arg
,
6051 build_real (TREE_TYPE (arg
),
6053 fold_build2 (code
, type
, arg
,
6054 build_real (TREE_TYPE (arg
),
6063 /* Subroutine of fold() that optimizes comparisons against Infinities,
6064 either +Inf or -Inf.
6066 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6067 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6068 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6070 The function returns the constant folded tree if a simplification
6071 can be made, and NULL_TREE otherwise. */
6074 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6076 enum machine_mode mode
;
6077 REAL_VALUE_TYPE max
;
6081 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6083 /* For negative infinity swap the sense of the comparison. */
6084 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6086 code
= swap_tree_comparison (code
);
6091 /* x > +Inf is always false, if with ignore sNANs. */
6092 if (HONOR_SNANS (mode
))
6094 return omit_one_operand (type
, integer_zero_node
, arg0
);
6097 /* x <= +Inf is always true, if we don't case about NaNs. */
6098 if (! HONOR_NANS (mode
))
6099 return omit_one_operand (type
, integer_one_node
, arg0
);
6101 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6102 if (lang_hooks
.decls
.global_bindings_p () == 0
6103 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6105 arg0
= save_expr (arg0
);
6106 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6112 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6113 real_maxval (&max
, neg
, mode
);
6114 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6115 arg0
, build_real (TREE_TYPE (arg0
), max
));
6118 /* x < +Inf is always equal to x <= DBL_MAX. */
6119 real_maxval (&max
, neg
, mode
);
6120 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6121 arg0
, build_real (TREE_TYPE (arg0
), max
));
6124 /* x != +Inf is always equal to !(x > DBL_MAX). */
6125 real_maxval (&max
, neg
, mode
);
6126 if (! HONOR_NANS (mode
))
6127 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6128 arg0
, build_real (TREE_TYPE (arg0
), max
));
6130 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6131 arg0
, build_real (TREE_TYPE (arg0
), max
));
6132 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6141 /* Subroutine of fold() that optimizes comparisons of a division by
6142 a nonzero integer constant against an integer constant, i.e.
6145 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6146 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6147 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6149 The function returns the constant folded tree if a simplification
6150 can be made, and NULL_TREE otherwise. */
6153 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6155 tree prod
, tmp
, hi
, lo
;
6156 tree arg00
= TREE_OPERAND (arg0
, 0);
6157 tree arg01
= TREE_OPERAND (arg0
, 1);
6158 unsigned HOST_WIDE_INT lpart
;
6159 HOST_WIDE_INT hpart
;
6160 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6164 /* We have to do this the hard way to detect unsigned overflow.
6165 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6166 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6167 TREE_INT_CST_HIGH (arg01
),
6168 TREE_INT_CST_LOW (arg1
),
6169 TREE_INT_CST_HIGH (arg1
),
6170 &lpart
, &hpart
, unsigned_p
);
6171 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6173 neg_overflow
= false;
6177 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6178 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6181 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6182 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6183 TREE_INT_CST_HIGH (prod
),
6184 TREE_INT_CST_LOW (tmp
),
6185 TREE_INT_CST_HIGH (tmp
),
6186 &lpart
, &hpart
, unsigned_p
);
6187 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6188 -1, overflow
| TREE_OVERFLOW (prod
));
6190 else if (tree_int_cst_sgn (arg01
) >= 0)
6192 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6193 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6194 switch (tree_int_cst_sgn (arg1
))
6197 neg_overflow
= true;
6198 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6203 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6208 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6218 /* A negative divisor reverses the relational operators. */
6219 code
= swap_tree_comparison (code
);
6221 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6222 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6223 switch (tree_int_cst_sgn (arg1
))
6226 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6231 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6236 neg_overflow
= true;
6237 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6249 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6250 return omit_one_operand (type
, integer_zero_node
, arg00
);
6251 if (TREE_OVERFLOW (hi
))
6252 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6253 if (TREE_OVERFLOW (lo
))
6254 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6255 return build_range_check (type
, arg00
, 1, lo
, hi
);
6258 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6259 return omit_one_operand (type
, integer_one_node
, arg00
);
6260 if (TREE_OVERFLOW (hi
))
6261 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6262 if (TREE_OVERFLOW (lo
))
6263 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6264 return build_range_check (type
, arg00
, 0, lo
, hi
);
6267 if (TREE_OVERFLOW (lo
))
6269 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6270 return omit_one_operand (type
, tmp
, arg00
);
6272 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6275 if (TREE_OVERFLOW (hi
))
6277 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6278 return omit_one_operand (type
, tmp
, arg00
);
6280 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6283 if (TREE_OVERFLOW (hi
))
6285 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6286 return omit_one_operand (type
, tmp
, arg00
);
6288 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6291 if (TREE_OVERFLOW (lo
))
6293 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6294 return omit_one_operand (type
, tmp
, arg00
);
6296 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6306 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6307 equality/inequality test, then return a simplified form of the test
6308 using a sign testing. Otherwise return NULL. TYPE is the desired
6312 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6315 /* If this is testing a single bit, we can optimize the test. */
6316 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6317 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6318 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6320 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6321 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6322 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6324 if (arg00
!= NULL_TREE
6325 /* This is only a win if casting to a signed type is cheap,
6326 i.e. when arg00's type is not a partial mode. */
6327 && TYPE_PRECISION (TREE_TYPE (arg00
))
6328 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6330 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
6331 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6332 result_type
, fold_convert (stype
, arg00
),
6333 build_int_cst (stype
, 0));
6340 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6341 equality/inequality test, then return a simplified form of
6342 the test using shifts and logical operations. Otherwise return
6343 NULL. TYPE is the desired result type. */
6346 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6349 /* If this is testing a single bit, we can optimize the test. */
6350 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6351 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6352 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6354 tree inner
= TREE_OPERAND (arg0
, 0);
6355 tree type
= TREE_TYPE (arg0
);
6356 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6357 enum machine_mode operand_mode
= TYPE_MODE (type
);
6359 tree signed_type
, unsigned_type
, intermediate_type
;
6362 /* First, see if we can fold the single bit test into a sign-bit
6364 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6369 /* Otherwise we have (A & C) != 0 where C is a single bit,
6370 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6371 Similarly for (A & C) == 0. */
6373 /* If INNER is a right shift of a constant and it plus BITNUM does
6374 not overflow, adjust BITNUM and INNER. */
6375 if (TREE_CODE (inner
) == RSHIFT_EXPR
6376 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6377 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6378 && bitnum
< TYPE_PRECISION (type
)
6379 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6380 bitnum
- TYPE_PRECISION (type
)))
6382 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6383 inner
= TREE_OPERAND (inner
, 0);
6386 /* If we are going to be able to omit the AND below, we must do our
6387 operations as unsigned. If we must use the AND, we have a choice.
6388 Normally unsigned is faster, but for some machines signed is. */
6389 #ifdef LOAD_EXTEND_OP
6390 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6391 && !flag_syntax_only
) ? 0 : 1;
6396 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6397 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6398 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6399 inner
= fold_convert (intermediate_type
, inner
);
6402 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6403 inner
, size_int (bitnum
));
6405 one
= build_int_cst (intermediate_type
, 1);
6407 if (code
== EQ_EXPR
)
6408 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6410 /* Put the AND last so it can combine with more things. */
6411 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6413 /* Make sure to return the proper type. */
6414 inner
= fold_convert (result_type
, inner
);
6421 /* Check whether we are allowed to reorder operands arg0 and arg1,
6422 such that the evaluation of arg1 occurs before arg0. */
6425 reorder_operands_p (tree arg0
, tree arg1
)
6427 if (! flag_evaluation_order
)
6429 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6431 return ! TREE_SIDE_EFFECTS (arg0
)
6432 && ! TREE_SIDE_EFFECTS (arg1
);
6435 /* Test whether it is preferable two swap two operands, ARG0 and
6436 ARG1, for example because ARG0 is an integer constant and ARG1
6437 isn't. If REORDER is true, only recommend swapping if we can
6438 evaluate the operands in reverse order. */
6441 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6443 STRIP_SIGN_NOPS (arg0
);
6444 STRIP_SIGN_NOPS (arg1
);
6446 if (TREE_CODE (arg1
) == INTEGER_CST
)
6448 if (TREE_CODE (arg0
) == INTEGER_CST
)
6451 if (TREE_CODE (arg1
) == REAL_CST
)
6453 if (TREE_CODE (arg0
) == REAL_CST
)
6456 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6458 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6461 if (TREE_CONSTANT (arg1
))
6463 if (TREE_CONSTANT (arg0
))
6469 if (reorder
&& flag_evaluation_order
6470 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6478 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6479 for commutative and comparison operators. Ensuring a canonical
6480 form allows the optimizers to find additional redundancies without
6481 having to explicitly check for both orderings. */
6482 if (TREE_CODE (arg0
) == SSA_NAME
6483 && TREE_CODE (arg1
) == SSA_NAME
6484 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6490 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6491 ARG0 is extended to a wider type. */
6494 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6496 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6498 tree shorter_type
, outer_type
;
6502 if (arg0_unw
== arg0
)
6504 shorter_type
= TREE_TYPE (arg0_unw
);
6506 #ifdef HAVE_canonicalize_funcptr_for_compare
6507 /* Disable this optimization if we're casting a function pointer
6508 type on targets that require function pointer canonicalization. */
6509 if (HAVE_canonicalize_funcptr_for_compare
6510 && TREE_CODE (shorter_type
) == POINTER_TYPE
6511 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6515 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6518 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6520 /* If possible, express the comparison in the shorter mode. */
6521 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6522 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6523 && (TREE_TYPE (arg1_unw
) == shorter_type
6524 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6525 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6526 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6527 && int_fits_type_p (arg1_unw
, shorter_type
))))
6528 return fold_build2 (code
, type
, arg0_unw
,
6529 fold_convert (shorter_type
, arg1_unw
));
6531 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6532 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6533 || !int_fits_type_p (arg1_unw
, shorter_type
))
6536 /* If we are comparing with the integer that does not fit into the range
6537 of the shorter type, the result is known. */
6538 outer_type
= TREE_TYPE (arg1_unw
);
6539 min
= lower_bound_in_type (outer_type
, shorter_type
);
6540 max
= upper_bound_in_type (outer_type
, shorter_type
);
6542 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6544 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6551 return omit_one_operand (type
, integer_zero_node
, arg0
);
6556 return omit_one_operand (type
, integer_one_node
, arg0
);
6562 return omit_one_operand (type
, integer_one_node
, arg0
);
6564 return omit_one_operand (type
, integer_zero_node
, arg0
);
6569 return omit_one_operand (type
, integer_zero_node
, arg0
);
6571 return omit_one_operand (type
, integer_one_node
, arg0
);
6580 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6581 ARG0 just the signedness is changed. */
6584 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6585 tree arg0
, tree arg1
)
6588 tree inner_type
, outer_type
;
6590 if (TREE_CODE (arg0
) != NOP_EXPR
6591 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6594 outer_type
= TREE_TYPE (arg0
);
6595 arg0_inner
= TREE_OPERAND (arg0
, 0);
6596 inner_type
= TREE_TYPE (arg0_inner
);
6598 #ifdef HAVE_canonicalize_funcptr_for_compare
6599 /* Disable this optimization if we're casting a function pointer
6600 type on targets that require function pointer canonicalization. */
6601 if (HAVE_canonicalize_funcptr_for_compare
6602 && TREE_CODE (inner_type
) == POINTER_TYPE
6603 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6607 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6610 if (TREE_CODE (arg1
) != INTEGER_CST
6611 && !((TREE_CODE (arg1
) == NOP_EXPR
6612 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6613 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6616 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6621 if (TREE_CODE (arg1
) == INTEGER_CST
)
6622 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
6623 TREE_INT_CST_HIGH (arg1
), 0,
6624 TREE_OVERFLOW (arg1
));
6626 arg1
= fold_convert (inner_type
, arg1
);
6628 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6631 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6632 step of the array. Reconstructs s and delta in the case of s * delta
6633 being an integer constant (and thus already folded).
6634 ADDR is the address. MULT is the multiplicative expression.
6635 If the function succeeds, the new address expression is returned. Otherwise
6636 NULL_TREE is returned. */
6639 try_move_mult_to_index (enum tree_code code
, tree addr
, tree op1
)
6641 tree s
, delta
, step
;
6642 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6647 /* Canonicalize op1 into a possibly non-constant delta
6648 and an INTEGER_CST s. */
6649 if (TREE_CODE (op1
) == MULT_EXPR
)
6651 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6656 if (TREE_CODE (arg0
) == INTEGER_CST
)
6661 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6669 else if (TREE_CODE (op1
) == INTEGER_CST
)
6676 /* Simulate we are delta * 1. */
6678 s
= integer_one_node
;
6681 for (;; ref
= TREE_OPERAND (ref
, 0))
6683 if (TREE_CODE (ref
) == ARRAY_REF
)
6685 /* Remember if this was a multi-dimensional array. */
6686 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6689 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6693 step
= array_ref_element_size (ref
);
6694 if (TREE_CODE (step
) != INTEGER_CST
)
6699 if (! tree_int_cst_equal (step
, s
))
6704 /* Try if delta is a multiple of step. */
6705 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6711 /* Only fold here if we can verify we do not overflow one
6712 dimension of a multi-dimensional array. */
6717 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6718 || !INTEGRAL_TYPE_P (itype
)
6719 || !TYPE_MAX_VALUE (itype
)
6720 || TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
)
6723 tmp
= fold_binary (code
, itype
,
6724 fold_convert (itype
,
6725 TREE_OPERAND (ref
, 1)),
6726 fold_convert (itype
, delta
));
6728 || TREE_CODE (tmp
) != INTEGER_CST
6729 || tree_int_cst_lt (TYPE_MAX_VALUE (itype
), tmp
))
6738 if (!handled_component_p (ref
))
6742 /* We found the suitable array reference. So copy everything up to it,
6743 and replace the index. */
6745 pref
= TREE_OPERAND (addr
, 0);
6746 ret
= copy_node (pref
);
6751 pref
= TREE_OPERAND (pref
, 0);
6752 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6753 pos
= TREE_OPERAND (pos
, 0);
6756 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6757 fold_convert (itype
,
6758 TREE_OPERAND (pos
, 1)),
6759 fold_convert (itype
, delta
));
6761 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6765 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6766 means A >= Y && A != MAX, but in this case we know that
6767 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6770 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6772 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6774 if (TREE_CODE (bound
) == LT_EXPR
)
6775 a
= TREE_OPERAND (bound
, 0);
6776 else if (TREE_CODE (bound
) == GT_EXPR
)
6777 a
= TREE_OPERAND (bound
, 1);
6781 typea
= TREE_TYPE (a
);
6782 if (!INTEGRAL_TYPE_P (typea
)
6783 && !POINTER_TYPE_P (typea
))
6786 if (TREE_CODE (ineq
) == LT_EXPR
)
6788 a1
= TREE_OPERAND (ineq
, 1);
6789 y
= TREE_OPERAND (ineq
, 0);
6791 else if (TREE_CODE (ineq
) == GT_EXPR
)
6793 a1
= TREE_OPERAND (ineq
, 0);
6794 y
= TREE_OPERAND (ineq
, 1);
6799 if (TREE_TYPE (a1
) != typea
)
6802 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
6803 if (!integer_onep (diff
))
6806 return fold_build2 (GE_EXPR
, type
, a
, y
);
6809 /* Fold a sum or difference of at least one multiplication.
6810 Returns the folded tree or NULL if no simplification could be made. */
6813 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6815 tree arg00
, arg01
, arg10
, arg11
;
6816 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6818 /* (A * C) +- (B * C) -> (A+-B) * C.
6819 (A * C) +- A -> A * (C+-1).
6820 We are most concerned about the case where C is a constant,
6821 but other combinations show up during loop reduction. Since
6822 it is not difficult, try all four possibilities. */
6824 if (TREE_CODE (arg0
) == MULT_EXPR
)
6826 arg00
= TREE_OPERAND (arg0
, 0);
6827 arg01
= TREE_OPERAND (arg0
, 1);
6832 arg01
= build_one_cst (type
);
6834 if (TREE_CODE (arg1
) == MULT_EXPR
)
6836 arg10
= TREE_OPERAND (arg1
, 0);
6837 arg11
= TREE_OPERAND (arg1
, 1);
6842 arg11
= build_one_cst (type
);
6846 if (operand_equal_p (arg01
, arg11
, 0))
6847 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6848 else if (operand_equal_p (arg00
, arg10
, 0))
6849 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6850 else if (operand_equal_p (arg00
, arg11
, 0))
6851 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6852 else if (operand_equal_p (arg01
, arg10
, 0))
6853 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6855 /* No identical multiplicands; see if we can find a common
6856 power-of-two factor in non-power-of-two multiplies. This
6857 can help in multi-dimensional array access. */
6858 else if (host_integerp (arg01
, 0)
6859 && host_integerp (arg11
, 0))
6861 HOST_WIDE_INT int01
, int11
, tmp
;
6864 int01
= TREE_INT_CST_LOW (arg01
);
6865 int11
= TREE_INT_CST_LOW (arg11
);
6867 /* Move min of absolute values to int11. */
6868 if ((int01
>= 0 ? int01
: -int01
)
6869 < (int11
>= 0 ? int11
: -int11
))
6871 tmp
= int01
, int01
= int11
, int11
= tmp
;
6872 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6879 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0)
6881 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6882 build_int_cst (TREE_TYPE (arg00
),
6887 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6892 return fold_build2 (MULT_EXPR
, type
,
6893 fold_build2 (code
, type
,
6894 fold_convert (type
, alt0
),
6895 fold_convert (type
, alt1
)),
6896 fold_convert (type
, same
));
6901 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6902 specified by EXPR into the buffer PTR of length LEN bytes.
6903 Return the number of bytes placed in the buffer, or zero
6907 native_encode_int (tree expr
, unsigned char *ptr
, int len
)
6909 tree type
= TREE_TYPE (expr
);
6910 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6911 int byte
, offset
, word
, words
;
6912 unsigned char value
;
6914 if (total_bytes
> len
)
6916 words
= total_bytes
/ UNITS_PER_WORD
;
6918 for (byte
= 0; byte
< total_bytes
; byte
++)
6920 int bitpos
= byte
* BITS_PER_UNIT
;
6921 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
6922 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
6924 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
6925 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
6927 if (total_bytes
> UNITS_PER_WORD
)
6929 word
= byte
/ UNITS_PER_WORD
;
6930 if (WORDS_BIG_ENDIAN
)
6931 word
= (words
- 1) - word
;
6932 offset
= word
* UNITS_PER_WORD
;
6933 if (BYTES_BIG_ENDIAN
)
6934 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6936 offset
+= byte
% UNITS_PER_WORD
;
6939 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
6940 ptr
[offset
] = value
;
6946 /* Subroutine of native_encode_expr. Encode the REAL_CST
6947 specified by EXPR into the buffer PTR of length LEN bytes.
6948 Return the number of bytes placed in the buffer, or zero
6952 native_encode_real (tree expr
, unsigned char *ptr
, int len
)
6954 tree type
= TREE_TYPE (expr
);
6955 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6956 int byte
, offset
, word
, words
;
6957 unsigned char value
;
6959 /* There are always 32 bits in each long, no matter the size of
6960 the hosts long. We handle floating point representations with
6964 if (total_bytes
> len
)
6966 words
= total_bytes
/ UNITS_PER_WORD
;
6968 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
6970 for (byte
= 0; byte
< total_bytes
; byte
++)
6972 int bitpos
= byte
* BITS_PER_UNIT
;
6973 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
6975 if (total_bytes
> UNITS_PER_WORD
)
6977 word
= byte
/ UNITS_PER_WORD
;
6978 if (FLOAT_WORDS_BIG_ENDIAN
)
6979 word
= (words
- 1) - word
;
6980 offset
= word
* UNITS_PER_WORD
;
6981 if (BYTES_BIG_ENDIAN
)
6982 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6984 offset
+= byte
% UNITS_PER_WORD
;
6987 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
6988 ptr
[offset
] = value
;
6993 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6994 specified by EXPR into the buffer PTR of length LEN bytes.
6995 Return the number of bytes placed in the buffer, or zero
6999 native_encode_complex (tree expr
, unsigned char *ptr
, int len
)
7004 part
= TREE_REALPART (expr
);
7005 rsize
= native_encode_expr (part
, ptr
, len
);
7008 part
= TREE_IMAGPART (expr
);
7009 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7012 return rsize
+ isize
;
7016 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7017 specified by EXPR into the buffer PTR of length LEN bytes.
7018 Return the number of bytes placed in the buffer, or zero
7022 native_encode_vector (tree expr
, unsigned char *ptr
, int len
)
7024 int i
, size
, offset
, count
;
7025 tree itype
, elem
, elements
;
7028 elements
= TREE_VECTOR_CST_ELTS (expr
);
7029 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7030 itype
= TREE_TYPE (TREE_TYPE (expr
));
7031 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7032 for (i
= 0; i
< count
; i
++)
7036 elem
= TREE_VALUE (elements
);
7037 elements
= TREE_CHAIN (elements
);
7044 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7049 if (offset
+ size
> len
)
7051 memset (ptr
+offset
, 0, size
);
7059 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7060 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7061 buffer PTR of length LEN bytes. Return the number of bytes
7062 placed in the buffer, or zero upon failure. */
7065 native_encode_expr (tree expr
, unsigned char *ptr
, int len
)
7067 switch (TREE_CODE (expr
))
7070 return native_encode_int (expr
, ptr
, len
);
7073 return native_encode_real (expr
, ptr
, len
);
7076 return native_encode_complex (expr
, ptr
, len
);
7079 return native_encode_vector (expr
, ptr
, len
);
7087 /* Subroutine of native_interpret_expr. Interpret the contents of
7088 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7089 If the buffer cannot be interpreted, return NULL_TREE. */
7092 native_interpret_int (tree type
, unsigned char *ptr
, int len
)
7094 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7095 int byte
, offset
, word
, words
;
7096 unsigned char value
;
7097 unsigned int HOST_WIDE_INT lo
= 0;
7098 HOST_WIDE_INT hi
= 0;
7100 if (total_bytes
> len
)
7102 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7104 words
= total_bytes
/ UNITS_PER_WORD
;
7106 for (byte
= 0; byte
< total_bytes
; byte
++)
7108 int bitpos
= byte
* BITS_PER_UNIT
;
7109 if (total_bytes
> UNITS_PER_WORD
)
7111 word
= byte
/ UNITS_PER_WORD
;
7112 if (WORDS_BIG_ENDIAN
)
7113 word
= (words
- 1) - word
;
7114 offset
= word
* UNITS_PER_WORD
;
7115 if (BYTES_BIG_ENDIAN
)
7116 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7118 offset
+= byte
% UNITS_PER_WORD
;
7121 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7122 value
= ptr
[offset
];
7124 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7125 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7127 hi
|= (unsigned HOST_WIDE_INT
) value
7128 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7131 return build_int_cst_wide_type (type
, lo
, hi
);
7135 /* Subroutine of native_interpret_expr. Interpret the contents of
7136 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7137 If the buffer cannot be interpreted, return NULL_TREE. */
7140 native_interpret_real (tree type
, unsigned char *ptr
, int len
)
7142 enum machine_mode mode
= TYPE_MODE (type
);
7143 int total_bytes
= GET_MODE_SIZE (mode
);
7144 int byte
, offset
, word
, words
;
7145 unsigned char value
;
7146 /* There are always 32 bits in each long, no matter the size of
7147 the hosts long. We handle floating point representations with
7152 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7153 if (total_bytes
> len
|| total_bytes
> 24)
7155 words
= total_bytes
/ UNITS_PER_WORD
;
7157 memset (tmp
, 0, sizeof (tmp
));
7158 for (byte
= 0; byte
< total_bytes
; byte
++)
7160 int bitpos
= byte
* BITS_PER_UNIT
;
7161 if (total_bytes
> UNITS_PER_WORD
)
7163 word
= byte
/ UNITS_PER_WORD
;
7164 if (FLOAT_WORDS_BIG_ENDIAN
)
7165 word
= (words
- 1) - word
;
7166 offset
= word
* UNITS_PER_WORD
;
7167 if (BYTES_BIG_ENDIAN
)
7168 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7170 offset
+= byte
% UNITS_PER_WORD
;
7173 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7174 value
= ptr
[offset
];
7176 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7179 real_from_target (&r
, tmp
, mode
);
7180 return build_real (type
, r
);
7184 /* Subroutine of native_interpret_expr. Interpret the contents of
7185 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7186 If the buffer cannot be interpreted, return NULL_TREE. */
7189 native_interpret_complex (tree type
, unsigned char *ptr
, int len
)
7191 tree etype
, rpart
, ipart
;
7194 etype
= TREE_TYPE (type
);
7195 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7198 rpart
= native_interpret_expr (etype
, ptr
, size
);
7201 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7204 return build_complex (type
, rpart
, ipart
);
7208 /* Subroutine of native_interpret_expr. Interpret the contents of
7209 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7210 If the buffer cannot be interpreted, return NULL_TREE. */
7213 native_interpret_vector (tree type
, unsigned char *ptr
, int len
)
7215 tree etype
, elem
, elements
;
7218 etype
= TREE_TYPE (type
);
7219 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7220 count
= TYPE_VECTOR_SUBPARTS (type
);
7221 if (size
* count
> len
)
7224 elements
= NULL_TREE
;
7225 for (i
= count
- 1; i
>= 0; i
--)
7227 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7230 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7232 return build_vector (type
, elements
);
7236 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7237 the buffer PTR of length LEN as a constant of type TYPE. For
7238 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7239 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7240 return NULL_TREE. */
7243 native_interpret_expr (tree type
, unsigned char *ptr
, int len
)
7245 switch (TREE_CODE (type
))
7250 return native_interpret_int (type
, ptr
, len
);
7253 return native_interpret_real (type
, ptr
, len
);
7256 return native_interpret_complex (type
, ptr
, len
);
7259 return native_interpret_vector (type
, ptr
, len
);
7267 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7268 TYPE at compile-time. If we're unable to perform the conversion
7269 return NULL_TREE. */
7272 fold_view_convert_expr (tree type
, tree expr
)
7274 /* We support up to 512-bit values (for V8DFmode). */
7275 unsigned char buffer
[64];
7278 /* Check that the host and target are sane. */
7279 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7282 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7286 return native_interpret_expr (type
, buffer
, len
);
7290 /* Fold a unary expression of code CODE and type TYPE with operand
7291 OP0. Return the folded expression if folding is successful.
7292 Otherwise, return NULL_TREE. */
7295 fold_unary (enum tree_code code
, tree type
, tree op0
)
7299 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7301 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7302 && TREE_CODE_LENGTH (code
) == 1);
7307 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
7308 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7310 /* Don't use STRIP_NOPS, because signedness of argument type
7312 STRIP_SIGN_NOPS (arg0
);
7316 /* Strip any conversions that don't change the mode. This
7317 is safe for every expression, except for a comparison
7318 expression because its signedness is derived from its
7321 Note that this is done as an internal manipulation within
7322 the constant folder, in order to find the simplest
7323 representation of the arguments so that their form can be
7324 studied. In any cases, the appropriate type conversions
7325 should be put back in the tree that will get out of the
7331 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7333 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7334 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7335 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7336 else if (TREE_CODE (arg0
) == COND_EXPR
)
7338 tree arg01
= TREE_OPERAND (arg0
, 1);
7339 tree arg02
= TREE_OPERAND (arg0
, 2);
7340 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7341 arg01
= fold_build1 (code
, type
, arg01
);
7342 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7343 arg02
= fold_build1 (code
, type
, arg02
);
7344 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7347 /* If this was a conversion, and all we did was to move into
7348 inside the COND_EXPR, bring it back out. But leave it if
7349 it is a conversion from integer to integer and the
7350 result precision is no wider than a word since such a
7351 conversion is cheap and may be optimized away by combine,
7352 while it couldn't if it were outside the COND_EXPR. Then return
7353 so we don't get into an infinite recursion loop taking the
7354 conversion out and then back in. */
7356 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7357 || code
== NON_LVALUE_EXPR
)
7358 && TREE_CODE (tem
) == COND_EXPR
7359 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7360 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7361 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7362 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7363 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7364 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7365 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7367 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7368 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7369 || flag_syntax_only
))
7370 tem
= build1 (code
, type
,
7372 TREE_TYPE (TREE_OPERAND
7373 (TREE_OPERAND (tem
, 1), 0)),
7374 TREE_OPERAND (tem
, 0),
7375 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7376 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7379 else if (COMPARISON_CLASS_P (arg0
))
7381 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7383 arg0
= copy_node (arg0
);
7384 TREE_TYPE (arg0
) = type
;
7387 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7388 return fold_build3 (COND_EXPR
, type
, arg0
,
7389 fold_build1 (code
, type
,
7391 fold_build1 (code
, type
,
7392 integer_zero_node
));
7401 case FIX_TRUNC_EXPR
:
7402 if (TREE_TYPE (op0
) == type
)
7405 /* If we have (type) (a CMP b) and type is an integral type, return
7406 new expression involving the new type. */
7407 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7408 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7409 TREE_OPERAND (op0
, 1));
7411 /* Handle cases of two conversions in a row. */
7412 if (TREE_CODE (op0
) == NOP_EXPR
7413 || TREE_CODE (op0
) == CONVERT_EXPR
)
7415 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7416 tree inter_type
= TREE_TYPE (op0
);
7417 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7418 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7419 int inside_float
= FLOAT_TYPE_P (inside_type
);
7420 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7421 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7422 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7423 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7424 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7425 int inter_float
= FLOAT_TYPE_P (inter_type
);
7426 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7427 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7428 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7429 int final_int
= INTEGRAL_TYPE_P (type
);
7430 int final_ptr
= POINTER_TYPE_P (type
);
7431 int final_float
= FLOAT_TYPE_P (type
);
7432 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7433 unsigned int final_prec
= TYPE_PRECISION (type
);
7434 int final_unsignedp
= TYPE_UNSIGNED (type
);
7436 /* In addition to the cases of two conversions in a row
7437 handled below, if we are converting something to its own
7438 type via an object of identical or wider precision, neither
7439 conversion is needed. */
7440 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7441 && (((inter_int
|| inter_ptr
) && final_int
)
7442 || (inter_float
&& final_float
))
7443 && inter_prec
>= final_prec
)
7444 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7446 /* Likewise, if the intermediate and final types are either both
7447 float or both integer, we don't need the middle conversion if
7448 it is wider than the final type and doesn't change the signedness
7449 (for integers). Avoid this if the final type is a pointer
7450 since then we sometimes need the inner conversion. Likewise if
7451 the outer has a precision not equal to the size of its mode. */
7452 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
7453 || (inter_float
&& inside_float
)
7454 || (inter_vec
&& inside_vec
))
7455 && inter_prec
>= inside_prec
7456 && (inter_float
|| inter_vec
7457 || inter_unsignedp
== inside_unsignedp
)
7458 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7459 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7461 && (! final_vec
|| inter_prec
== inside_prec
))
7462 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7464 /* If we have a sign-extension of a zero-extended value, we can
7465 replace that by a single zero-extension. */
7466 if (inside_int
&& inter_int
&& final_int
7467 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7468 && inside_unsignedp
&& !inter_unsignedp
)
7469 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7471 /* Two conversions in a row are not needed unless:
7472 - some conversion is floating-point (overstrict for now), or
7473 - some conversion is a vector (overstrict for now), or
7474 - the intermediate type is narrower than both initial and
7476 - the intermediate type and innermost type differ in signedness,
7477 and the outermost type is wider than the intermediate, or
7478 - the initial type is a pointer type and the precisions of the
7479 intermediate and final types differ, or
7480 - the final type is a pointer type and the precisions of the
7481 initial and intermediate types differ.
7482 - the final type is a pointer type and the initial type not
7483 - the initial type is a pointer to an array and the final type
7485 if (! inside_float
&& ! inter_float
&& ! final_float
7486 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7487 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7488 && ! (inside_int
&& inter_int
7489 && inter_unsignedp
!= inside_unsignedp
7490 && inter_prec
< final_prec
)
7491 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7492 == (final_unsignedp
&& final_prec
> inter_prec
))
7493 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7494 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7495 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7496 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7497 && final_ptr
== inside_ptr
7499 && TREE_CODE (TREE_TYPE (inside_type
)) == ARRAY_TYPE
7500 && TREE_CODE (TREE_TYPE (type
)) != ARRAY_TYPE
))
7501 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7504 /* Handle (T *)&A.B.C for A being of type T and B and C
7505 living at offset zero. This occurs frequently in
7506 C++ upcasting and then accessing the base. */
7507 if (TREE_CODE (op0
) == ADDR_EXPR
7508 && POINTER_TYPE_P (type
)
7509 && handled_component_p (TREE_OPERAND (op0
, 0)))
7511 HOST_WIDE_INT bitsize
, bitpos
;
7513 enum machine_mode mode
;
7514 int unsignedp
, volatilep
;
7515 tree base
= TREE_OPERAND (op0
, 0);
7516 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7517 &mode
, &unsignedp
, &volatilep
, false);
7518 /* If the reference was to a (constant) zero offset, we can use
7519 the address of the base if it has the same base type
7520 as the result type. */
7521 if (! offset
&& bitpos
== 0
7522 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7523 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7524 return fold_convert (type
, build_fold_addr_expr (base
));
7527 if ((TREE_CODE (op0
) == MODIFY_EXPR
7528 || TREE_CODE (op0
) == GIMPLE_MODIFY_STMT
)
7529 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0
, 1))
7530 /* Detect assigning a bitfield. */
7531 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7533 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0
, 0), 1))))
7535 /* Don't leave an assignment inside a conversion
7536 unless assigning a bitfield. */
7537 tem
= fold_build1 (code
, type
, GENERIC_TREE_OPERAND (op0
, 1));
7538 /* First do the assignment, then return converted constant. */
7539 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7540 TREE_NO_WARNING (tem
) = 1;
7541 TREE_USED (tem
) = 1;
7545 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7546 constants (if x has signed type, the sign bit cannot be set
7547 in c). This folds extension into the BIT_AND_EXPR. */
7548 if (INTEGRAL_TYPE_P (type
)
7549 && TREE_CODE (type
) != BOOLEAN_TYPE
7550 && TREE_CODE (op0
) == BIT_AND_EXPR
7551 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7554 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
7557 if (TYPE_UNSIGNED (TREE_TYPE (and))
7558 || (TYPE_PRECISION (type
)
7559 <= TYPE_PRECISION (TREE_TYPE (and))))
7561 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7562 <= HOST_BITS_PER_WIDE_INT
7563 && host_integerp (and1
, 1))
7565 unsigned HOST_WIDE_INT cst
;
7567 cst
= tree_low_cst (and1
, 1);
7568 cst
&= (HOST_WIDE_INT
) -1
7569 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7570 change
= (cst
== 0);
7571 #ifdef LOAD_EXTEND_OP
7573 && !flag_syntax_only
7574 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7577 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
7578 and0
= fold_convert (uns
, and0
);
7579 and1
= fold_convert (uns
, and1
);
7585 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
7586 TREE_INT_CST_HIGH (and1
), 0,
7587 TREE_OVERFLOW (and1
));
7588 return fold_build2 (BIT_AND_EXPR
, type
,
7589 fold_convert (type
, and0
), tem
);
7593 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7594 T2 being pointers to types of the same size. */
7595 if (POINTER_TYPE_P (type
)
7596 && BINARY_CLASS_P (arg0
)
7597 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7598 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7600 tree arg00
= TREE_OPERAND (arg0
, 0);
7602 tree t1
= TREE_TYPE (arg00
);
7603 tree tt0
= TREE_TYPE (t0
);
7604 tree tt1
= TREE_TYPE (t1
);
7605 tree s0
= TYPE_SIZE (tt0
);
7606 tree s1
= TYPE_SIZE (tt1
);
7608 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
7609 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
7610 TREE_OPERAND (arg0
, 1));
7613 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7614 of the same precision, and X is a integer type not narrower than
7615 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7616 if (INTEGRAL_TYPE_P (type
)
7617 && TREE_CODE (op0
) == BIT_NOT_EXPR
7618 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7619 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
7620 || TREE_CODE (TREE_OPERAND (op0
, 0)) == CONVERT_EXPR
)
7621 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7623 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7624 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7625 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7626 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
7629 tem
= fold_convert_const (code
, type
, arg0
);
7630 return tem
? tem
: NULL_TREE
;
7632 case VIEW_CONVERT_EXPR
:
7633 if (TREE_TYPE (op0
) == type
)
7635 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7636 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7637 return fold_view_convert_expr (type
, op0
);
7640 tem
= fold_negate_expr (arg0
);
7642 return fold_convert (type
, tem
);
7646 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7647 return fold_abs_const (arg0
, type
);
7648 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7649 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7650 /* Convert fabs((double)float) into (double)fabsf(float). */
7651 else if (TREE_CODE (arg0
) == NOP_EXPR
7652 && TREE_CODE (type
) == REAL_TYPE
)
7654 tree targ0
= strip_float_extensions (arg0
);
7656 return fold_convert (type
, fold_build1 (ABS_EXPR
,
7660 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7661 else if (tree_expr_nonnegative_p (arg0
) || TREE_CODE (arg0
) == ABS_EXPR
)
7664 /* Strip sign ops from argument. */
7665 if (TREE_CODE (type
) == REAL_TYPE
)
7667 tem
= fold_strip_sign_ops (arg0
);
7669 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
7674 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7675 return fold_convert (type
, arg0
);
7676 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7678 tree itype
= TREE_TYPE (type
);
7679 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
7680 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
7681 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
7683 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7685 tree itype
= TREE_TYPE (type
);
7686 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
7687 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
7688 return build_complex (type
, rpart
, negate_expr (ipart
));
7690 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7691 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
7695 if (TREE_CODE (arg0
) == INTEGER_CST
)
7696 return fold_not_const (arg0
, type
);
7697 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
7698 return TREE_OPERAND (arg0
, 0);
7699 /* Convert ~ (-A) to A - 1. */
7700 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7701 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7702 build_int_cst (type
, 1));
7703 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7704 else if (INTEGRAL_TYPE_P (type
)
7705 && ((TREE_CODE (arg0
) == MINUS_EXPR
7706 && integer_onep (TREE_OPERAND (arg0
, 1)))
7707 || (TREE_CODE (arg0
) == PLUS_EXPR
7708 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7709 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7710 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7711 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7712 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7714 TREE_OPERAND (arg0
, 0)))))
7715 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
7716 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
7717 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7718 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7720 TREE_OPERAND (arg0
, 1)))))
7721 return fold_build2 (BIT_XOR_EXPR
, type
,
7722 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
7726 case TRUTH_NOT_EXPR
:
7727 /* The argument to invert_truthvalue must have Boolean type. */
7728 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7729 arg0
= fold_convert (boolean_type_node
, arg0
);
7731 /* Note that the operand of this must be an int
7732 and its values must be 0 or 1.
7733 ("true" is a fixed value perhaps depending on the language,
7734 but we don't handle values other than 1 correctly yet.) */
7735 tem
= fold_truth_not_expr (arg0
);
7738 return fold_convert (type
, tem
);
7741 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7742 return fold_convert (type
, arg0
);
7743 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7744 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7745 TREE_OPERAND (arg0
, 1));
7746 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7747 return fold_convert (type
, TREE_REALPART (arg0
));
7748 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7750 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7751 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
7752 fold_build1 (REALPART_EXPR
, itype
,
7753 TREE_OPERAND (arg0
, 0)),
7754 fold_build1 (REALPART_EXPR
, itype
,
7755 TREE_OPERAND (arg0
, 1)));
7756 return fold_convert (type
, tem
);
7758 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7760 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7761 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7762 return fold_convert (type
, tem
);
7764 if (TREE_CODE (arg0
) == CALL_EXPR
)
7766 tree fn
= get_callee_fndecl (arg0
);
7767 if (DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7768 switch (DECL_FUNCTION_CODE (fn
))
7770 CASE_FLT_FN (BUILT_IN_CEXPI
):
7771 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
7773 return build_function_call_expr (fn
,
7774 TREE_OPERAND (arg0
, 1));
7784 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7785 return fold_convert (type
, integer_zero_node
);
7786 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7787 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7788 TREE_OPERAND (arg0
, 0));
7789 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7790 return fold_convert (type
, TREE_IMAGPART (arg0
));
7791 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7793 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7794 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
7795 fold_build1 (IMAGPART_EXPR
, itype
,
7796 TREE_OPERAND (arg0
, 0)),
7797 fold_build1 (IMAGPART_EXPR
, itype
,
7798 TREE_OPERAND (arg0
, 1)));
7799 return fold_convert (type
, tem
);
7801 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7803 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7804 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7805 return fold_convert (type
, negate_expr (tem
));
7807 if (TREE_CODE (arg0
) == CALL_EXPR
)
7809 tree fn
= get_callee_fndecl (arg0
);
7810 if (DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7811 switch (DECL_FUNCTION_CODE (fn
))
7813 CASE_FLT_FN (BUILT_IN_CEXPI
):
7814 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7816 return build_function_call_expr (fn
,
7817 TREE_OPERAND (arg0
, 1));
7828 } /* switch (code) */
7831 /* Fold a binary expression of code CODE and type TYPE with operands
7832 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7833 Return the folded expression if folding is successful. Otherwise,
7834 return NULL_TREE. */
7837 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
7839 enum tree_code compl_code
;
7841 if (code
== MIN_EXPR
)
7842 compl_code
= MAX_EXPR
;
7843 else if (code
== MAX_EXPR
)
7844 compl_code
= MIN_EXPR
;
7848 /* MIN (MAX (a, b), b) == b. */
7849 if (TREE_CODE (op0
) == compl_code
7850 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
7851 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
7853 /* MIN (MAX (b, a), b) == b. */
7854 if (TREE_CODE (op0
) == compl_code
7855 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
7856 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
7857 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
7859 /* MIN (a, MAX (a, b)) == a. */
7860 if (TREE_CODE (op1
) == compl_code
7861 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
7862 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
7863 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
7865 /* MIN (a, MAX (b, a)) == a. */
7866 if (TREE_CODE (op1
) == compl_code
7867 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
7868 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
7869 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
7874 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7875 by changing CODE to reduce the magnitude of constants involved in
7876 ARG0 of the comparison.
7877 Returns a canonicalized comparison tree if a simplification was
7878 possible, otherwise returns NULL_TREE. */
7881 maybe_canonicalize_comparison_1 (enum tree_code code
, tree type
,
7882 tree arg0
, tree arg1
)
7884 enum tree_code code0
= TREE_CODE (arg0
);
7885 tree t
, cst0
= NULL_TREE
;
7889 /* Match A +- CST code arg1 and CST code arg1. */
7890 if (!(((code0
== MINUS_EXPR
7891 || code0
== PLUS_EXPR
)
7892 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7893 || code0
== INTEGER_CST
))
7896 /* Identify the constant in arg0 and its sign. */
7897 if (code0
== INTEGER_CST
)
7900 cst0
= TREE_OPERAND (arg0
, 1);
7901 sgn0
= tree_int_cst_sgn (cst0
);
7903 /* Overflowed constants and zero will cause problems. */
7904 if (integer_zerop (cst0
)
7905 || TREE_OVERFLOW (cst0
))
7908 /* See if we can reduce the magnitude of the constant in
7909 arg0 by changing the comparison code. */
7910 if (code0
== INTEGER_CST
)
7912 /* CST <= arg1 -> CST-1 < arg1. */
7913 if (code
== LE_EXPR
&& sgn0
== 1)
7915 /* -CST < arg1 -> -CST-1 <= arg1. */
7916 else if (code
== LT_EXPR
&& sgn0
== -1)
7918 /* CST > arg1 -> CST-1 >= arg1. */
7919 else if (code
== GT_EXPR
&& sgn0
== 1)
7921 /* -CST >= arg1 -> -CST-1 > arg1. */
7922 else if (code
== GE_EXPR
&& sgn0
== -1)
7926 /* arg1 code' CST' might be more canonical. */
7931 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7933 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
7935 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7936 else if (code
== GT_EXPR
7937 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
7939 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7940 else if (code
== LE_EXPR
7941 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
7943 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7944 else if (code
== GE_EXPR
7945 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
7951 /* Now build the constant reduced in magnitude. */
7952 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
7953 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
7954 if (code0
!= INTEGER_CST
)
7955 t
= fold_build2 (code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
7957 /* If swapping might yield to a more canonical form, do so. */
7959 return fold_build2 (swap_tree_comparison (code
), type
, arg1
, t
);
7961 return fold_build2 (code
, type
, t
, arg1
);
7964 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7965 overflow further. Try to decrease the magnitude of constants involved
7966 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7967 and put sole constants at the second argument position.
7968 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7971 maybe_canonicalize_comparison (enum tree_code code
, tree type
,
7972 tree arg0
, tree arg1
)
7976 /* In principle pointers also have undefined overflow behavior,
7977 but that causes problems elsewhere. */
7978 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
7979 || POINTER_TYPE_P (TREE_TYPE (arg0
)))
7982 /* Try canonicalization by simplifying arg0. */
7983 t
= maybe_canonicalize_comparison_1 (code
, type
, arg0
, arg1
);
7987 /* Try canonicalization by simplifying arg1 using the swapped
7989 code
= swap_tree_comparison (code
);
7990 return maybe_canonicalize_comparison_1 (code
, type
, arg1
, arg0
);
7993 /* Subroutine of fold_binary. This routine performs all of the
7994 transformations that are common to the equality/inequality
7995 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7996 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7997 fold_binary should call fold_binary. Fold a comparison with
7998 tree code CODE and type TYPE with operands OP0 and OP1. Return
7999 the folded comparison or NULL_TREE. */
8002 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
8004 tree arg0
, arg1
, tem
;
8009 STRIP_SIGN_NOPS (arg0
);
8010 STRIP_SIGN_NOPS (arg1
);
8012 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8013 if (tem
!= NULL_TREE
)
8016 /* If one arg is a real or integer constant, put it last. */
8017 if (tree_swap_operands_p (arg0
, arg1
, true))
8018 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8020 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8021 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8022 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8023 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8024 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8025 && (TREE_CODE (arg1
) == INTEGER_CST
8026 && !TREE_OVERFLOW (arg1
)))
8028 tree const1
= TREE_OPERAND (arg0
, 1);
8030 tree variable
= TREE_OPERAND (arg0
, 0);
8033 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8035 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8036 TREE_TYPE (arg1
), const2
, const1
);
8037 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8038 && (TREE_CODE (lhs
) != INTEGER_CST
8039 || !TREE_OVERFLOW (lhs
)))
8040 return fold_build2 (code
, type
, variable
, lhs
);
8043 /* For comparisons of pointers we can decompose it to a compile time
8044 comparison of the base objects and the offsets into the object.
8045 This requires at least one operand being an ADDR_EXPR to do more
8046 than the operand_equal_p test below. */
8047 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8048 && (TREE_CODE (arg0
) == ADDR_EXPR
8049 || TREE_CODE (arg1
) == ADDR_EXPR
))
8051 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8052 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8053 enum machine_mode mode
;
8054 int volatilep
, unsignedp
;
8055 bool indirect_base0
= false;
8057 /* Get base and offset for the access. Strip ADDR_EXPR for
8058 get_inner_reference, but put it back by stripping INDIRECT_REF
8059 off the base object if possible. */
8061 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8063 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8064 &bitsize
, &bitpos0
, &offset0
, &mode
,
8065 &unsignedp
, &volatilep
, false);
8066 if (TREE_CODE (base0
) == INDIRECT_REF
)
8067 base0
= TREE_OPERAND (base0
, 0);
8069 indirect_base0
= true;
8073 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8075 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8076 &bitsize
, &bitpos1
, &offset1
, &mode
,
8077 &unsignedp
, &volatilep
, false);
8078 /* We have to make sure to have an indirect/non-indirect base1
8079 just the same as we did for base0. */
8080 if (TREE_CODE (base1
) == INDIRECT_REF
8082 base1
= TREE_OPERAND (base1
, 0);
8083 else if (!indirect_base0
)
8086 else if (indirect_base0
)
8089 /* If we have equivalent bases we might be able to simplify. */
8091 && operand_equal_p (base0
, base1
, 0))
8093 /* We can fold this expression to a constant if the non-constant
8094 offset parts are equal. */
8095 if (offset0
== offset1
8096 || (offset0
&& offset1
8097 && operand_equal_p (offset0
, offset1
, 0)))
8102 return build_int_cst (boolean_type_node
, bitpos0
== bitpos1
);
8104 return build_int_cst (boolean_type_node
, bitpos0
!= bitpos1
);
8106 return build_int_cst (boolean_type_node
, bitpos0
< bitpos1
);
8108 return build_int_cst (boolean_type_node
, bitpos0
<= bitpos1
);
8110 return build_int_cst (boolean_type_node
, bitpos0
>= bitpos1
);
8112 return build_int_cst (boolean_type_node
, bitpos0
> bitpos1
);
8116 /* We can simplify the comparison to a comparison of the variable
8117 offset parts if the constant offset parts are equal.
8118 Be careful to use signed size type here because otherwise we
8119 mess with array offsets in the wrong way. This is possible
8120 because pointer arithmetic is restricted to retain within an
8121 object and overflow on pointer differences is undefined as of
8122 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8123 else if (bitpos0
== bitpos1
)
8125 tree signed_size_type_node
;
8126 signed_size_type_node
= signed_type_for (size_type_node
);
8128 /* By converting to signed size type we cover middle-end pointer
8129 arithmetic which operates on unsigned pointer types of size
8130 type size and ARRAY_REF offsets which are properly sign or
8131 zero extended from their type in case it is narrower than
8133 if (offset0
== NULL_TREE
)
8134 offset0
= build_int_cst (signed_size_type_node
, 0);
8136 offset0
= fold_convert (signed_size_type_node
, offset0
);
8137 if (offset1
== NULL_TREE
)
8138 offset1
= build_int_cst (signed_size_type_node
, 0);
8140 offset1
= fold_convert (signed_size_type_node
, offset1
);
8142 return fold_build2 (code
, type
, offset0
, offset1
);
8147 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8148 same object, then we can fold this to a comparison of the two offsets in
8149 signed size type. This is possible because pointer arithmetic is
8150 restricted to retain within an object and overflow on pointer differences
8151 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8153 We check flag_wrapv directly because pointers types are unsigned,
8154 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8155 normally what we want to avoid certain odd overflow cases, but
8157 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8159 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0
)))
8161 tree base0
, offset0
, base1
, offset1
;
8163 if (extract_array_ref (arg0
, &base0
, &offset0
)
8164 && extract_array_ref (arg1
, &base1
, &offset1
)
8165 && operand_equal_p (base0
, base1
, 0))
8167 tree signed_size_type_node
;
8168 signed_size_type_node
= signed_type_for (size_type_node
);
8170 /* By converting to signed size type we cover middle-end pointer
8171 arithmetic which operates on unsigned pointer types of size
8172 type size and ARRAY_REF offsets which are properly sign or
8173 zero extended from their type in case it is narrower than
8175 if (offset0
== NULL_TREE
)
8176 offset0
= build_int_cst (signed_size_type_node
, 0);
8178 offset0
= fold_convert (signed_size_type_node
, offset0
);
8179 if (offset1
== NULL_TREE
)
8180 offset1
= build_int_cst (signed_size_type_node
, 0);
8182 offset1
= fold_convert (signed_size_type_node
, offset1
);
8184 return fold_build2 (code
, type
, offset0
, offset1
);
8188 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8189 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8190 the resulting offset is smaller in absolute value than the
8192 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8193 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8194 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8195 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8196 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8197 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8198 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8200 tree const1
= TREE_OPERAND (arg0
, 1);
8201 tree const2
= TREE_OPERAND (arg1
, 1);
8202 tree variable1
= TREE_OPERAND (arg0
, 0);
8203 tree variable2
= TREE_OPERAND (arg1
, 0);
8206 /* Put the constant on the side where it doesn't overflow and is
8207 of lower absolute value than before. */
8208 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8209 ? MINUS_EXPR
: PLUS_EXPR
,
8211 if (!TREE_OVERFLOW (cst
)
8212 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8213 return fold_build2 (code
, type
,
8215 fold_build2 (TREE_CODE (arg1
), TREE_TYPE (arg1
),
8218 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8219 ? MINUS_EXPR
: PLUS_EXPR
,
8221 if (!TREE_OVERFLOW (cst
)
8222 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8223 return fold_build2 (code
, type
,
8224 fold_build2 (TREE_CODE (arg0
), TREE_TYPE (arg0
),
8229 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8230 signed arithmetic case. That form is created by the compiler
8231 often enough for folding it to be of value. One example is in
8232 computing loop trip counts after Operator Strength Reduction. */
8233 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8234 && TREE_CODE (arg0
) == MULT_EXPR
8235 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8236 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8237 && integer_zerop (arg1
))
8239 tree const1
= TREE_OPERAND (arg0
, 1);
8240 tree const2
= arg1
; /* zero */
8241 tree variable1
= TREE_OPERAND (arg0
, 0);
8242 enum tree_code cmp_code
= code
;
8244 gcc_assert (!integer_zerop (const1
));
8246 /* If const1 is negative we swap the sense of the comparison. */
8247 if (tree_int_cst_sgn (const1
) < 0)
8248 cmp_code
= swap_tree_comparison (cmp_code
);
8250 return fold_build2 (cmp_code
, type
, variable1
, const2
);
8253 tem
= maybe_canonicalize_comparison (code
, type
, arg0
, arg1
);
8257 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8259 tree targ0
= strip_float_extensions (arg0
);
8260 tree targ1
= strip_float_extensions (arg1
);
8261 tree newtype
= TREE_TYPE (targ0
);
8263 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8264 newtype
= TREE_TYPE (targ1
);
8266 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8267 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8268 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
8269 fold_convert (newtype
, targ1
));
8271 /* (-a) CMP (-b) -> b CMP a */
8272 if (TREE_CODE (arg0
) == NEGATE_EXPR
8273 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8274 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8275 TREE_OPERAND (arg0
, 0));
8277 if (TREE_CODE (arg1
) == REAL_CST
)
8279 REAL_VALUE_TYPE cst
;
8280 cst
= TREE_REAL_CST (arg1
);
8282 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8283 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8284 return fold_build2 (swap_tree_comparison (code
), type
,
8285 TREE_OPERAND (arg0
, 0),
8286 build_real (TREE_TYPE (arg1
),
8287 REAL_VALUE_NEGATE (cst
)));
8289 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8290 /* a CMP (-0) -> a CMP 0 */
8291 if (REAL_VALUE_MINUS_ZERO (cst
))
8292 return fold_build2 (code
, type
, arg0
,
8293 build_real (TREE_TYPE (arg1
), dconst0
));
8295 /* x != NaN is always true, other ops are always false. */
8296 if (REAL_VALUE_ISNAN (cst
)
8297 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8299 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8300 return omit_one_operand (type
, tem
, arg0
);
8303 /* Fold comparisons against infinity. */
8304 if (REAL_VALUE_ISINF (cst
))
8306 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
8307 if (tem
!= NULL_TREE
)
8312 /* If this is a comparison of a real constant with a PLUS_EXPR
8313 or a MINUS_EXPR of a real constant, we can convert it into a
8314 comparison with a revised real constant as long as no overflow
8315 occurs when unsafe_math_optimizations are enabled. */
8316 if (flag_unsafe_math_optimizations
8317 && TREE_CODE (arg1
) == REAL_CST
8318 && (TREE_CODE (arg0
) == PLUS_EXPR
8319 || TREE_CODE (arg0
) == MINUS_EXPR
)
8320 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8321 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8322 ? MINUS_EXPR
: PLUS_EXPR
,
8323 arg1
, TREE_OPERAND (arg0
, 1), 0))
8324 && !TREE_OVERFLOW (tem
))
8325 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8327 /* Likewise, we can simplify a comparison of a real constant with
8328 a MINUS_EXPR whose first operand is also a real constant, i.e.
8329 (c1 - x) < c2 becomes x > c1-c2. */
8330 if (flag_unsafe_math_optimizations
8331 && TREE_CODE (arg1
) == REAL_CST
8332 && TREE_CODE (arg0
) == MINUS_EXPR
8333 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
8334 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
8336 && !TREE_OVERFLOW (tem
))
8337 return fold_build2 (swap_tree_comparison (code
), type
,
8338 TREE_OPERAND (arg0
, 1), tem
);
8340 /* Fold comparisons against built-in math functions. */
8341 if (TREE_CODE (arg1
) == REAL_CST
8342 && flag_unsafe_math_optimizations
8343 && ! flag_errno_math
)
8345 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8347 if (fcode
!= END_BUILTINS
)
8349 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
8350 if (tem
!= NULL_TREE
)
8356 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8357 if (TREE_CONSTANT (arg1
)
8358 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
8359 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
8360 /* This optimization is invalid for ordered comparisons
8361 if CONST+INCR overflows or if foo+incr might overflow.
8362 This optimization is invalid for floating point due to rounding.
8363 For pointer types we assume overflow doesn't happen. */
8364 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
8365 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8366 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
8368 tree varop
, newconst
;
8370 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
8372 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
8373 arg1
, TREE_OPERAND (arg0
, 1));
8374 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
8375 TREE_OPERAND (arg0
, 0),
8376 TREE_OPERAND (arg0
, 1));
8380 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
8381 arg1
, TREE_OPERAND (arg0
, 1));
8382 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
8383 TREE_OPERAND (arg0
, 0),
8384 TREE_OPERAND (arg0
, 1));
8388 /* If VAROP is a reference to a bitfield, we must mask
8389 the constant by the width of the field. */
8390 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
8391 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
8392 && host_integerp (DECL_SIZE (TREE_OPERAND
8393 (TREE_OPERAND (varop
, 0), 1)), 1))
8395 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
8396 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
8397 tree folded_compare
, shift
;
8399 /* First check whether the comparison would come out
8400 always the same. If we don't do that we would
8401 change the meaning with the masking. */
8402 folded_compare
= fold_build2 (code
, type
,
8403 TREE_OPERAND (varop
, 0), arg1
);
8404 if (TREE_CODE (folded_compare
) == INTEGER_CST
)
8405 return omit_one_operand (type
, folded_compare
, varop
);
8407 shift
= build_int_cst (NULL_TREE
,
8408 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
8409 shift
= fold_convert (TREE_TYPE (varop
), shift
);
8410 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
8412 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
8416 return fold_build2 (code
, type
, varop
, newconst
);
8419 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8420 && (TREE_CODE (arg0
) == NOP_EXPR
8421 || TREE_CODE (arg0
) == CONVERT_EXPR
))
8423 /* If we are widening one operand of an integer comparison,
8424 see if the other operand is similarly being widened. Perhaps we
8425 can do the comparison in the narrower type. */
8426 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
8430 /* Or if we are changing signedness. */
8431 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
8436 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8437 constant, we can simplify it. */
8438 if (TREE_CODE (arg1
) == INTEGER_CST
8439 && (TREE_CODE (arg0
) == MIN_EXPR
8440 || TREE_CODE (arg0
) == MAX_EXPR
)
8441 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8443 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
8448 /* Simplify comparison of something with itself. (For IEEE
8449 floating-point, we can only do some of these simplifications.) */
8450 if (operand_equal_p (arg0
, arg1
, 0))
8455 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8456 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8457 return constant_boolean_node (1, type
);
8462 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8463 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8464 return constant_boolean_node (1, type
);
8465 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
8468 /* For NE, we can only do this simplification if integer
8469 or we don't honor IEEE floating point NaNs. */
8470 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8471 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8473 /* ... fall through ... */
8476 return constant_boolean_node (0, type
);
8482 /* If we are comparing an expression that just has comparisons
8483 of two integer values, arithmetic expressions of those comparisons,
8484 and constants, we can simplify it. There are only three cases
8485 to check: the two values can either be equal, the first can be
8486 greater, or the second can be greater. Fold the expression for
8487 those three values. Since each value must be 0 or 1, we have
8488 eight possibilities, each of which corresponds to the constant 0
8489 or 1 or one of the six possible comparisons.
8491 This handles common cases like (a > b) == 0 but also handles
8492 expressions like ((x > y) - (y > x)) > 0, which supposedly
8493 occur in macroized code. */
8495 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8497 tree cval1
= 0, cval2
= 0;
8500 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8501 /* Don't handle degenerate cases here; they should already
8502 have been handled anyway. */
8503 && cval1
!= 0 && cval2
!= 0
8504 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8505 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8506 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8507 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8508 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8509 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8510 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8512 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8513 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8515 /* We can't just pass T to eval_subst in case cval1 or cval2
8516 was the same as ARG1. */
8519 = fold_build2 (code
, type
,
8520 eval_subst (arg0
, cval1
, maxval
,
8524 = fold_build2 (code
, type
,
8525 eval_subst (arg0
, cval1
, maxval
,
8529 = fold_build2 (code
, type
,
8530 eval_subst (arg0
, cval1
, minval
,
8534 /* All three of these results should be 0 or 1. Confirm they are.
8535 Then use those values to select the proper code to use. */
8537 if (TREE_CODE (high_result
) == INTEGER_CST
8538 && TREE_CODE (equal_result
) == INTEGER_CST
8539 && TREE_CODE (low_result
) == INTEGER_CST
)
8541 /* Make a 3-bit mask with the high-order bit being the
8542 value for `>', the next for '=', and the low for '<'. */
8543 switch ((integer_onep (high_result
) * 4)
8544 + (integer_onep (equal_result
) * 2)
8545 + integer_onep (low_result
))
8549 return omit_one_operand (type
, integer_zero_node
, arg0
);
8570 return omit_one_operand (type
, integer_one_node
, arg0
);
8574 return save_expr (build2 (code
, type
, cval1
, cval2
));
8575 return fold_build2 (code
, type
, cval1
, cval2
);
8580 /* Fold a comparison of the address of COMPONENT_REFs with the same
8581 type and component to a comparison of the address of the base
8582 object. In short, &x->a OP &y->a to x OP y and
8583 &x->a OP &y.a to x OP &y */
8584 if (TREE_CODE (arg0
) == ADDR_EXPR
8585 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
8586 && TREE_CODE (arg1
) == ADDR_EXPR
8587 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
8589 tree cref0
= TREE_OPERAND (arg0
, 0);
8590 tree cref1
= TREE_OPERAND (arg1
, 0);
8591 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
8593 tree op0
= TREE_OPERAND (cref0
, 0);
8594 tree op1
= TREE_OPERAND (cref1
, 0);
8595 return fold_build2 (code
, type
,
8596 build_fold_addr_expr (op0
),
8597 build_fold_addr_expr (op1
));
8601 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8602 into a single range test. */
8603 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8604 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8605 && TREE_CODE (arg1
) == INTEGER_CST
8606 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8607 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8608 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8609 && !TREE_OVERFLOW (arg1
))
8611 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
8612 if (tem
!= NULL_TREE
)
8616 /* Fold ~X op ~Y as Y op X. */
8617 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8618 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8619 return fold_build2 (code
, type
,
8620 TREE_OPERAND (arg1
, 0),
8621 TREE_OPERAND (arg0
, 0));
8623 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8624 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8625 && TREE_CODE (arg1
) == INTEGER_CST
)
8626 return fold_build2 (swap_tree_comparison (code
), type
,
8627 TREE_OPERAND (arg0
, 0),
8628 fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
));
8634 /* Subroutine of fold_binary. Optimize complex multiplications of the
8635 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8636 argument EXPR represents the expression "z" of type TYPE. */
8639 fold_mult_zconjz (tree type
, tree expr
)
8641 tree itype
= TREE_TYPE (type
);
8642 tree rpart
, ipart
, tem
;
8644 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8646 rpart
= TREE_OPERAND (expr
, 0);
8647 ipart
= TREE_OPERAND (expr
, 1);
8649 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8651 rpart
= TREE_REALPART (expr
);
8652 ipart
= TREE_IMAGPART (expr
);
8656 expr
= save_expr (expr
);
8657 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
8658 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
8661 rpart
= save_expr (rpart
);
8662 ipart
= save_expr (ipart
);
8663 tem
= fold_build2 (PLUS_EXPR
, itype
,
8664 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
8665 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
8666 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
8667 fold_convert (itype
, integer_zero_node
));
8671 /* Fold a binary expression of code CODE and type TYPE with operands
8672 OP0 and OP1. Return the folded expression if folding is
8673 successful. Otherwise, return NULL_TREE. */
8676 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
8678 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
8679 tree arg0
, arg1
, tem
;
8680 tree t1
= NULL_TREE
;
8682 gcc_assert ((IS_EXPR_CODE_CLASS (kind
)
8683 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
8684 && TREE_CODE_LENGTH (code
) == 2
8686 && op1
!= NULL_TREE
);
8691 /* Strip any conversions that don't change the mode. This is
8692 safe for every expression, except for a comparison expression
8693 because its signedness is derived from its operands. So, in
8694 the latter case, only strip conversions that don't change the
8697 Note that this is done as an internal manipulation within the
8698 constant folder, in order to find the simplest representation
8699 of the arguments so that their form can be studied. In any
8700 cases, the appropriate type conversions should be put back in
8701 the tree that will get out of the constant folder. */
8703 if (kind
== tcc_comparison
)
8705 STRIP_SIGN_NOPS (arg0
);
8706 STRIP_SIGN_NOPS (arg1
);
8714 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8715 constant but we can't do arithmetic on them. */
8716 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
8717 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
8718 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
8719 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
8721 if (kind
== tcc_binary
)
8722 tem
= const_binop (code
, arg0
, arg1
, 0);
8723 else if (kind
== tcc_comparison
)
8724 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8728 if (tem
!= NULL_TREE
)
8730 if (TREE_TYPE (tem
) != type
)
8731 tem
= fold_convert (type
, tem
);
8736 /* If this is a commutative operation, and ARG0 is a constant, move it
8737 to ARG1 to reduce the number of tests below. */
8738 if (commutative_tree_code (code
)
8739 && tree_swap_operands_p (arg0
, arg1
, true))
8740 return fold_build2 (code
, type
, op1
, op0
);
8742 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8744 First check for cases where an arithmetic operation is applied to a
8745 compound, conditional, or comparison operation. Push the arithmetic
8746 operation inside the compound or conditional to see if any folding
8747 can then be done. Convert comparison to conditional for this purpose.
8748 The also optimizes non-constant cases that used to be done in
8751 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8752 one of the operands is a comparison and the other is a comparison, a
8753 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8754 code below would make the expression more complex. Change it to a
8755 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8756 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8758 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
8759 || code
== EQ_EXPR
|| code
== NE_EXPR
)
8760 && ((truth_value_p (TREE_CODE (arg0
))
8761 && (truth_value_p (TREE_CODE (arg1
))
8762 || (TREE_CODE (arg1
) == BIT_AND_EXPR
8763 && integer_onep (TREE_OPERAND (arg1
, 1)))))
8764 || (truth_value_p (TREE_CODE (arg1
))
8765 && (truth_value_p (TREE_CODE (arg0
))
8766 || (TREE_CODE (arg0
) == BIT_AND_EXPR
8767 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
8769 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
8770 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
8773 fold_convert (boolean_type_node
, arg0
),
8774 fold_convert (boolean_type_node
, arg1
));
8776 if (code
== EQ_EXPR
)
8777 tem
= invert_truthvalue (tem
);
8779 return fold_convert (type
, tem
);
8782 if (TREE_CODE_CLASS (code
) == tcc_binary
8783 || TREE_CODE_CLASS (code
) == tcc_comparison
)
8785 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
8786 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8787 fold_build2 (code
, type
,
8788 TREE_OPERAND (arg0
, 1), op1
));
8789 if (TREE_CODE (arg1
) == COMPOUND_EXPR
8790 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
8791 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
8792 fold_build2 (code
, type
,
8793 op0
, TREE_OPERAND (arg1
, 1)));
8795 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
8797 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
8799 /*cond_first_p=*/1);
8800 if (tem
!= NULL_TREE
)
8804 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
8806 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
8808 /*cond_first_p=*/0);
8809 if (tem
!= NULL_TREE
)
8817 /* A + (-B) -> A - B */
8818 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
8819 return fold_build2 (MINUS_EXPR
, type
,
8820 fold_convert (type
, arg0
),
8821 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8822 /* (-A) + B -> B - A */
8823 if (TREE_CODE (arg0
) == NEGATE_EXPR
8824 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
8825 return fold_build2 (MINUS_EXPR
, type
,
8826 fold_convert (type
, arg1
),
8827 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8828 /* Convert ~A + 1 to -A. */
8829 if (INTEGRAL_TYPE_P (type
)
8830 && TREE_CODE (arg0
) == BIT_NOT_EXPR
8831 && integer_onep (arg1
))
8832 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8834 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8836 if ((TREE_CODE (arg0
) == MULT_EXPR
8837 || TREE_CODE (arg1
) == MULT_EXPR
)
8838 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
8840 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
8845 if (! FLOAT_TYPE_P (type
))
8847 if (integer_zerop (arg1
))
8848 return non_lvalue (fold_convert (type
, arg0
));
8851 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8852 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
8853 && !TYPE_OVERFLOW_TRAPS (type
))
8855 t1
= build_int_cst_type (type
, -1);
8856 return omit_one_operand (type
, t1
, arg1
);
8860 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8861 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
8862 && !TYPE_OVERFLOW_TRAPS (type
))
8864 t1
= build_int_cst_type (type
, -1);
8865 return omit_one_operand (type
, t1
, arg0
);
8868 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8869 with a constant, and the two constants have no bits in common,
8870 we should treat this as a BIT_IOR_EXPR since this may produce more
8872 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8873 && TREE_CODE (arg1
) == BIT_AND_EXPR
8874 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8875 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8876 && integer_zerop (const_binop (BIT_AND_EXPR
,
8877 TREE_OPERAND (arg0
, 1),
8878 TREE_OPERAND (arg1
, 1), 0)))
8880 code
= BIT_IOR_EXPR
;
8884 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8885 (plus (plus (mult) (mult)) (foo)) so that we can
8886 take advantage of the factoring cases below. */
8887 if (((TREE_CODE (arg0
) == PLUS_EXPR
8888 || TREE_CODE (arg0
) == MINUS_EXPR
)
8889 && TREE_CODE (arg1
) == MULT_EXPR
)
8890 || ((TREE_CODE (arg1
) == PLUS_EXPR
8891 || TREE_CODE (arg1
) == MINUS_EXPR
)
8892 && TREE_CODE (arg0
) == MULT_EXPR
))
8894 tree parg0
, parg1
, parg
, marg
;
8895 enum tree_code pcode
;
8897 if (TREE_CODE (arg1
) == MULT_EXPR
)
8898 parg
= arg0
, marg
= arg1
;
8900 parg
= arg1
, marg
= arg0
;
8901 pcode
= TREE_CODE (parg
);
8902 parg0
= TREE_OPERAND (parg
, 0);
8903 parg1
= TREE_OPERAND (parg
, 1);
8907 if (TREE_CODE (parg0
) == MULT_EXPR
8908 && TREE_CODE (parg1
) != MULT_EXPR
)
8909 return fold_build2 (pcode
, type
,
8910 fold_build2 (PLUS_EXPR
, type
,
8911 fold_convert (type
, parg0
),
8912 fold_convert (type
, marg
)),
8913 fold_convert (type
, parg1
));
8914 if (TREE_CODE (parg0
) != MULT_EXPR
8915 && TREE_CODE (parg1
) == MULT_EXPR
)
8916 return fold_build2 (PLUS_EXPR
, type
,
8917 fold_convert (type
, parg0
),
8918 fold_build2 (pcode
, type
,
8919 fold_convert (type
, marg
),
8924 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8925 of the array. Loop optimizer sometimes produce this type of
8927 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8929 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
8931 return fold_convert (type
, tem
);
8933 else if (TREE_CODE (arg1
) == ADDR_EXPR
)
8935 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
8937 return fold_convert (type
, tem
);
8942 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8943 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
8944 return non_lvalue (fold_convert (type
, arg0
));
8946 /* Likewise if the operands are reversed. */
8947 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
8948 return non_lvalue (fold_convert (type
, arg1
));
8950 /* Convert X + -C into X - C. */
8951 if (TREE_CODE (arg1
) == REAL_CST
8952 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
8954 tem
= fold_negate_const (arg1
, type
);
8955 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
8956 return fold_build2 (MINUS_EXPR
, type
,
8957 fold_convert (type
, arg0
),
8958 fold_convert (type
, tem
));
8961 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8962 to __complex__ ( x, y ). This is not the same for SNaNs or
8963 if signed zeros are involved. */
8964 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8965 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
8966 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8968 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
8969 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
8970 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
8971 bool arg0rz
= false, arg0iz
= false;
8972 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
8973 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
8975 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
8976 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
8977 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
8979 tree rp
= arg1r
? arg1r
8980 : build1 (REALPART_EXPR
, rtype
, arg1
);
8981 tree ip
= arg0i
? arg0i
8982 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
8983 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
8985 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
8987 tree rp
= arg0r
? arg0r
8988 : build1 (REALPART_EXPR
, rtype
, arg0
);
8989 tree ip
= arg1i
? arg1i
8990 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
8991 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
8996 if (flag_unsafe_math_optimizations
8997 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
8998 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
8999 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9002 /* Convert x+x into x*2.0. */
9003 if (operand_equal_p (arg0
, arg1
, 0)
9004 && SCALAR_FLOAT_TYPE_P (type
))
9005 return fold_build2 (MULT_EXPR
, type
, arg0
,
9006 build_real (type
, dconst2
));
9008 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9009 if (flag_unsafe_math_optimizations
9010 && TREE_CODE (arg1
) == PLUS_EXPR
9011 && TREE_CODE (arg0
) != MULT_EXPR
)
9013 tree tree10
= TREE_OPERAND (arg1
, 0);
9014 tree tree11
= TREE_OPERAND (arg1
, 1);
9015 if (TREE_CODE (tree11
) == MULT_EXPR
9016 && TREE_CODE (tree10
) == MULT_EXPR
)
9019 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
9020 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
9023 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9024 if (flag_unsafe_math_optimizations
9025 && TREE_CODE (arg0
) == PLUS_EXPR
9026 && TREE_CODE (arg1
) != MULT_EXPR
)
9028 tree tree00
= TREE_OPERAND (arg0
, 0);
9029 tree tree01
= TREE_OPERAND (arg0
, 1);
9030 if (TREE_CODE (tree01
) == MULT_EXPR
9031 && TREE_CODE (tree00
) == MULT_EXPR
)
9034 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
9035 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
9041 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9042 is a rotate of A by C1 bits. */
9043 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9044 is a rotate of A by B bits. */
9046 enum tree_code code0
, code1
;
9047 code0
= TREE_CODE (arg0
);
9048 code1
= TREE_CODE (arg1
);
9049 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9050 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9051 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9052 TREE_OPERAND (arg1
, 0), 0)
9053 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
9055 tree tree01
, tree11
;
9056 enum tree_code code01
, code11
;
9058 tree01
= TREE_OPERAND (arg0
, 1);
9059 tree11
= TREE_OPERAND (arg1
, 1);
9060 STRIP_NOPS (tree01
);
9061 STRIP_NOPS (tree11
);
9062 code01
= TREE_CODE (tree01
);
9063 code11
= TREE_CODE (tree11
);
9064 if (code01
== INTEGER_CST
9065 && code11
== INTEGER_CST
9066 && TREE_INT_CST_HIGH (tree01
) == 0
9067 && TREE_INT_CST_HIGH (tree11
) == 0
9068 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9069 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9070 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9071 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9072 else if (code11
== MINUS_EXPR
)
9074 tree tree110
, tree111
;
9075 tree110
= TREE_OPERAND (tree11
, 0);
9076 tree111
= TREE_OPERAND (tree11
, 1);
9077 STRIP_NOPS (tree110
);
9078 STRIP_NOPS (tree111
);
9079 if (TREE_CODE (tree110
) == INTEGER_CST
9080 && 0 == compare_tree_int (tree110
,
9082 (TREE_TYPE (TREE_OPERAND
9084 && operand_equal_p (tree01
, tree111
, 0))
9085 return build2 ((code0
== LSHIFT_EXPR
9088 type
, TREE_OPERAND (arg0
, 0), tree01
);
9090 else if (code01
== MINUS_EXPR
)
9092 tree tree010
, tree011
;
9093 tree010
= TREE_OPERAND (tree01
, 0);
9094 tree011
= TREE_OPERAND (tree01
, 1);
9095 STRIP_NOPS (tree010
);
9096 STRIP_NOPS (tree011
);
9097 if (TREE_CODE (tree010
) == INTEGER_CST
9098 && 0 == compare_tree_int (tree010
,
9100 (TREE_TYPE (TREE_OPERAND
9102 && operand_equal_p (tree11
, tree011
, 0))
9103 return build2 ((code0
!= LSHIFT_EXPR
9106 type
, TREE_OPERAND (arg0
, 0), tree11
);
9112 /* In most languages, can't associate operations on floats through
9113 parentheses. Rather than remember where the parentheses were, we
9114 don't associate floats at all, unless the user has specified
9115 -funsafe-math-optimizations. */
9117 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
9119 tree var0
, con0
, lit0
, minus_lit0
;
9120 tree var1
, con1
, lit1
, minus_lit1
;
9122 /* Split both trees into variables, constants, and literals. Then
9123 associate each group together, the constants with literals,
9124 then the result with variables. This increases the chances of
9125 literals being recombined later and of generating relocatable
9126 expressions for the sum of a constant and literal. */
9127 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9128 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9129 code
== MINUS_EXPR
);
9131 /* Only do something if we found more than two objects. Otherwise,
9132 nothing has changed and we risk infinite recursion. */
9133 if (2 < ((var0
!= 0) + (var1
!= 0)
9134 + (con0
!= 0) + (con1
!= 0)
9135 + (lit0
!= 0) + (lit1
!= 0)
9136 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
9138 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9139 if (code
== MINUS_EXPR
)
9142 var0
= associate_trees (var0
, var1
, code
, type
);
9143 con0
= associate_trees (con0
, con1
, code
, type
);
9144 lit0
= associate_trees (lit0
, lit1
, code
, type
);
9145 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
9147 /* Preserve the MINUS_EXPR if the negative part of the literal is
9148 greater than the positive part. Otherwise, the multiplicative
9149 folding code (i.e extract_muldiv) may be fooled in case
9150 unsigned constants are subtracted, like in the following
9151 example: ((X*2 + 4) - 8U)/2. */
9152 if (minus_lit0
&& lit0
)
9154 if (TREE_CODE (lit0
) == INTEGER_CST
9155 && TREE_CODE (minus_lit0
) == INTEGER_CST
9156 && tree_int_cst_lt (lit0
, minus_lit0
))
9158 minus_lit0
= associate_trees (minus_lit0
, lit0
,
9164 lit0
= associate_trees (lit0
, minus_lit0
,
9172 return fold_convert (type
,
9173 associate_trees (var0
, minus_lit0
,
9177 con0
= associate_trees (con0
, minus_lit0
,
9179 return fold_convert (type
,
9180 associate_trees (var0
, con0
,
9185 con0
= associate_trees (con0
, lit0
, code
, type
);
9186 return fold_convert (type
, associate_trees (var0
, con0
,
9194 /* A - (-B) -> A + B */
9195 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9196 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
9197 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9198 if (TREE_CODE (arg0
) == NEGATE_EXPR
9199 && (FLOAT_TYPE_P (type
)
9200 || INTEGRAL_TYPE_P (type
))
9201 && negate_expr_p (arg1
)
9202 && reorder_operands_p (arg0
, arg1
))
9203 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
9204 TREE_OPERAND (arg0
, 0));
9205 /* Convert -A - 1 to ~A. */
9206 if (INTEGRAL_TYPE_P (type
)
9207 && TREE_CODE (arg0
) == NEGATE_EXPR
9208 && integer_onep (arg1
)
9209 && !TYPE_OVERFLOW_TRAPS (type
))
9210 return fold_build1 (BIT_NOT_EXPR
, type
,
9211 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9213 /* Convert -1 - A to ~A. */
9214 if (INTEGRAL_TYPE_P (type
)
9215 && integer_all_onesp (arg0
))
9216 return fold_build1 (BIT_NOT_EXPR
, type
, op1
);
9218 if (! FLOAT_TYPE_P (type
))
9220 if (integer_zerop (arg0
))
9221 return negate_expr (fold_convert (type
, arg1
));
9222 if (integer_zerop (arg1
))
9223 return non_lvalue (fold_convert (type
, arg0
));
9225 /* Fold A - (A & B) into ~B & A. */
9226 if (!TREE_SIDE_EFFECTS (arg0
)
9227 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
9229 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
9230 return fold_build2 (BIT_AND_EXPR
, type
,
9231 fold_build1 (BIT_NOT_EXPR
, type
,
9232 TREE_OPERAND (arg1
, 0)),
9234 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9235 return fold_build2 (BIT_AND_EXPR
, type
,
9236 fold_build1 (BIT_NOT_EXPR
, type
,
9237 TREE_OPERAND (arg1
, 1)),
9241 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9242 any power of 2 minus 1. */
9243 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9244 && TREE_CODE (arg1
) == BIT_AND_EXPR
9245 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9246 TREE_OPERAND (arg1
, 0), 0))
9248 tree mask0
= TREE_OPERAND (arg0
, 1);
9249 tree mask1
= TREE_OPERAND (arg1
, 1);
9250 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
9252 if (operand_equal_p (tem
, mask1
, 0))
9254 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
9255 TREE_OPERAND (arg0
, 0), mask1
);
9256 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
9261 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9262 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
9263 return non_lvalue (fold_convert (type
, arg0
));
9265 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9266 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9267 (-ARG1 + ARG0) reduces to -ARG1. */
9268 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9269 return negate_expr (fold_convert (type
, arg1
));
9271 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9272 __complex__ ( x, -y ). This is not the same for SNaNs or if
9273 signed zeros are involved. */
9274 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9275 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9276 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9278 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9279 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9280 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9281 bool arg0rz
= false, arg0iz
= false;
9282 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9283 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9285 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9286 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9287 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9289 tree rp
= fold_build1 (NEGATE_EXPR
, rtype
,
9291 : build1 (REALPART_EXPR
, rtype
, arg1
));
9292 tree ip
= arg0i
? arg0i
9293 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9294 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9296 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9298 tree rp
= arg0r
? arg0r
9299 : build1 (REALPART_EXPR
, rtype
, arg0
);
9300 tree ip
= fold_build1 (NEGATE_EXPR
, rtype
,
9302 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9303 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9308 /* Fold &x - &x. This can happen from &x.foo - &x.
9309 This is unsafe for certain floats even in non-IEEE formats.
9310 In IEEE, it is unsafe because it does wrong for NaNs.
9311 Also note that operand_equal_p is always false if an operand
9314 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
9315 && operand_equal_p (arg0
, arg1
, 0))
9316 return fold_convert (type
, integer_zero_node
);
9318 /* A - B -> A + (-B) if B is easily negatable. */
9319 if (negate_expr_p (arg1
)
9320 && ((FLOAT_TYPE_P (type
)
9321 /* Avoid this transformation if B is a positive REAL_CST. */
9322 && (TREE_CODE (arg1
) != REAL_CST
9323 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
9324 || INTEGRAL_TYPE_P (type
)))
9325 return fold_build2 (PLUS_EXPR
, type
,
9326 fold_convert (type
, arg0
),
9327 fold_convert (type
, negate_expr (arg1
)));
9329 /* Try folding difference of addresses. */
9333 if ((TREE_CODE (arg0
) == ADDR_EXPR
9334 || TREE_CODE (arg1
) == ADDR_EXPR
)
9335 && ptr_difference_const (arg0
, arg1
, &diff
))
9336 return build_int_cst_type (type
, diff
);
9339 /* Fold &a[i] - &a[j] to i-j. */
9340 if (TREE_CODE (arg0
) == ADDR_EXPR
9341 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9342 && TREE_CODE (arg1
) == ADDR_EXPR
9343 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9345 tree aref0
= TREE_OPERAND (arg0
, 0);
9346 tree aref1
= TREE_OPERAND (arg1
, 0);
9347 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
9348 TREE_OPERAND (aref1
, 0), 0))
9350 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
9351 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
9352 tree esz
= array_ref_element_size (aref0
);
9353 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9354 return fold_build2 (MULT_EXPR
, type
, diff
,
9355 fold_convert (type
, esz
));
9360 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9361 of the array. Loop optimizer sometimes produce this type of
9363 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9365 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
9367 return fold_convert (type
, tem
);
9370 if (flag_unsafe_math_optimizations
9371 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9372 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9373 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9376 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9378 if ((TREE_CODE (arg0
) == MULT_EXPR
9379 || TREE_CODE (arg1
) == MULT_EXPR
)
9380 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
9382 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9390 /* (-A) * (-B) -> A * B */
9391 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
9392 return fold_build2 (MULT_EXPR
, type
,
9393 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
9394 fold_convert (type
, negate_expr (arg1
)));
9395 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
9396 return fold_build2 (MULT_EXPR
, type
,
9397 fold_convert (type
, negate_expr (arg0
)),
9398 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9400 if (! FLOAT_TYPE_P (type
))
9402 if (integer_zerop (arg1
))
9403 return omit_one_operand (type
, arg1
, arg0
);
9404 if (integer_onep (arg1
))
9405 return non_lvalue (fold_convert (type
, arg0
));
9406 /* Transform x * -1 into -x. */
9407 if (integer_all_onesp (arg1
))
9408 return fold_convert (type
, negate_expr (arg0
));
9409 /* Transform x * -C into -x * C if x is easily negatable. */
9410 if (TREE_CODE (arg1
) == INTEGER_CST
9411 && tree_int_cst_sgn (arg1
) == -1
9412 && negate_expr_p (arg0
)
9413 && (tem
= negate_expr (arg1
)) != arg1
9414 && !TREE_OVERFLOW (tem
))
9415 return fold_build2 (MULT_EXPR
, type
,
9416 negate_expr (arg0
), tem
);
9418 /* (a * (1 << b)) is (a << b) */
9419 if (TREE_CODE (arg1
) == LSHIFT_EXPR
9420 && integer_onep (TREE_OPERAND (arg1
, 0)))
9421 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
9422 TREE_OPERAND (arg1
, 1));
9423 if (TREE_CODE (arg0
) == LSHIFT_EXPR
9424 && integer_onep (TREE_OPERAND (arg0
, 0)))
9425 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
9426 TREE_OPERAND (arg0
, 1));
9428 if (TREE_CODE (arg1
) == INTEGER_CST
9429 && 0 != (tem
= extract_muldiv (op0
,
9430 fold_convert (type
, arg1
),
9432 return fold_convert (type
, tem
);
9434 /* Optimize z * conj(z) for integer complex numbers. */
9435 if (TREE_CODE (arg0
) == CONJ_EXPR
9436 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9437 return fold_mult_zconjz (type
, arg1
);
9438 if (TREE_CODE (arg1
) == CONJ_EXPR
9439 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9440 return fold_mult_zconjz (type
, arg0
);
9444 /* Maybe fold x * 0 to 0. The expressions aren't the same
9445 when x is NaN, since x * 0 is also NaN. Nor are they the
9446 same in modes with signed zeros, since multiplying a
9447 negative value by 0 gives -0, not +0. */
9448 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9449 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9450 && real_zerop (arg1
))
9451 return omit_one_operand (type
, arg1
, arg0
);
9452 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9453 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9454 && real_onep (arg1
))
9455 return non_lvalue (fold_convert (type
, arg0
));
9457 /* Transform x * -1.0 into -x. */
9458 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9459 && real_minus_onep (arg1
))
9460 return fold_convert (type
, negate_expr (arg0
));
9462 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9463 if (flag_unsafe_math_optimizations
9464 && TREE_CODE (arg0
) == RDIV_EXPR
9465 && TREE_CODE (arg1
) == REAL_CST
9466 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
9468 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
9471 return fold_build2 (RDIV_EXPR
, type
, tem
,
9472 TREE_OPERAND (arg0
, 1));
9475 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9476 if (operand_equal_p (arg0
, arg1
, 0))
9478 tree tem
= fold_strip_sign_ops (arg0
);
9479 if (tem
!= NULL_TREE
)
9481 tem
= fold_convert (type
, tem
);
9482 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
9486 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9487 This is not the same for NaNs or if signed zeros are
9489 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9490 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9491 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9492 && TREE_CODE (arg1
) == COMPLEX_CST
9493 && real_zerop (TREE_REALPART (arg1
)))
9495 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9496 if (real_onep (TREE_IMAGPART (arg1
)))
9497 return fold_build2 (COMPLEX_EXPR
, type
,
9498 negate_expr (fold_build1 (IMAGPART_EXPR
,
9500 fold_build1 (REALPART_EXPR
, rtype
, arg0
));
9501 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9502 return fold_build2 (COMPLEX_EXPR
, type
,
9503 fold_build1 (IMAGPART_EXPR
, rtype
, arg0
),
9504 negate_expr (fold_build1 (REALPART_EXPR
,
9508 /* Optimize z * conj(z) for floating point complex numbers.
9509 Guarded by flag_unsafe_math_optimizations as non-finite
9510 imaginary components don't produce scalar results. */
9511 if (flag_unsafe_math_optimizations
9512 && TREE_CODE (arg0
) == CONJ_EXPR
9513 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9514 return fold_mult_zconjz (type
, arg1
);
9515 if (flag_unsafe_math_optimizations
9516 && TREE_CODE (arg1
) == CONJ_EXPR
9517 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9518 return fold_mult_zconjz (type
, arg0
);
9520 if (flag_unsafe_math_optimizations
)
9522 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
9523 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
9525 /* Optimizations of root(...)*root(...). */
9526 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
9528 tree rootfn
, arg
, arglist
;
9529 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9530 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9532 /* Optimize sqrt(x)*sqrt(x) as x. */
9533 if (BUILTIN_SQRT_P (fcode0
)
9534 && operand_equal_p (arg00
, arg10
, 0)
9535 && ! HONOR_SNANS (TYPE_MODE (type
)))
9538 /* Optimize root(x)*root(y) as root(x*y). */
9539 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9540 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
9541 arglist
= build_tree_list (NULL_TREE
, arg
);
9542 return build_function_call_expr (rootfn
, arglist
);
9545 /* Optimize expN(x)*expN(y) as expN(x+y). */
9546 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
9548 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9549 tree arg
= fold_build2 (PLUS_EXPR
, type
,
9550 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9551 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
9552 tree arglist
= build_tree_list (NULL_TREE
, arg
);
9553 return build_function_call_expr (expfn
, arglist
);
9556 /* Optimizations of pow(...)*pow(...). */
9557 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
9558 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
9559 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
9561 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9562 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
9564 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9565 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
9568 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9569 if (operand_equal_p (arg01
, arg11
, 0))
9571 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9572 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
9573 tree arglist
= tree_cons (NULL_TREE
, arg
,
9574 build_tree_list (NULL_TREE
,
9576 return build_function_call_expr (powfn
, arglist
);
9579 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9580 if (operand_equal_p (arg00
, arg10
, 0))
9582 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9583 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
9584 tree arglist
= tree_cons (NULL_TREE
, arg00
,
9585 build_tree_list (NULL_TREE
,
9587 return build_function_call_expr (powfn
, arglist
);
9591 /* Optimize tan(x)*cos(x) as sin(x). */
9592 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
9593 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
9594 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
9595 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
9596 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
9597 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
9598 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9599 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
9601 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
9603 if (sinfn
!= NULL_TREE
)
9604 return build_function_call_expr (sinfn
,
9605 TREE_OPERAND (arg0
, 1));
9608 /* Optimize x*pow(x,c) as pow(x,c+1). */
9609 if (fcode1
== BUILT_IN_POW
9610 || fcode1
== BUILT_IN_POWF
9611 || fcode1
== BUILT_IN_POWL
)
9613 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9614 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
9616 if (TREE_CODE (arg11
) == REAL_CST
9617 && !TREE_OVERFLOW (arg11
)
9618 && operand_equal_p (arg0
, arg10
, 0))
9620 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
9624 c
= TREE_REAL_CST (arg11
);
9625 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
9626 arg
= build_real (type
, c
);
9627 arglist
= build_tree_list (NULL_TREE
, arg
);
9628 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
9629 return build_function_call_expr (powfn
, arglist
);
9633 /* Optimize pow(x,c)*x as pow(x,c+1). */
9634 if (fcode0
== BUILT_IN_POW
9635 || fcode0
== BUILT_IN_POWF
9636 || fcode0
== BUILT_IN_POWL
)
9638 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9639 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
9641 if (TREE_CODE (arg01
) == REAL_CST
9642 && !TREE_OVERFLOW (arg01
)
9643 && operand_equal_p (arg1
, arg00
, 0))
9645 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9649 c
= TREE_REAL_CST (arg01
);
9650 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
9651 arg
= build_real (type
, c
);
9652 arglist
= build_tree_list (NULL_TREE
, arg
);
9653 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
9654 return build_function_call_expr (powfn
, arglist
);
9658 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9660 && operand_equal_p (arg0
, arg1
, 0))
9662 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
9666 tree arg
= build_real (type
, dconst2
);
9667 tree arglist
= build_tree_list (NULL_TREE
, arg
);
9668 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
9669 return build_function_call_expr (powfn
, arglist
);
9678 if (integer_all_onesp (arg1
))
9679 return omit_one_operand (type
, arg1
, arg0
);
9680 if (integer_zerop (arg1
))
9681 return non_lvalue (fold_convert (type
, arg0
));
9682 if (operand_equal_p (arg0
, arg1
, 0))
9683 return non_lvalue (fold_convert (type
, arg0
));
9686 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9687 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9689 t1
= build_int_cst_type (type
, -1);
9690 return omit_one_operand (type
, t1
, arg1
);
9694 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9695 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9697 t1
= build_int_cst_type (type
, -1);
9698 return omit_one_operand (type
, t1
, arg0
);
9701 /* Canonicalize (X & C1) | C2. */
9702 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9703 && TREE_CODE (arg1
) == INTEGER_CST
9704 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9706 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, mlo
, mhi
;
9707 int width
= TYPE_PRECISION (type
);
9708 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
9709 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
9710 hi2
= TREE_INT_CST_HIGH (arg1
);
9711 lo2
= TREE_INT_CST_LOW (arg1
);
9713 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9714 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
9715 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9717 if (width
> HOST_BITS_PER_WIDE_INT
)
9719 mhi
= (unsigned HOST_WIDE_INT
) -1
9720 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
9726 mlo
= (unsigned HOST_WIDE_INT
) -1
9727 >> (HOST_BITS_PER_WIDE_INT
- width
);
9730 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9731 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
9732 return fold_build2 (BIT_IOR_EXPR
, type
,
9733 TREE_OPERAND (arg0
, 0), arg1
);
9735 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9738 if ((hi1
& ~hi2
) != hi1
|| (lo1
& ~lo2
) != lo1
)
9739 return fold_build2 (BIT_IOR_EXPR
, type
,
9740 fold_build2 (BIT_AND_EXPR
, type
,
9741 TREE_OPERAND (arg0
, 0),
9742 build_int_cst_wide (type
,
9748 /* (X & Y) | Y is (X, Y). */
9749 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9750 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9751 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9752 /* (X & Y) | X is (Y, X). */
9753 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9754 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9755 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9756 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
9757 /* X | (X & Y) is (Y, X). */
9758 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9759 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
9760 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
9761 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
9762 /* X | (Y & X) is (Y, X). */
9763 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9764 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9765 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9766 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
9768 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
9769 if (t1
!= NULL_TREE
)
9772 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9774 This results in more efficient code for machines without a NAND
9775 instruction. Combine will canonicalize to the first form
9776 which will allow use of NAND instructions provided by the
9777 backend if they exist. */
9778 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9779 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9781 return fold_build1 (BIT_NOT_EXPR
, type
,
9782 build2 (BIT_AND_EXPR
, type
,
9783 TREE_OPERAND (arg0
, 0),
9784 TREE_OPERAND (arg1
, 0)));
9787 /* See if this can be simplified into a rotate first. If that
9788 is unsuccessful continue in the association code. */
9792 if (integer_zerop (arg1
))
9793 return non_lvalue (fold_convert (type
, arg0
));
9794 if (integer_all_onesp (arg1
))
9795 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
9796 if (operand_equal_p (arg0
, arg1
, 0))
9797 return omit_one_operand (type
, integer_zero_node
, arg0
);
9800 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9801 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9803 t1
= build_int_cst_type (type
, -1);
9804 return omit_one_operand (type
, t1
, arg1
);
9808 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9809 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9811 t1
= build_int_cst_type (type
, -1);
9812 return omit_one_operand (type
, t1
, arg0
);
9815 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9816 with a constant, and the two constants have no bits in common,
9817 we should treat this as a BIT_IOR_EXPR since this may produce more
9819 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9820 && TREE_CODE (arg1
) == BIT_AND_EXPR
9821 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9822 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9823 && integer_zerop (const_binop (BIT_AND_EXPR
,
9824 TREE_OPERAND (arg0
, 1),
9825 TREE_OPERAND (arg1
, 1), 0)))
9827 code
= BIT_IOR_EXPR
;
9831 /* (X | Y) ^ X -> Y & ~ X*/
9832 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9833 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9835 tree t2
= TREE_OPERAND (arg0
, 1);
9836 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
9838 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9839 fold_convert (type
, t1
));
9843 /* (Y | X) ^ X -> Y & ~ X*/
9844 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9845 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9847 tree t2
= TREE_OPERAND (arg0
, 0);
9848 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
9850 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9851 fold_convert (type
, t1
));
9855 /* X ^ (X | Y) -> Y & ~ X*/
9856 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9857 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
9859 tree t2
= TREE_OPERAND (arg1
, 1);
9860 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
9862 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9863 fold_convert (type
, t1
));
9867 /* X ^ (Y | X) -> Y & ~ X*/
9868 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9869 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
9871 tree t2
= TREE_OPERAND (arg1
, 0);
9872 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
9874 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9875 fold_convert (type
, t1
));
9879 /* Convert ~X ^ ~Y to X ^ Y. */
9880 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9881 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9882 return fold_build2 (code
, type
,
9883 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
9884 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9886 /* Convert ~X ^ C to X ^ ~C. */
9887 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9888 && TREE_CODE (arg1
) == INTEGER_CST
)
9889 return fold_build2 (code
, type
,
9890 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
9891 fold_build1 (BIT_NOT_EXPR
, type
, arg1
));
9893 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9894 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9895 && integer_onep (TREE_OPERAND (arg0
, 1))
9896 && integer_onep (arg1
))
9897 return fold_build2 (EQ_EXPR
, type
, arg0
,
9898 build_int_cst (TREE_TYPE (arg0
), 0));
9900 /* Fold (X & Y) ^ Y as ~X & Y. */
9901 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9902 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9904 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9905 return fold_build2 (BIT_AND_EXPR
, type
,
9906 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9907 fold_convert (type
, arg1
));
9909 /* Fold (X & Y) ^ X as ~Y & X. */
9910 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9911 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9912 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9914 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9915 return fold_build2 (BIT_AND_EXPR
, type
,
9916 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9917 fold_convert (type
, arg1
));
9919 /* Fold X ^ (X & Y) as X & ~Y. */
9920 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9921 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9923 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9924 return fold_build2 (BIT_AND_EXPR
, type
,
9925 fold_convert (type
, arg0
),
9926 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
9928 /* Fold X ^ (Y & X) as ~Y & X. */
9929 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9930 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9931 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9933 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9934 return fold_build2 (BIT_AND_EXPR
, type
,
9935 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9936 fold_convert (type
, arg0
));
9939 /* See if this can be simplified into a rotate first. If that
9940 is unsuccessful continue in the association code. */
9944 if (integer_all_onesp (arg1
))
9945 return non_lvalue (fold_convert (type
, arg0
));
9946 if (integer_zerop (arg1
))
9947 return omit_one_operand (type
, arg1
, arg0
);
9948 if (operand_equal_p (arg0
, arg1
, 0))
9949 return non_lvalue (fold_convert (type
, arg0
));
9951 /* ~X & X is always zero. */
9952 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9953 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9954 return omit_one_operand (type
, integer_zero_node
, arg1
);
9956 /* X & ~X is always zero. */
9957 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9958 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9959 return omit_one_operand (type
, integer_zero_node
, arg0
);
9961 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9962 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9963 && TREE_CODE (arg1
) == INTEGER_CST
9964 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9965 return fold_build2 (BIT_IOR_EXPR
, type
,
9966 fold_build2 (BIT_AND_EXPR
, type
,
9967 TREE_OPERAND (arg0
, 0), arg1
),
9968 fold_build2 (BIT_AND_EXPR
, type
,
9969 TREE_OPERAND (arg0
, 1), arg1
));
9971 /* (X | Y) & Y is (X, Y). */
9972 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9973 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9974 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9975 /* (X | Y) & X is (Y, X). */
9976 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9977 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9978 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9979 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
9980 /* X & (X | Y) is (Y, X). */
9981 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9982 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
9983 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
9984 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
9985 /* X & (Y | X) is (Y, X). */
9986 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9987 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9988 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9989 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
9991 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9992 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9993 && integer_onep (TREE_OPERAND (arg0
, 1))
9994 && integer_onep (arg1
))
9996 tem
= TREE_OPERAND (arg0
, 0);
9997 return fold_build2 (EQ_EXPR
, type
,
9998 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
9999 build_int_cst (TREE_TYPE (tem
), 1)),
10000 build_int_cst (TREE_TYPE (tem
), 0));
10002 /* Fold ~X & 1 as (X & 1) == 0. */
10003 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10004 && integer_onep (arg1
))
10006 tem
= TREE_OPERAND (arg0
, 0);
10007 return fold_build2 (EQ_EXPR
, type
,
10008 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10009 build_int_cst (TREE_TYPE (tem
), 1)),
10010 build_int_cst (TREE_TYPE (tem
), 0));
10013 /* Fold (X ^ Y) & Y as ~X & Y. */
10014 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10015 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10017 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10018 return fold_build2 (BIT_AND_EXPR
, type
,
10019 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10020 fold_convert (type
, arg1
));
10022 /* Fold (X ^ Y) & X as ~Y & X. */
10023 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10024 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10025 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10027 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10028 return fold_build2 (BIT_AND_EXPR
, type
,
10029 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10030 fold_convert (type
, arg1
));
10032 /* Fold X & (X ^ Y) as X & ~Y. */
10033 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10034 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10036 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10037 return fold_build2 (BIT_AND_EXPR
, type
,
10038 fold_convert (type
, arg0
),
10039 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10041 /* Fold X & (Y ^ X) as ~Y & X. */
10042 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10043 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10044 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10046 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10047 return fold_build2 (BIT_AND_EXPR
, type
,
10048 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10049 fold_convert (type
, arg0
));
10052 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10053 if (t1
!= NULL_TREE
)
10055 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10056 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10057 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10060 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10062 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
10063 && (~TREE_INT_CST_LOW (arg1
)
10064 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
10065 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
10068 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10070 This results in more efficient code for machines without a NOR
10071 instruction. Combine will canonicalize to the first form
10072 which will allow use of NOR instructions provided by the
10073 backend if they exist. */
10074 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10075 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10077 return fold_build1 (BIT_NOT_EXPR
, type
,
10078 build2 (BIT_IOR_EXPR
, type
,
10079 TREE_OPERAND (arg0
, 0),
10080 TREE_OPERAND (arg1
, 0)));
10086 /* Don't touch a floating-point divide by zero unless the mode
10087 of the constant can represent infinity. */
10088 if (TREE_CODE (arg1
) == REAL_CST
10089 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10090 && real_zerop (arg1
))
10093 /* Optimize A / A to 1.0 if we don't care about
10094 NaNs or Infinities. Skip the transformation
10095 for non-real operands. */
10096 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10097 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10098 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
10099 && operand_equal_p (arg0
, arg1
, 0))
10101 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
10103 return omit_two_operands (type
, r
, arg0
, arg1
);
10106 /* The complex version of the above A / A optimization. */
10107 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10108 && operand_equal_p (arg0
, arg1
, 0))
10110 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
10111 if (! HONOR_NANS (TYPE_MODE (elem_type
))
10112 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
10114 tree r
= build_real (elem_type
, dconst1
);
10115 /* omit_two_operands will call fold_convert for us. */
10116 return omit_two_operands (type
, r
, arg0
, arg1
);
10120 /* (-A) / (-B) -> A / B */
10121 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10122 return fold_build2 (RDIV_EXPR
, type
,
10123 TREE_OPERAND (arg0
, 0),
10124 negate_expr (arg1
));
10125 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10126 return fold_build2 (RDIV_EXPR
, type
,
10127 negate_expr (arg0
),
10128 TREE_OPERAND (arg1
, 0));
10130 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10131 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10132 && real_onep (arg1
))
10133 return non_lvalue (fold_convert (type
, arg0
));
10135 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10136 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10137 && real_minus_onep (arg1
))
10138 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
10140 /* If ARG1 is a constant, we can convert this to a multiply by the
10141 reciprocal. This does not have the same rounding properties,
10142 so only do this if -funsafe-math-optimizations. We can actually
10143 always safely do it if ARG1 is a power of two, but it's hard to
10144 tell if it is or not in a portable manner. */
10145 if (TREE_CODE (arg1
) == REAL_CST
)
10147 if (flag_unsafe_math_optimizations
10148 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
10150 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
10151 /* Find the reciprocal if optimizing and the result is exact. */
10155 r
= TREE_REAL_CST (arg1
);
10156 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
10158 tem
= build_real (type
, r
);
10159 return fold_build2 (MULT_EXPR
, type
,
10160 fold_convert (type
, arg0
), tem
);
10164 /* Convert A/B/C to A/(B*C). */
10165 if (flag_unsafe_math_optimizations
10166 && TREE_CODE (arg0
) == RDIV_EXPR
)
10167 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10168 fold_build2 (MULT_EXPR
, type
,
10169 TREE_OPERAND (arg0
, 1), arg1
));
10171 /* Convert A/(B/C) to (A/B)*C. */
10172 if (flag_unsafe_math_optimizations
10173 && TREE_CODE (arg1
) == RDIV_EXPR
)
10174 return fold_build2 (MULT_EXPR
, type
,
10175 fold_build2 (RDIV_EXPR
, type
, arg0
,
10176 TREE_OPERAND (arg1
, 0)),
10177 TREE_OPERAND (arg1
, 1));
10179 /* Convert C1/(X*C2) into (C1/C2)/X. */
10180 if (flag_unsafe_math_optimizations
10181 && TREE_CODE (arg1
) == MULT_EXPR
10182 && TREE_CODE (arg0
) == REAL_CST
10183 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
10185 tree tem
= const_binop (RDIV_EXPR
, arg0
,
10186 TREE_OPERAND (arg1
, 1), 0);
10188 return fold_build2 (RDIV_EXPR
, type
, tem
,
10189 TREE_OPERAND (arg1
, 0));
10192 if (flag_unsafe_math_optimizations
)
10194 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10195 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10197 /* Optimize sin(x)/cos(x) as tan(x). */
10198 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
10199 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
10200 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
10201 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
10202 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
10204 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10206 if (tanfn
!= NULL_TREE
)
10207 return build_function_call_expr (tanfn
,
10208 TREE_OPERAND (arg0
, 1));
10211 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10212 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
10213 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
10214 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
10215 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
10216 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
10218 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10220 if (tanfn
!= NULL_TREE
)
10222 tree tmp
= TREE_OPERAND (arg0
, 1);
10223 tmp
= build_function_call_expr (tanfn
, tmp
);
10224 return fold_build2 (RDIV_EXPR
, type
,
10225 build_real (type
, dconst1
), tmp
);
10229 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10230 NaNs or Infinities. */
10231 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
10232 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
10233 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
10235 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
10236 tree arg01
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
10238 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
10239 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
10240 && operand_equal_p (arg00
, arg01
, 0))
10242 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10244 if (cosfn
!= NULL_TREE
)
10245 return build_function_call_expr (cosfn
,
10246 TREE_OPERAND (arg0
, 1));
10250 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10251 NaNs or Infinities. */
10252 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
10253 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
10254 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
10256 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
10257 tree arg01
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
10259 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
10260 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
10261 && operand_equal_p (arg00
, arg01
, 0))
10263 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10265 if (cosfn
!= NULL_TREE
)
10267 tree tmp
= TREE_OPERAND (arg0
, 1);
10268 tmp
= build_function_call_expr (cosfn
, tmp
);
10269 return fold_build2 (RDIV_EXPR
, type
,
10270 build_real (type
, dconst1
),
10276 /* Optimize pow(x,c)/x as pow(x,c-1). */
10277 if (fcode0
== BUILT_IN_POW
10278 || fcode0
== BUILT_IN_POWF
10279 || fcode0
== BUILT_IN_POWL
)
10281 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
10282 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
10283 if (TREE_CODE (arg01
) == REAL_CST
10284 && !TREE_OVERFLOW (arg01
)
10285 && operand_equal_p (arg1
, arg00
, 0))
10287 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10291 c
= TREE_REAL_CST (arg01
);
10292 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
10293 arg
= build_real (type
, c
);
10294 arglist
= build_tree_list (NULL_TREE
, arg
);
10295 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
10296 return build_function_call_expr (powfn
, arglist
);
10300 /* Optimize x/expN(y) into x*expN(-y). */
10301 if (BUILTIN_EXPONENT_P (fcode1
))
10303 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
10304 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
10305 tree arglist
= build_tree_list (NULL_TREE
,
10306 fold_convert (type
, arg
));
10307 arg1
= build_function_call_expr (expfn
, arglist
);
10308 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
10311 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10312 if (fcode1
== BUILT_IN_POW
10313 || fcode1
== BUILT_IN_POWF
10314 || fcode1
== BUILT_IN_POWL
)
10316 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
10317 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
10318 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
10319 tree neg11
= fold_convert (type
, negate_expr (arg11
));
10320 tree arglist
= tree_cons (NULL_TREE
, arg10
,
10321 build_tree_list (NULL_TREE
, neg11
));
10322 arg1
= build_function_call_expr (powfn
, arglist
);
10323 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
10328 case TRUNC_DIV_EXPR
:
10329 case FLOOR_DIV_EXPR
:
10330 /* Simplify A / (B << N) where A and B are positive and B is
10331 a power of 2, to A >> (N + log2(B)). */
10332 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10333 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
)))
10335 tree sval
= TREE_OPERAND (arg1
, 0);
10336 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10338 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10339 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
10341 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10342 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
10343 return fold_build2 (RSHIFT_EXPR
, type
,
10344 fold_convert (type
, arg0
), sh_cnt
);
10349 case ROUND_DIV_EXPR
:
10350 case CEIL_DIV_EXPR
:
10351 case EXACT_DIV_EXPR
:
10352 if (integer_onep (arg1
))
10353 return non_lvalue (fold_convert (type
, arg0
));
10354 if (integer_zerop (arg1
))
10356 /* X / -1 is -X. */
10357 if (!TYPE_UNSIGNED (type
)
10358 && TREE_CODE (arg1
) == INTEGER_CST
10359 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
10360 && TREE_INT_CST_HIGH (arg1
) == -1)
10361 return fold_convert (type
, negate_expr (arg0
));
10363 /* Convert -A / -B to A / B when the type is signed and overflow is
10365 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10366 && TREE_CODE (arg0
) == NEGATE_EXPR
10367 && negate_expr_p (arg1
))
10368 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10369 negate_expr (arg1
));
10370 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10371 && TREE_CODE (arg1
) == NEGATE_EXPR
10372 && negate_expr_p (arg0
))
10373 return fold_build2 (code
, type
, negate_expr (arg0
),
10374 TREE_OPERAND (arg1
, 0));
10376 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10377 operation, EXACT_DIV_EXPR.
10379 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10380 At one time others generated faster code, it's not clear if they do
10381 after the last round to changes to the DIV code in expmed.c. */
10382 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10383 && multiple_of_p (type
, arg0
, arg1
))
10384 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
10386 if (TREE_CODE (arg1
) == INTEGER_CST
10387 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
10388 return fold_convert (type
, tem
);
10392 case CEIL_MOD_EXPR
:
10393 case FLOOR_MOD_EXPR
:
10394 case ROUND_MOD_EXPR
:
10395 case TRUNC_MOD_EXPR
:
10396 /* X % 1 is always zero, but be sure to preserve any side
10398 if (integer_onep (arg1
))
10399 return omit_one_operand (type
, integer_zero_node
, arg0
);
10401 /* X % 0, return X % 0 unchanged so that we can get the
10402 proper warnings and errors. */
10403 if (integer_zerop (arg1
))
10406 /* 0 % X is always zero, but be sure to preserve any side
10407 effects in X. Place this after checking for X == 0. */
10408 if (integer_zerop (arg0
))
10409 return omit_one_operand (type
, integer_zero_node
, arg1
);
10411 /* X % -1 is zero. */
10412 if (!TYPE_UNSIGNED (type
)
10413 && TREE_CODE (arg1
) == INTEGER_CST
10414 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
10415 && TREE_INT_CST_HIGH (arg1
) == -1)
10416 return omit_one_operand (type
, integer_zero_node
, arg0
);
10418 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10419 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10420 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
10421 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
)))
10424 /* Also optimize A % (C << N) where C is a power of 2,
10425 to A & ((C << N) - 1). */
10426 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
10427 c
= TREE_OPERAND (arg1
, 0);
10429 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
10431 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
10432 build_int_cst (TREE_TYPE (arg1
), 1));
10433 return fold_build2 (BIT_AND_EXPR
, type
,
10434 fold_convert (type
, arg0
),
10435 fold_convert (type
, mask
));
10439 /* X % -C is the same as X % C. */
10440 if (code
== TRUNC_MOD_EXPR
10441 && !TYPE_UNSIGNED (type
)
10442 && TREE_CODE (arg1
) == INTEGER_CST
10443 && !TREE_OVERFLOW (arg1
)
10444 && TREE_INT_CST_HIGH (arg1
) < 0
10445 && !TYPE_OVERFLOW_TRAPS (type
)
10446 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10447 && !sign_bit_p (arg1
, arg1
))
10448 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
10449 fold_convert (type
, negate_expr (arg1
)));
10451 /* X % -Y is the same as X % Y. */
10452 if (code
== TRUNC_MOD_EXPR
10453 && !TYPE_UNSIGNED (type
)
10454 && TREE_CODE (arg1
) == NEGATE_EXPR
10455 && !TYPE_OVERFLOW_TRAPS (type
))
10456 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
10457 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10459 if (TREE_CODE (arg1
) == INTEGER_CST
10460 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
10461 return fold_convert (type
, tem
);
10467 if (integer_all_onesp (arg0
))
10468 return omit_one_operand (type
, arg0
, arg1
);
10472 /* Optimize -1 >> x for arithmetic right shifts. */
10473 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
10474 return omit_one_operand (type
, arg0
, arg1
);
10475 /* ... fall through ... */
10479 if (integer_zerop (arg1
))
10480 return non_lvalue (fold_convert (type
, arg0
));
10481 if (integer_zerop (arg0
))
10482 return omit_one_operand (type
, arg0
, arg1
);
10484 /* Since negative shift count is not well-defined,
10485 don't try to compute it in the compiler. */
10486 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10489 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10490 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
10491 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
10492 && host_integerp (TREE_OPERAND (arg0
, 1), false)
10493 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
10495 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
10496 + TREE_INT_CST_LOW (arg1
));
10498 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10499 being well defined. */
10500 if (low
>= TYPE_PRECISION (type
))
10502 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
10503 low
= low
% TYPE_PRECISION (type
);
10504 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
10505 return build_int_cst (type
, 0);
10507 low
= TYPE_PRECISION (type
) - 1;
10510 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10511 build_int_cst (type
, low
));
10514 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10515 into x & ((unsigned)-1 >> c) for unsigned types. */
10516 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
10517 || (TYPE_UNSIGNED (type
)
10518 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
10519 && host_integerp (arg1
, false)
10520 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
10521 && host_integerp (TREE_OPERAND (arg0
, 1), false)
10522 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
10524 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10525 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
10531 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10533 lshift
= build_int_cst (type
, -1);
10534 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
10536 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
10540 /* Rewrite an LROTATE_EXPR by a constant into an
10541 RROTATE_EXPR by a new constant. */
10542 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
10544 tree tem
= build_int_cst (TREE_TYPE (arg1
),
10545 GET_MODE_BITSIZE (TYPE_MODE (type
)));
10546 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
10547 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
10550 /* If we have a rotate of a bit operation with the rotate count and
10551 the second operand of the bit operation both constant,
10552 permute the two operations. */
10553 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10554 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10555 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10556 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10557 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10558 return fold_build2 (TREE_CODE (arg0
), type
,
10559 fold_build2 (code
, type
,
10560 TREE_OPERAND (arg0
, 0), arg1
),
10561 fold_build2 (code
, type
,
10562 TREE_OPERAND (arg0
, 1), arg1
));
10564 /* Two consecutive rotates adding up to the width of the mode can
10566 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10567 && TREE_CODE (arg0
) == RROTATE_EXPR
10568 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10569 && TREE_INT_CST_HIGH (arg1
) == 0
10570 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
10571 && ((TREE_INT_CST_LOW (arg1
)
10572 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
10573 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
10574 return TREE_OPERAND (arg0
, 0);
10579 if (operand_equal_p (arg0
, arg1
, 0))
10580 return omit_one_operand (type
, arg0
, arg1
);
10581 if (INTEGRAL_TYPE_P (type
)
10582 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
10583 return omit_one_operand (type
, arg1
, arg0
);
10584 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
10590 if (operand_equal_p (arg0
, arg1
, 0))
10591 return omit_one_operand (type
, arg0
, arg1
);
10592 if (INTEGRAL_TYPE_P (type
)
10593 && TYPE_MAX_VALUE (type
)
10594 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
10595 return omit_one_operand (type
, arg1
, arg0
);
10596 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
10601 case TRUTH_ANDIF_EXPR
:
10602 /* Note that the operands of this must be ints
10603 and their values must be 0 or 1.
10604 ("true" is a fixed value perhaps depending on the language.) */
10605 /* If first arg is constant zero, return it. */
10606 if (integer_zerop (arg0
))
10607 return fold_convert (type
, arg0
);
10608 case TRUTH_AND_EXPR
:
10609 /* If either arg is constant true, drop it. */
10610 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10611 return non_lvalue (fold_convert (type
, arg1
));
10612 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10613 /* Preserve sequence points. */
10614 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10615 return non_lvalue (fold_convert (type
, arg0
));
10616 /* If second arg is constant zero, result is zero, but first arg
10617 must be evaluated. */
10618 if (integer_zerop (arg1
))
10619 return omit_one_operand (type
, arg1
, arg0
);
10620 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10621 case will be handled here. */
10622 if (integer_zerop (arg0
))
10623 return omit_one_operand (type
, arg0
, arg1
);
10625 /* !X && X is always false. */
10626 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10627 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10628 return omit_one_operand (type
, integer_zero_node
, arg1
);
10629 /* X && !X is always false. */
10630 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10631 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10632 return omit_one_operand (type
, integer_zero_node
, arg0
);
10634 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10635 means A >= Y && A != MAX, but in this case we know that
10638 if (!TREE_SIDE_EFFECTS (arg0
)
10639 && !TREE_SIDE_EFFECTS (arg1
))
10641 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
10642 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10643 return fold_build2 (code
, type
, tem
, arg1
);
10645 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
10646 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10647 return fold_build2 (code
, type
, arg0
, tem
);
10651 /* We only do these simplifications if we are optimizing. */
10655 /* Check for things like (A || B) && (A || C). We can convert this
10656 to A || (B && C). Note that either operator can be any of the four
10657 truth and/or operations and the transformation will still be
10658 valid. Also note that we only care about order for the
10659 ANDIF and ORIF operators. If B contains side effects, this
10660 might change the truth-value of A. */
10661 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10662 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
10663 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
10664 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
10665 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
10666 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
10668 tree a00
= TREE_OPERAND (arg0
, 0);
10669 tree a01
= TREE_OPERAND (arg0
, 1);
10670 tree a10
= TREE_OPERAND (arg1
, 0);
10671 tree a11
= TREE_OPERAND (arg1
, 1);
10672 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
10673 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
10674 && (code
== TRUTH_AND_EXPR
10675 || code
== TRUTH_OR_EXPR
));
10677 if (operand_equal_p (a00
, a10
, 0))
10678 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
10679 fold_build2 (code
, type
, a01
, a11
));
10680 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
10681 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
10682 fold_build2 (code
, type
, a01
, a10
));
10683 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
10684 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
10685 fold_build2 (code
, type
, a00
, a11
));
10687 /* This case if tricky because we must either have commutative
10688 operators or else A10 must not have side-effects. */
10690 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
10691 && operand_equal_p (a01
, a11
, 0))
10692 return fold_build2 (TREE_CODE (arg0
), type
,
10693 fold_build2 (code
, type
, a00
, a10
),
10697 /* See if we can build a range comparison. */
10698 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
10701 /* Check for the possibility of merging component references. If our
10702 lhs is another similar operation, try to merge its rhs with our
10703 rhs. Then try to merge our lhs and rhs. */
10704 if (TREE_CODE (arg0
) == code
10705 && 0 != (tem
= fold_truthop (code
, type
,
10706 TREE_OPERAND (arg0
, 1), arg1
)))
10707 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10709 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
10714 case TRUTH_ORIF_EXPR
:
10715 /* Note that the operands of this must be ints
10716 and their values must be 0 or true.
10717 ("true" is a fixed value perhaps depending on the language.) */
10718 /* If first arg is constant true, return it. */
10719 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10720 return fold_convert (type
, arg0
);
10721 case TRUTH_OR_EXPR
:
10722 /* If either arg is constant zero, drop it. */
10723 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10724 return non_lvalue (fold_convert (type
, arg1
));
10725 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10726 /* Preserve sequence points. */
10727 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10728 return non_lvalue (fold_convert (type
, arg0
));
10729 /* If second arg is constant true, result is true, but we must
10730 evaluate first arg. */
10731 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10732 return omit_one_operand (type
, arg1
, arg0
);
10733 /* Likewise for first arg, but note this only occurs here for
10735 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10736 return omit_one_operand (type
, arg0
, arg1
);
10738 /* !X || X is always true. */
10739 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10740 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10741 return omit_one_operand (type
, integer_one_node
, arg1
);
10742 /* X || !X is always true. */
10743 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10744 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10745 return omit_one_operand (type
, integer_one_node
, arg0
);
10749 case TRUTH_XOR_EXPR
:
10750 /* If the second arg is constant zero, drop it. */
10751 if (integer_zerop (arg1
))
10752 return non_lvalue (fold_convert (type
, arg0
));
10753 /* If the second arg is constant true, this is a logical inversion. */
10754 if (integer_onep (arg1
))
10756 /* Only call invert_truthvalue if operand is a truth value. */
10757 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
10758 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
10760 tem
= invert_truthvalue (arg0
);
10761 return non_lvalue (fold_convert (type
, tem
));
10763 /* Identical arguments cancel to zero. */
10764 if (operand_equal_p (arg0
, arg1
, 0))
10765 return omit_one_operand (type
, integer_zero_node
, arg0
);
10767 /* !X ^ X is always true. */
10768 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10769 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10770 return omit_one_operand (type
, integer_one_node
, arg1
);
10772 /* X ^ !X is always true. */
10773 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10774 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10775 return omit_one_operand (type
, integer_one_node
, arg0
);
10781 tem
= fold_comparison (code
, type
, op0
, op1
);
10782 if (tem
!= NULL_TREE
)
10785 /* bool_var != 0 becomes bool_var. */
10786 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10787 && code
== NE_EXPR
)
10788 return non_lvalue (fold_convert (type
, arg0
));
10790 /* bool_var == 1 becomes bool_var. */
10791 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10792 && code
== EQ_EXPR
)
10793 return non_lvalue (fold_convert (type
, arg0
));
10795 /* bool_var != 1 becomes !bool_var. */
10796 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10797 && code
== NE_EXPR
)
10798 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
10800 /* bool_var == 0 becomes !bool_var. */
10801 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10802 && code
== EQ_EXPR
)
10803 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
10805 /* If this is an equality comparison of the address of a non-weak
10806 object against zero, then we know the result. */
10807 if (TREE_CODE (arg0
) == ADDR_EXPR
10808 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
10809 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
10810 && integer_zerop (arg1
))
10811 return constant_boolean_node (code
!= EQ_EXPR
, type
);
10813 /* If this is an equality comparison of the address of two non-weak,
10814 unaliased symbols neither of which are extern (since we do not
10815 have access to attributes for externs), then we know the result. */
10816 if (TREE_CODE (arg0
) == ADDR_EXPR
10817 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
10818 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
10819 && ! lookup_attribute ("alias",
10820 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
10821 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
10822 && TREE_CODE (arg1
) == ADDR_EXPR
10823 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
10824 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
10825 && ! lookup_attribute ("alias",
10826 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
10827 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
10829 /* We know that we're looking at the address of two
10830 non-weak, unaliased, static _DECL nodes.
10832 It is both wasteful and incorrect to call operand_equal_p
10833 to compare the two ADDR_EXPR nodes. It is wasteful in that
10834 all we need to do is test pointer equality for the arguments
10835 to the two ADDR_EXPR nodes. It is incorrect to use
10836 operand_equal_p as that function is NOT equivalent to a
10837 C equality test. It can in fact return false for two
10838 objects which would test as equal using the C equality
10840 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
10841 return constant_boolean_node (equal
10842 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
10846 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10847 a MINUS_EXPR of a constant, we can convert it into a comparison with
10848 a revised constant as long as no overflow occurs. */
10849 if (TREE_CODE (arg1
) == INTEGER_CST
10850 && (TREE_CODE (arg0
) == PLUS_EXPR
10851 || TREE_CODE (arg0
) == MINUS_EXPR
)
10852 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10853 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
10854 ? MINUS_EXPR
: PLUS_EXPR
,
10855 fold_convert (TREE_TYPE (arg0
), arg1
),
10856 TREE_OPERAND (arg0
, 1), 0))
10857 && !TREE_OVERFLOW (tem
))
10858 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10860 /* Similarly for a NEGATE_EXPR. */
10861 if (TREE_CODE (arg0
) == NEGATE_EXPR
10862 && TREE_CODE (arg1
) == INTEGER_CST
10863 && 0 != (tem
= negate_expr (arg1
))
10864 && TREE_CODE (tem
) == INTEGER_CST
10865 && !TREE_OVERFLOW (tem
))
10866 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10868 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10869 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10870 && TREE_CODE (arg1
) == INTEGER_CST
10871 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10872 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10873 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg0
),
10874 fold_convert (TREE_TYPE (arg0
), arg1
),
10875 TREE_OPERAND (arg0
, 1)));
10877 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10878 for !=. Don't do this for ordered comparisons due to overflow. */
10879 if (TREE_CODE (arg0
) == MINUS_EXPR
10880 && integer_zerop (arg1
))
10881 return fold_build2 (code
, type
,
10882 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
10884 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10885 if (TREE_CODE (arg0
) == ABS_EXPR
10886 && (integer_zerop (arg1
) || real_zerop (arg1
)))
10887 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
10889 /* If this is an EQ or NE comparison with zero and ARG0 is
10890 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10891 two operations, but the latter can be done in one less insn
10892 on machines that have only two-operand insns or on which a
10893 constant cannot be the first operand. */
10894 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10895 && integer_zerop (arg1
))
10897 tree arg00
= TREE_OPERAND (arg0
, 0);
10898 tree arg01
= TREE_OPERAND (arg0
, 1);
10899 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10900 && integer_onep (TREE_OPERAND (arg00
, 0)))
10902 fold_build2 (code
, type
,
10903 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10904 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
10905 arg01
, TREE_OPERAND (arg00
, 1)),
10906 fold_convert (TREE_TYPE (arg0
),
10907 integer_one_node
)),
10909 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
10910 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
10912 fold_build2 (code
, type
,
10913 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10914 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
10915 arg00
, TREE_OPERAND (arg01
, 1)),
10916 fold_convert (TREE_TYPE (arg0
),
10917 integer_one_node
)),
10921 /* If this is an NE or EQ comparison of zero against the result of a
10922 signed MOD operation whose second operand is a power of 2, make
10923 the MOD operation unsigned since it is simpler and equivalent. */
10924 if (integer_zerop (arg1
)
10925 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10926 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10927 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10928 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10929 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10930 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10932 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
10933 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
10934 fold_convert (newtype
,
10935 TREE_OPERAND (arg0
, 0)),
10936 fold_convert (newtype
,
10937 TREE_OPERAND (arg0
, 1)));
10939 return fold_build2 (code
, type
, newmod
,
10940 fold_convert (newtype
, arg1
));
10943 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10944 C1 is a valid shift constant, and C2 is a power of two, i.e.
10946 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10947 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10948 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10950 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10951 && integer_zerop (arg1
))
10953 tree itype
= TREE_TYPE (arg0
);
10954 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
10955 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10957 /* Check for a valid shift count. */
10958 if (TREE_INT_CST_HIGH (arg001
) == 0
10959 && TREE_INT_CST_LOW (arg001
) < prec
)
10961 tree arg01
= TREE_OPERAND (arg0
, 1);
10962 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10963 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10964 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10965 can be rewritten as (X & (C2 << C1)) != 0. */
10966 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
10968 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
10969 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
10970 return fold_build2 (code
, type
, tem
, arg1
);
10972 /* Otherwise, for signed (arithmetic) shifts,
10973 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10974 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10975 else if (!TYPE_UNSIGNED (itype
))
10976 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10977 arg000
, build_int_cst (itype
, 0));
10978 /* Otherwise, of unsigned (logical) shifts,
10979 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10980 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10982 return omit_one_operand (type
,
10983 code
== EQ_EXPR
? integer_one_node
10984 : integer_zero_node
,
10989 /* If this is an NE comparison of zero with an AND of one, remove the
10990 comparison since the AND will give the correct value. */
10991 if (code
== NE_EXPR
10992 && integer_zerop (arg1
)
10993 && TREE_CODE (arg0
) == BIT_AND_EXPR
10994 && integer_onep (TREE_OPERAND (arg0
, 1)))
10995 return fold_convert (type
, arg0
);
10997 /* If we have (A & C) == C where C is a power of 2, convert this into
10998 (A & C) != 0. Similarly for NE_EXPR. */
10999 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11000 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11001 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11002 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11003 arg0
, fold_convert (TREE_TYPE (arg0
),
11004 integer_zero_node
));
11006 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11007 bit, then fold the expression into A < 0 or A >= 0. */
11008 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
11012 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11013 Similarly for NE_EXPR. */
11014 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11015 && TREE_CODE (arg1
) == INTEGER_CST
11016 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11018 tree notc
= fold_build1 (BIT_NOT_EXPR
,
11019 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
11020 TREE_OPERAND (arg0
, 1));
11021 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11023 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11024 if (integer_nonzerop (dandnotc
))
11025 return omit_one_operand (type
, rslt
, arg0
);
11028 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11029 Similarly for NE_EXPR. */
11030 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11031 && TREE_CODE (arg1
) == INTEGER_CST
11032 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11034 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
11035 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11036 TREE_OPERAND (arg0
, 1), notd
);
11037 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11038 if (integer_nonzerop (candnotd
))
11039 return omit_one_operand (type
, rslt
, arg0
);
11042 /* If this is a comparison of a field, we may be able to simplify it. */
11043 if ((TREE_CODE (arg0
) == COMPONENT_REF
11044 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
11045 /* Handle the constant case even without -O
11046 to make sure the warnings are given. */
11047 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
11049 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
11054 /* Optimize comparisons of strlen vs zero to a compare of the
11055 first character of the string vs zero. To wit,
11056 strlen(ptr) == 0 => *ptr == 0
11057 strlen(ptr) != 0 => *ptr != 0
11058 Other cases should reduce to one of these two (or a constant)
11059 due to the return value of strlen being unsigned. */
11060 if (TREE_CODE (arg0
) == CALL_EXPR
11061 && integer_zerop (arg1
))
11063 tree fndecl
= get_callee_fndecl (arg0
);
11067 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
11068 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
11069 && (arglist
= TREE_OPERAND (arg0
, 1))
11070 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
11071 && ! TREE_CHAIN (arglist
))
11073 tree iref
= build_fold_indirect_ref (TREE_VALUE (arglist
));
11074 return fold_build2 (code
, type
, iref
,
11075 build_int_cst (TREE_TYPE (iref
), 0));
11079 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11080 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11081 if (TREE_CODE (arg0
) == RSHIFT_EXPR
11082 && integer_zerop (arg1
)
11083 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11085 tree arg00
= TREE_OPERAND (arg0
, 0);
11086 tree arg01
= TREE_OPERAND (arg0
, 1);
11087 tree itype
= TREE_TYPE (arg00
);
11088 if (TREE_INT_CST_HIGH (arg01
) == 0
11089 && TREE_INT_CST_LOW (arg01
)
11090 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
11092 if (TYPE_UNSIGNED (itype
))
11094 itype
= lang_hooks
.types
.signed_type (itype
);
11095 arg00
= fold_convert (itype
, arg00
);
11097 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
11098 type
, arg00
, build_int_cst (itype
, 0));
11102 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11103 if (integer_zerop (arg1
)
11104 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11105 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11106 TREE_OPERAND (arg0
, 1));
11108 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11109 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11110 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11111 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11112 build_int_cst (TREE_TYPE (arg1
), 0));
11113 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11114 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11115 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11116 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11117 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
11118 build_int_cst (TREE_TYPE (arg1
), 0));
11120 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11121 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11122 && TREE_CODE (arg1
) == INTEGER_CST
11123 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11124 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11125 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
11126 TREE_OPERAND (arg0
, 1), arg1
));
11128 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11129 (X & C) == 0 when C is a single bit. */
11130 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11131 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
11132 && integer_zerop (arg1
)
11133 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11135 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11136 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
11137 TREE_OPERAND (arg0
, 1));
11138 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
11142 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11143 constant C is a power of two, i.e. a single bit. */
11144 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11145 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11146 && integer_zerop (arg1
)
11147 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11148 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11149 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11151 tree arg00
= TREE_OPERAND (arg0
, 0);
11152 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11153 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
11156 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11157 when is C is a power of two, i.e. a single bit. */
11158 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11159 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
11160 && integer_zerop (arg1
)
11161 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11162 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11163 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11165 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11166 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
11167 arg000
, TREE_OPERAND (arg0
, 1));
11168 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11169 tem
, build_int_cst (TREE_TYPE (tem
), 0));
11172 if (integer_zerop (arg1
)
11173 && tree_expr_nonzero_p (arg0
))
11175 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
11176 return omit_one_operand (type
, res
, arg0
);
11179 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11180 if (TREE_CODE (arg0
) == NEGATE_EXPR
11181 && TREE_CODE (arg1
) == NEGATE_EXPR
)
11182 return fold_build2 (code
, type
,
11183 TREE_OPERAND (arg0
, 0),
11184 TREE_OPERAND (arg1
, 0));
11186 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11187 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11188 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11190 tree arg00
= TREE_OPERAND (arg0
, 0);
11191 tree arg01
= TREE_OPERAND (arg0
, 1);
11192 tree arg10
= TREE_OPERAND (arg1
, 0);
11193 tree arg11
= TREE_OPERAND (arg1
, 1);
11194 tree itype
= TREE_TYPE (arg0
);
11196 if (operand_equal_p (arg01
, arg11
, 0))
11197 return fold_build2 (code
, type
,
11198 fold_build2 (BIT_AND_EXPR
, itype
,
11199 fold_build2 (BIT_XOR_EXPR
, itype
,
11202 build_int_cst (itype
, 0));
11204 if (operand_equal_p (arg01
, arg10
, 0))
11205 return fold_build2 (code
, type
,
11206 fold_build2 (BIT_AND_EXPR
, itype
,
11207 fold_build2 (BIT_XOR_EXPR
, itype
,
11210 build_int_cst (itype
, 0));
11212 if (operand_equal_p (arg00
, arg11
, 0))
11213 return fold_build2 (code
, type
,
11214 fold_build2 (BIT_AND_EXPR
, itype
,
11215 fold_build2 (BIT_XOR_EXPR
, itype
,
11218 build_int_cst (itype
, 0));
11220 if (operand_equal_p (arg00
, arg10
, 0))
11221 return fold_build2 (code
, type
,
11222 fold_build2 (BIT_AND_EXPR
, itype
,
11223 fold_build2 (BIT_XOR_EXPR
, itype
,
11226 build_int_cst (itype
, 0));
11229 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11230 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
11232 tree arg00
= TREE_OPERAND (arg0
, 0);
11233 tree arg01
= TREE_OPERAND (arg0
, 1);
11234 tree arg10
= TREE_OPERAND (arg1
, 0);
11235 tree arg11
= TREE_OPERAND (arg1
, 1);
11236 tree itype
= TREE_TYPE (arg0
);
11238 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11239 operand_equal_p guarantees no side-effects so we don't need
11240 to use omit_one_operand on Z. */
11241 if (operand_equal_p (arg01
, arg11
, 0))
11242 return fold_build2 (code
, type
, arg00
, arg10
);
11243 if (operand_equal_p (arg01
, arg10
, 0))
11244 return fold_build2 (code
, type
, arg00
, arg11
);
11245 if (operand_equal_p (arg00
, arg11
, 0))
11246 return fold_build2 (code
, type
, arg01
, arg10
);
11247 if (operand_equal_p (arg00
, arg10
, 0))
11248 return fold_build2 (code
, type
, arg01
, arg11
);
11250 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11251 if (TREE_CODE (arg01
) == INTEGER_CST
11252 && TREE_CODE (arg11
) == INTEGER_CST
)
11253 return fold_build2 (code
, type
,
11254 fold_build2 (BIT_XOR_EXPR
, itype
, arg00
,
11255 fold_build2 (BIT_XOR_EXPR
, itype
,
11265 tem
= fold_comparison (code
, type
, op0
, op1
);
11266 if (tem
!= NULL_TREE
)
11269 /* Transform comparisons of the form X +- C CMP X. */
11270 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11271 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11272 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11273 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
11274 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11275 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
11277 tree arg01
= TREE_OPERAND (arg0
, 1);
11278 enum tree_code code0
= TREE_CODE (arg0
);
11281 if (TREE_CODE (arg01
) == REAL_CST
)
11282 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11284 is_positive
= tree_int_cst_sgn (arg01
);
11286 /* (X - c) > X becomes false. */
11287 if (code
== GT_EXPR
11288 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11289 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11290 return constant_boolean_node (0, type
);
11292 /* Likewise (X + c) < X becomes false. */
11293 if (code
== LT_EXPR
11294 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11295 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11296 return constant_boolean_node (0, type
);
11298 /* Convert (X - c) <= X to true. */
11299 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
11301 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11302 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11303 return constant_boolean_node (1, type
);
11305 /* Convert (X + c) >= X to true. */
11306 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
11308 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11309 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11310 return constant_boolean_node (1, type
);
11312 if (TREE_CODE (arg01
) == INTEGER_CST
)
11314 /* Convert X + c > X and X - c < X to true for integers. */
11315 if (code
== GT_EXPR
11316 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11317 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11318 return constant_boolean_node (1, type
);
11320 if (code
== LT_EXPR
11321 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11322 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11323 return constant_boolean_node (1, type
);
11325 /* Convert X + c <= X and X - c >= X to false for integers. */
11326 if (code
== LE_EXPR
11327 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11328 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11329 return constant_boolean_node (0, type
);
11331 if (code
== GE_EXPR
11332 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11333 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11334 return constant_boolean_node (0, type
);
11338 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11339 This transformation affects the cases which are handled in later
11340 optimizations involving comparisons with non-negative constants. */
11341 if (TREE_CODE (arg1
) == INTEGER_CST
11342 && TREE_CODE (arg0
) != INTEGER_CST
11343 && tree_int_cst_sgn (arg1
) > 0)
11345 if (code
== GE_EXPR
)
11347 arg1
= const_binop (MINUS_EXPR
, arg1
,
11348 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11349 return fold_build2 (GT_EXPR
, type
, arg0
,
11350 fold_convert (TREE_TYPE (arg0
), arg1
));
11352 if (code
== LT_EXPR
)
11354 arg1
= const_binop (MINUS_EXPR
, arg1
,
11355 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11356 return fold_build2 (LE_EXPR
, type
, arg0
,
11357 fold_convert (TREE_TYPE (arg0
), arg1
));
11361 /* Comparisons with the highest or lowest possible integer of
11362 the specified precision will have known values. */
11364 tree arg1_type
= TREE_TYPE (arg1
);
11365 unsigned int width
= TYPE_PRECISION (arg1_type
);
11367 if (TREE_CODE (arg1
) == INTEGER_CST
11368 && !TREE_OVERFLOW (arg1
)
11369 && width
<= 2 * HOST_BITS_PER_WIDE_INT
11370 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
11372 HOST_WIDE_INT signed_max_hi
;
11373 unsigned HOST_WIDE_INT signed_max_lo
;
11374 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
11376 if (width
<= HOST_BITS_PER_WIDE_INT
)
11378 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
11383 if (TYPE_UNSIGNED (arg1_type
))
11385 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
11391 max_lo
= signed_max_lo
;
11392 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
11398 width
-= HOST_BITS_PER_WIDE_INT
;
11399 signed_max_lo
= -1;
11400 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
11405 if (TYPE_UNSIGNED (arg1_type
))
11407 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
11412 max_hi
= signed_max_hi
;
11413 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
11417 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
11418 && TREE_INT_CST_LOW (arg1
) == max_lo
)
11422 return omit_one_operand (type
, integer_zero_node
, arg0
);
11425 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11428 return omit_one_operand (type
, integer_one_node
, arg0
);
11431 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
11433 /* The GE_EXPR and LT_EXPR cases above are not normally
11434 reached because of previous transformations. */
11439 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
11441 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
11445 arg1
= const_binop (PLUS_EXPR
, arg1
,
11446 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11447 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11449 arg1
= const_binop (PLUS_EXPR
, arg1
,
11450 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11451 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
11455 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
11457 && TREE_INT_CST_LOW (arg1
) == min_lo
)
11461 return omit_one_operand (type
, integer_zero_node
, arg0
);
11464 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11467 return omit_one_operand (type
, integer_one_node
, arg0
);
11470 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
11475 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
11477 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
11481 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
11482 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
11484 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
11485 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11490 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
11491 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
11492 && TYPE_UNSIGNED (arg1_type
)
11493 /* We will flip the signedness of the comparison operator
11494 associated with the mode of arg1, so the sign bit is
11495 specified by this mode. Check that arg1 is the signed
11496 max associated with this sign bit. */
11497 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
11498 /* signed_type does not work on pointer types. */
11499 && INTEGRAL_TYPE_P (arg1_type
))
11501 /* The following case also applies to X < signed_max+1
11502 and X >= signed_max+1 because previous transformations. */
11503 if (code
== LE_EXPR
|| code
== GT_EXPR
)
11506 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
11507 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
11508 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
11509 type
, fold_convert (st0
, arg0
),
11510 build_int_cst (st1
, 0));
11516 /* If we are comparing an ABS_EXPR with a constant, we can
11517 convert all the cases into explicit comparisons, but they may
11518 well not be faster than doing the ABS and one comparison.
11519 But ABS (X) <= C is a range comparison, which becomes a subtraction
11520 and a comparison, and is probably faster. */
11521 if (code
== LE_EXPR
11522 && TREE_CODE (arg1
) == INTEGER_CST
11523 && TREE_CODE (arg0
) == ABS_EXPR
11524 && ! TREE_SIDE_EFFECTS (arg0
)
11525 && (0 != (tem
= negate_expr (arg1
)))
11526 && TREE_CODE (tem
) == INTEGER_CST
11527 && !TREE_OVERFLOW (tem
))
11528 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11529 build2 (GE_EXPR
, type
,
11530 TREE_OPERAND (arg0
, 0), tem
),
11531 build2 (LE_EXPR
, type
,
11532 TREE_OPERAND (arg0
, 0), arg1
));
11534 /* Convert ABS_EXPR<x> >= 0 to true. */
11535 if (code
== GE_EXPR
11536 && tree_expr_nonnegative_p (arg0
)
11537 && (integer_zerop (arg1
)
11538 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11539 && real_zerop (arg1
))))
11540 return omit_one_operand (type
, integer_one_node
, arg0
);
11542 /* Convert ABS_EXPR<x> < 0 to false. */
11543 if (code
== LT_EXPR
11544 && tree_expr_nonnegative_p (arg0
)
11545 && (integer_zerop (arg1
) || real_zerop (arg1
)))
11546 return omit_one_operand (type
, integer_zero_node
, arg0
);
11548 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11549 and similarly for >= into !=. */
11550 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11551 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11552 && TREE_CODE (arg1
) == LSHIFT_EXPR
11553 && integer_onep (TREE_OPERAND (arg1
, 0)))
11554 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11555 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11556 TREE_OPERAND (arg1
, 1)),
11557 build_int_cst (TREE_TYPE (arg0
), 0));
11559 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11560 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11561 && (TREE_CODE (arg1
) == NOP_EXPR
11562 || TREE_CODE (arg1
) == CONVERT_EXPR
)
11563 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11564 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11566 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11567 fold_convert (TREE_TYPE (arg0
),
11568 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11569 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
11571 build_int_cst (TREE_TYPE (arg0
), 0));
11575 case UNORDERED_EXPR
:
11583 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
11585 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
11586 if (t1
!= NULL_TREE
)
11590 /* If the first operand is NaN, the result is constant. */
11591 if (TREE_CODE (arg0
) == REAL_CST
11592 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
11593 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
11595 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
11596 ? integer_zero_node
11597 : integer_one_node
;
11598 return omit_one_operand (type
, t1
, arg1
);
11601 /* If the second operand is NaN, the result is constant. */
11602 if (TREE_CODE (arg1
) == REAL_CST
11603 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
11604 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
11606 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
11607 ? integer_zero_node
11608 : integer_one_node
;
11609 return omit_one_operand (type
, t1
, arg0
);
11612 /* Simplify unordered comparison of something with itself. */
11613 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
11614 && operand_equal_p (arg0
, arg1
, 0))
11615 return constant_boolean_node (1, type
);
11617 if (code
== LTGT_EXPR
11618 && !flag_trapping_math
11619 && operand_equal_p (arg0
, arg1
, 0))
11620 return constant_boolean_node (0, type
);
11622 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11624 tree targ0
= strip_float_extensions (arg0
);
11625 tree targ1
= strip_float_extensions (arg1
);
11626 tree newtype
= TREE_TYPE (targ0
);
11628 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
11629 newtype
= TREE_TYPE (targ1
);
11631 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
11632 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
11633 fold_convert (newtype
, targ1
));
11638 case COMPOUND_EXPR
:
11639 /* When pedantic, a compound expression can be neither an lvalue
11640 nor an integer constant expression. */
11641 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11643 /* Don't let (0, 0) be null pointer constant. */
11644 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11645 : fold_convert (type
, arg1
);
11646 return pedantic_non_lvalue (tem
);
11649 if ((TREE_CODE (arg0
) == REAL_CST
11650 && TREE_CODE (arg1
) == REAL_CST
)
11651 || (TREE_CODE (arg0
) == INTEGER_CST
11652 && TREE_CODE (arg1
) == INTEGER_CST
))
11653 return build_complex (type
, arg0
, arg1
);
11657 /* An ASSERT_EXPR should never be passed to fold_binary. */
11658 gcc_unreachable ();
11662 } /* switch (code) */
11665 /* Callback for walk_tree, looking for LABEL_EXPR.
11666 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11667 Do not check the sub-tree of GOTO_EXPR. */
11670 contains_label_1 (tree
*tp
,
11671 int *walk_subtrees
,
11672 void *data ATTRIBUTE_UNUSED
)
11674 switch (TREE_CODE (*tp
))
11679 *walk_subtrees
= 0;
11686 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11687 accessible from outside the sub-tree. Returns NULL_TREE if no
11688 addressable label is found. */
11691 contains_label_p (tree st
)
11693 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
11696 /* Fold a ternary expression of code CODE and type TYPE with operands
11697 OP0, OP1, and OP2. Return the folded expression if folding is
11698 successful. Otherwise, return NULL_TREE. */
11701 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
11704 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
11705 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11707 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11708 && TREE_CODE_LENGTH (code
) == 3);
11710 /* Strip any conversions that don't change the mode. This is safe
11711 for every expression, except for a comparison expression because
11712 its signedness is derived from its operands. So, in the latter
11713 case, only strip conversions that don't change the signedness.
11715 Note that this is done as an internal manipulation within the
11716 constant folder, in order to find the simplest representation of
11717 the arguments so that their form can be studied. In any cases,
11718 the appropriate type conversions should be put back in the tree
11719 that will get out of the constant folder. */
11734 case COMPONENT_REF
:
11735 if (TREE_CODE (arg0
) == CONSTRUCTOR
11736 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11738 unsigned HOST_WIDE_INT idx
;
11740 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11747 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11748 so all simple results must be passed through pedantic_non_lvalue. */
11749 if (TREE_CODE (arg0
) == INTEGER_CST
)
11751 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11752 tem
= integer_zerop (arg0
) ? op2
: op1
;
11753 /* Only optimize constant conditions when the selected branch
11754 has the same type as the COND_EXPR. This avoids optimizing
11755 away "c ? x : throw", where the throw has a void type.
11756 Avoid throwing away that operand which contains label. */
11757 if ((!TREE_SIDE_EFFECTS (unused_op
)
11758 || !contains_label_p (unused_op
))
11759 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11760 || VOID_TYPE_P (type
)))
11761 return pedantic_non_lvalue (tem
);
11764 if (operand_equal_p (arg1
, op2
, 0))
11765 return pedantic_omit_one_operand (type
, arg1
, arg0
);
11767 /* If we have A op B ? A : C, we may be able to convert this to a
11768 simpler expression, depending on the operation and the values
11769 of B and C. Signed zeros prevent all of these transformations,
11770 for reasons given above each one.
11772 Also try swapping the arguments and inverting the conditional. */
11773 if (COMPARISON_CLASS_P (arg0
)
11774 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11775 arg1
, TREE_OPERAND (arg0
, 1))
11776 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
11778 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
11783 if (COMPARISON_CLASS_P (arg0
)
11784 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11786 TREE_OPERAND (arg0
, 1))
11787 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
11789 tem
= fold_truth_not_expr (arg0
);
11790 if (tem
&& COMPARISON_CLASS_P (tem
))
11792 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
11798 /* If the second operand is simpler than the third, swap them
11799 since that produces better jump optimization results. */
11800 if (truth_value_p (TREE_CODE (arg0
))
11801 && tree_swap_operands_p (op1
, op2
, false))
11803 /* See if this can be inverted. If it can't, possibly because
11804 it was a floating-point inequality comparison, don't do
11806 tem
= fold_truth_not_expr (arg0
);
11808 return fold_build3 (code
, type
, tem
, op2
, op1
);
11811 /* Convert A ? 1 : 0 to simply A. */
11812 if (integer_onep (op1
)
11813 && integer_zerop (op2
)
11814 /* If we try to convert OP0 to our type, the
11815 call to fold will try to move the conversion inside
11816 a COND, which will recurse. In that case, the COND_EXPR
11817 is probably the best choice, so leave it alone. */
11818 && type
== TREE_TYPE (arg0
))
11819 return pedantic_non_lvalue (arg0
);
11821 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11822 over COND_EXPR in cases such as floating point comparisons. */
11823 if (integer_zerop (op1
)
11824 && integer_onep (op2
)
11825 && truth_value_p (TREE_CODE (arg0
)))
11826 return pedantic_non_lvalue (fold_convert (type
,
11827 invert_truthvalue (arg0
)));
11829 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11830 if (TREE_CODE (arg0
) == LT_EXPR
11831 && integer_zerop (TREE_OPERAND (arg0
, 1))
11832 && integer_zerop (op2
)
11833 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11835 /* sign_bit_p only checks ARG1 bits within A's precision.
11836 If <sign bit of A> has wider type than A, bits outside
11837 of A's precision in <sign bit of A> need to be checked.
11838 If they are all 0, this optimization needs to be done
11839 in unsigned A's type, if they are all 1 in signed A's type,
11840 otherwise this can't be done. */
11841 if (TYPE_PRECISION (TREE_TYPE (tem
))
11842 < TYPE_PRECISION (TREE_TYPE (arg1
))
11843 && TYPE_PRECISION (TREE_TYPE (tem
))
11844 < TYPE_PRECISION (type
))
11846 unsigned HOST_WIDE_INT mask_lo
;
11847 HOST_WIDE_INT mask_hi
;
11848 int inner_width
, outer_width
;
11851 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11852 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11853 if (outer_width
> TYPE_PRECISION (type
))
11854 outer_width
= TYPE_PRECISION (type
);
11856 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
11858 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
11859 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
11865 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
11866 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
11868 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
11870 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
11871 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
11875 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
11876 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
11878 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
11879 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
11881 tem_type
= lang_hooks
.types
.signed_type (TREE_TYPE (tem
));
11882 tem
= fold_convert (tem_type
, tem
);
11884 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
11885 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
11887 tem_type
= lang_hooks
.types
.unsigned_type (TREE_TYPE (tem
));
11888 tem
= fold_convert (tem_type
, tem
);
11895 return fold_convert (type
,
11896 fold_build2 (BIT_AND_EXPR
,
11897 TREE_TYPE (tem
), tem
,
11898 fold_convert (TREE_TYPE (tem
),
11902 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11903 already handled above. */
11904 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11905 && integer_onep (TREE_OPERAND (arg0
, 1))
11906 && integer_zerop (op2
)
11907 && integer_pow2p (arg1
))
11909 tree tem
= TREE_OPERAND (arg0
, 0);
11911 if (TREE_CODE (tem
) == RSHIFT_EXPR
11912 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
11913 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
11914 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
11915 return fold_build2 (BIT_AND_EXPR
, type
,
11916 TREE_OPERAND (tem
, 0), arg1
);
11919 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11920 is probably obsolete because the first operand should be a
11921 truth value (that's why we have the two cases above), but let's
11922 leave it in until we can confirm this for all front-ends. */
11923 if (integer_zerop (op2
)
11924 && TREE_CODE (arg0
) == NE_EXPR
11925 && integer_zerop (TREE_OPERAND (arg0
, 1))
11926 && integer_pow2p (arg1
)
11927 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11928 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11929 arg1
, OEP_ONLY_CONST
))
11930 return pedantic_non_lvalue (fold_convert (type
,
11931 TREE_OPERAND (arg0
, 0)));
11933 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11934 if (integer_zerop (op2
)
11935 && truth_value_p (TREE_CODE (arg0
))
11936 && truth_value_p (TREE_CODE (arg1
)))
11937 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11938 fold_convert (type
, arg0
),
11941 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11942 if (integer_onep (op2
)
11943 && truth_value_p (TREE_CODE (arg0
))
11944 && truth_value_p (TREE_CODE (arg1
)))
11946 /* Only perform transformation if ARG0 is easily inverted. */
11947 tem
= fold_truth_not_expr (arg0
);
11949 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
11950 fold_convert (type
, tem
),
11954 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11955 if (integer_zerop (arg1
)
11956 && truth_value_p (TREE_CODE (arg0
))
11957 && truth_value_p (TREE_CODE (op2
)))
11959 /* Only perform transformation if ARG0 is easily inverted. */
11960 tem
= fold_truth_not_expr (arg0
);
11962 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11963 fold_convert (type
, tem
),
11967 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11968 if (integer_onep (arg1
)
11969 && truth_value_p (TREE_CODE (arg0
))
11970 && truth_value_p (TREE_CODE (op2
)))
11971 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
11972 fold_convert (type
, arg0
),
11978 /* Check for a built-in function. */
11979 if (TREE_CODE (op0
) == ADDR_EXPR
11980 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
11981 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
11982 return fold_builtin (TREE_OPERAND (op0
, 0), op1
, false);
11985 case BIT_FIELD_REF
:
11986 if (TREE_CODE (arg0
) == VECTOR_CST
11987 && type
== TREE_TYPE (TREE_TYPE (arg0
))
11988 && host_integerp (arg1
, 1)
11989 && host_integerp (op2
, 1))
11991 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
11992 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
11995 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
11996 && (idx
% width
) == 0
11997 && (idx
= idx
/ width
)
11998 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
12000 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
12001 while (idx
-- > 0 && elements
)
12002 elements
= TREE_CHAIN (elements
);
12004 return TREE_VALUE (elements
);
12006 return fold_convert (type
, integer_zero_node
);
12013 } /* switch (code) */
12016 /* Perform constant folding and related simplification of EXPR.
12017 The related simplifications include x*1 => x, x*0 => 0, etc.,
12018 and application of the associative law.
12019 NOP_EXPR conversions may be removed freely (as long as we
12020 are careful not to change the type of the overall expression).
12021 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12022 but we can constant-fold them if they have constant operands. */
12024 #ifdef ENABLE_FOLD_CHECKING
12025 # define fold(x) fold_1 (x)
12026 static tree
fold_1 (tree
);
12032 const tree t
= expr
;
12033 enum tree_code code
= TREE_CODE (t
);
12034 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12037 /* Return right away if a constant. */
12038 if (kind
== tcc_constant
)
12041 if (IS_EXPR_CODE_CLASS (kind
)
12042 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
12044 tree type
= TREE_TYPE (t
);
12045 tree op0
, op1
, op2
;
12047 switch (TREE_CODE_LENGTH (code
))
12050 op0
= TREE_OPERAND (t
, 0);
12051 tem
= fold_unary (code
, type
, op0
);
12052 return tem
? tem
: expr
;
12054 op0
= TREE_OPERAND (t
, 0);
12055 op1
= TREE_OPERAND (t
, 1);
12056 tem
= fold_binary (code
, type
, op0
, op1
);
12057 return tem
? tem
: expr
;
12059 op0
= TREE_OPERAND (t
, 0);
12060 op1
= TREE_OPERAND (t
, 1);
12061 op2
= TREE_OPERAND (t
, 2);
12062 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
12063 return tem
? tem
: expr
;
12072 return fold (DECL_INITIAL (t
));
12076 } /* switch (code) */
12079 #ifdef ENABLE_FOLD_CHECKING
12082 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
12083 static void fold_check_failed (tree
, tree
);
12084 void print_fold_checksum (tree
);
12086 /* When --enable-checking=fold, compute a digest of expr before
12087 and after actual fold call to see if fold did not accidentally
12088 change original expr. */
12094 struct md5_ctx ctx
;
12095 unsigned char checksum_before
[16], checksum_after
[16];
12098 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12099 md5_init_ctx (&ctx
);
12100 fold_checksum_tree (expr
, &ctx
, ht
);
12101 md5_finish_ctx (&ctx
, checksum_before
);
12104 ret
= fold_1 (expr
);
12106 md5_init_ctx (&ctx
);
12107 fold_checksum_tree (expr
, &ctx
, ht
);
12108 md5_finish_ctx (&ctx
, checksum_after
);
12111 if (memcmp (checksum_before
, checksum_after
, 16))
12112 fold_check_failed (expr
, ret
);
12118 print_fold_checksum (tree expr
)
12120 struct md5_ctx ctx
;
12121 unsigned char checksum
[16], cnt
;
12124 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12125 md5_init_ctx (&ctx
);
12126 fold_checksum_tree (expr
, &ctx
, ht
);
12127 md5_finish_ctx (&ctx
, checksum
);
12129 for (cnt
= 0; cnt
< 16; ++cnt
)
12130 fprintf (stderr
, "%02x", checksum
[cnt
]);
12131 putc ('\n', stderr
);
12135 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
12137 internal_error ("fold check: original tree changed by fold");
12141 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
12144 enum tree_code code
;
12145 struct tree_function_decl buf
;
12150 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
12151 <= sizeof (struct tree_function_decl
))
12152 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
12155 slot
= htab_find_slot (ht
, expr
, INSERT
);
12159 code
= TREE_CODE (expr
);
12160 if (TREE_CODE_CLASS (code
) == tcc_declaration
12161 && DECL_ASSEMBLER_NAME_SET_P (expr
))
12163 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12164 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12165 expr
= (tree
) &buf
;
12166 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
12168 else if (TREE_CODE_CLASS (code
) == tcc_type
12169 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
12170 || TYPE_CACHED_VALUES_P (expr
)
12171 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
12173 /* Allow these fields to be modified. */
12174 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12175 expr
= (tree
) &buf
;
12176 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
) = 0;
12177 TYPE_POINTER_TO (expr
) = NULL
;
12178 TYPE_REFERENCE_TO (expr
) = NULL
;
12179 if (TYPE_CACHED_VALUES_P (expr
))
12181 TYPE_CACHED_VALUES_P (expr
) = 0;
12182 TYPE_CACHED_VALUES (expr
) = NULL
;
12185 md5_process_bytes (expr
, tree_size (expr
), ctx
);
12186 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
12187 if (TREE_CODE_CLASS (code
) != tcc_type
12188 && TREE_CODE_CLASS (code
) != tcc_declaration
12189 && code
!= TREE_LIST
)
12190 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12191 switch (TREE_CODE_CLASS (code
))
12197 md5_process_bytes (TREE_STRING_POINTER (expr
),
12198 TREE_STRING_LENGTH (expr
), ctx
);
12201 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12202 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12205 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
12211 case tcc_exceptional
:
12215 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12216 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12217 expr
= TREE_CHAIN (expr
);
12218 goto recursive_label
;
12221 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12222 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12228 case tcc_expression
:
12229 case tcc_reference
:
12230 case tcc_comparison
:
12233 case tcc_statement
:
12234 len
= TREE_CODE_LENGTH (code
);
12235 for (i
= 0; i
< len
; ++i
)
12236 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12238 case tcc_declaration
:
12239 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12240 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12241 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12243 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12244 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12245 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12246 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12247 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12249 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
12250 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
12252 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12254 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12255 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12256 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
12260 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12261 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12262 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12263 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12264 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12265 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12266 if (INTEGRAL_TYPE_P (expr
)
12267 || SCALAR_FLOAT_TYPE_P (expr
))
12269 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12270 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12272 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12273 if (TREE_CODE (expr
) == RECORD_TYPE
12274 || TREE_CODE (expr
) == UNION_TYPE
12275 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12276 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12277 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12286 /* Fold a unary tree expression with code CODE of type TYPE with an
12287 operand OP0. Return a folded expression if successful. Otherwise,
12288 return a tree expression with code CODE of type TYPE with an
12292 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12295 #ifdef ENABLE_FOLD_CHECKING
12296 unsigned char checksum_before
[16], checksum_after
[16];
12297 struct md5_ctx ctx
;
12300 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12301 md5_init_ctx (&ctx
);
12302 fold_checksum_tree (op0
, &ctx
, ht
);
12303 md5_finish_ctx (&ctx
, checksum_before
);
12307 tem
= fold_unary (code
, type
, op0
);
12309 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
12311 #ifdef ENABLE_FOLD_CHECKING
12312 md5_init_ctx (&ctx
);
12313 fold_checksum_tree (op0
, &ctx
, ht
);
12314 md5_finish_ctx (&ctx
, checksum_after
);
12317 if (memcmp (checksum_before
, checksum_after
, 16))
12318 fold_check_failed (op0
, tem
);
12323 /* Fold a binary tree expression with code CODE of type TYPE with
12324 operands OP0 and OP1. Return a folded expression if successful.
12325 Otherwise, return a tree expression with code CODE of type TYPE
12326 with operands OP0 and OP1. */
12329 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
12333 #ifdef ENABLE_FOLD_CHECKING
12334 unsigned char checksum_before_op0
[16],
12335 checksum_before_op1
[16],
12336 checksum_after_op0
[16],
12337 checksum_after_op1
[16];
12338 struct md5_ctx ctx
;
12341 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12342 md5_init_ctx (&ctx
);
12343 fold_checksum_tree (op0
, &ctx
, ht
);
12344 md5_finish_ctx (&ctx
, checksum_before_op0
);
12347 md5_init_ctx (&ctx
);
12348 fold_checksum_tree (op1
, &ctx
, ht
);
12349 md5_finish_ctx (&ctx
, checksum_before_op1
);
12353 tem
= fold_binary (code
, type
, op0
, op1
);
12355 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
12357 #ifdef ENABLE_FOLD_CHECKING
12358 md5_init_ctx (&ctx
);
12359 fold_checksum_tree (op0
, &ctx
, ht
);
12360 md5_finish_ctx (&ctx
, checksum_after_op0
);
12363 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12364 fold_check_failed (op0
, tem
);
12366 md5_init_ctx (&ctx
);
12367 fold_checksum_tree (op1
, &ctx
, ht
);
12368 md5_finish_ctx (&ctx
, checksum_after_op1
);
12371 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12372 fold_check_failed (op1
, tem
);
12377 /* Fold a ternary tree expression with code CODE of type TYPE with
12378 operands OP0, OP1, and OP2. Return a folded expression if
12379 successful. Otherwise, return a tree expression with code CODE of
12380 type TYPE with operands OP0, OP1, and OP2. */
12383 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
12387 #ifdef ENABLE_FOLD_CHECKING
12388 unsigned char checksum_before_op0
[16],
12389 checksum_before_op1
[16],
12390 checksum_before_op2
[16],
12391 checksum_after_op0
[16],
12392 checksum_after_op1
[16],
12393 checksum_after_op2
[16];
12394 struct md5_ctx ctx
;
12397 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12398 md5_init_ctx (&ctx
);
12399 fold_checksum_tree (op0
, &ctx
, ht
);
12400 md5_finish_ctx (&ctx
, checksum_before_op0
);
12403 md5_init_ctx (&ctx
);
12404 fold_checksum_tree (op1
, &ctx
, ht
);
12405 md5_finish_ctx (&ctx
, checksum_before_op1
);
12408 md5_init_ctx (&ctx
);
12409 fold_checksum_tree (op2
, &ctx
, ht
);
12410 md5_finish_ctx (&ctx
, checksum_before_op2
);
12414 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
12416 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12418 #ifdef ENABLE_FOLD_CHECKING
12419 md5_init_ctx (&ctx
);
12420 fold_checksum_tree (op0
, &ctx
, ht
);
12421 md5_finish_ctx (&ctx
, checksum_after_op0
);
12424 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12425 fold_check_failed (op0
, tem
);
12427 md5_init_ctx (&ctx
);
12428 fold_checksum_tree (op1
, &ctx
, ht
);
12429 md5_finish_ctx (&ctx
, checksum_after_op1
);
12432 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12433 fold_check_failed (op1
, tem
);
12435 md5_init_ctx (&ctx
);
12436 fold_checksum_tree (op2
, &ctx
, ht
);
12437 md5_finish_ctx (&ctx
, checksum_after_op2
);
12440 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12441 fold_check_failed (op2
, tem
);
12446 /* Perform constant folding and related simplification of initializer
12447 expression EXPR. These behave identically to "fold_buildN" but ignore
12448 potential run-time traps and exceptions that fold must preserve. */
12450 #define START_FOLD_INIT \
12451 int saved_signaling_nans = flag_signaling_nans;\
12452 int saved_trapping_math = flag_trapping_math;\
12453 int saved_rounding_math = flag_rounding_math;\
12454 int saved_trapv = flag_trapv;\
12455 int saved_folding_initializer = folding_initializer;\
12456 flag_signaling_nans = 0;\
12457 flag_trapping_math = 0;\
12458 flag_rounding_math = 0;\
12460 folding_initializer = 1;
12462 #define END_FOLD_INIT \
12463 flag_signaling_nans = saved_signaling_nans;\
12464 flag_trapping_math = saved_trapping_math;\
12465 flag_rounding_math = saved_rounding_math;\
12466 flag_trapv = saved_trapv;\
12467 folding_initializer = saved_folding_initializer;
12470 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
12475 result
= fold_build1 (code
, type
, op
);
12482 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
12487 result
= fold_build2 (code
, type
, op0
, op1
);
12494 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
12500 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
12506 #undef START_FOLD_INIT
12507 #undef END_FOLD_INIT
12509 /* Determine if first argument is a multiple of second argument. Return 0 if
12510 it is not, or we cannot easily determined it to be.
12512 An example of the sort of thing we care about (at this point; this routine
12513 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12514 fold cases do now) is discovering that
12516 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12522 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12524 This code also handles discovering that
12526 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12528 is a multiple of 8 so we don't have to worry about dealing with a
12529 possible remainder.
12531 Note that we *look* inside a SAVE_EXPR only to determine how it was
12532 calculated; it is not safe for fold to do much of anything else with the
12533 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12534 at run time. For example, the latter example above *cannot* be implemented
12535 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12536 evaluation time of the original SAVE_EXPR is not necessarily the same at
12537 the time the new expression is evaluated. The only optimization of this
12538 sort that would be valid is changing
12540 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12544 SAVE_EXPR (I) * SAVE_EXPR (J)
12546 (where the same SAVE_EXPR (J) is used in the original and the
12547 transformed version). */
12550 multiple_of_p (tree type
, tree top
, tree bottom
)
12552 if (operand_equal_p (top
, bottom
, 0))
12555 if (TREE_CODE (type
) != INTEGER_TYPE
)
12558 switch (TREE_CODE (top
))
12561 /* Bitwise and provides a power of two multiple. If the mask is
12562 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12563 if (!integer_pow2p (bottom
))
12568 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12569 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12573 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12574 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12577 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
12581 op1
= TREE_OPERAND (top
, 1);
12582 /* const_binop may not detect overflow correctly,
12583 so check for it explicitly here. */
12584 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
12585 > TREE_INT_CST_LOW (op1
)
12586 && TREE_INT_CST_HIGH (op1
) == 0
12587 && 0 != (t1
= fold_convert (type
,
12588 const_binop (LSHIFT_EXPR
,
12591 && !TREE_OVERFLOW (t1
))
12592 return multiple_of_p (type
, t1
, bottom
);
12597 /* Can't handle conversions from non-integral or wider integral type. */
12598 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
12599 || (TYPE_PRECISION (type
)
12600 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
12603 /* .. fall through ... */
12606 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
12609 if (TREE_CODE (bottom
) != INTEGER_CST
12610 || (TYPE_UNSIGNED (type
)
12611 && (tree_int_cst_sgn (top
) < 0
12612 || tree_int_cst_sgn (bottom
) < 0)))
12614 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
12622 /* Return true if `t' is known to be non-negative. */
12625 tree_expr_nonnegative_p (tree t
)
12627 if (t
== error_mark_node
)
12630 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
12633 switch (TREE_CODE (t
))
12636 /* Query VRP to see if it has recorded any information about
12637 the range of this object. */
12638 return ssa_name_nonnegative_p (t
);
12641 /* We can't return 1 if flag_wrapv is set because
12642 ABS_EXPR<INT_MIN> = INT_MIN. */
12643 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
12645 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
12650 return tree_int_cst_sgn (t
) >= 0;
12653 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
12656 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
12657 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12658 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12660 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12661 both unsigned and at least 2 bits shorter than the result. */
12662 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
12663 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
12664 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
12666 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
12667 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
12668 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12669 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12671 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
12672 TYPE_PRECISION (inner2
)) + 1;
12673 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
12679 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
12681 /* x * x for floating point x is always non-negative. */
12682 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
12684 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12685 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12688 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12689 both unsigned and their total bits is shorter than the result. */
12690 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
12691 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
12692 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
12694 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
12695 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
12696 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12697 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12698 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
12699 < TYPE_PRECISION (TREE_TYPE (t
));
12705 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12706 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12712 case TRUNC_DIV_EXPR
:
12713 case CEIL_DIV_EXPR
:
12714 case FLOOR_DIV_EXPR
:
12715 case ROUND_DIV_EXPR
:
12716 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12717 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12719 case TRUNC_MOD_EXPR
:
12720 case CEIL_MOD_EXPR
:
12721 case FLOOR_MOD_EXPR
:
12722 case ROUND_MOD_EXPR
:
12724 case NON_LVALUE_EXPR
:
12726 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12728 case COMPOUND_EXPR
:
12730 case GIMPLE_MODIFY_STMT
:
12731 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t
, 1));
12734 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
12737 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
12738 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
12742 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
12743 tree outer_type
= TREE_TYPE (t
);
12745 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12747 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12748 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12749 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
12751 if (TYPE_UNSIGNED (inner_type
))
12753 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12756 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
12758 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12759 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
12760 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
12761 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12762 && TYPE_UNSIGNED (inner_type
);
12769 tree temp
= TARGET_EXPR_SLOT (t
);
12770 t
= TARGET_EXPR_INITIAL (t
);
12772 /* If the initializer is non-void, then it's a normal expression
12773 that will be assigned to the slot. */
12774 if (!VOID_TYPE_P (t
))
12775 return tree_expr_nonnegative_p (t
);
12777 /* Otherwise, the initializer sets the slot in some way. One common
12778 way is an assignment statement at the end of the initializer. */
12781 if (TREE_CODE (t
) == BIND_EXPR
)
12782 t
= expr_last (BIND_EXPR_BODY (t
));
12783 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
12784 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
12785 t
= expr_last (TREE_OPERAND (t
, 0));
12786 else if (TREE_CODE (t
) == STATEMENT_LIST
)
12791 if ((TREE_CODE (t
) == MODIFY_EXPR
12792 || TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
12793 && GENERIC_TREE_OPERAND (t
, 0) == temp
)
12794 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t
, 1));
12801 tree fndecl
= get_callee_fndecl (t
);
12802 tree arglist
= TREE_OPERAND (t
, 1);
12803 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
12804 switch (DECL_FUNCTION_CODE (fndecl
))
12806 CASE_FLT_FN (BUILT_IN_ACOS
):
12807 CASE_FLT_FN (BUILT_IN_ACOSH
):
12808 CASE_FLT_FN (BUILT_IN_CABS
):
12809 CASE_FLT_FN (BUILT_IN_COSH
):
12810 CASE_FLT_FN (BUILT_IN_ERFC
):
12811 CASE_FLT_FN (BUILT_IN_EXP
):
12812 CASE_FLT_FN (BUILT_IN_EXP10
):
12813 CASE_FLT_FN (BUILT_IN_EXP2
):
12814 CASE_FLT_FN (BUILT_IN_FABS
):
12815 CASE_FLT_FN (BUILT_IN_FDIM
):
12816 CASE_FLT_FN (BUILT_IN_HYPOT
):
12817 CASE_FLT_FN (BUILT_IN_POW10
):
12818 CASE_INT_FN (BUILT_IN_FFS
):
12819 CASE_INT_FN (BUILT_IN_PARITY
):
12820 CASE_INT_FN (BUILT_IN_POPCOUNT
):
12821 case BUILT_IN_BSWAP32
:
12822 case BUILT_IN_BSWAP64
:
12826 CASE_FLT_FN (BUILT_IN_SQRT
):
12827 /* sqrt(-0.0) is -0.0. */
12828 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
12830 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12832 CASE_FLT_FN (BUILT_IN_ASINH
):
12833 CASE_FLT_FN (BUILT_IN_ATAN
):
12834 CASE_FLT_FN (BUILT_IN_ATANH
):
12835 CASE_FLT_FN (BUILT_IN_CBRT
):
12836 CASE_FLT_FN (BUILT_IN_CEIL
):
12837 CASE_FLT_FN (BUILT_IN_ERF
):
12838 CASE_FLT_FN (BUILT_IN_EXPM1
):
12839 CASE_FLT_FN (BUILT_IN_FLOOR
):
12840 CASE_FLT_FN (BUILT_IN_FMOD
):
12841 CASE_FLT_FN (BUILT_IN_FREXP
):
12842 CASE_FLT_FN (BUILT_IN_LCEIL
):
12843 CASE_FLT_FN (BUILT_IN_LDEXP
):
12844 CASE_FLT_FN (BUILT_IN_LFLOOR
):
12845 CASE_FLT_FN (BUILT_IN_LLCEIL
):
12846 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
12847 CASE_FLT_FN (BUILT_IN_LLRINT
):
12848 CASE_FLT_FN (BUILT_IN_LLROUND
):
12849 CASE_FLT_FN (BUILT_IN_LRINT
):
12850 CASE_FLT_FN (BUILT_IN_LROUND
):
12851 CASE_FLT_FN (BUILT_IN_MODF
):
12852 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
12853 CASE_FLT_FN (BUILT_IN_RINT
):
12854 CASE_FLT_FN (BUILT_IN_ROUND
):
12855 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
12856 CASE_FLT_FN (BUILT_IN_SINH
):
12857 CASE_FLT_FN (BUILT_IN_TANH
):
12858 CASE_FLT_FN (BUILT_IN_TRUNC
):
12859 /* True if the 1st argument is nonnegative. */
12860 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12862 CASE_FLT_FN (BUILT_IN_FMAX
):
12863 /* True if the 1st OR 2nd arguments are nonnegative. */
12864 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
12865 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12867 CASE_FLT_FN (BUILT_IN_FMIN
):
12868 /* True if the 1st AND 2nd arguments are nonnegative. */
12869 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
12870 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12872 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
12873 /* True if the 2nd argument is nonnegative. */
12874 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12876 CASE_FLT_FN (BUILT_IN_POWI
):
12877 /* True if the 1st argument is nonnegative or the second
12878 argument is an even integer. */
12879 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist
))) == INTEGER_CST
)
12881 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
12882 if ((TREE_INT_CST_LOW (arg1
) & 1) == 0)
12885 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12887 CASE_FLT_FN (BUILT_IN_POW
):
12888 /* True if the 1st argument is nonnegative or the second
12889 argument is an even integer valued real. */
12890 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist
))) == REAL_CST
)
12895 c
= TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist
)));
12896 n
= real_to_integer (&c
);
12899 REAL_VALUE_TYPE cint
;
12900 real_from_integer (&cint
, VOIDmode
, n
,
12901 n
< 0 ? -1 : 0, 0);
12902 if (real_identical (&c
, &cint
))
12906 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12913 /* ... fall through ... */
12916 if (truth_value_p (TREE_CODE (t
)))
12917 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12921 /* We don't know sign of `t', so be conservative and return false. */
12925 /* Return true when T is an address and is known to be nonzero.
12926 For floating point we further ensure that T is not denormal.
12927 Similar logic is present in nonzero_address in rtlanal.h. */
12930 tree_expr_nonzero_p (tree t
)
12932 tree type
= TREE_TYPE (t
);
12934 /* Doing something useful for floating point would need more work. */
12935 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
12938 switch (TREE_CODE (t
))
12941 /* Query VRP to see if it has recorded any information about
12942 the range of this object. */
12943 return ssa_name_nonzero_p (t
);
12946 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
12949 return !integer_zerop (t
);
12952 if (TYPE_OVERFLOW_UNDEFINED (type
))
12954 /* With the presence of negative values it is hard
12955 to say something. */
12956 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12957 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
12959 /* One of operands must be positive and the other non-negative. */
12960 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12961 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12966 if (TYPE_OVERFLOW_UNDEFINED (type
))
12968 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12969 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12975 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
12976 tree outer_type
= TREE_TYPE (t
);
12978 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
12979 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
12985 tree base
= get_base_address (TREE_OPERAND (t
, 0));
12990 /* Weak declarations may link to NULL. */
12991 if (VAR_OR_FUNCTION_DECL_P (base
))
12992 return !DECL_WEAK (base
);
12994 /* Constants are never weak. */
12995 if (CONSTANT_CLASS_P (base
))
13002 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
13003 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
13006 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
13007 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
13010 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
13012 /* When both operands are nonzero, then MAX must be too. */
13013 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
13016 /* MAX where operand 0 is positive is positive. */
13017 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
13019 /* MAX where operand 1 is positive is positive. */
13020 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
13021 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
13025 case COMPOUND_EXPR
:
13027 case GIMPLE_MODIFY_STMT
:
13029 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t
, 1));
13032 case NON_LVALUE_EXPR
:
13033 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
13036 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
13037 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
13040 return alloca_call_p (t
);
13048 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13049 attempt to fold the expression to a constant without modifying TYPE,
13052 If the expression could be simplified to a constant, then return
13053 the constant. If the expression would not be simplified to a
13054 constant, then return NULL_TREE. */
13057 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
13059 tree tem
= fold_binary (code
, type
, op0
, op1
);
13060 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13063 /* Given the components of a unary expression CODE, TYPE and OP0,
13064 attempt to fold the expression to a constant without modifying
13067 If the expression could be simplified to a constant, then return
13068 the constant. If the expression would not be simplified to a
13069 constant, then return NULL_TREE. */
13072 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
13074 tree tem
= fold_unary (code
, type
, op0
);
13075 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13078 /* If EXP represents referencing an element in a constant string
13079 (either via pointer arithmetic or array indexing), return the
13080 tree representing the value accessed, otherwise return NULL. */
13083 fold_read_from_constant_string (tree exp
)
13085 if ((TREE_CODE (exp
) == INDIRECT_REF
13086 || TREE_CODE (exp
) == ARRAY_REF
)
13087 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
13089 tree exp1
= TREE_OPERAND (exp
, 0);
13093 if (TREE_CODE (exp
) == INDIRECT_REF
)
13094 string
= string_constant (exp1
, &index
);
13097 tree low_bound
= array_ref_low_bound (exp
);
13098 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
13100 /* Optimize the special-case of a zero lower bound.
13102 We convert the low_bound to sizetype to avoid some problems
13103 with constant folding. (E.g. suppose the lower bound is 1,
13104 and its mode is QI. Without the conversion,l (ARRAY
13105 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13106 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13107 if (! integer_zerop (low_bound
))
13108 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
13114 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13115 && TREE_CODE (string
) == STRING_CST
13116 && TREE_CODE (index
) == INTEGER_CST
13117 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13118 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
13120 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
13121 return fold_convert (TREE_TYPE (exp
),
13122 build_int_cst (NULL_TREE
,
13123 (TREE_STRING_POINTER (string
)
13124 [TREE_INT_CST_LOW (index
)])));
13129 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13130 an integer constant or real constant.
13132 TYPE is the type of the result. */
13135 fold_negate_const (tree arg0
, tree type
)
13137 tree t
= NULL_TREE
;
13139 switch (TREE_CODE (arg0
))
13143 unsigned HOST_WIDE_INT low
;
13144 HOST_WIDE_INT high
;
13145 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
13146 TREE_INT_CST_HIGH (arg0
),
13148 t
= force_fit_type_double (type
, low
, high
, 1,
13149 (overflow
| TREE_OVERFLOW (arg0
))
13150 && !TYPE_UNSIGNED (type
));
13155 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
13159 gcc_unreachable ();
13165 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13166 an integer constant or real constant.
13168 TYPE is the type of the result. */
13171 fold_abs_const (tree arg0
, tree type
)
13173 tree t
= NULL_TREE
;
13175 switch (TREE_CODE (arg0
))
13178 /* If the value is unsigned, then the absolute value is
13179 the same as the ordinary value. */
13180 if (TYPE_UNSIGNED (type
))
13182 /* Similarly, if the value is non-negative. */
13183 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
13185 /* If the value is negative, then the absolute value is
13189 unsigned HOST_WIDE_INT low
;
13190 HOST_WIDE_INT high
;
13191 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
13192 TREE_INT_CST_HIGH (arg0
),
13194 t
= force_fit_type_double (type
, low
, high
, -1,
13195 overflow
| TREE_OVERFLOW (arg0
));
13200 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
13201 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
13207 gcc_unreachable ();
13213 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13214 constant. TYPE is the type of the result. */
13217 fold_not_const (tree arg0
, tree type
)
13219 tree t
= NULL_TREE
;
13221 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
13223 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
13224 ~TREE_INT_CST_HIGH (arg0
), 0,
13225 TREE_OVERFLOW (arg0
));
13230 /* Given CODE, a relational operator, the target type, TYPE and two
13231 constant operands OP0 and OP1, return the result of the
13232 relational operation. If the result is not a compile time
13233 constant, then return NULL_TREE. */
13236 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
13238 int result
, invert
;
13240 /* From here on, the only cases we handle are when the result is
13241 known to be a constant. */
13243 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
13245 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
13246 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
13248 /* Handle the cases where either operand is a NaN. */
13249 if (real_isnan (c0
) || real_isnan (c1
))
13259 case UNORDERED_EXPR
:
13273 if (flag_trapping_math
)
13279 gcc_unreachable ();
13282 return constant_boolean_node (result
, type
);
13285 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
13288 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13290 To compute GT, swap the arguments and do LT.
13291 To compute GE, do LT and invert the result.
13292 To compute LE, swap the arguments, do LT and invert the result.
13293 To compute NE, do EQ and invert the result.
13295 Therefore, the code below must handle only EQ and LT. */
13297 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13302 code
= swap_tree_comparison (code
);
13305 /* Note that it is safe to invert for real values here because we
13306 have already handled the one case that it matters. */
13309 if (code
== NE_EXPR
|| code
== GE_EXPR
)
13312 code
= invert_tree_comparison (code
, false);
13315 /* Compute a result for LT or EQ if args permit;
13316 Otherwise return T. */
13317 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
13319 if (code
== EQ_EXPR
)
13320 result
= tree_int_cst_equal (op0
, op1
);
13321 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
13322 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
13324 result
= INT_CST_LT (op0
, op1
);
13331 return constant_boolean_node (result
, type
);
13334 /* Build an expression for the a clean point containing EXPR with type TYPE.
13335 Don't build a cleanup point expression for EXPR which don't have side
13339 fold_build_cleanup_point_expr (tree type
, tree expr
)
13341 /* If the expression does not have side effects then we don't have to wrap
13342 it with a cleanup point expression. */
13343 if (!TREE_SIDE_EFFECTS (expr
))
13346 /* If the expression is a return, check to see if the expression inside the
13347 return has no side effects or the right hand side of the modify expression
13348 inside the return. If either don't have side effects set we don't need to
13349 wrap the expression in a cleanup point expression. Note we don't check the
13350 left hand side of the modify because it should always be a return decl. */
13351 if (TREE_CODE (expr
) == RETURN_EXPR
)
13353 tree op
= TREE_OPERAND (expr
, 0);
13354 if (!op
|| !TREE_SIDE_EFFECTS (op
))
13356 op
= TREE_OPERAND (op
, 1);
13357 if (!TREE_SIDE_EFFECTS (op
))
13361 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
13364 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13365 avoid confusing the gimplify process. */
13368 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
13370 /* The size of the object is not relevant when talking about its address. */
13371 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
13372 t
= TREE_OPERAND (t
, 0);
13374 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13375 if (TREE_CODE (t
) == INDIRECT_REF
13376 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
13378 t
= TREE_OPERAND (t
, 0);
13379 if (TREE_TYPE (t
) != ptrtype
)
13380 t
= build1 (NOP_EXPR
, ptrtype
, t
);
13386 while (handled_component_p (base
))
13387 base
= TREE_OPERAND (base
, 0);
13389 TREE_ADDRESSABLE (base
) = 1;
13391 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
13398 build_fold_addr_expr (tree t
)
13400 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
13403 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13404 of an indirection through OP0, or NULL_TREE if no simplification is
13408 fold_indirect_ref_1 (tree type
, tree op0
)
13414 subtype
= TREE_TYPE (sub
);
13415 if (!POINTER_TYPE_P (subtype
))
13418 if (TREE_CODE (sub
) == ADDR_EXPR
)
13420 tree op
= TREE_OPERAND (sub
, 0);
13421 tree optype
= TREE_TYPE (op
);
13422 /* *&CONST_DECL -> to the value of the const decl. */
13423 if (TREE_CODE (op
) == CONST_DECL
)
13424 return DECL_INITIAL (op
);
13425 /* *&p => p; make sure to handle *&"str"[cst] here. */
13426 if (type
== optype
)
13428 tree fop
= fold_read_from_constant_string (op
);
13434 /* *(foo *)&fooarray => fooarray[0] */
13435 else if (TREE_CODE (optype
) == ARRAY_TYPE
13436 && type
== TREE_TYPE (optype
))
13438 tree type_domain
= TYPE_DOMAIN (optype
);
13439 tree min_val
= size_zero_node
;
13440 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
13441 min_val
= TYPE_MIN_VALUE (type_domain
);
13442 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
13444 /* *(foo *)&complexfoo => __real__ complexfoo */
13445 else if (TREE_CODE (optype
) == COMPLEX_TYPE
13446 && type
== TREE_TYPE (optype
))
13447 return fold_build1 (REALPART_EXPR
, type
, op
);
13448 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13449 else if (TREE_CODE (optype
) == VECTOR_TYPE
13450 && type
== TREE_TYPE (optype
))
13452 tree part_width
= TYPE_SIZE (type
);
13453 tree index
= bitsize_int (0);
13454 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
13458 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13459 if (TREE_CODE (sub
) == PLUS_EXPR
13460 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
13462 tree op00
= TREE_OPERAND (sub
, 0);
13463 tree op01
= TREE_OPERAND (sub
, 1);
13467 op00type
= TREE_TYPE (op00
);
13468 if (TREE_CODE (op00
) == ADDR_EXPR
13469 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
13470 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
13472 tree size
= TYPE_SIZE_UNIT (type
);
13473 if (tree_int_cst_equal (size
, op01
))
13474 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
13478 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13479 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
13480 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
13483 tree min_val
= size_zero_node
;
13484 sub
= build_fold_indirect_ref (sub
);
13485 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
13486 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
13487 min_val
= TYPE_MIN_VALUE (type_domain
);
13488 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
13494 /* Builds an expression for an indirection through T, simplifying some
13498 build_fold_indirect_ref (tree t
)
13500 tree type
= TREE_TYPE (TREE_TYPE (t
));
13501 tree sub
= fold_indirect_ref_1 (type
, t
);
13506 return build1 (INDIRECT_REF
, type
, t
);
13509 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13512 fold_indirect_ref (tree t
)
13514 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
13522 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13523 whose result is ignored. The type of the returned tree need not be
13524 the same as the original expression. */
13527 fold_ignored_result (tree t
)
13529 if (!TREE_SIDE_EFFECTS (t
))
13530 return integer_zero_node
;
13533 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
13536 t
= TREE_OPERAND (t
, 0);
13540 case tcc_comparison
:
13541 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
13542 t
= TREE_OPERAND (t
, 0);
13543 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
13544 t
= TREE_OPERAND (t
, 1);
13549 case tcc_expression
:
13550 switch (TREE_CODE (t
))
13552 case COMPOUND_EXPR
:
13553 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
13555 t
= TREE_OPERAND (t
, 0);
13559 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
13560 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
13562 t
= TREE_OPERAND (t
, 0);
13575 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13576 This can only be applied to objects of a sizetype. */
13579 round_up (tree value
, int divisor
)
13581 tree div
= NULL_TREE
;
13583 gcc_assert (divisor
> 0);
13587 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13588 have to do anything. Only do this when we are not given a const,
13589 because in that case, this check is more expensive than just
13591 if (TREE_CODE (value
) != INTEGER_CST
)
13593 div
= build_int_cst (TREE_TYPE (value
), divisor
);
13595 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
13599 /* If divisor is a power of two, simplify this to bit manipulation. */
13600 if (divisor
== (divisor
& -divisor
))
13602 if (TREE_CODE (value
) == INTEGER_CST
)
13604 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
13605 unsigned HOST_WIDE_INT high
;
13608 if ((low
& (divisor
- 1)) == 0)
13611 overflow_p
= TREE_OVERFLOW (value
);
13612 high
= TREE_INT_CST_HIGH (value
);
13613 low
&= ~(divisor
- 1);
13622 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
13629 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
13630 value
= size_binop (PLUS_EXPR
, value
, t
);
13631 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
13632 value
= size_binop (BIT_AND_EXPR
, value
, t
);
13638 div
= build_int_cst (TREE_TYPE (value
), divisor
);
13639 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
13640 value
= size_binop (MULT_EXPR
, value
, div
);
13646 /* Likewise, but round down. */
13649 round_down (tree value
, int divisor
)
13651 tree div
= NULL_TREE
;
13653 gcc_assert (divisor
> 0);
13657 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13658 have to do anything. Only do this when we are not given a const,
13659 because in that case, this check is more expensive than just
13661 if (TREE_CODE (value
) != INTEGER_CST
)
13663 div
= build_int_cst (TREE_TYPE (value
), divisor
);
13665 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
13669 /* If divisor is a power of two, simplify this to bit manipulation. */
13670 if (divisor
== (divisor
& -divisor
))
13674 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
13675 value
= size_binop (BIT_AND_EXPR
, value
, t
);
13680 div
= build_int_cst (TREE_TYPE (value
), divisor
);
13681 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
13682 value
= size_binop (MULT_EXPR
, value
, div
);
13688 /* Returns the pointer to the base of the object addressed by EXP and
13689 extracts the information about the offset of the access, storing it
13690 to PBITPOS and POFFSET. */
13693 split_address_to_core_and_offset (tree exp
,
13694 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
13697 enum machine_mode mode
;
13698 int unsignedp
, volatilep
;
13699 HOST_WIDE_INT bitsize
;
13701 if (TREE_CODE (exp
) == ADDR_EXPR
)
13703 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
13704 poffset
, &mode
, &unsignedp
, &volatilep
,
13706 core
= build_fold_addr_expr (core
);
13712 *poffset
= NULL_TREE
;
13718 /* Returns true if addresses of E1 and E2 differ by a constant, false
13719 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13722 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
13725 HOST_WIDE_INT bitpos1
, bitpos2
;
13726 tree toffset1
, toffset2
, tdiff
, type
;
13728 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
13729 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
13731 if (bitpos1
% BITS_PER_UNIT
!= 0
13732 || bitpos2
% BITS_PER_UNIT
!= 0
13733 || !operand_equal_p (core1
, core2
, 0))
13736 if (toffset1
&& toffset2
)
13738 type
= TREE_TYPE (toffset1
);
13739 if (type
!= TREE_TYPE (toffset2
))
13740 toffset2
= fold_convert (type
, toffset2
);
13742 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
13743 if (!cst_and_fits_in_hwi (tdiff
))
13746 *diff
= int_cst_value (tdiff
);
13748 else if (toffset1
|| toffset2
)
13750 /* If only one of the offsets is non-constant, the difference cannot
13757 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
13761 /* Simplify the floating point expression EXP when the sign of the
13762 result is not significant. Return NULL_TREE if no simplification
13766 fold_strip_sign_ops (tree exp
)
13770 switch (TREE_CODE (exp
))
13774 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
13775 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
13779 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
13781 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
13782 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
13783 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
13784 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
13785 arg0
? arg0
: TREE_OPERAND (exp
, 0),
13786 arg1
? arg1
: TREE_OPERAND (exp
, 1));
13789 case COMPOUND_EXPR
:
13790 arg0
= TREE_OPERAND (exp
, 0);
13791 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
13793 return fold_build2 (COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
13797 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
13798 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
13800 return fold_build3 (COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
13801 arg0
? arg0
: TREE_OPERAND (exp
, 1),
13802 arg1
? arg1
: TREE_OPERAND (exp
, 2));
13807 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
13810 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
13811 /* Strip copysign function call, return the 1st argument. */
13812 arg0
= TREE_VALUE (TREE_OPERAND (exp
, 1));
13813 arg1
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp
, 1)));
13814 return omit_one_operand (TREE_TYPE (exp
), arg0
, arg1
);
13817 /* Strip sign ops from the argument of "odd" math functions. */
13818 if (negate_mathfn_p (fcode
))
13820 arg0
= fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp
, 1)));
13822 return build_function_call_expr (get_callee_fndecl (exp
),
13823 build_tree_list (NULL_TREE
,